code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
"""SQL-Alchemy wrapper for Maraschino database"""
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy.ext.declarative import declarative_base
from maraschino import DATABASE
engine = create_engine('sqlite:///%s' % (DATABASE), convert_unicode=True)
db_session = scoped_session(sessionmaker(autocommit=False,
autoflush=False,
bind=engine))
Base = declarative_base()
Base.query = db_session.query_property()
def init_db():
"""Initialize database"""
import maraschino.models
Base.metadata.create_all(bind=engine)
| N3MIS15/maraschino-webcam | maraschino/database.py | Python | mit | 687 |
# -*- coding: utf-8 -*-
#
# Copyright 2009: Johannes Raggam, BlueDynamics Alliance
# http://bluedynamics.com
# GNU Lesser General Public License Version 2 or later
__author__ = """Johannes Raggam <johannes@raggam.co.at>"""
__docformat__ = 'plaintext'
from activities.runtime.interfaces import IExecution
from zope.interface import implements
from zope.component import getGlobalSiteManager
import sys
class Diagnosis(object):
implements(IExecution)
name = "diagnosis"
def __call__(self, action_info, stereotype_info, data):
if data['patient'].health < 30:
data['diagnosis'] = "acute"
else:
data['diagnosis'] = "normal"
return data
class DataAcquisition(object):
implements(IExecution)
name = "data-acquisition"
def __call__(self, action_info, stereotype_info, data):
data['name'] = data['patient'].name
return data
class DataVerification(object):
implements(IExecution)
name = "data-verification"
def __call__(self, action_info, stereotype_info, data):
# delete this key, because at the same time a diagnosis is made
# which leads to different values and a conflict when trying to merge
del data['diagnosis']
return data
class Therapy(object):
implements(IExecution)
name = "therapy"
def __call__(self, action_info, tgv_dict, data):
if 'variation' in tgv_dict.keys() and \
tgv_dict['variation'] == "acute":
data['patient'].health += 30
else:
data['patient'].health += 40
return data
gsm = getGlobalSiteManager()
gsm.registerUtility(component=Diagnosis(), name=Diagnosis.name)
gsm.registerUtility(component=DataAcquisition(), name=DataAcquisition.name)
gsm.registerUtility(component=DataVerification(), name=DataVerification.name)
gsm.registerUtility(component=Therapy(), name=Therapy.name)
# | bluedynamics/activities.test.hospital | src/activities/test/hospital/executions.py | Python | lgpl-3.0 | 1,924 |
"""distutils.dep_util
Utility functions for simple, timestamp-based dependency of files
and groups of files; also, function based entirely on such
timestamp dependency analysis."""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id: dep_util.py,v 1.7 2004/11/10 22:23:14 loewis Exp $"
import os
from distutils.errors import DistutilsFileError
def newer (source, target):
"""Return true if 'source' exists and is more recently modified than
'target', or if 'source' exists and 'target' doesn't. Return false if
both exist and 'target' is the same age or younger than 'source'.
Raise DistutilsFileError if 'source' does not exist.
"""
if not os.path.exists(source):
raise DistutilsFileError, "file '%s' does not exist" % source
if not os.path.exists(target):
return 1
from stat import ST_MTIME
mtime1 = os.stat(source)[ST_MTIME]
mtime2 = os.stat(target)[ST_MTIME]
return mtime1 > mtime2
# newer ()
def newer_pairwise (sources, targets):
"""Walk two filename lists in parallel, testing if each source is newer
than its corresponding target. Return a pair of lists (sources,
targets) where source is newer than target, according to the semantics
of 'newer()'.
"""
if len(sources) != len(targets):
raise ValueError, "'sources' and 'targets' must be same length"
# build a pair of lists (sources, targets) where source is newer
n_sources = []
n_targets = []
for i in range(len(sources)):
if newer(sources[i], targets[i]):
n_sources.append(sources[i])
n_targets.append(targets[i])
return (n_sources, n_targets)
# newer_pairwise ()
def newer_group (sources, target, missing='error'):
"""Return true if 'target' is out-of-date with respect to any file
listed in 'sources'. In other words, if 'target' exists and is newer
than every file in 'sources', return false; otherwise return true.
'missing' controls what we do when a source file is missing; the
default ("error") is to blow up with an OSError from inside 'stat()';
if it is "ignore", we silently drop any missing source files; if it is
"newer", any missing source files make us assume that 'target' is
out-of-date (this is handy in "dry-run" mode: it'll make you pretend to
carry out commands that wouldn't work because inputs are missing, but
that doesn't matter because you're not actually going to run the
commands).
"""
# If the target doesn't even exist, then it's definitely out-of-date.
if not os.path.exists(target):
return 1
# Otherwise we have to find out the hard way: if *any* source file
# is more recent than 'target', then 'target' is out-of-date and
# we can immediately return true. If we fall through to the end
# of the loop, then 'target' is up-to-date and we return false.
from stat import ST_MTIME
target_mtime = os.stat(target)[ST_MTIME]
for source in sources:
if not os.path.exists(source):
if missing == 'error': # blow up when we stat() the file
pass
elif missing == 'ignore': # missing source dropped from
continue # target's dependency list
elif missing == 'newer': # missing source means target is
return 1 # out-of-date
source_mtime = os.stat(source)[ST_MTIME]
if source_mtime > target_mtime:
return 1
else:
return 0
# newer_group ()
| trivoldus28/pulsarch-verilog | tools/local/bas-release/bas,3.9-SunOS-i386/lib/python/lib/python2.4/distutils/dep_util.py | Python | gpl-2.0 | 3,573 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
import json
import pytz
import six
import unittest
from girder.api import rest
import girder.events
date = datetime.datetime.now()
class TestResource(object):
@rest.endpoint
def returnsSet(self, *args, **kwargs):
return {'key': {1, 2, 3}}
@rest.endpoint
def returnsDate(self, *args, **kwargs):
return {'key': date}
@rest.endpoint
def returnsInf(self, *args, **kwargs):
return {'value': float('inf')}
class RestUtilTestCase(unittest.TestCase):
"""
This performs unit-level testing of REST-related utilities.
"""
def testBoolParam(self):
resource = rest.Resource()
expect = {
'TRUE': True,
' true ': True,
'Yes': True,
'1': True,
'ON': True,
'false': False,
'False': False,
'OFF': False,
'': False,
' ': False,
False: False,
True: True
}
for input, output in six.viewitems(expect):
params = {
'some_key': input
}
self.assertEqual(resource.boolParam('some_key', params), output)
self.assertEqual(resource.boolParam('some_key', {}, default='x'), 'x')
def testGetApiUrl(self):
url = 'https://localhost/thing/api/v1/hello/world?foo=bar#test'
self.assertEqual(rest.getApiUrl(url), 'https://localhost/thing/api/v1')
parts = rest.getUrlParts(url)
self.assertEqual(parts.path, '/thing/api/v1/hello/world')
self.assertEqual(rest.getApiUrl(parts.path), '/thing/api/v1')
self.assertEqual(parts.port, None)
self.assertEqual(parts.hostname, 'localhost')
self.assertEqual(parts.query, 'foo=bar')
self.assertEqual(parts.fragment, 'test')
url = 'https://localhost/girder#users'
self.assertRaises(Exception, rest.getApiUrl, url=url)
def testCustomJsonEncoder(self):
resource = TestResource()
resp = resource.returnsSet().decode('utf8')
self.assertEqual(json.loads(resp), {'key': [1, 2, 3]})
resp = resource.returnsDate().decode('utf8')
self.assertEqual(json.loads(resp), {
'key': date.replace(tzinfo=pytz.UTC).isoformat()
})
# Returning infinity or NaN floats should raise a reasonable exception
regex = 'Out of range float values are not JSON compliant'
with six.assertRaisesRegex(self, ValueError, regex):
resp = resource.returnsInf()
def testCustomJsonEncoderEvent(self):
def _toString(event):
obj = event.info
if isinstance(obj, set):
event.addResponse(str(list(obj)))
with girder.events.bound('rest.json_encode', 'toString', _toString):
resource = TestResource()
resp = resource.returnsSet().decode('utf8')
self.assertEqual(json.loads(resp), {'key': '[1, 2, 3]'})
# Check we still get default encode for date
resp = resource.returnsDate().decode('utf8')
self.assertEqual(json.loads(resp), {
'key': date.replace(tzinfo=pytz.UTC).isoformat()
})
def testRequireParamsDictMode(self):
resource = rest.Resource()
resource.requireParams('hello', {'hello': 'world'})
resource.requireParams('hello', {'hello': None})
with six.assertRaisesRegex(self, rest.RestException, 'Parameter "hello" is required.'):
resource.requireParams(['hello'], {'foo': 'bar'})
with six.assertRaisesRegex(self, rest.RestException, 'Parameter "hello" is required.'):
resource.requireParams(['hello'], None)
def testSetContentDisposition(self):
with six.assertRaisesRegex(
self, rest.RestException,
'Error: Content-Disposition \(.*\) is not a recognized value.'):
rest.setContentDisposition('filename', 'unknown', False)
with six.assertRaisesRegex(
self, rest.RestException, 'Error: Content-Disposition filename is empty.'):
rest.setContentDisposition('', setHeader=False)
self.assertEqual(rest.setContentDisposition(
'filename', setHeader=False),
'attachment; filename="filename"')
self.assertEqual(rest.setContentDisposition(
'filename', 'inline', setHeader=False),
'inline; filename="filename"')
self.assertEqual(rest.setContentDisposition(
'filename', 'form-data; name="chunk"', setHeader=False),
'form-data; name="chunk"; filename="filename"')
self.assertEqual(rest.setContentDisposition(
'file "name"', setHeader=False),
'attachment; filename="file \\"name\\""')
self.assertEqual(rest.setContentDisposition(
'file\\name', setHeader=False),
'attachment; filename="file\\\\name"')
self.assertEqual(rest.setContentDisposition(
u'\u043e\u0431\u0440\u0430\u0437\u0435\u0446', setHeader=False),
'attachment; filename=""; filename*=UTF-8\'\''
'%D0%BE%D0%B1%D1%80%D0%B0%D0%B7%D0%B5%D1%86')
self.assertEqual(rest.setContentDisposition(
u'\U0001f603', setHeader=False),
'attachment; filename=""; filename*=UTF-8\'\'%F0%9F%98%83')
| adsorensen/girder | tests/cases/rest_util_test.py | Python | apache-2.0 | 6,155 |
# Copyright (c) 2014, German Neuroinformatics Node (G-Node)
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted under the terms of the BSD License. See
# LICENSE file in the root of the Project.
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from nixio.util.proxy_list import RefProxyList
class RefSourceProxyList(RefProxyList):
def __init__(self, obj):
super(RefSourceProxyList, self).__init__(
obj, "_source_count", "_get_source_by_id", "_get_source_by_pos",
"_remove_source_by_id", "_add_source_by_id"
)
_sources_doc = """
Getter for sources.
"""
def _get_sources(self):
if not hasattr(self, "_sources"):
setattr(self, "_sources", RefSourceProxyList(self))
return self._sources
class EntityWithSourcesMixin(object):
sources = property(_get_sources, None, None, _sources_doc)
| stoewer/nixpy | nixio/entity_with_sources.py | Python | bsd-3-clause | 990 |
# -*- coding: utf-8 -*-
"""Fetch hard drive temperature data from a hddtemp daemon
that runs on localhost and default port (7634)
contributed by `somospocos <https://github.com/somospocos>`_ - many thanks!
"""
import socket
import core.module
import core.widget
HOST = "localhost"
PORT = 7634
CHUNK_SIZE = 1024
RECORD_SIZE = 5
SEPARATOR = "|"
class Module(core.module.Module):
def __init__(self, config, theme):
super().__init__(config, theme, core.widget.Widget(self.hddtemps))
self.__hddtemps = self.__get_hddtemps()
def hddtemps(self, _):
return self.__hddtemps
def __fetch_data(self):
"""fetch data from hddtemp service"""
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.connect((HOST, PORT))
data = ""
while True:
chunk = sock.recv(CHUNK_SIZE)
if chunk:
data += str(chunk)
else:
break
return data
except (AttributeError, socket.error) as e:
pass
@staticmethod
def __get_parts(data):
"""
split data using | separator and remove first item
(because the first item is empty)
"""
parts = data.split("|")[1:]
return parts
@staticmethod
def __partition_parts(parts):
"""
partition parts: one device record is five (5) items
"""
per_disk = [
parts[i : i + RECORD_SIZE] for i in range(len(parts))[::RECORD_SIZE]
]
return per_disk
@staticmethod
def __get_name_and_temp(device_record):
"""
get device name (without /dev part, to save space on bar)
and temperature (in °C) as tuple
"""
device_name = device_record[0].split("/")[-1]
device_temp = device_record[2]
return (device_name, device_temp)
@staticmethod
def __get_hddtemp(device_record):
name, temp = device_record
hddtemp = "{}+{}°C".format(name, temp)
return hddtemp
def __get_hddtemps(self):
data = self.__fetch_data()
if data is None:
return "n/a"
parts = self.__get_parts(data)
per_disk = self.__partition_parts(parts)
names_and_temps = [self.__get_name_and_temp(x) for x in per_disk]
hddtemps = [self.__get_hddtemp(x) for x in names_and_temps]
return SEPARATOR.join(hddtemps)
def update(self):
self.__hddtemps = self.__get_hddtemps()
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
| tobi-wan-kenobi/bumblebee-status | bumblebee_status/modules/contrib/hddtemp.py | Python | mit | 2,667 |
from distutils.core import setup
from distutils.command.install_data import install_data
from distutils.command.install import INSTALL_SCHEMES
import os
import sys
class osx_install_data(install_data):
# On MacOS, the platform-specific lib dir is /System/Library/Framework/Python/.../
# which is wrong. Python 2.5 supplied with MacOS 10.5 has an Apple-specific fix
# for this in distutils.command.install_data#306. It fixes install_lib but not
# install_data, which is why we roll our own install_data class.
def finalize_options(self):
# By the time finalize_options is called, install.install_lib is set to the
# fixed directory, so we set the installdir to install_lib. The
# install_data class uses ('install_data', 'install_dir') instead.
self.set_undefined_options('install', ('install_lib', 'install_dir'))
install_data.finalize_options(self)
if sys.platform == "darwin":
cmdclasses = {'install_data': osx_install_data}
else:
cmdclasses = {'install_data': install_data}
def fullsplit(path, result=None):
"""
Split a pathname into components (the opposite of os.path.join) in a
platform-neutral way.
"""
if result is None:
result = []
head, tail = os.path.split(path)
if head == '':
return [tail] + result
if head == path:
return result
return fullsplit(head, [tail] + result)
# Tell distutils to put the data_files in platform-specific installation
# locations. See here for an explanation:
# http://groups.google.com/group/comp.lang.python/browse_thread/thread/35ec7b2fed36eaec/2105ee4d9e8042cb
for scheme in INSTALL_SCHEMES.values():
scheme['data'] = scheme['purelib']
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir != '':
os.chdir(root_dir)
django_dir = 'django'
for dirpath, dirnames, filenames in os.walk(django_dir):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
packages.append('.'.join(fullsplit(dirpath)))
elif filenames:
data_files.append([dirpath, [os.path.join(dirpath, f) for f in filenames]])
# Small hack for working with bdist_wininst.
# See http://mail.python.org/pipermail/distutils-sig/2004-August/004134.html
if len(sys.argv) > 1 and sys.argv[1] == 'bdist_wininst':
for file_info in data_files:
file_info[0] = '\\PURELIB\\%s' % file_info[0]
# Dynamically calculate the version based on django.VERSION.
version = __import__('django').get_version()
if u'SVN' in version:
version = ' '.join(version.split(' ')[:-1])
setup(
name = "Django",
version = version.replace(' ', '-'),
url = 'http://www.djangoproject.com/',
author = 'Django Software Foundation',
author_email = 'foundation@djangoproject.com',
description = 'A high-level Python Web framework that encourages rapid development and clean, pragmatic design.',
download_url = 'http://media.djangoproject.com/releases/1.2/Django-1.2-beta-1.tar.gz',
packages = packages,
cmdclass = cmdclasses,
data_files = data_files,
scripts = ['django/bin/django-admin.py'],
classifiers = ['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Internet :: WWW/HTTP :: WSGI',
'Topic :: Software Development :: Libraries :: Application Frameworks',
'Topic :: Software Development :: Libraries :: Python Modules',
],
)
| t11e/django | setup.py | Python | bsd-3-clause | 4,079 |
import imageio
import os
try:
imageio.plugins.ffmpeg.download()
except:
print("Some cause an error, please execute is as root.")
finally:
os.remove("ffmpeg-ins.py")
| DcSoK/ImgurPlus | lib/ffmpeg-ins.py | Python | gpl-3.0 | 177 |
import os
import infra.basetest
GLXINFO_TIMEOUT = 120
class TestGlxinfo(infra.basetest.BRTest):
config = \
"""
BR2_x86_core2=y
BR2_TOOLCHAIN_EXTERNAL=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM=y
BR2_TOOLCHAIN_EXTERNAL_DOWNLOAD=y
BR2_TOOLCHAIN_EXTERNAL_URL="http://toolchains.bootlin.com/downloads/releases/toolchains/x86-core2/tarballs/x86-core2--glibc--bleeding-edge-2018.11-1.tar.bz2"
BR2_TOOLCHAIN_EXTERNAL_GCC_8=y
BR2_TOOLCHAIN_EXTERNAL_HEADERS_4_14=y
BR2_TOOLCHAIN_EXTERNAL_CXX=y
BR2_TOOLCHAIN_EXTERNAL_HAS_SSP=y
BR2_TOOLCHAIN_EXTERNAL_CUSTOM_GLIBC=y
BR2_LINUX_KERNEL=y
BR2_LINUX_KERNEL_CUSTOM_VERSION=y
BR2_LINUX_KERNEL_CUSTOM_VERSION_VALUE="4.16.7"
BR2_LINUX_KERNEL_USE_CUSTOM_CONFIG=y
BR2_LINUX_KERNEL_CUSTOM_CONFIG_FILE="board/qemu/x86/linux.config"
BR2_PACKAGE_MESA3D_DEMOS=y
BR2_PACKAGE_MESA3D=y
BR2_PACKAGE_MESA3D_GALLIUM_DRIVER_SWRAST=y
BR2_PACKAGE_MESA3D_OPENGL_GLX=y
BR2_PACKAGE_XORG7=y
BR2_PACKAGE_XSERVER_XORG_SERVER=y
BR2_TARGET_GENERIC_GETTY_PORT="ttyS0"
BR2_TARGET_ROOTFS_EXT2=y
# BR2_TARGET_ROOTFS_TAR is not set
BR2_ROOTFS_OVERLAY="{}"
""".format(
infra.filepath("tests/package/test_glxinfo/rootfs-overlay"))
def wait_for_xserver(self):
# xserver takes some time to start up
# The test case fail here if for some reason xserver is not properly installed
_, _ = self.emulator.run('while [ ! -e /var/run/xorg.pid ]; do sleep 1; done', 120)
def login(self):
img = os.path.join(self.builddir, "images", "rootfs.ext2")
kern = os.path.join(self.builddir, "images", "bzImage")
# glxinfo overallocate memory and the minimum that seemed to work was 512MB
self.emulator.boot(arch="i386",
kernel=kern,
kernel_cmdline=["root=/dev/vda console=ttyS0"],
options=["-M", "pc", "-m", "512", "-drive", "file={},if=virtio,format=raw".format(img)])
self.emulator.login()
def test_run(self):
self.login()
self.wait_for_xserver()
# The test case verifies that the xserver with GLX is working
cmd = "glxinfo -B -display :0"
output, exit_code = self.emulator.run(cmd, GLXINFO_TIMEOUT)
self.assertEqual(exit_code, 0)
for line in output:
self.assertNotIn("Error", line)
# Error case: "Error: couldn't find RGB GLX visual or fbconfig"
| glevand/buildroot--buildroot | support/testing/tests/package/test_glxinfo.py | Python | gpl-2.0 | 2,589 |
from django.urls import path
urlpatterns = [
path('stock/soap11', 'stock.web.views.dispatch11'),
path('stock/soap12', 'stock.web.views.dispatch12'),
path('ws/ops', 'stock.web.views.ops_dispatch'),
]
| soapteam/soapfish | examples/stock/urls.py | Python | bsd-3-clause | 212 |
from db import Db
from gen import Generator
from parse import Parser
from sql import Sql
from rnd import Rnd
import sys
import sqlite3
import codecs
SENTENCE_SEPARATOR = '.'
WORD_SEPARATOR = ' '
if __name__ == '__main__':
args = sys.argv
usage = 'Usage: %s (parse <name> <depth> <path to txt file>|gen <name> <count>)' % (args[0], )
if (len(args) < 3):
raise ValueError(usage)
mode = args[1]
name = args[2]
if mode == 'parse':
if (len(args) != 5):
raise ValueError(usage)
depth = int(args[3])
file_name = args[4]
db = Db(sqlite3.connect(name + '.db'), Sql())
db.setup(depth)
txt = codecs.open(file_name, 'r', 'utf-8').read()
Parser(name, db, SENTENCE_SEPARATOR, WORD_SEPARATOR).parse(txt)
elif mode == 'gen':
count = int(args[3])
db = Db(sqlite3.connect(name + '.db'), Sql())
generator = Generator(name, db, Rnd())
for i in range(0, count):
print(generator.generate(WORD_SEPARATOR))
else:
raise ValueError(usage)
| codebox/markov-text | markov.py | Python | mit | 1,018 |
"""
This code is for illustration purpose only.
Use multi_agent.py for better performance and speed.
"""
import os
import numpy as np
import tensorflow as tf
import env
import a3c
import load_trace
from common_settings import CommonSettings
S_INFO = CommonSettings.S_INFO # bit_rate, buffer_size, next_chunk_size, bandwidth_measurement(throughput and time), chunk_til_video_end
S_LEN = CommonSettings.S_LEN # take how many frames in the past
A_DIM = CommonSettings.A_DIM
ACTOR_LR_RATE = 0.0001
CRITIC_LR_RATE = 0.001
TRAIN_SEQ_LEN = 100 # take as a train batch
MODEL_SAVE_INTERVAL = 100
# VIDEO_BIT_RATE = [300, 750, 1200, 1850, 2850, 4300] # Kbps
VIDEO_BIT_RATE = CommonSettings.VIDEO_BIT_RATE # Kbps
BUFFER_NORM_FACTOR = 10.0
CHUNK_TIL_VIDEO_END_CAP = 48.0
M_IN_K = 1000.0
REBUF_PENALTY = 4.3 # 1 sec rebuffering -> 3 Mbps
SMOOTH_PENALTY = 1
DEFAULT_QUALITY = 1 # default video quality without agent
RANDOM_SEED = 42
RAND_RANGE = 1000000
GRADIENT_BATCH_SIZE = 16
SUMMARY_DIR = './results'
LOG_FILE = './results/log'
# log in format of time_stamp bit_rate buffer_size rebuffer_time chunk_size download_time reward
NN_MODEL = None
def main():
np.random.seed(RANDOM_SEED)
assert len(VIDEO_BIT_RATE) == A_DIM
all_cooked_time, all_cooked_bw, _ = load_trace.load_trace()
if not os.path.exists(SUMMARY_DIR):
os.makedirs(SUMMARY_DIR)
net_env = env.Environment(all_cooked_time=all_cooked_time,
all_cooked_bw=all_cooked_bw)
with tf.Session() as sess, open(LOG_FILE, 'wb') as log_file:
actor = a3c.ActorNetwork(sess,
state_dim=[S_INFO, S_LEN], action_dim=A_DIM,
learning_rate=ACTOR_LR_RATE)
critic = a3c.CriticNetwork(sess,
state_dim=[S_INFO, S_LEN],
learning_rate=CRITIC_LR_RATE)
summary_ops, summary_vars = a3c.build_summaries()
sess.run(tf.global_variables_initializer())
writer = tf.summary.FileWriter(SUMMARY_DIR, sess.graph) # training monitor
saver = tf.train.Saver() # save neural net parameters
# restore neural net parameters
nn_model = NN_MODEL
if nn_model is not None: # nn_model is the path to file
saver.restore(sess, nn_model)
print("Model restored.")
epoch = 0
time_stamp = 0
last_bit_rate = DEFAULT_QUALITY
bit_rate = DEFAULT_QUALITY
action_vec = np.zeros(A_DIM)
action_vec[bit_rate] = 1
s_batch = [np.zeros((S_INFO, S_LEN))]
a_batch = [action_vec]
r_batch = []
entropy_record = []
actor_gradient_batch = []
critic_gradient_batch = []
while True: # serve video forever
# the action is from the last decision
# this is to make the framework similar to the real
delay, sleep_time, buffer_size, rebuf, \
video_chunk_size, next_video_chunk_sizes, \
end_of_video, video_chunk_remain = \
net_env.get_video_chunk(bit_rate)
time_stamp += delay # in ms
time_stamp += sleep_time # in ms
# reward is video quality - rebuffer penalty - smooth penalty
reward = VIDEO_BIT_RATE[bit_rate] / M_IN_K \
- REBUF_PENALTY * rebuf \
- SMOOTH_PENALTY * np.abs(VIDEO_BIT_RATE[bit_rate] -
VIDEO_BIT_RATE[last_bit_rate]) / M_IN_K
r_batch.append(reward)
last_bit_rate = bit_rate
# retrieve previous state
if len(s_batch) == 0:
state = [np.zeros((S_INFO, S_LEN))]
else:
state = np.array(s_batch[-1], copy=True)
# dequeue history record
state = np.roll(state, -1, axis=1)
# this should be S_INFO number of terms
state[0, -1] = VIDEO_BIT_RATE[bit_rate] / float(np.max(VIDEO_BIT_RATE)) # last quality
state[1, -1] = buffer_size / BUFFER_NORM_FACTOR # 10 sec
state[2, -1] = float(video_chunk_size) / float(delay) / M_IN_K # kilo byte / ms
state[3, -1] = float(delay) / M_IN_K / BUFFER_NORM_FACTOR # 10 sec
state[4, :A_DIM] = np.array(next_video_chunk_sizes) / M_IN_K / M_IN_K # mega byte
state[5, -1] = np.minimum(video_chunk_remain, CHUNK_TIL_VIDEO_END_CAP) / float(CHUNK_TIL_VIDEO_END_CAP)
action_prob = actor.predict(np.reshape(state, (1, S_INFO, S_LEN)))
action_cumsum = np.cumsum(action_prob)
bit_rate = (action_cumsum > np.random.randint(1, RAND_RANGE) / float(RAND_RANGE)).argmax()
# Note: we need to discretize the probability into 1/RAND_RANGE steps,
# because there is an intrinsic discrepancy in passing single state and batch states
entropy_record.append(a3c.compute_entropy(action_prob[0]))
# log time_stamp, bit_rate, buffer_size, reward
log_file.write((str(time_stamp) + '\t' +
str(VIDEO_BIT_RATE[bit_rate]) + '\t' +
str(buffer_size) + '\t' +
str(rebuf) + '\t' +
str(video_chunk_size) + '\t' +
str(delay) + '\t' +
str(reward) + '\n').encode())
log_file.flush()
if len(r_batch) >= TRAIN_SEQ_LEN or end_of_video: # do training once
actor_gradient, critic_gradient, td_batch = \
a3c.compute_gradients(s_batch=np.stack(s_batch[1:], axis=0), # ignore the first chuck
a_batch=np.vstack(a_batch[1:]), # since we don't have the
r_batch=np.vstack(r_batch[1:]), # control over it
terminal=end_of_video, actor=actor, critic=critic)
td_loss = np.mean(td_batch)
actor_gradient_batch.append(actor_gradient)
critic_gradient_batch.append(critic_gradient)
print("====")
print("Epoch", epoch)
print("TD_loss", td_loss, "Avg_reward", np.mean(r_batch), "Avg_entropy", np.mean(entropy_record))
print("====")
summary_str = sess.run(summary_ops, feed_dict={
summary_vars[0]: td_loss,
summary_vars[1]: np.mean(r_batch),
summary_vars[2]: np.mean(entropy_record)
})
writer.add_summary(summary_str, epoch)
writer.flush()
entropy_record = []
if len(actor_gradient_batch) >= GRADIENT_BATCH_SIZE:
assert len(actor_gradient_batch) == len(critic_gradient_batch)
# assembled_actor_gradient = actor_gradient_batch[0]
# assembled_critic_gradient = critic_gradient_batch[0]
# assert len(actor_gradient_batch) == len(critic_gradient_batch)
# for i in xrange(len(actor_gradient_batch) - 1):
# for j in xrange(len(actor_gradient)):
# assembled_actor_gradient[j] += actor_gradient_batch[i][j]
# assembled_critic_gradient[j] += critic_gradient_batch[i][j]
# actor.apply_gradients(assembled_actor_gradient)
# critic.apply_gradients(assembled_critic_gradient)
for i in range(len(actor_gradient_batch)):
actor.apply_gradients(actor_gradient_batch[i])
critic.apply_gradients(critic_gradient_batch[i])
actor_gradient_batch = []
critic_gradient_batch = []
epoch += 1
if epoch % MODEL_SAVE_INTERVAL == 0:
# Save the neural net parameters to disk.
save_path = saver.save(sess, SUMMARY_DIR + "/nn_model_ep_" +
str(epoch) + ".ckpt")
print("Model saved in file: %s" % save_path)
del s_batch[:]
del a_batch[:]
del r_batch[:]
if end_of_video:
last_bit_rate = DEFAULT_QUALITY
bit_rate = DEFAULT_QUALITY # use the default action here
action_vec = np.zeros(A_DIM)
action_vec[bit_rate] = 1
s_batch.append(np.zeros((S_INFO, S_LEN)))
a_batch.append(action_vec)
else:
s_batch.append(state)
action_vec = np.zeros(A_DIM)
action_vec[bit_rate] = 1
a_batch.append(action_vec)
if __name__ == '__main__':
main()
| BlackPoint-CX/ABR | ABR/sim/agent.py | Python | mit | 8,958 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Functions to format lexc from omorfi data."""
# Author: Omorfi contributors <omorfi-devel@groups.google.com> 2015
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# functions for formatting the database data to lexc
from .settings import deriv_boundary, morph_boundary, newword_boundary, optional_hyphen, stub_boundary, word_boundary
from .string_manglers import lexc_escape
def format_copyright_lexc():
return """
! This automatically generated lexc data is originated from
! omorfi database.
! Copyright (c) 2014 Omorfi contributors
! This program is free software: you can redistribute it and/or modify
! it under the terms of the GNU General Public License as published by
! the Free Software Foundation, version 3 of the License
! This program is distributed in the hope that it will be useful,
! but WITHOUT ANY WARRANTY; without even the implied warranty of
! MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
! GNU General Public License for more details.
! You should have received a copy of the GNU General Public License
! along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# generics
def format_continuation_lexc_generic(anals, surf, cont):
surf = lexc_escape(surf)
return "%s:%s\t%s ; \n" % (surf.replace(optional_hyphen, newword_boundary),
surf, cont)
def format_wordmap_lexc_generic(wordmap):
wordmap['analysis'] = lexc_escape(wordmap['stub']) + stub_boundary
retvals = []
lex_stub = lexc_escape(wordmap['stub'])
retvals += ["%s:%s\t%s\t;" %
(wordmap['analysis'], lex_stub, wordmap['new_para'])]
return "\n".join(retvals)
def format_wordmap_lexc_labeled_segments(wordmap):
wordmap['analysis'] = lexc_escape(
wordmap['stub']) + "[UPOS=" + wordmap['upos'] + ']'
wordmap['analysis'] = wordmap['analysis'].replace(word_boundary, "[WB=+]")
wordmap['analysis'] = wordmap['analysis'].replace(
newword_boundary, "[WB=?]")
retvals = []
lex_stub = lexc_escape(wordmap['stub'])
retvals += ["%s:%s\t%s\t;" %
(wordmap['analysis'], lex_stub, wordmap['new_para'])]
return "\n".join(retvals)
def format_continuation_lexc_labeled_segments(anals, surf, cont):
surf = lexc_escape(surf)
# mostly suffixes: e.g.
# >i>ssa Pl|Ine -> |i|PL|ssa|INE
# >i -> |i|ACT|PAST|SG3
foo = surf
foo = foo.replace(morph_boundary, "[MB=LEFT]", 1)
foo = foo.replace(newword_boundary, "[WB=?]")
foo = foo.replace(word_boundary, "[WB=+]")
restanals = []
for anal in anals.split('|'):
if anal.startswith("D") and "{DB}" in foo:
foo = foo.replace(deriv_boundary, "[DB=" + anal + "]", 1)
elif "{MB}" in foo:
foo = foo.replace(morph_boundary, "[MB=" + anal + "]", 1)
else:
restanals.append(anal)
if restanals and len(restanals) > 0:
foo += "[TRAILS=→" + "][?=".join(restanals) + "]"
return "%s:%s\t%s ; \n" % (foo.replace(optional_hyphen, newword_boundary),
surf, cont)
| jiemakel/omorfi | src/python/omorfi/lexc_formatter.py | Python | gpl-3.0 | 3,736 |
#!/usr/bin/env python3
# -*- python -*-
"""
%prog SUBMODULE...
Hack to pipe submodules of Numpy through 2to3 and build them in-place
one-by-one.
Example usage:
python3 tools/py3tool.py testing distutils core
This will copy files to _py3k/numpy, add a dummy __init__.py and
version.py on the top level, and copy and 2to3 the files of the three
submodules.
When running py3tool again, only changed files are re-processed, which
makes the test-bugfix cycle faster.
"""
from optparse import OptionParser
import shutil
import os
import sys
import re
import subprocess
import fnmatch
if os.environ.get('USE_2TO3CACHE'):
import lib2to3cache
BASE = os.path.normpath(os.path.join(os.path.dirname(__file__), '..'))
TEMP = os.path.normpath(os.path.join(BASE, '_py3k'))
SCRIPT_2TO3 = os.path.join(BASE, 'tools', '2to3.py')
EXTRA_2TO3_FLAGS = {
'*/setup.py': '-x import',
#'numpy/core/code_generators/generate_umath.py': '-x import',
#'numpy/core/code_generators/generate_numpy_api.py': '-x import',
#'numpy/core/code_generators/generate_ufunc_api.py': '-x import',
#'numpy/core/defchararray.py': '-x unicode',
#'numpy/compat/py3k.py': '-x unicode',
#'numpy/ma/timer_comparison.py': 'skip',
#'numpy/distutils/system_info.py': '-x reduce',
#'numpy/f2py/auxfuncs.py': '-x reduce',
#'numpy/lib/arrayterator.py': '-x reduce',
#'numpy/lib/tests/test_arrayterator.py': '-x reduce',
#'numpy/ma/core.py': '-x reduce',
#'numpy/ma/tests/test_core.py': '-x reduce',
#'numpy/ma/tests/test_old_ma.py': '-x reduce',
#'numpy/ma/timer_comparison.py': '-x reduce',
#'numpy/oldnumeric/ma.py': '-x reduce',
}
def main():
p = OptionParser(usage=__doc__.strip())
p.add_option("--clean", "-c", action="store_true",
help="clean source directory")
options, args = p.parse_args()
if not args:
p.error('no submodules given')
else:
dirs = ['scipy/%s' % x for x in map(os.path.basename, args)]
# Prepare
if not os.path.isdir(TEMP):
os.makedirs(TEMP)
# Set up dummy files (for building only submodules)
dummy_files = {
'__init__.py': 'from scipy.version import version as __version__',
'version.py': 'version = "0.8.0.dev"'
}
for fn, content in dummy_files.items():
fn = os.path.join(TEMP, 'scipy', fn)
if not os.path.isfile(fn):
try:
os.makedirs(os.path.dirname(fn))
except OSError:
pass
f = open(fn, 'wb+')
f.write(content.encode('ascii'))
f.close()
# Environment
pp = [os.path.abspath(TEMP)]
def getenv():
env = dict(os.environ)
env.update({'PYTHONPATH': ':'.join(pp)})
return env
# Copy
for d in dirs:
src = os.path.join(BASE, d)
dst = os.path.join(TEMP, d)
# Run 2to3
sync_2to3(dst=dst,
src=src,
patchfile=os.path.join(TEMP, os.path.basename(d) + '.patch'),
clean=options.clean)
# Run setup.py, falling back to Pdb post-mortem on exceptions
setup_py = os.path.join(dst, 'setup.py')
if os.path.isfile(setup_py):
code = """\
import pdb, sys, traceback
p = pdb.Pdb()
try:
import __main__
__main__.__dict__.update({
"__name__": "__main__", "__file__": "setup.py",
"__builtins__": __builtins__})
fp = open("setup.py", "rb")
try:
exec(compile(fp.read(), "setup.py", 'exec'))
finally:
fp.close()
except SystemExit:
raise
except:
traceback.print_exc()
t = sys.exc_info()[2]
p.interaction(None, t)
"""
ret = subprocess.call([sys.executable, '-c', code,
'build_ext', '-i'],
cwd=dst,
env=getenv())
if ret != 0:
raise RuntimeError("Build failed.")
# Run nosetests
subprocess.call(['nosetests3', '-v', d], cwd=TEMP)
def custom_mangling(filename):
import_mangling = [
os.path.join('cluster', '__init__.py'),
os.path.join('cluster', 'hierarchy.py'),
os.path.join('cluster', 'vq.py'),
os.path.join('fftpack', 'basic.py'),
os.path.join('fftpack', 'pseudo_diffs.py'),
os.path.join('integrate', 'odepack.py'),
os.path.join('integrate', 'quadpack.py'),
os.path.join('integrate', 'ode.py'),
os.path.join('interpolate', 'fitpack.py'),
os.path.join('interpolate', 'fitpack2.py'),
os.path.join('interpolate', 'interpolate.py'),
os.path.join('interpolate', 'interpolate_wrapper.py'),
os.path.join('interpolate', 'ndgriddata.py'),
os.path.join('io', 'array_import.py'),
os.path.join('io', '__init__.py'),
os.path.join('io', 'matlab', 'miobase.py'),
os.path.join('io', 'matlab', 'mio4.py'),
os.path.join('io', 'matlab', 'mio5.py'),
os.path.join('io', 'matlab', 'mio5_params.py'),
os.path.join('linalg', 'basic.py'),
os.path.join('linalg', 'decomp.py'),
os.path.join('linalg', 'lapack.py'),
os.path.join('linalg', 'flinalg.py'),
os.path.join('linalg', 'iterative.py'),
os.path.join('lib', 'blas', '__init__.py'),
os.path.join('lib', 'lapack', '__init__.py'),
os.path.join('ndimage', 'filters.py'),
os.path.join('ndimage', 'fourier.py'),
os.path.join('ndimage', 'interpolation.py'),
os.path.join('ndimage', 'measurements.py'),
os.path.join('ndimage', 'morphology.py'),
os.path.join('optimize', 'minpack.py'),
os.path.join('optimize', 'zeros.py'),
os.path.join('optimize', 'lbfgsb.py'),
os.path.join('optimize', 'cobyla.py'),
os.path.join('optimize', 'slsqp.py'),
os.path.join('optimize', 'nnls.py'),
os.path.join('signal', '__init__.py'),
os.path.join('signal', 'bsplines.py'),
os.path.join('signal', 'signaltools.py'),
os.path.join('signal', 'fir_filter_design.py'),
os.path.join('special', '__init__.py'),
os.path.join('special', 'basic.py'),
os.path.join('special', 'orthogonal.py'),
os.path.join('spatial', '__init__.py'),
os.path.join('spatial', 'distance.py'),
os.path.join('sparse', 'linalg', 'isolve', 'iterative.py'),
os.path.join('sparse', 'linalg', 'dsolve', 'linsolve.py'),
os.path.join('sparse', 'linalg', 'dsolve', 'umfpack', 'umfpack.py'),
os.path.join('sparse', 'linalg', 'eigen', 'arpack', 'arpack.py'),
os.path.join('sparse', 'linalg', 'eigen', 'arpack', 'speigs.py'),
os.path.join('sparse', 'linalg', 'iterative', 'isolve', 'iterative.py'),
os.path.join('stats', 'stats.py'),
os.path.join('stats', 'distributions.py'),
os.path.join('stats', 'morestats.py'),
os.path.join('stats', 'kde.py'),
os.path.join('stats', 'mstats_basic.py'),
]
if any(filename.endswith(x) for x in import_mangling):
print(filename)
f = open(filename, 'r')
text = f.read()
f.close()
for mod in ['_vq', '_hierarchy_wrap', '_fftpack', 'convolve',
'_flinalg', 'fblas', 'flapack', 'cblas', 'clapack',
'calc_lwork', '_cephes', 'specfun', 'orthogonal_eval',
'lambertw', 'ckdtree', '_distance_wrap',
'_minpack', '_zeros', '_lbfgsb', '_cobyla', '_slsqp',
'_nnls',
'sigtools', 'spline',
'_fitpack', 'dfitpack', '_interpolate',
'_odepack', '_quadpack', 'vode', '_dop',
'vonmises_cython',
'futil', 'mvn',
'_nd_image',
'numpyio',
'_superlu', '_arpack', '_iterative', '_umfpack',
'interpnd',
'mio_utils', 'mio5_utils', 'streams'
]:
text = re.sub(r'^(\s*)import %s' % mod,
r'\1from . import %s' % mod,
text, flags=re.M)
text = re.sub(r'^(\s*)from %s import' % mod,
r'\1from .%s import' % mod,
text, flags=re.M)
#text = text.replace('from matrixlib', 'from .matrixlib')
f = open(filename, 'w')
f.write(text)
f.close()
def walk_sync(dir1, dir2, _seen=None):
if _seen is None:
seen = {}
else:
seen = _seen
if not dir1.endswith(os.path.sep):
dir1 = dir1 + os.path.sep
# Walk through stuff (which we haven't yet gone through) in dir1
for root, dirs, files in os.walk(dir1):
sub = root[len(dir1):]
if sub in seen:
dirs = [x for x in dirs if x not in seen[sub][0]]
files = [x for x in files if x not in seen[sub][1]]
seen[sub][0].extend(dirs)
seen[sub][1].extend(files)
else:
seen[sub] = (dirs, files)
if not dirs and not files:
continue
yield os.path.join(dir1, sub), os.path.join(dir2, sub), dirs, files
if _seen is None:
# Walk through stuff (which we haven't yet gone through) in dir2
for root2, root1, dirs, files in walk_sync(dir2, dir1, _seen=seen):
yield root1, root2, dirs, files
def sync_2to3(src, dst, patchfile=None, clean=False):
import lib2to3.main
from io import StringIO
to_convert = []
for src_dir, dst_dir, dirs, files in walk_sync(src, dst):
for fn in dirs + files:
src_fn = os.path.join(src_dir, fn)
dst_fn = os.path.join(dst_dir, fn)
# skip temporary etc. files
if fn.startswith('.#') or fn.endswith('~'):
continue
# remove non-existing
if os.path.exists(dst_fn) and not os.path.exists(src_fn):
if clean:
if os.path.isdir(dst_fn):
shutil.rmtree(dst_fn)
else:
os.unlink(dst_fn)
continue
# make directories
if os.path.isdir(src_fn):
if not os.path.isdir(dst_fn):
os.makedirs(dst_fn)
continue
dst_dir = os.path.dirname(dst_fn)
if os.path.isfile(dst_fn) and not os.path.isdir(dst_dir):
os.makedirs(dst_dir)
# don't replace up-to-date files
try:
if os.path.isfile(dst_fn) and \
os.stat(dst_fn).st_mtime >= os.stat(src_fn).st_mtime:
continue
except OSError:
pass
# copy file
shutil.copyfile(src_fn, dst_fn)
# add .py files to 2to3 list
if dst_fn.endswith('.py'):
to_convert.append((src_fn, dst_fn))
# run 2to3
flag_sets = {}
for fn, dst_fn in to_convert:
flag = ''
for pat, opt in EXTRA_2TO3_FLAGS.items():
if fnmatch.fnmatch(fn, pat):
flag = opt
break
flag_sets.setdefault(flag, []).append(dst_fn)
if patchfile:
p = open(patchfile, 'wb+')
else:
p = open(os.devnull, 'wb')
for flags, filenames in flag_sets.items():
if flags == 'skip':
continue
_old_stdout = sys.stdout
try:
sys.stdout = StringIO()
lib2to3.main.main("lib2to3.fixes", ['-w'] + flags.split()+filenames)
finally:
sys.stdout = _old_stdout
for fn, dst_fn in to_convert:
# perform custom mangling
custom_mangling(dst_fn)
p.close()
if __name__ == "__main__":
main()
| jasonmccampbell/scipy-refactor | tools/py3tool.py | Python | bsd-3-clause | 11,878 |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2011, 2012, 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Initialization and configuration for `flask_sqlalchemy`."""
from flask_registry import ModuleAutoDiscoveryRegistry, RegistryProxy
from flask_sqlalchemy import SQLAlchemy as FlaskSQLAlchemy
import sqlalchemy
import sqlalchemy.dialects.postgresql
from sqlalchemy import event, types
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.pool import Pool
from sqlalchemy_utils import JSONType
from invenio.ext.sqlalchemy.types import LegacyBigInteger, LegacyInteger, \
LegacyMediumInteger, LegacySmallInteger, LegacyTinyInteger
from .expressions import AsBINARY
from .types import GUID, MarshalBinary, PickleBinary
from .utils import get_model_type
def _include_sqlalchemy(obj, engine=None):
"""Init all required SQLAlchemy's types."""
# for module in sqlalchemy, sqlalchemy.orm:
# for key in module.__all__:
# if not hasattr(obj, key):
# setattr(obj, key,
# getattr(module, key))
if engine == 'mysql':
from sqlalchemy.dialects import mysql as engine_types
else:
from sqlalchemy import types as engine_types
# Length is provided to JSONType to ensure MySQL uses LONGTEXT instead
# of TEXT which only provides for 64kb storage compared to 4gb for
# LONGTEXT.
setattr(obj, 'JSON', JSONType(length=2 ** 32 - 2))
setattr(obj, 'Char', engine_types.CHAR)
try:
setattr(obj, 'TinyText', engine_types.TINYTEXT)
except:
setattr(obj, 'TinyText', engine_types.TEXT)
setattr(obj, 'hybrid_property', hybrid_property)
try:
setattr(obj, 'Double', engine_types.DOUBLE)
except:
setattr(obj, 'Double', engine_types.FLOAT)
setattr(obj, 'Binary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iLargeBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'iMediumBinary', sqlalchemy.types.LargeBinary)
setattr(obj, 'UUID', GUID)
setattr(obj, 'Integer', LegacyInteger)
setattr(obj, 'MediumInteger', LegacyMediumInteger)
setattr(obj, 'SmallInteger', LegacySmallInteger)
setattr(obj, 'TinyInteger', LegacyTinyInteger)
setattr(obj, 'BigInteger', LegacyBigInteger)
if engine == 'mysql':
from .engines import mysql as dummy_mysql # noqa
# module = invenio.sqlalchemyutils_mysql
# for key in module.__dict__:
# setattr(obj, key,
# getattr(module, key))
obj.AsBINARY = AsBINARY
obj.MarshalBinary = MarshalBinary
obj.PickleBinary = PickleBinary
# Overwrite :meth:`MutableDick.update` to detect changes.
from sqlalchemy.ext.mutable import MutableDict
def update_mutable_dict(self, *args, **kwargs):
super(MutableDict, self).update(*args, **kwargs)
self.changed()
MutableDict.update = update_mutable_dict
obj.MutableDict = MutableDict
# @compiles(types.Text, 'postgresql')
# @compiles(sqlalchemy.dialects.postgresql.TEXT, 'postgresql')
# def compile_text(element, compiler, **kw):
# """Redefine Text filed type for PostgreSQL."""
# return 'TEXT'
# @compiles(types.VARBINARY, 'postgresql')
# def compile_text(element, compiler, **kw):
# """Redefine VARBINARY filed type for PostgreSQL."""
# return 'BYTEA'
def autocommit_on_checkin(dbapi_con, con_record):
"""Call autocommit on raw mysql connection for fixing bug in MySQL 5.5."""
try:
dbapi_con.autocommit(True)
except:
pass
# FIXME
# from invenio.ext.logging import register_exception
# register_exception()
# Possibly register globally.
# event.listen(Pool, 'checkin', autocommit_on_checkin)
class SQLAlchemy(FlaskSQLAlchemy):
"""Database object."""
def init_app(self, app):
"""Init application."""
super(self.__class__, self).init_app(app)
engine = app.config.get('CFG_DATABASE_TYPE', 'mysql')
self.Model = get_model_type(self.Model)
if engine == 'mysql':
# Override MySQL parameters to force MyISAM engine
mysql_parameters = {'keep_existing': True,
'extend_existing': False,
'mysql_engine': 'MyISAM',
'mysql_charset': 'utf8'}
original_table = self.Table
def table_with_myisam(*args, **kwargs):
"""Use same MySQL parameters that are used for ORM models."""
new_kwargs = dict(mysql_parameters)
new_kwargs.update(kwargs)
return original_table(*args, **new_kwargs)
self.Table = table_with_myisam
self.Model.__table_args__ = mysql_parameters
_include_sqlalchemy(self, engine=engine)
def __getattr__(self, name):
"""
Called when the normal mechanism fails.
This is only called when the normal mechanism fails,
so in practice should never be called.
It is only provided to satisfy pylint that it is okay not to
raise E1101 errors in the client code.
:see http://stackoverflow.com/a/3515234/780928
"""
raise AttributeError("%r instance has no attribute %r" % (self, name))
def schemadiff(self, excludeTables=None):
"""Generate a schema diff."""
from migrate.versioning import schemadiff
return schemadiff \
.getDiffOfModelAgainstDatabase(self.metadata,
self.engine,
excludeTables=excludeTables)
def apply_driver_hacks(self, app, info, options):
"""Called before engine creation."""
# Don't forget to apply hacks defined on parent object.
super(self.__class__, self).apply_driver_hacks(app, info, options)
if info.drivername == 'mysql':
options.setdefault('execution_options', {
# Autocommit cause Exception in SQLAlchemy >= 0.9.
# @see http://docs.sqlalchemy.org/en/rel_0_9/
# core/connections.html#understanding-autocommit
# 'autocommit': True,
'use_unicode': False,
'charset': 'utf8mb4',
})
event.listen(Pool, 'checkin', autocommit_on_checkin)
db = SQLAlchemy()
"""
Provides access to :class:`~.SQLAlchemy` instance.
"""
models = RegistryProxy('models', ModuleAutoDiscoveryRegistry, 'models')
def setup_app(app):
"""Setup SQLAlchemy extension."""
if 'SQLALCHEMY_DATABASE_URI' not in app.config:
from sqlalchemy.engine.url import URL
cfg = app.config
app.config['SQLALCHEMY_DATABASE_URI'] = URL(
cfg.get('CFG_DATABASE_TYPE', 'mysql'),
username=cfg.get('CFG_DATABASE_USER'),
password=cfg.get('CFG_DATABASE_PASS'),
host=cfg.get('CFG_DATABASE_HOST'),
database=cfg.get('CFG_DATABASE_NAME'),
port=cfg.get('CFG_DATABASE_PORT'),
)
# Let's initialize database.
db.init_app(app)
return app
| Lilykos/invenio | invenio/ext/sqlalchemy/__init__.py | Python | gpl-2.0 | 7,906 |
class Solution:
def profitableSchemes(self, G: int, P: int, group: List[int], profit: List[int]) -> int:
dp = [[0]*(1 + G) for _ in range(1 + P)]
dp[0][0] = 1
MOD = 10**9 + 7
for g0, p0 in zip(group, profit):
for g in range(G, g0 - 1, -1):
for p in range(P, -1, -1):
dp[p][g] = (dp[p][g] + dp[max(p - p0, 0)][g - g0]) % MOD
return sum(dp[-1]) % MOD
| jiadaizhao/LeetCode | 0801-0900/0879-Profitable Schemes/0879-Profitable Schemes.py | Python | mit | 442 |
from tornado import ioloop, httpclient as hc, gen, log, escape
import time
from . import _compat as _
from .graphite import GraphiteRecord
from .utils import convert_to_format, parse_interval, parse_rule, HISTORICAL, HISTORICAL_TOD, interval_to_graphite
import math
import psycopg2
from collections import deque, defaultdict
from itertools import islice
from datetime import datetime
LOGGER = log.gen_log
METHODS = "average", "last_value", "sum"
LEVELS = {
'critical': 0,
'warning': 10,
'normal': 20,
}
class sliceable_deque(deque):
def __getitem__(self, index):
try:
return deque.__getitem__(self, index)
except TypeError:
return type(self)(islice(self, index.start, index.stop, index.step))
class AlertFabric(type):
""" Register alert's classes and produce an alert by source. """
alerts = {}
def __new__(mcs, name, bases, params):
source = params.get('source')
cls = super(AlertFabric, mcs).__new__(mcs, name, bases, params)
if source:
mcs.alerts[source] = cls
LOGGER.info('Register Alert: %s' % source)
return cls
def get(cls, reactor, source='graphite', **options):
acls = cls.alerts[source]
return acls(reactor, **options)
class BaseAlert(_.with_metaclass(AlertFabric)):
""" Abstract basic alert class. """
source = None
def __init__(self, reactor, **options):
self.reactor = reactor
self.options = options
self.client = hc.AsyncHTTPClient()
self.history_TOD_value = {}
self.recorded = False
self.pastHour = datetime.now().time().hour
self.historicValues = {}
self.first = True
try:
self.configure(**options)
except Exception as e:
raise ValueError("Invalid alert configuration: %s" % e)
self.waiting = False
self.state = {None: "normal", "waiting": "normal", "loading": "normal"}
self.history = defaultdict(lambda: sliceable_deque([], self.history_size))
LOGGER.info("Alert '%s': has inited" % self)
def __hash__(self):
return hash(self.name) ^ hash(self.source)
def __eq__(self, other):
return hash(self) == hash(other)
def __str__(self):
return "%s (%s)" % (self.name, self.interval)
def configure(self, name=None, rules=None, query=None, **options):
assert name, "Alert's name is invalid"
self.name = name
assert rules, "%s: Alert's rules is invalid" % name
self.rules = [parse_rule(rule) for rule in rules]
self.rules = list(sorted(self.rules, key=lambda r: LEVELS.get(r.get('level'), 99)))
assert query, "%s: Alert's query is invalid" % self.name
self.query = query
self.interval = interval_to_graphite(
options.get('interval', self.reactor.options['interval']))
interval = parse_interval(self.interval)
self.time_window = interval_to_graphite(
options.get('time_window', options.get('time_window', self.reactor.options['time_window'])))
self._format = options.get('format', self.reactor.options['format'])
self.request_timeout = options.get(
'request_timeout', self.reactor.options['request_timeout'])
self.history_size = options.get('history_size', self.reactor.options['history_size'])
self.history_size = parse_interval(self.history_size)
self.history_size = int(math.ceil(self.history_size / interval))
self.history_TOD_size = options.get('history_TOD_size', '1d')
self.history_TOD_size = int(parse_interval(self.history_TOD_size) / 86400000.0)
if self.reactor.options.get('debug'):
self.callback = ioloop.PeriodicCallback(self.load, 5000)
else:
self.callback = ioloop.PeriodicCallback(self.load, interval)
def convert(self, value):
return convert_to_format(value, self._format)
def reset(self):
""" Reset state to normal for all targets.
It will repeat notification if a metric is still failed.
"""
for target in self.state:
self.state[target] = "normal"
def start(self):
self.callback.start()
self.load()
return self
def stop(self):
self.callback.stop()
return self
def check(self, records):
"""
Called at an interval to check if any values have exceeded alert thresholds.
History is also recorded in a local table and History_TOD is stored in the database at the top of the hour.
"""
work = False
if datetime.now().time().hour == (self.pastHour+1)%24 and not self.recorded:
work = True
self.recorded = True
self.pastHour = (self.pastHour+1)%24
#DB call
elif datetime.now().time().hour == self.pastHour and self.recorded:
self.recorded = False
for value, target in records:
LOGGER.info("%s [%s]: %s", self.name, target, value)
if value is None:
self.notify('critical', value, target)
continue
if target not in self.historicValues:
self.historicValues[target] = (value, 1)
else:
self.historicValues[target] = (self.historicValues[target][0]+value, self.historicValues[target][1]+1)
if (self.first or work) and not value is None and target in self.historicValues:
self.first = False
conn = psycopg2.connect(self.reactor.options.get('database'))
cur = conn.cursor()
### Pull new history_TOD data by averaging database data ###
cur.execute("SELECT * FROM history where day >= date %s - integer \' %s \' AND day < date %s AND query LIKE %s AND hour LIKE %s;", (str(datetime.now().date()),self.history_TOD_size, str(datetime.now().date()), target, str(datetime.now().time().hour)))
lista = cur.fetchall()
count = 0
total = 0
for item in lista:
count += 1
total += float(item[1])
if count > 0:
total /= count
self.history_TOD_value[target] = total
else:
LOGGER.error("No history data for %s" % target)
conn.commit()
cur.close()
conn.close()
for rule in self.rules:
rvalue = self.get_value_for_rule(rule, target)
if rvalue is None:
continue
if rule['op'](value, rvalue):
self.notify(rule['level'], value, target, rule=rule)
break
else:
self.notify('normal', value, target, rule=rule)
# INSERT DAILY STUFF HERE #
if work and not value is None and target in self.historicValues:
conn = psycopg2.connect(self.reactor.options.get('database'))
cur = conn.cursor()
LOGGER.info("datebase call made");
### Insert Hourly Data into database ###
cur.execute("INSERT INTO history (query, value, day, hour) VALUES (%s, %s, %s, %s);", (target, self.historicValues[target][0]/self.historicValues[target][1] , str(datetime.now().date()), str(datetime.now().time().hour)))
### If the start of a day, insert average of past day's data into database ###
if datetime.now().time().hour == 0:
cur.execute("SELECT * FROM history WHERE day == date %s - integer \' 1 \'AND query LIKE %s;", (str(datetime.now().date()) ,target))
lista = cur.fetchall()
count = 0
total = 0
for item in lista:
count += 1
total += float(item[1])
if count > 0:
total /= count
cur.execute("INSERT INTO history (query, value, day, hour) VALUES (%s, %s, date %s - integer \' 1 \', %s);", (target, total , str(datetime.now().date()), 24))
### Commit Changes. Database calls done ###
conn.commit()
cur.close()
conn.close()
del self.historicValues[target]
#database call#
self.history[target].append(value)
def get_value_for_rule(self, rule, target):
rvalue = rule['value']
if rvalue == HISTORICAL:
history = self.history[target]
if len(history) < self.history_size:
return None
rvalue = sum(history) / len(history)
if rvalue == HISTORICAL_TOD:
try:
rvalue = self.history_TOD_value[target]
except KeyError:
LOGGER.error("KeyError for %s, No Historical Data" % target)
return None
rvalue = rule['mod'](rvalue)
return rvalue
def notify(self, level, value, target=None, ntype=None, rule=None):
""" Notify main reactor about event. """
# Did we see the event before?
if target in self.state and level == self.state[target]:
return False
# Do we see the event first time?
if target not in self.state and level == 'normal' \
and not self.reactor.options['send_initial']:
return False
self.state[target] = level
return self.reactor.notify(level, self, value, target=target, ntype=ntype, rule=rule)
def load(self):
raise NotImplementedError()
class GraphiteAlert(BaseAlert):
source = 'graphite'
def configure(self, **options):
super(GraphiteAlert, self).configure(**options)
self.method = options.get('method', self.reactor.options['method'])
assert self.method in METHODS, "Method is invalid"
self.auth_username = self.reactor.options.get('auth_username')
self.auth_password = self.reactor.options.get('auth_password')
query = escape.url_escape(self.query)
self.url = "%(base)s/render/?target=%(query)s&rawData=true&from=-%(time_window)s" % {
'base': self.reactor.options['graphite_url'], 'query': query,
'time_window': self.time_window}
LOGGER.debug('%s: url = %s' % (self.name, self.url))
@gen.coroutine
def load(self):
LOGGER.debug('%s: start checking: %s' % (self.name, self.query))
if self.waiting:
LOGGER.debug('process takes too much time')
# self.notify('warning', 'Process takes too much time', target='waiting', ntype='common')
else:
self.waiting = True
try:
response = yield self.client.fetch(self.url, auth_username=self.auth_username,
auth_password=self.auth_password,
request_timeout=self.request_timeout)
records = (GraphiteRecord(line.decode('utf-8')) for line in response.buffer)
data = [(None if record.empty else getattr(record, self.method), record.target) for record in records]
if len(data) == 0:
raise ValueError('No data')
self.check(data)
self.notify('normal', 'Metrics are loaded', target='loading', ntype='common')
except Exception as e:
self.notify('critical', 'Loading error: %s' % e, target='loading', ntype='common')
self.waiting = False
def get_graph_url(self, target, graphite_url=None):
query = escape.url_escape(target)
return "%(base)s/render/?target=%(query)s&from=-%(time_window)s" % {
'base': graphite_url or self.reactor.options['graphite_url'], 'query': query,
'time_window': self.time_window}
class URLAlert(BaseAlert):
source = 'url'
@gen.coroutine
def load(self):
LOGGER.debug('%s: start checking: %s' % (self.name, self.query))
if self.waiting:
self.notify('warning', 'Process takes too much time', target='waiting', ntype='common')
else:
self.waiting = True
try:
response = yield self.client.fetch(self.query,
method=self.options.get('method', 'GET'),
request_timeout=self.request_timeout)
self.check([(response.code, self.query)])
self.notify('normal', 'Metrics are loaded', target='loading')
except Exception as e:
self.notify('critical', str(e), target='loading')
self.waiting = False
| ViaSat/graphite-beacon | graphite_beacon/alerts.py | Python | mit | 12,919 |
#!/bin/python
# gofed-ng - Golang system
# Copyright (C) 2016 Fridolin Pokorny, fpokorny@redhat.com
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# ####################################################################
import sys
import os
def service_path2service_name(service_path):
basename = os.path.basename(service_path)
return basename[:-len('.py')]
if __name__ == "__main__":
sys.exit(1)
| gofed/gofed-ng | testsuite/helpers/utils.py | Python | gpl-3.0 | 1,073 |
#!/usr/bin/env python
# Copyright (C) 2015 Tadej Stajner <tadej@tdj.si>
# License: 3-clause BSD
from setuptools import setup
setup(name='autokit',
version='0.1',
description='autokit - machine learning for busy people',
author='Tadej Stajner',
author_email='tadej@tdj.si',
url='https://github.com/tadejs/autokit',
packages=['autokit'],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Topic :: Scientific/Engineering',
'Topic :: Software Development',
],
platforms=['Linux', 'OS-X', 'Windows'],
dependency_links = ['https://github.com/hyperopt/hyperopt-sklearn/tarball/master#egg=hyperopt-sklearn-0.0.1'],
install_requires = [
'numpy',
'scipy',
'scikit-learn',
'networkx',
'hyperopt'],
)
| aruneral01/autokit | setup.py | Python | mit | 1,216 |
"""Get details for a virtual server."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
from SoftLayer import utils
@click.command()
@click.argument('identifier')
@click.option('--passwords',
is_flag=True,
help='Show passwords (check over your shoulder!)')
@click.option('--price', is_flag=True, help='Show associated prices')
@environment.pass_env
def cli(env, identifier, passwords=False, price=False):
"""Get details for a virtual server."""
vsi = SoftLayer.VSManager(env.client)
table = formatting.KeyValueTable(['name', 'value'])
table.align['name'] = 'r'
table.align['value'] = 'l'
vs_id = helpers.resolve_id(vsi.resolve_ids, identifier, 'VS')
result = vsi.get_instance(vs_id)
result = utils.NestedDict(result)
table.add_row(['id', result['id']])
table.add_row(['guid', result['globalIdentifier']])
table.add_row(['hostname', result['hostname']])
table.add_row(['domain', result['domain']])
table.add_row(['fqdn', result['fullyQualifiedDomainName']])
table.add_row(['status', formatting.FormattedItem(
result['status']['keyName'] or formatting.blank(),
result['status']['name'] or formatting.blank()
)])
table.add_row(['state', formatting.FormattedItem(
utils.lookup(result, 'powerState', 'keyName'),
utils.lookup(result, 'powerState', 'name'),
)])
table.add_row(['active_transaction', formatting.active_txn(result)])
table.add_row(['datacenter',
result['datacenter']['name'] or formatting.blank()])
operating_system = utils.lookup(result,
'operatingSystem',
'softwareLicense',
'softwareDescription') or {}
table.add_row([
'os',
formatting.FormattedItem(
operating_system.get('version') or formatting.blank(),
operating_system.get('name') or formatting.blank()
)])
table.add_row(['os_version',
operating_system.get('version') or formatting.blank()])
table.add_row(['cores', result['maxCpu']])
table.add_row(['memory', formatting.mb_to_gb(result['maxMemory'])])
table.add_row(['public_ip',
result['primaryIpAddress'] or formatting.blank()])
table.add_row(['private_ip',
result['primaryBackendIpAddress'] or formatting.blank()])
table.add_row(['private_only', result['privateNetworkOnlyFlag']])
table.add_row(['private_cpu', result['dedicatedAccountHostOnlyFlag']])
table.add_row(['created', result['createDate']])
table.add_row(['modified', result['modifyDate']])
if utils.lookup(result, 'billingItem') != []:
table.add_row(['owner', formatting.FormattedItem(
utils.lookup(result, 'billingItem', 'orderItem',
'order', 'userRecord',
'username') or formatting.blank(),
)])
else:
table.add_row(['owner', formatting.blank()])
vlan_table = formatting.Table(['type', 'number', 'id'])
for vlan in result['networkVlans']:
vlan_table.add_row([
vlan['networkSpace'], vlan['vlanNumber'], vlan['id']])
table.add_row(['vlans', vlan_table])
if result.get('notes'):
table.add_row(['notes', result['notes']])
if price:
table.add_row(['price rate',
result['billingItem']['recurringFee']])
if passwords:
pass_table = formatting.Table(['username', 'password'])
for item in result['operatingSystem']['passwords']:
pass_table.add_row([item['username'], item['password']])
table.add_row(['users', pass_table])
table.add_row(['tags', formatting.tags(result['tagReferences'])])
# Test to see if this actually has a primary (public) ip address
try:
if not result['privateNetworkOnlyFlag']:
ptr_domains = env.client.call(
'Virtual_Guest', 'getReverseDomainRecords',
id=vs_id,
)
for ptr_domain in ptr_domains:
for ptr in ptr_domain['resourceRecords']:
table.add_row(['ptr', ptr['data']])
except SoftLayer.SoftLayerAPIError:
pass
env.fout(table)
| underscorephil/softlayer-python | SoftLayer/CLI/virt/detail.py | Python | mit | 4,441 |
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
from . import product_template
from . import product_variant
| OCA/product-variant | product_variant_inactive/models/__init__.py | Python | agpl-3.0 | 131 |
# Copyright 2014-2019 The ODL contributors
#
# This file is part of ODL.
#
# This Source Code Form is subject to the terms of the Mozilla Public License,
# v. 2.0. If a copy of the MPL was not distributed with this file, You can
# obtain one at https://mozilla.org/MPL/2.0/.
"""Testing utilities."""
from __future__ import absolute_import, division, print_function
import os
import sys
import warnings
from builtins import object
from contextlib import contextmanager
from time import time
import numpy as np
from future.moves.itertools import zip_longest
from odl.util.utility import is_string, run_from_ipython
__all__ = (
'dtype_ndigits',
'dtype_tol',
'all_equal',
'all_almost_equal',
'is_subdict',
'skip_if_no_pyfftw',
'skip_if_no_pywavelets',
'simple_fixture',
'noise_array',
'noise_element',
'noise_elements',
'fail_counter',
'timer',
'timeit',
'ProgressBar',
'ProgressRange',
'test',
'run_doctests',
'test_file',
)
def _ndigits(a, b, default=None):
"""Return number of expected correct digits comparing ``a`` and ``b``.
The returned number is the minimum `dtype_ndigits` of the two objects.
See Also
--------
dtype_ndigits
"""
dtype1 = getattr(a, 'dtype', object)
dtype2 = getattr(b, 'dtype', object)
return min(dtype_ndigits(dtype1, default), dtype_ndigits(dtype2, default))
def dtype_ndigits(dtype, default=None):
"""Return the number of correct digits expected for a given dtype.
This is intended as a somewhat generous default (relative) precision for
results of more or less stable computations.
Returned numbers:
- ``np.float16``: ``1``
- ``np.float32`` or ``np.complex64``: ``3``
- Others: ``default`` if given, otherwise ``5``
See Also
--------
dtype_tol : Same precision expressed as tolerance
"""
small_dtypes = [np.float32, np.complex64]
tiny_dtypes = [np.float16]
if dtype in tiny_dtypes:
return 1
elif dtype in small_dtypes:
return 3
else:
return default if default is not None else 5
def dtype_tol(dtype, default=None):
"""Return a tolerance for a given dtype.
This is intended as a somewhat generous default (relative) tolerance for
results of more or less stable computations.
Returned numbers:
- ``np.float16``: ``1e-1``
- ``np.float32`` or ``np.complex64``: ``1e-3``
- Others: ``default`` if given, otherwise ``1e-5``
See Also
--------
dtype_ndigits : Same tolerance expressed in number of digits.
"""
return 10 ** -dtype_ndigits(dtype, default)
def all_equal(iter1, iter2):
"""Return ``True`` if all elements in ``a`` and ``b`` are equal."""
# Direct comparison for scalars, tuples or lists
try:
if iter1 == iter2:
return True
except ValueError: # Raised by NumPy when comparing arrays
pass
# Special case for None
if iter1 is None and iter2 is None:
return True
# If one nested iterator is exhausted, go to direct comparison
try:
it1 = iter(iter1)
it2 = iter(iter2)
except TypeError:
try:
return iter1 == iter2
except ValueError: # Raised by NumPy when comparing arrays
return False
diff_length_sentinel = object()
# Compare element by element and return False if the sequences have
# different lengths
for [ip1, ip2] in zip_longest(it1, it2,
fillvalue=diff_length_sentinel):
# Verify that none of the lists has ended (then they are not the
# same size)
if ip1 is diff_length_sentinel or ip2 is diff_length_sentinel:
return False
if not all_equal(ip1, ip2):
return False
return True
def all_almost_equal_array(v1, v2, ndigits):
return np.allclose(v1, v2,
rtol=10 ** -ndigits, atol=10 ** -ndigits,
equal_nan=True)
def all_almost_equal(iter1, iter2, ndigits=None):
"""Return ``True`` if all elements in ``a`` and ``b`` are almost equal."""
try:
if iter1 is iter2 or iter1 == iter2:
return True
except ValueError:
pass
if iter1 is None and iter2 is None:
return True
if hasattr(iter1, '__array__') and hasattr(iter2, '__array__'):
# Only get default ndigits if comparing arrays, need to keep `None`
# otherwise for recursive calls.
if ndigits is None:
ndigits = _ndigits(iter1, iter2, None)
return all_almost_equal_array(iter1, iter2, ndigits)
try:
it1 = iter(iter1)
it2 = iter(iter2)
except TypeError:
if ndigits is None:
ndigits = _ndigits(iter1, iter2, None)
return np.isclose(iter1, iter2,
atol=10 ** -ndigits, rtol=10 ** -ndigits,
equal_nan=True)
diff_length_sentinel = object()
for [ip1, ip2] in zip_longest(it1, it2,
fillvalue=diff_length_sentinel):
# Verify that none of the lists has ended (then they are not the
# same size)
if ip1 is diff_length_sentinel or ip2 is diff_length_sentinel:
return False
if not all_almost_equal(ip1, ip2, ndigits):
return False
return True
def is_subdict(subdict, dictionary):
"""Return ``True`` if all items of ``subdict`` are in ``dictionary``."""
return all(item in dictionary.items() for item in subdict.items())
try:
import pytest
except ImportError:
def identity(*args, **kwargs):
if args and callable(args[0]):
return args[0]
else:
return identity
skip_if_no_pyfftw = identity
skip_if_no_pywavelets = identity
else:
# Mark decorators for test parameters
skip_if_no_pyfftw = pytest.mark.skipif(
'not odl.trafos.PYFFTW_AVAILABLE',
reason='pyFFTW not available',
)
skip_if_no_pywavelets = pytest.mark.skipif(
'not odl.trafos.PYWT_AVAILABLE',
reason='PyWavelets not available',
)
def simple_fixture(name, params, fmt=None):
"""Helper to create a pytest fixture using only name and params.
Parameters
----------
name : str
Name of the parameters used for the ``ids`` argument
to `pytest.fixture`.
params : sequence
Values to be taken as parameters in the fixture. They are
used as ``params`` argument to `_pytest.fixtures.fixture`.
Arguments wrapped in a ``pytest.skipif`` decorator are
unwrapped for the generation of the test IDs.
fmt : str, optional
Use this format string for the generation of the ``ids``.
For each value, the id string is generated as ::
fmt.format(name=name, value=value)
hence the format string must use ``{name}`` and ``{value}``.
Default format strings are:
- ``" {name}='{value}' "`` for string parameters,
- ``" {name}={value} "`` for other types.
"""
import _pytest
if fmt is None:
# Use some intelligence to make good format strings
fmt_str = " {name}='{value}' "
fmt_default = " {name}={value} "
ids = []
for p in params:
# TODO: other types of decorators?
if (
isinstance(p, _pytest.mark.MarkDecorator)
and p.name == 'skipif'
):
# Unwrap the wrapped object in the decorator
if is_string(p.args[1]):
ids.append(fmt_str.format(name=name, value=p.args[1]))
else:
ids.append(fmt_default.format(name=name, value=p.args[1]))
else:
if is_string(p):
ids.append(fmt_str.format(name=name, value=p))
else:
ids.append(fmt_default.format(name=name, value=p))
else:
# Use provided `fmt` for everything
ids = [fmt.format(name=name, value=p) for p in params]
wrapper = pytest.fixture(scope='module', ids=ids, params=params)
return wrapper(lambda request: request.param)
# Helpers to generate data
def noise_array(space):
"""Generate a white noise array that is compatible with ``space``.
The array contains white noise with standard deviation 1 in the case of
floating point dtypes and uniformly spaced values between -10 and 10 in
the case of integer dtypes.
For product spaces the method is called recursively for all sub-spaces.
Notes
-----
This method is intended for internal testing purposes. For more explicit
example elements see ``odl.phantoms`` and ``LinearSpaceElement.examples``.
Parameters
----------
space : `LinearSpace`
Space from which to derive the array data type and size.
Returns
-------
noise_array : `numpy.ndarray` element
Array with white noise such that ``space.element``'s can be created
from it.
Examples
--------
Create single noise array:
>>> space = odl.rn(3)
>>> array = noise_array(space)
See Also
--------
noise_element
noise_elements
odl.set.space.LinearSpace.examples : Examples of elements
typical to the space.
"""
from odl.space import ProductSpace
if isinstance(space, ProductSpace):
return np.array([noise_array(si) for si in space])
else:
if space.dtype == bool:
arr = np.random.randint(0, 2, size=space.shape, dtype=bool)
elif np.issubdtype(space.dtype, np.unsignedinteger):
arr = np.random.randint(0, 10, space.shape)
elif np.issubdtype(space.dtype, np.signedinteger):
arr = np.random.randint(-10, 10, space.shape)
elif np.issubdtype(space.dtype, np.floating):
arr = np.random.randn(*space.shape)
elif np.issubdtype(space.dtype, np.complexfloating):
arr = (
np.random.randn(*space.shape)
+ 1j * np.random.randn(*space.shape)
) / np.sqrt(2.0)
else:
raise ValueError('bad dtype {}'.format(space.dtype))
return arr.astype(space.dtype, copy=False)
def noise_element(space):
"""Create a white noise element in ``space``.
The element contains white noise with standard deviation 1 in the case of
floating point dtypes and uniformly spaced values between -10 and 10 in
the case of integer dtypes.
For product spaces the method is called recursively for all sub-spaces.
Notes
-----
This method is intended for internal testing purposes. For more explicit
example elements see ``odl.phantoms`` and ``LinearSpaceElement.examples``.
Parameters
----------
space : `LinearSpace`
Space in which to create an element. The
`odl.set.space.LinearSpace.element` method of the space needs to
accept input of `numpy.ndarray` type.
Returns
-------
noise_element : ``space`` element
Examples
--------
Create single noise element:
>>> space = odl.rn(3)
>>> vector = noise_element(space)
See Also
--------
noise_array
noise_elements
odl.set.space.LinearSpace.examples : Examples of elements typical
to the space.
"""
return space.element(noise_array(space))
def noise_elements(space, n=1):
"""Create a list of ``n`` noise arrays and elements in ``space``.
The arrays contain white noise with standard deviation 1 in the case of
floating point dtypes and uniformly spaced values between -10 and 10 in
the case of integer dtypes.
The returned elements have the same values as the arrays.
For product spaces the method is called recursively for all sub-spaces.
Notes
-----
This method is intended for internal testing purposes. For more explicit
example elements see ``odl.phantoms`` and ``LinearSpaceElement.examples``.
Parameters
----------
space : `LinearSpace`
Space in which to create an element. The
`odl.set.space.LinearSpace.element` method of the space needs to
accept input of `numpy.ndarray` type.
n : int, optional
Number of elements to create.
Returns
-------
arrays : `numpy.ndarray` or tuple of `numpy.ndarray`
A single array if ``n == 1``, otherwise a tuple of arrays.
elements : ``space`` element or tuple of ``space`` elements
A single element if ``n == 1``, otherwise a tuple of elements.
Examples
--------
Create single noise element:
>>> space = odl.rn(3)
>>> arr, vector = noise_elements(space)
Create multiple noise elements:
>>> [arr1, arr2], [vector1, vector2] = noise_elements(space, n=2)
See Also
--------
noise_array
noise_element
"""
arrs = tuple(noise_array(space) for _ in range(n))
# Make space elements from arrays
elems = tuple(space.element(arr.copy()) for arr in arrs)
if n == 1:
return tuple(arrs + elems)
else:
return arrs, elems
@contextmanager
def fail_counter(test_name, err_msg=None, logger=print):
"""Used to count the number of failures of something.
Usage::
with fail_counter("my_test") as counter:
# Do stuff
counter.fail()
When done, it prints ::
my_test
*** FAILED 1 TEST CASE(S) ***
"""
class _FailCounter(object):
def __init__(self):
self.num_failed = 0
self.fail_strings = []
def fail(self, string=None):
"""Add failure with reason as string."""
# TODO: possibly limit number of printed strings
self.num_failed += 1
if string is not None:
self.fail_strings.append(str(string))
try:
counter = _FailCounter()
yield counter
finally:
if counter.num_failed == 0:
logger('{:<70}: Completed all test cases.'.format(test_name))
else:
print(test_name)
for fail_string in counter.fail_strings:
print(fail_string)
if err_msg is not None:
print(err_msg)
print('*** FAILED {} TEST CASE(S) ***'.format(counter.num_failed))
@contextmanager
def timer(name=None):
"""A timer context manager.
Usage::
with timer('name'):
# Do stuff
Prints the time stuff took to execute.
"""
if name is None:
name = "Elapsed"
try:
tstart = time()
yield
finally:
time_str = '{:.3f}'.format(time() - tstart)
print('{:>30s} : {:>10s} '.format(name, time_str))
def timeit(arg):
"""A timer decorator.
Usage::
@timeit
def myfunction(...):
...
@timeit('info string')
def myfunction(...):
...
"""
if callable(arg):
def timed_function(*args, **kwargs):
with timer(str(arg)):
return arg(*args, **kwargs)
return timed_function
else:
def _timeit_helper(func):
def timed_function(*args, **kwargs):
with timer(arg):
return func(*args, **kwargs)
return timed_function
return _timeit_helper
class ProgressBar(object):
"""A simple command-line progress bar.
Usage:
>>> progress = ProgressBar('Reading data', 10)
\rReading data: [ ] Starting
>>> progress.update(4) #halfway, zero indexing
\rReading data: [############### ] 50.0%
Multi-indices, from slowest to fastest:
>>> progress = ProgressBar('Reading data', 10, 10)
\rReading data: [ ] Starting
>>> progress.update(9, 8)
\rReading data: [############################# ] 99.0%
Supports simply calling update, which moves the counter forward:
>>> progress = ProgressBar('Reading data', 10, 10)
\rReading data: [ ] Starting
>>> progress.update()
\rReading data: [ ] 1.0%
"""
def __init__(self, text='progress', *njobs):
"""Initialize a new instance."""
self.text = str(text)
if len(njobs) == 0:
raise ValueError('need to provide at least one job')
self.njobs = njobs
self.current_progress = 0.0
self.index = 0
self.done = False
self.start()
def start(self):
"""Print the initial bar."""
sys.stdout.write('\r{0}: [{1:30s}] Starting'.format(self.text,
' ' * 30))
sys.stdout.flush()
def update(self, *indices):
"""Update the bar according to ``indices``."""
if indices:
if len(indices) != len(self.njobs):
raise ValueError('number of indices not correct')
self.index = np.ravel_multi_index(indices, self.njobs) + 1
else:
self.index += 1
# Find progress as ratio between 0 and 1
# offset by 1 for zero indexing
progress = self.index / np.prod(self.njobs)
# Write a progressbar and percent
if progress < 1.0:
# Only update on 0.1% intervals
if progress > self.current_progress + 0.001:
sys.stdout.write('\r{0}: [{1:30s}] {2:4.1f}% '.format(
self.text, '#' * int(30 * progress), 100 * progress))
self.current_progress = progress
else: # Special message when done
if not self.done:
sys.stdout.write('\r{0}: [{1:30s}] Done \n'.format(
self.text, '#' * 30))
self.done = True
sys.stdout.flush()
class ProgressRange(object):
"""Simple range sequence with progress bar output"""
def __init__(self, text, n):
"""Initialize a new instance."""
self.current = 0
self.n = n
self.bar = ProgressBar(text, n)
def __iter__(self):
return self
def __next__(self):
if self.current < self.n:
val = self.current
self.current += 1
self.bar.update()
return val
else:
raise StopIteration()
def test(arguments=None):
"""Run ODL tests given by arguments."""
try:
import pytest
except ImportError:
raise ImportError(
'ODL tests cannot be run without `pytest` installed.\n'
'Run `$ pip install [--user] odl[testing]` in order to install '
'`pytest`.'
)
from .pytest_config import collect_ignore
this_dir = os.path.dirname(__file__)
odl_root = os.path.abspath(os.path.join(this_dir, os.pardir, os.pardir))
args = ['{root}/odl'.format(root=odl_root)]
ignores = ['--ignore={}'.format(file) for file in collect_ignore]
args.extend(ignores)
if arguments is not None:
args.extend(arguments)
pytest.main(args)
def run_doctests(skip_if=False, **kwargs):
"""Run all doctests in the current module.
This function calls ``doctest.testmod()``, by default with the options
``optionflags=doctest.NORMALIZE_WHITESPACE`` and
``extraglobs={'odl': odl, 'np': np}``. This can be changed with
keyword arguments.
Parameters
----------
skip_if : bool
For ``True``, skip the doctests in this module.
kwargs :
Extra keyword arguments passed on to the ``doctest.testmod``
function.
"""
from doctest import testmod, NORMALIZE_WHITESPACE, SKIP
from packaging.version import parse as parse_version
import odl
import numpy as np
optionflags = kwargs.pop('optionflags', NORMALIZE_WHITESPACE)
if skip_if:
optionflags |= SKIP
extraglobs = kwargs.pop('extraglobs', {'odl': odl, 'np': np})
if run_from_ipython():
try:
import spyder
except ImportError:
pass
else:
if parse_version(spyder.__version__) < parse_version('3.1.4'):
warnings.warn('A bug with IPython and Spyder < 3.1.4 '
'sometimes causes doctests to fail to run. '
'Please upgrade Spyder or use another '
'interpreter if the doctests do not work.',
RuntimeWarning)
testmod(optionflags=optionflags, extraglobs=extraglobs, **kwargs)
def test_file(file, args=None):
"""Run tests in file with proper default arguments."""
try:
import pytest
except ImportError:
raise ImportError('ODL tests cannot be run without `pytest` installed.'
'\nRun `$ pip install [--user] odl[testing]` in '
'order to install `pytest`.')
if args is None:
args = []
args.extend([str(file.replace('\\', '/')), '-v', '--capture=sys'])
pytest.main(args)
if __name__ == '__main__':
run_doctests()
| kohr-h/odl | odl/util/testutils.py | Python | mpl-2.0 | 21,151 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Daniele Simonetti
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
| OpenNingia/l5r-character-manager | l5rcm/models/advancements/__init__.py | Python | gpl-3.0 | 749 |
# -*- coding: utf-8 -*-
#
# data documentation build configuration file, created by
# sphinx-quickstart on Fri Apr 24 11:56:24 2015.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'data'
copyright = u'2015, Marc Brinkmann'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.5'
# The full version, including alpha/beta/rc tags.
release = '0.5.dev1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'datadoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'data.tex', u'data Documentation',
u'Marc Brinkmann', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'data', u'data Documentation',
[u'Marc Brinkmann'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'data', u'data Documentation',
u'Marc Brinkmann', 'data', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
# broken references in python docs
nitpick_ignore = [('py:func', 'file.read')]
| mbr/data | docs/conf.py | Python | mit | 7,972 |
# -*- coding: utf8 -*-
import mechanize
import datetime
from bs4 import BeautifulSoup
import urllib2
import cookielib
import re
import sqlite3
from dateutil.parser import parse
conn=sqlite3.connect('/home/top10.db')
conn.text_factory = lambda x: unicode(x, 'utf-8', 'ignore')
item = [[u'']*3 for x in xrange(50)]
index=0
now=datetime.datetime.now()
currentmonth=now.strftime("%m")
#1.go to site with login
cj = cookielib.CookieJar()
br = mechanize.Browser()
br.set_cookiejar(cj)
br.open('http://broadcamp.com/bbs/login.php')
br.select_form(nr=0)
br.form['mb_id'] = 'okwow123'
br.form['mb_password'] = '239184'
br.submit()
br.open('http://broadcamp.com/bbs/board.php?bo_table=d4')
#2.save scratch results
result=br.response().read()
#3.find max no
first_index= result.find('http://broadcamp.com/bbs/board.php?bo_table=d4&wr_id=')
max_no=result[first_index+53:first_index+57]
#print result[first_index:first_index+57]
#4.find 30 address,title,opendate
for x in range(50):
taddress=result[first_index:first_index+53]+str(int(max_no)-x)
br.open(taddress)
tresult=br.response().read()
title=tresult.split('<title>')[1].split('</title>')
temp=title[0].replace('> 사이트 추천등록 | 브로드캠프','')
item[index][1]=temp
ttresult=tresult.split('<div id=\"view_content\">')[1].split('<!--view_content-->[0]')
match=re.search(r'(\d+월 +\d+일)',tresult)
if match:
mnd = match.group(1).replace('월','.').replace('일','')
item[index][2]=mnd.replace(' ','')
match=re.search(r'(\d+/+\d)',tresult)
if match:
mnd = match.group(1)
item[index][2]=mnd.replace(' ','').replace('/','.')
match=re.search(r'(\d+/ +\d)',tresult)
if match:
mnd = match.group(1)
item[index][2]=mnd.replace(' ','').replace('/','.')
match=re.search(r'(\d+ /+\d)',tresult)
if match:
mnd = match.group(1)
item[index][2]=mnd.replace(' ','').replace('/','.')
match=re.search(r'(\d+월+\d+일)',tresult)
if match:
mnd = match.group(1).replace('월','.').replace('일','')
item[index][2]=mnd.replace(' ','')
urls=re.findall('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', ttresult[0])
item[index][2]='2017.'+item[index][2]
if len(item[index][2])>8 or len(item[index][2])==5:
item[index][2]=''
else:
date_obj=parse(item[index][2])
item[index][2]=date_obj.strftime("%Y.%m.%d")
if (int((item[index][2])[5:7])-int(currentmonth))>=2:
item[index][2]=''
print item[index][2]
for y in range(len(urls)):
if urls[y].find("broadcamp") == -1 and urls[y].find("google") == -1 and urls[y].find("schema") == -1 and urls[y].find("miwit") == -1 and urls[y].find("-") == -1 and urls[y].find("<") == -1 and urls[y].find("image") == -1 and urls[y] != '' and len(urls[y])>10:
item[index][0]=urls[y]
#br.open(urls[y])
#tttresult=br.response().read()
break
conn.execute('insert into top10(address,name,date) values (?,?,?)',item[index])
index+=1
conn.commit()
| okwow123/freebaram | crawler/mech.py | Python | mit | 3,121 |
# Addons: "HitLog"
# ktulho <https://kr.cm/f/p/17624/>
import BigWorld
import ResMgr
import nations
from Avatar import PlayerAvatar
from DestructibleEntity import DestructibleEntity
from Vehicle import Vehicle
from VehicleEffects import DamageFromShotDecoder
from constants import ATTACK_REASON
from constants import ITEM_DEFS_PATH, ARENA_GUI_TYPE, VEHICLE_CLASSES
from gui.battle_control import avatar_getter
from helpers import dependency
from items import _xml
from skeletons.gui.battle_session import IBattleSessionProvider
from vehicle_systems.tankStructure import TankPartIndexes
import xvm_battle.python.battle as battle
import xvm_main.python.config as config
import xvm_main.python.userprefs as userprefs
from xfw.events import registerEvent
from xfw_actionscript.python import *
from xvm_main.python.logger import *
from xvm_main.python.stats import _stat
from xvm_main.python.xvm import l10n
import parser_addon
from xvm.damageLog import HIT_EFFECT_CODES, keyLower, ATTACK_REASONS, RATINGS, VEHICLE_CLASSES_SHORT, ConfigCache
BATTLE_TYPE = {ARENA_GUI_TYPE.UNKNOWN: "unknown",
ARENA_GUI_TYPE.RANDOM: "regular",
ARENA_GUI_TYPE.TRAINING: "training",
ARENA_GUI_TYPE.TUTORIAL: "tutorial",
ARENA_GUI_TYPE.CYBERSPORT: "cybersport",
ARENA_GUI_TYPE.EVENT_BATTLES: "event_battles",
ARENA_GUI_TYPE.RATED_SANDBOX: "rated_sandbox",
ARENA_GUI_TYPE.SANDBOX: "sandbox",
ARENA_GUI_TYPE.FALLOUT_CLASSIC: "fallout_classic",
ARENA_GUI_TYPE.FALLOUT_MULTITEAM: "fallout_multiteam",
ARENA_GUI_TYPE.SORTIE_2: "sortie_2",
ARENA_GUI_TYPE.FORT_BATTLE_2: "fort_battle_2",
ARENA_GUI_TYPE.RANKED: "ranked",
ARENA_GUI_TYPE.BOOTCAMP: "bootcamp",
ARENA_GUI_TYPE.EPIC_RANDOM: "epic_random",
ARENA_GUI_TYPE.EPIC_RANDOM_TRAINING: "epic_random_training",
ARENA_GUI_TYPE.EPIC_BATTLE: "epic_battle",
ARENA_GUI_TYPE.EPIC_TRAINING: "epic_battle"}
HIT_LOG = 'hitLog/'
FORMAT_HISTORY = 'formatHistory'
GROUP_HITS_PLAYER = 'groupHitsByPlayer'
SCROLL_LOG = 'scrollLog'
ADD_TO_END = 'addToEnd'
LINES = 'lines'
MOVE_IN_BATTLE = 'moveInBattle'
HIT_LOG_ENABLED = HIT_LOG + 'enabled'
SHOW_SELF_DAMAGE = HIT_LOG + 'showSelfDamage'
SHOW_ALLY_DAMAGE = HIT_LOG + 'showAllyDamage'
ON_HIT_LOG = 'ON_HIT_LOG'
PILLBOX = 'pillbox'
class HIT_LOG_SECTIONS(object):
LOG = HIT_LOG + 'log/'
ALT_LOG = HIT_LOG + 'logAlt/'
BACKGROUND = HIT_LOG + 'logBackground/'
ALT_BACKGROUND = HIT_LOG + 'logAltBackground/'
SECTIONS = (LOG, ALT_LOG, BACKGROUND, ALT_BACKGROUND)
_config = ConfigCache()
def parser(notParsedStr, macros):
if notParsedStr and macros:
return parser_addon.parser_addon(notParsedStr, macros)
return notParsedStr
def readColor(section, value, xvalue=None):
def getColor(c, v):
for i in c:
if i['value'] > v:
color = i['color']
return '#' + color[2:] if color[:2] == '0x' else color
return None
colors = _config.get('colors/' + section)
if value is not None and colors is not None:
return getColor(colors, value)
elif xvalue is not None:
colors_x = _config.get('colors/x')
return getColor(colors_x, xvalue)
class Macros(dict):
def __init__(self, *a, **kw):
dict.__init__(self, *a, **kw)
self.chooseRating = ''
def setChooseRating(self):
scale = config.networkServicesSettings.scale
name = config.networkServicesSettings.rating
r = '{}_{}'.format(scale, name)
if r in RATINGS:
self.chooseRating = RATINGS[r]['name']
else:
self.chooseRating = 'xwgr' if scale == 'xvm' else 'wgr'
def setCommonMacros(self):
value = g_dataHitLog.data
xwn8 = value.get('xwn8', None)
xwtr = value.get('xwtr', None)
xeff = value.get('xeff', None)
xwgr = value.get('xwgr', None)
self['vehicle'] = value['shortUserString']
self['name'] = value['name']
self['clannb'] = value['clanAbbrev']
self['clan'] = ''.join(['[', value['clanAbbrev'], ']']) if value['clanAbbrev'] else ''
self['level'] = value['level']
self['clanicon'] = value['clanicon']
self['squad-num'] = value['squadnum']
self['alive'] = 'al' if value['isAlive'] else None
self['splash-hit'] = 'splash' if value['splashHit'] else None
self['critical-hit'] = 'crit' if value['criticalHit'] else None
self['wn8'] = value.get('wn8', None)
self['xwn8'] = value.get('xwn8', None)
self['wtr'] = value.get('wtr', None)
self['xwtr'] = value.get('xwtr', None)
self['eff'] = value.get('eff', None),
self['xeff'] = value.get('xeff', None)
self['wgr'] = value.get('wgr', None)
self['xwgr'] = value.get('xwgr', None)
self['xte'] = value.get('xte', None)
self['r'] = '{{%s}}' % self.chooseRating
self['xr'] = '{{%s}}' % self.chooseRating if self.chooseRating[0] == 'x' else '{{x%s}}' % self.chooseRating
self['c:r'] = '{{c:%s}}' % self.chooseRating
self['c:xr'] = '{{c:%s}}' % self.chooseRating if self.chooseRating[0] == 'x' else '{{c:x%s}}' % self.chooseRating
self['c:wn8'] = readColor('wn8', value.get('wn8', None), xwn8)
self['c:xwn8'] = readColor('x', xwn8)
self['c:wtr'] = readColor('wtr', value.get('wtr', None), xwtr)
self['c:xwtr'] = readColor('x', xwtr)
self['c:eff'] = readColor('eff', value.get('eff', None), xeff)
self['c:xeff'] = readColor('x', xeff)
self['c:wgr'] = readColor('wgr', value.get('wgr', None), xwgr)
self['c:xwgr'] = readColor('x', xwgr)
self['c:xte'] = readColor('x', value.get('xte', None))
self['diff-masses'] = value.get('diff-masses', None)
self['nation'] = value.get('nation', None)
self['blownup'] = 'blownup' if value['blownup'] else None
self['vehiclename'] = value.get('attackerVehicleName', None)
self['battletype-key'] = value.get('battletype-key', ARENA_GUI_TYPE.UNKNOWN)
self['dmg-deviation'] = value['damageDeviation'] * 100 if value['damageDeviation'] is not None else None
class DataHitLog(object):
guiSessionProvider = dependency.descriptor(IBattleSessionProvider)
def __init__(self):
self.player = None
self.shells = {}
self.macros = Macros()
self.reset()
self.ammo = None
def reset(self):
self.shellType = None
self.playerVehicleID = None
self.vehHealth = {}
self.vehDead = []
self.shells.clear()
self.macros.clear()
self.totalDamage = 0
self.old_totalDamage = 0
self.isVehicle = True
self.entityNumber = None
self.vehicleID = None
self.intCD = None
self.splashHit = False
self.criticalHit = False
self.compName = 'unknown'
self.battletypeKey = 'unknown'
self.data = {
'damage': 0,
'dmgRatio': 0,
'attackReasonID': 0,
'blownup': False,
# 'hitEffect': None,
'costShell': 'unknown',
'shellKind': None,
'splashHit': False,
'criticalHit': False,
'isAlive': True,
'compName': None,
'attackedVehicleType': 'not_vehicle',
'shortUserString': None,
'level': None,
'nation': None,
'diff-masses': 0,
'name': None,
'clanAbbrev': None,
'clanicon': None,
'squadnum': None,
'teamDmg': 'unknown',
'damageDeviation': None,
'attackerVehicleName': '',
'battletype-key': 'unknown'
}
def updateLabels(self):
self.macros.setCommonMacros()
g_hitLogs.output()
self.splashHit = False
def setRatings(self):
if (_stat.resp is not None) and (self.data['name'] in _stat.resp['players']):
stats = _stat.resp['players'][self.data['name']]
self.data['wn8'] = stats.get('wn8', None)
self.data['xwn8'] = stats.get('xwn8', None)
self.data['wtr'] = stats.get('wtr', None)
self.data['xwtr'] = stats.get('xwtr', None)
self.data['eff'] = stats.get('e', None)
self.data['xeff'] = stats.get('xeff', None)
self.data['wgr'] = stats.get('wgr', None)
self.data['xwgr'] = stats.get('xwgr', None)
self.data['xte'] = stats.get('v').get('xte', None)
def getTeamDmg(self, vInfo):
if self.isVehicle:
if vInfo.team != self.player.team:
return 'enemy-dmg'
return 'player' if vInfo.player.name == self.player.name else 'ally-dmg'
return self.data['teamDmg']
def resetData(self):
self.data['attackedVehicleType'] = 'not_vehicle'
self.data['shortUserString'] = ''
self.data['attackerVehicleName'] = ''
self.data['level'] = None
self.data['nation'] = None
self.data['diff-masses'] = None
self.data['name'] = ''
self.data['clanAbbrev'] = ''
self.data['clanicon'] = None
self.data['squadnum'] = None
self.data['wn8'] = None
self.data['xwn8'] = None
self.data['wtr'] = None
self.data['xwtr'] = None
self.data['eff'] = None
self.data['xeff'] = None
self.data['wgr'] = None
self.data['xwgr'] = None
self.data['xte'] = None
self.data['teamDmg'] = 'unknown'
self.data['costShell'] = 'unknown'
self.data['shellKind'] = 'not_shell'
self.data['damageDeviation'] = None
def updateData(self):
maxHealth = self.vehHealth[self.vehicleID]['maxHealth'] if self.vehicleID in self.vehHealth else 0
self.data['dmgRatio'] = self.data['damage'] * 100 // maxHealth if maxHealth != 0 else 0
if self.vehicleID:
attacked = self.player.arena.vehicles.get(self.vehicleID)
if attacked is not None:
vehicleType = attacked['vehicleType']
self.data['name'] = attacked['name']
self.data['clanAbbrev'] = attacked['clanAbbrev']
if vehicleType:
_type = vehicleType.type
self.data['attackedVehicleType'] = list(_type.tags.intersection(VEHICLE_CLASSES))[0]
self.data['attackerVehicleName'] = vehicleType.name.replace(':', '-', 1) if vehicleType.name else ''
self.data['shortUserString'] = _type.shortUserString
self.data['level'] = vehicleType.level
self.data['nation'] = nations.NAMES[_type.customizationNationID]
if self.data['attackReasonID'] == 2:
self.data['diff-masses'] = (self.player.vehicleTypeDescriptor.physics['weight'] - vehicleType.physics['weight']) / 1000.0
self.setRatings()
elif not self.isVehicle:
self.data['shortUserString'] = l10n(PILLBOX).format(self.entityNumber)
self.compName = None
self.criticalHit = None
self.data['clanicon'] = _stat.getClanIcon(self.vehicleID)
arenaDP = self.guiSessionProvider.getArenaDP()
if arenaDP is not None:
vInfo = arenaDP.getVehicleInfo(vID=self.vehicleID)
self.data['squadnum'] = vInfo.squadIndex if vInfo.squadIndex != 0 else None
self.data['teamDmg'] = self.getTeamDmg(vInfo)
self.data['splashHit'] = self.splashHit
self.data['criticalHit'] = self.criticalHit
self.data['compName'] = self.compName
self.data['battletype-key'] = self.battletypeKey
self.updateLabels()
def loaded(self):
self.intCD = self.ammo.getCurrentShellCD()
def setParametersShot(self):
if self.intCD is not None:
_shells = self.shells[self.intCD]
self.data['shellKind'] = _shells['shellKind']
self.data['costShell'] = _shells['costShell']
def getDamageDeviation(self, newHealth):
result = None
if newHealth > 0 and self.intCD in self.shells:
_shells = self.shells[self.intCD]
result = (self.data['damage'] - _shells['shellDamage']) / float(_shells['shellDamage'])
if (_shells['shellKind'] in ['high_explosive', 'armor_piercing_he']) and (result < -0.25):
result = 0.0
return result
def onHealthChanged(self, vehicle, newHealth, attackerID, attackReasonID, isVehicle=True):
self.resetData()
self.isVehicle = isVehicle
self.vehicleID = vehicle.id
self.data['isAlive'] = vehicle.isAlive()
if attackReasonID < 8:
self.data['attackReasonID'] = attackReasonID
elif attackReasonID in [9, 10, 13, 24]:
self.data['attackReasonID'] = 24
elif attackReasonID in [11, 14, 25]:
self.data['attackReasonID'] = 25
self.data['blownup'] = newHealth <= -5
newHealth = max(0, newHealth)
self.data['damage'] = (self.vehHealth[vehicle.id]['health'] - newHealth) if vehicle.id in self.vehHealth else (- newHealth)
if self.data['attackReasonID'] != 0:
self.criticalHit = False
self.splashHit = False
self.compName = None
else:
self.setParametersShot()
self.data['damageDeviation'] = self.getDamageDeviation(newHealth)
if not self.isVehicle:
self.entityNumber = vehicle.destructibleEntityID
self.data['teamDmg'] = 'ally-dmg' if vehicle.isPlayerTeam else 'enemy-dmg'
self.data['shortUserString'] = l10n(PILLBOX).format(self.entityNumber)
self.updateData()
def showDamageFromShot(self, vehicle, attackerID, points, effectsIndex, damageFactor):
maxComponentIdx = TankPartIndexes.ALL[-1]
wheelsConfig = vehicle.appearance.typeDescriptor.chassis.generalWheelsAnimatorConfig
if wheelsConfig:
maxComponentIdx += wheelsConfig.getWheelsCount()
maxHitEffectCode, decodedPoints, maxDamagedComponent = DamageFromShotDecoder.decodeHitPoints(points, vehicle.appearance.collisions, maxComponentIdx)
if decodedPoints:
compName = decodedPoints[0].componentName
self.compName = compName if compName[0] != 'W' else 'wheel'
else:
self.compName = 'unknown'
self.criticalHit = (maxHitEffectCode == 5)
def onEnterWorld(self, vehicle):
self.macros.setChooseRating()
self.player = BigWorld.player()
self.playerVehicleID = self.player.playerVehicleID
self.ammo = self.guiSessionProvider.shared.ammo
shots = vehicle.typeDescriptor.gun.shots
nation = nations.NAMES[vehicle.typeDescriptor.type.id[0]]
xmlPath = '%s%s%s%s' % (ITEM_DEFS_PATH, 'vehicles/', nation, '/components/shells.xml')
xmlCtx_s = (((None, '{}/{}'.format(xmlPath, n)), s) for n, s in ResMgr.openSection(xmlPath).items() if (n != 'icons') and (n != 'xmlns:xmlref'))
goldShells = [_xml.readInt(xmlCtx, s, 'id', 0, 65535) for xmlCtx, s in xmlCtx_s if s.readBool('improved', False)]
for shot in shots:
shell = shot.shell
intCD = shell.compactDescr
self.shells[intCD] = {}
self.shells[intCD]['shellKind'] = shell.kind.lower()
self.shells[intCD]['shellDamage'] = shell.damage[0]
self.shells[intCD]['costShell'] = 'gold-shell' if shell.id[1] in goldShells else 'silver-shell'
ResMgr.purge(xmlPath, True)
arena = avatar_getter.getArena()
self.battletypeKey = BATTLE_TYPE.get(arena.guiType, ARENA_GUI_TYPE.UNKNOWN)
def updateVehInfo(self, vehicle):
if vehicle.id not in self.vehHealth:
self.vehHealth[vehicle.id] = {}
self.vehHealth[vehicle.id]['health'] = int(vehicle.health)
self.vehHealth[vehicle.id]['maxHealth'] = int(vehicle.maxHealth) if isinstance(vehicle, DestructibleEntity) else vehicle.typeDescriptor.maxHealth
if not vehicle.isAlive() and vehicle.id not in self.vehDead:
self.vehDead.append(vehicle.id)
g_dataHitLog = DataHitLog()
class GroupHit(object):
def __init__(self, section):
self.section = section
self._listLog = []
self.numberTopLine = 0
self.players = {}
self.countLines = 0
self.maxCountLines = None
self.isAddToEnd = False
self.S_LINES = section + LINES
self.S_ADD_TO_END = section + ADD_TO_END
self.S_FORMAT_HISTORY = section + FORMAT_HISTORY
self.ATTACK_REASON_FIRE_ID = ATTACK_REASON.getIndex(ATTACK_REASON.FIRE)
self.ATTACK_REASON_RAM_ID = ATTACK_REASON.getIndex(ATTACK_REASON.RAM)
self.attackReasonID = 0
self.damage = 0
self.__damageRatio = 0
self.vehID = 0
self.__hitLogConfig = {}
def mouse_wheel(self, isScrollUp):
if isScrollUp:
if self.numberTopLine < len(self._listLog):
self.numberTopLine += 1
return True
else:
if self.numberTopLine > 0:
self.numberTopLine -= 1
return True
def removePlayer(self, vehID):
if vehID in self.players:
del self.players[vehID]
def sumDmg(self):
player = self.players[self.vehID]
player['dmg-player'] += self.damage
if self.attackReasonID not in player['dmg-kind-player']:
player['dmg-kind-player'].append(self.attackReasonID)
maxHealth = g_dataHitLog.vehHealth[self.vehID]['maxHealth'] if self.vehID in g_dataHitLog.vehHealth else 0
player['dmg-ratio-player'] = (player['dmg-player'] * 100 // maxHealth) if maxHealth != 0 else 0
player['dmg-ratio'] = (player['damage'] * 100 // maxHealth) if maxHealth != 0 else 0
def readyConfig(self):
if config.config_autoreload or not self.__hitLogConfig:
self.__hitLogConfig = {
'vehicleClass': keyLower(_config.get(self.section + 'vtype')),
'c_shell': keyLower(_config.get(self.section + 'c:costShell')),
'costShell': keyLower(_config.get(self.section + 'costShell')),
'c_dmg-kind': keyLower(_config.get(self.section + 'c:dmg-kind')),
'c_vehicleClass': keyLower(_config.get(self.section + 'c:vtype')),
'dmg-kind': keyLower(_config.get(self.section + 'dmg-kind')),
'dmg-kind-player': keyLower(_config.get(self.section + 'dmg-kind-player')),
'c_teamDmg': keyLower(_config.get(self.section + 'c:team-dmg')),
'teamDmg': keyLower(_config.get(self.section + 'team-dmg')),
'compNames': keyLower(_config.get(self.section + 'comp-name')),
'typeShell': keyLower(_config.get(self.section + 'type-shell')),
'c_typeShell': keyLower(_config.get(self.section + 'c:type-shell'))
}
return self.__hitLogConfig
def setParametrsHitLog(self):
self.countLines = len(self._listLog)
self.attackReasonID = g_dataHitLog.data['attackReasonID']
self.damage = g_dataHitLog.data['damage']
self.__damageRatio = g_dataHitLog.data['dmgRatio']
self.vehID = g_dataHitLog.vehicleID
try:
macro = {'battletype-key': g_dataHitLog.battletypeKey}
self.maxCountLines = int(parser(_config.get(self.S_LINES, 7), macro))
except TypeError:
self.maxCountLines = 7
self.isAddToEnd = _config.get(self.S_ADD_TO_END, False)
def udateMacros(self):
data = g_dataHitLog.macros
conf = self.readyConfig()
player = self.players[self.vehID]
value = g_dataHitLog.data
data['c:team-dmg'] = conf['c_teamDmg'].get(value['teamDmg'], '#FFFFFF')
data['team-dmg'] = conf['teamDmg'].get(value['teamDmg'], '')
data['vtype'] = conf['vehicleClass'].get(VEHICLE_CLASSES_SHORT[value['attackedVehicleType']], '')
data['c:costShell'] = conf['c_shell'].get(value['costShell'], None)
data['costShell'] = conf['costShell'].get(value['costShell'], None)
data['c:dmg-kind'] = conf['c_dmg-kind'][ATTACK_REASONS[value['attackReasonID']]]
data['dmg-kind'] = conf['dmg-kind'].get(ATTACK_REASONS[value['attackReasonID']], 'reason: %s' % value['attackReasonID'])
data['dmg-kind-player'] = ''.join([conf['dmg-kind-player'].get(ATTACK_REASONS[i], None) for i in player.get('dmg-kind-player', [])])
data['c:vtype'] = conf['c_vehicleClass'].get(VEHICLE_CLASSES_SHORT[value['attackedVehicleType']], '#CCCCCC')
data['comp-name'] = conf['compNames'].get(value['compName'], None)
data['type-shell'] = conf['typeShell'].get(value['shellKind'], 'not_shell')
data['type-shell-key'] = value['shellKind'] if value['shellKind'] is not None else 'not_shell'
data['c:type-shell'] = conf['c_typeShell'].get(value['shellKind'], None)
data['dmg'] = player['damage']
data['dmg-ratio'] = player['dmg-ratio']
data['n-player'] = player.get('n-player', 0)
data['dmg-player'] = player.get('dmg-player', 0)
data['dmg-ratio-player'] = player.get('dmg-ratio-player', 0)
data['c:dmg-ratio-player'] = readColor('dmg_ratio_player', player.get('dmg-ratio-player', None))
return data
def reset(self):
self.players.clear()
self._listLog[:] = []
self.numberTopLine = 0
self.countLines = 0
self.maxCountLines = None
def addAttackReasonID(self):
return {'damage': self.damage,
'time': BigWorld.time(),
'numberLine': self.countLines if self.isAddToEnd else -1}
def addPlayer(self):
return {'dmg-player': self.damage,
'dmg-ratio-player': self.__damageRatio,
'n-player': 1,
'damage': self.damage,
'dmg-ratio': self.__damageRatio,
'numberLine': 0,
'dmg-kind-player': [self.attackReasonID]}
class GroupHitByPlayer(GroupHit):
APPEND = 0
CHANGE = 1
INSERT = 2
def __init__(self, section):
super(GroupHitByPlayer, self).__init__(section)
self._listLogNumber = []
self.prevLineNumber = 0
def reset(self):
super(GroupHitByPlayer, self).reset()
self._listLogNumber[:] = []
def updateList(self, mode, numberLine=0):
macros = self.udateMacros()
formattedString = parser(_config.get(self.S_FORMAT_HISTORY, ''), macros)
if mode == self.APPEND:
self._listLog.append(formattedString)
self._listLogNumber.append('')
elif mode == self.INSERT:
self._listLog.insert(0, formattedString)
self._listLogNumber.insert(0, '')
else:
self._listLog[numberLine] = formattedString
def updateGroupFireRamming(self, vehicle):
if self.attackReasonID in [1, 2]:
if self.attackReasonID in vehicle and ((BigWorld.time() - vehicle[self.attackReasonID]['time']) < 1.0):
vehicle[self.attackReasonID]['damage'] += self.damage
vehicle[self.attackReasonID]['time'] = BigWorld.time()
vehicle['damage'] = vehicle[self.attackReasonID]['damage']
else:
vehicle[self.attackReasonID] = self.addAttackReasonID()
vehicle['n-player'] += 1
vehicle['damage'] = self.damage
else:
vehicle['n-player'] += 1
vehicle['damage'] = self.damage
def updatePlayers(self):
vehicle = self.players[self.vehID]
self.prevLineNumber = vehicle['numberLine']
self.updateGroupFireRamming(vehicle)
self.sumDmg()
if self.isAddToEnd:
if vehicle['numberLine'] == self.countLines - 1:
self.updateList(self.CHANGE, vehicle['numberLine'])
else:
self._listLog.pop(vehicle['numberLine'])
self._listLogNumber.pop(vehicle['numberLine'])
for v in self.players.itervalues():
if v['numberLine'] > vehicle['numberLine']:
v['numberLine'] -= 1
vehicle['numberLine'] = self.countLines - 1
self.updateList(self.APPEND)
else:
if vehicle['numberLine'] == 0:
self.updateList(self.CHANGE)
else:
self._listLog.pop(vehicle['numberLine'])
self._listLogNumber.pop(vehicle['numberLine'])
for v in self.players.itervalues():
if v['numberLine'] < vehicle['numberLine']:
v['numberLine'] += 1
vehicle['numberLine'] = 0
self.updateList(self.INSERT)
def addPlayers(self):
self.players[self.vehID] = self.addPlayer()
vehicle = self.players[self.vehID]
if self.attackReasonID in [1, 2]:
vehicle[self.attackReasonID] = self.addAttackReasonID()
if self.isAddToEnd:
if self.countLines >= self.maxCountLines:
self.numberTopLine += 1
vehicle['numberLine'] = self.countLines
self.updateList(self.APPEND)
else:
for v in self.players.itervalues():
v['numberLine'] += 1
vehicle['numberLine'] = 0
self.updateList(self.INSERT)
self.prevLineNumber = vehicle['numberLine']
def addLineNumber(self):
newLineNumber = self.players[self.vehID]['numberLine']
start, finish = (self.prevLineNumber, newLineNumber + 1) if self.prevLineNumber < newLineNumber else (newLineNumber, self.prevLineNumber + 1)
length = len(self._listLog)
for number in xrange(start, finish):
_number = number + 1 if self.isAddToEnd else length - number
self._listLogNumber[number] = parser(self._listLog[number], {'number': _number})
def getListLog(self):
self.setParametrsHitLog()
if self.maxCountLines <= 0:
return []
if self.vehID in self.players:
self.updatePlayers()
else:
self.addPlayers()
self.addLineNumber()
return self._listLogNumber
class GroupHitByFireRamming(GroupHit):
DIRECTION_UP = -1
DIRECTION_DOWN = 1
def __init__(self, section):
super(GroupHitByFireRamming, self).__init__(section)
self.isGroup = False
def shiftsLines(self, direction):
for v in self.players.itervalues():
if self.ATTACK_REASON_FIRE_ID in v:
v[self.ATTACK_REASON_FIRE_ID]['numberLine'] += direction
if self.ATTACK_REASON_RAM_ID in v:
v[self.ATTACK_REASON_RAM_ID]['numberLine'] += direction
def udateListLog(self):
macros = self.udateMacros()
if self.isGroup:
player = self.players[self.vehID]
lineNumber = player[self.attackReasonID]['numberLine']
macros['number'] = lineNumber + 1 if self.isAddToEnd else len(self._listLog) - lineNumber
formattedString = parser(_config.get(self.S_FORMAT_HISTORY, ''), macros)
self._listLog[lineNumber] = formattedString
elif self.isAddToEnd:
if self.maxCountLines <= self.countLines:
self.numberTopLine += 1
macros['number'] = self.countLines + 1
formattedString = parser(_config.get(self.S_FORMAT_HISTORY, ''), macros)
self._listLog.append(formattedString)
else:
self.shiftsLines(self.DIRECTION_DOWN)
macros['number'] = self.countLines + 1
formattedString = parser(_config.get(self.S_FORMAT_HISTORY, ''), macros)
self._listLog.insert(0, formattedString)
def updateAttackReasonID(self):
player = self.players[self.vehID]
if self.attackReasonID in player and ((BigWorld.time() - player[self.attackReasonID].get('time', 0)) < 1.0):
paramAttack = player[self.attackReasonID]
self.isGroup = True
paramAttack['damage'] += self.damage
paramAttack['time'] = BigWorld.time()
player['damage'] = paramAttack['damage']
else:
player[self.attackReasonID] = self.addAttackReasonID()
def updatePlayer(self):
self.isGroup = False
if self.vehID in self.players:
player = self.players[self.vehID]
if self.attackReasonID in [1, 2]:
self.updateAttackReasonID()
if not self.isGroup:
player['n-player'] += 1
player['damage'] = self.damage
self.sumDmg()
else:
self.players[self.vehID] = self.addPlayer()
if self.attackReasonID in [1, 2]:
self.players[self.vehID][self.attackReasonID] = self.addAttackReasonID()
def getListLog(self):
self.setParametrsHitLog()
if self.maxCountLines <= 0:
return []
self.updatePlayer()
self.udateListLog()
return self._listLog
class HitLog(object):
def __init__(self, section):
self.section = section
self.listLog = []
self.groupHitByPlayer = GroupHitByPlayer(section)
self.groupHitByFireRamming = GroupHitByFireRamming(section)
self.S_GROUP_HITS_PLAYER = section + GROUP_HITS_PLAYER
self.S_SCROLL_LOG = section + SCROLL_LOG
self.S_MOVE_IN_BATTLE = HIT_LOG_SECTIONS.LOG + MOVE_IN_BATTLE
self.DEFAULT_X = 320
self.DEFAULT_Y = 0
self.S_X = HIT_LOG_SECTIONS.LOG + 'x'
self.S_Y = HIT_LOG_SECTIONS.LOG + 'y'
self.x = 0
self.y = 0
def setPosition(self, battleType):
self._data = None
positon = {'x': _config.get(self.S_X, self.DEFAULT_X), 'y': _config.get(self.S_Y, self.DEFAULT_Y)}
if _config.get(self.S_MOVE_IN_BATTLE, False):
_data = userprefs.get(HIT_LOG_SECTIONS.LOG + '{}'.format(battleType), positon)
as_callback("hitLog_mouseDown", self.mouse_down)
as_callback("hitLog_mouseUp", self.mouse_up)
as_callback("hitLog_mouseMove", self.mouse_move)
else:
_data = positon
self.x = _data['x']
self.y = _data['y']
def savePosition(self, battleType):
if (None not in [self.x, self.y]) and _config.get(self.S_MOVE_IN_BATTLE, False):
userprefs.set(HIT_LOG_SECTIONS.LOG + '{}'.format(battleType), {'x': self.x, 'y': self.y})
def reset(self):
self.listLog[:] = []
self.groupHitByPlayer.reset()
self.groupHitByFireRamming.reset()
def mouse_wheel(self, isScrollUp):
if not _config.get(self.S_SCROLL_LOG, True):
return False
if _config.get(self.S_GROUP_HITS_PLAYER, True):
return self.groupHitByPlayer.mouse_wheel(isScrollUp)
else:
return self.groupHitByFireRamming.mouse_wheel(isScrollUp)
def getLog(self):
if _config.get(self.S_GROUP_HITS_PLAYER, True):
numberTopLine = self.groupHitByPlayer.numberTopLine
maxCountLines = self.groupHitByPlayer.maxCountLines
else:
numberTopLine = self.groupHitByFireRamming.numberTopLine
maxCountLines = self.groupHitByFireRamming.maxCountLines
return [] if maxCountLines is None else self.listLog[numberTopLine:maxCountLines + numberTopLine]
def mouse_down(self, _data):
if _data['buttonIdx'] == 0:
self._data = _data
def mouse_up(self, _data):
if _data['buttonIdx'] == 0:
self._data = None
def mouse_move(self, _data):
if self._data:
self.x += (_data['x'] - self._data['x'])
self.y += (_data['y'] - self._data['y'])
as_event(ON_HIT_LOG)
def updatePosition(self):
if (self.section == HIT_LOG_SECTIONS.LOG) or (self.section == HIT_LOG_SECTIONS.ALT_LOG):
if not _config.get(self.S_MOVE_IN_BATTLE, False):
self.x = parser(_config.get(self.S_X, self.DEFAULT_X), g_dataHitLog.macros)
self.y = parser(_config.get(self.S_Y, self.DEFAULT_Y), g_dataHitLog.macros)
def removePlayer(self, vehID):
self.groupHitByPlayer.removePlayer(vehID)
self.groupHitByFireRamming.removePlayer(vehID)
def output(self):
if _config.get(self.S_GROUP_HITS_PLAYER, True):
self.listLog = self.groupHitByPlayer.getListLog()
else:
self.listLog = self.groupHitByFireRamming.getListLog()
self.updatePosition()
if self.callEvent:
as_event(ON_HIT_LOG)
class HitLogs(object):
def __init__(self):
self.log = HitLog(HIT_LOG_SECTIONS.LOG)
self.logAlt = HitLog(HIT_LOG_SECTIONS.ALT_LOG)
self.logBg = HitLog(HIT_LOG_SECTIONS.BACKGROUND)
self.logAltBg = HitLog(HIT_LOG_SECTIONS.ALT_BACKGROUND)
self.logs = [self.log, self.logAlt, self.logBg, self.logAltBg]
self.isDownAlt = False
as_callback("hitLog_mouseWheel", self.mouse_wheel)
def mouse_wheel(self, _data):
isRefresh = False
isScrollUp = _data['delta'] < 0
for log in self.logs:
isRefresh = log.mouse_wheel(isScrollUp) or isRefresh
if isRefresh:
as_event(ON_HIT_LOG)
def setPosition(self, battleType):
self.log.setPosition(battleType)
def savePosition(self, battleType):
self.log.savePosition(battleType)
def removePlayerFromLogs(self, vehicleID):
for log in self.logs:
log.removePlayer(vehicleID)
def reset(self):
for log in self.logs:
log.reset()
def output(self):
self.log.callEvent = self.logBg.callEvent = not self.isDownAlt
self.logAlt.callEvent = self.logAltBg.callEvent = self.isDownAlt
for log in self.logs:
log.output()
if not g_dataHitLog.data['isAlive']:
log.removePlayer(g_dataHitLog.vehicleID)
def getListLog(self):
if self.isDownAlt:
listLog = self.logAlt.getLog()
else:
listLog = self.log.getLog()
return '\n'.join(listLog) if listLog else None
def getListLogBg(self):
if self.isDownAlt:
listLog = self.logAltBg.getLog()
else:
listLog = self.logBg.getLog()
return '\n'.join(listLog) if listLog else None
g_hitLogs = HitLogs()
@registerEvent(PlayerAvatar, '_PlayerAvatar__processVehicleAmmo')
def PlayerAvatar__processVehicleAmmo(self, vehicleID, compactDescr, quantity, quantityInClip, _, __):
if battle.isBattleTypeSupported and _config.get(HIT_LOG_ENABLED, True):
g_dataHitLog.loaded()
@registerEvent(DestructibleEntity, 'onEnterWorld')
def DestructibleEntity_onEnterWorld(self, prereqs):
if self.isAlive():
g_dataHitLog.updateVehInfo(self)
@registerEvent(DestructibleEntity, 'onHealthChanged')
def DestructibleEntity_onHealthChanged(self, newHealth, attackerID, attackReasonID, hitFlags):
destructibleEntityComponent = BigWorld.player().arena.componentSystem.destructibleEntityComponent
if _config.get(HIT_LOG_ENABLED, True) and battle.isBattleTypeSupported and (destructibleEntityComponent is not None):
if (g_dataHitLog.playerVehicleID == attackerID) and (self.id not in g_dataHitLog.vehDead):
if not self.isPlayerTeam or _config.get(SHOW_ALLY_DAMAGE, True):
g_dataHitLog.onHealthChanged(self, newHealth, attackerID, attackReasonID, False)
g_dataHitLog.updateVehInfo(self)
@registerEvent(Vehicle, 'showDamageFromShot')
def _Vehicle_showDamageFromShot(self, attackerID, points, effectsIndex, damageFactor):
if battle.isBattleTypeSupported and (g_dataHitLog.playerVehicleID == attackerID) and self.isAlive() and _config.get(HIT_LOG_ENABLED, True):
g_dataHitLog.showDamageFromShot(self, attackerID, points, effectsIndex, damageFactor)
@registerEvent(Vehicle, 'showDamageFromExplosion')
def _Vehicle_showDamageFromExplosion(self, attackerID, center, effectsIndex, damageFactor):
if battle.isBattleTypeSupported and (g_dataHitLog.playerVehicleID == attackerID) and self.isAlive() and _config.get(HIT_LOG_ENABLED, True):
g_dataHitLog.splashHit = True
g_dataHitLog.criticalHit = False
@registerEvent(PlayerAvatar, '_PlayerAvatar__onArenaVehicleKilled')
def __onArenaVehicleKilled(self, targetID, attackerID, equipmentID, reason):
if self.playerVehicleID != attackerID:
g_hitLogs.removePlayerFromLogs(targetID)
@registerEvent(Vehicle, 'onEnterWorld')
def _Vehicle_onEnterWorld(self, prereqs):
if _config.get(HIT_LOG_ENABLED, True) and battle.isBattleTypeSupported:
if self.id in g_dataHitLog.vehDead:
g_dataHitLog.vehDead.remove(self.id)
if self.isPlayerVehicle:
g_dataHitLog.onEnterWorld(self)
g_hitLogs.setPosition(g_dataHitLog.battletypeKey)
@registerEvent(Vehicle, 'startVisual')
def _Vehicle_startVisual(self):
if _config.get(HIT_LOG_ENABLED, True) and battle.isBattleTypeSupported:
g_dataHitLog.updateVehInfo(self)
@registerEvent(Vehicle, 'onHealthChanged')
def _Vehicle_onHealthChanged(self, newHealth, attackerID, attackReasonID):
if _config.get(HIT_LOG_ENABLED, True) and battle.isBattleTypeSupported:
if (g_dataHitLog.playerVehicleID == attackerID) and (self.id not in g_dataHitLog.vehDead or newHealth <= -5):
attacked = g_dataHitLog.player.arena.vehicles.get(self.id)
if (g_dataHitLog.player.team != attacked['team']) or _config.get(SHOW_ALLY_DAMAGE, True):
if (self.id != attackerID) or _config.get(SHOW_SELF_DAMAGE, True):
g_dataHitLog.onHealthChanged(self, newHealth, attackerID, attackReasonID)
else:
if (self.id == attackerID) and _config.get(SHOW_SELF_DAMAGE, True):
g_dataHitLog.onHealthChanged(self, newHealth, attackerID, attackReasonID)
g_dataHitLog.updateVehInfo(self)
@registerEvent(Vehicle, 'set_isCrewActive')
def set_isCrewActive(self, prev):
g_dataHitLog.updateVehInfo(self)
@registerEvent(PlayerAvatar, '_PlayerAvatar__destroyGUI')
def PlayerAvatar__destroyGUI(self):
if _config.get(HIT_LOG_ENABLED, True) and battle.isBattleTypeSupported:
g_hitLogs.savePosition(g_dataHitLog.battletypeKey)
g_hitLogs.reset()
g_dataHitLog.reset()
@registerEvent(PlayerAvatar, 'handleKey')
def PlayerAvatar_handleKey(self, isDown, key, mods):
if _config.get(HIT_LOG_ENABLED, True) and battle.isBattleTypeSupported:
hotkey = _config.get('hotkeys/hitLogAltMode')
if hotkey['enabled'] and (key == hotkey['keyCode']):
if isDown:
if hotkey['onHold']:
if not g_hitLogs.isDownAlt:
g_hitLogs.isDownAlt = True
as_event(ON_HIT_LOG)
else:
g_hitLogs.isDownAlt = not g_hitLogs.isDownAlt
as_event(ON_HIT_LOG)
else:
if hotkey['onHold']:
if g_hitLogs.isDownAlt:
g_hitLogs.isDownAlt = False
as_event(ON_HIT_LOG)
def hLog():
return g_hitLogs.getListLog()
def hLog_bg():
return g_hitLogs.getListLogBg()
def hLog_x():
return g_hitLogs.log.x
def hLog_y():
return g_hitLogs.log.y
| KnechtRootrechtCH/Mimir | content/Mods/XVM/XVM_Base/res_mods_content/configs/xvm/py_macro/xvm/hitLog.py | Python | mit | 39,872 |
import os, re, shutil
internalIp = os.environ['OPENSHIFT_DIY_IP']
runtimeDir = os.environ['OPENSHIFT_HOMEDIR'] + "/app-root/runtime"
repoDir = os.environ['OPENSHIFT_HOMEDIR'] + "/app-root/runtime/repo"
f = open(repoDir + '/misc/templates/httpd.conf.tpl', 'r')
conf = f.read().replace('{{OPENSHIFT_INTERNAL_IP}}', internalIp).replace('{{OPENSHIFT_REPO_DIR}}', repoDir).replace('{{OPENSHIFT_RUNTIME_DIR}}', runtimeDir)
f.close()
f = open(runtimeDir + '/srv/httpd/conf/httpd.conf', 'w')
f.write(conf)
f.close()
f = open(repoDir + '/misc/templates/php.ini.tpl', 'r')
conf = f.read().replace('{{OPENSHIFT_INTERNAL_IP}}', internalIp).replace('{{OPENSHIFT_REPO_DIR}}', repoDir).replace('{{OPENSHIFT_RUNTIME_DIR}}', runtimeDir)
f.close()
f = open(runtimeDir + '/srv/php/etc/apache2/php.ini', 'w')
f.write(conf)
f.close()
| dottobr83/openshift-php | misc/parse_templates.py | Python | mit | 818 |
# -*- coding: utf-8 -*-
from __future__ import print_function, division, absolute_import
import llvm
from numba import *
from numba import nodes
from numba.typesystem import is_obj, promote_to_native
from numba.codegen.codeutils import llvm_alloca, if_badval
from numba.codegen import debug
class ObjectCoercer(object):
"""
Object that knows how to convert to/from objects using Py_BuildValue
and PyArg_ParseTuple.
"""
# TODO: do all of this in a specializer
type_to_buildvalue_str = {
char: "b",
short: "h",
int_: "i",
long_: "l",
longlong: "L",
Py_ssize_t: "n",
npy_intp: "n", # ?
size_t: "n", # ?
uchar: "B",
ushort: "H",
uint: "I",
ulong: "k",
ulonglong: "K",
float_: "f",
double: "d",
complex128: "D",
object_: "O",
bool_: "b", # ?
c_string_type: "s",
char.pointer() : "s",
}
def __init__(self, translator):
self.context = translator.context
self.translator = translator
self.builder = translator.builder
self.llvm_module = self.builder.basic_block.function.module
sig, self.py_buildvalue = self.context.external_library.declare(
self.llvm_module, 'Py_BuildValue')
sig, self.pyarg_parsetuple = self.context.external_library.declare(
self.llvm_module, 'PyArg_ParseTuple')
sig, self.pyerr_clear = self.context.external_library.declare(
self.llvm_module, 'PyErr_Clear')
self.function_cache = translator.function_cache
self.NULL = self.translator.visit(nodes.NULL_obj)
def check_err(self, llvm_result, callback=None, cmp=llvm.core.ICMP_EQ,
pos_node=None):
"""
Check for errors. If the result is NULL, and error should have been set
Jumps to translator.error_label if an exception occurred.
"""
assert llvm_result.type.kind == llvm.core.TYPE_POINTER, llvm_result.type
int_result = self.translator.builder.ptrtoint(llvm_result,
llvm_types._intp)
NULL = llvm.core.Constant.int(int_result.type, 0)
if callback:
if_badval(self.translator, int_result, NULL,
callback=callback or default_callback, cmp=cmp)
else:
test = self.builder.icmp(cmp, int_result, NULL)
name = 'no_error'
if hasattr(pos_node, 'lineno'):
name = 'no_error_%s' % error.format_pos(pos_node).rstrip(": ")
bb = self.translator.append_basic_block(name)
self.builder.cbranch(test, self.translator.error_label, bb)
self.builder.position_at_end(bb)
return llvm_result
def check_err_int(self, llvm_result, badval):
llvm_badval = llvm.core.Constant.int(llvm_result.type, badval)
if_badval(self.translator, llvm_result, llvm_badval,
callback=lambda b, *args: b.branch(self.translator.error_label))
def _create_llvm_string(self, str):
return self.translator.visit(nodes.ConstNode(str, c_string_type))
def lstr(self, types, fmt=None):
"Get an llvm format string for the given types"
typestrs = []
result_types = []
for type in types:
if is_obj(type):
type = object_
elif type.is_int:
type = promote_to_native(type)
result_types.append(type)
typestrs.append(self.type_to_buildvalue_str[type])
str = "".join(typestrs)
if fmt is not None:
str = fmt % str
if debug.debug_conversion:
self.translator.puts("fmt: %s" % str)
result = self._create_llvm_string(str)
return result_types, result
def buildvalue(self, types, *largs, **kwds):
# The caller should check for errors using check_err or by wrapping
# its node in an ObjectTempNode
name = kwds.get('name', '')
fmt = kwds.get('fmt', None)
types, lstr = self.lstr(types, fmt)
largs = (lstr,) + largs
if debug.debug_conversion:
self.translator.puts("building... %s" % name)
# func_type = object_(*types).pointer()
# py_buildvalue = self.builder.bitcast(
# self.py_buildvalue, func_type.to_llvm(self.context))
py_buildvalue = self.py_buildvalue
result = self.builder.call(py_buildvalue, largs, name=name)
if debug.debug_conversion:
self.translator.puts("done building... %s" % name)
nodes.print_llvm(self.translator.env, object_, result)
self.translator.puts("--------------------------")
return result
def npy_intp_to_py_ssize_t(self, llvm_result, type):
# if type == minitypes.npy_intp:
# lpy_ssize_t = minitypes.Py_ssize_t.to_llvm(self.context)
# llvm_result = self.translator.caster.cast(llvm_result, lpy_ssize_t)
# type = minitypes.Py_ssize_t
return llvm_result, type
def py_ssize_t_to_npy_intp(self, llvm_result, type):
# if type == minitypes.npy_intp:
# lnpy_intp = minitypes.npy_intp.to_llvm(self.context)
# llvm_result = self.translator.caster.cast(llvm_result, lnpy_intp)
# type = minitypes.Py_ssize_t
return llvm_result, type
def convert_single_struct(self, llvm_result, type):
types = []
largs = []
for i, (field_name, field_type) in enumerate(type.fields):
types.extend((c_string_type, field_type))
largs.append(self._create_llvm_string(field_name))
struct_attr = self.builder.extract_value(llvm_result, i)
largs.append(struct_attr)
return self.buildvalue(types, *largs, name='struct', fmt="{%s}")
def convert_single(self, type, llvm_result, name=''):
"Generate code to convert an LLVM value to a Python object"
llvm_result, type = self.npy_intp_to_py_ssize_t(llvm_result, type)
if type.is_struct:
return self.convert_single_struct(llvm_result, type)
elif type.is_complex:
# We have a Py_complex value, construct a Py_complex * temporary
new_result = llvm_alloca(self.translator.lfunc, self.builder,
llvm_result.type, name='complex_temp')
self.builder.store(llvm_result, new_result)
llvm_result = new_result
return self.buildvalue([type], llvm_result, name=name)
def build_tuple(self, types, llvm_values):
"Build a tuple from a bunch of LLVM values"
assert len(types) == len(llvm_values)
return self.buildvalue(lstr, *llvm_values, name='tuple', fmt="(%s)")
def build_list(self, types, llvm_values):
"Build a tuple from a bunch of LLVM values"
assert len(types) == len(llvm_values)
return self.buildvalue(types, *llvm_values, name='list', fmt="[%s]")
def build_dict(self, key_types, value_types, llvm_keys, llvm_values):
"Build a dict from a bunch of LLVM values"
types = []
largs = []
for k, v, llvm_key, llvm_value in zip(key_types, value_types,
llvm_keys, llvm_values):
types.append(k)
types.append(v)
largs.append(llvm_key)
largs.append(llvm_value)
return self.buildvalue(types, *largs, name='dict', fmt="{%s}")
def parse_tuple(self, lstr, llvm_tuple, types, name=''):
"Unpack a Python tuple into typed llvm variables"
lresults = []
for i, type in enumerate(types):
var = llvm_alloca(self.translator.lfunc, self.builder,
type.to_llvm(self.context),
name=name and "%s%d" % (name, i))
lresults.append(var)
largs = [llvm_tuple, lstr] + lresults
if debug.debug_conversion:
self.translator.puts("parsing tuple... %s" % (types,))
nodes.print_llvm(self.translator.env, object_, llvm_tuple)
parse_result = self.builder.call(self.pyarg_parsetuple, largs)
self.check_err_int(parse_result, 0)
# Some conversion functions don't reset the exception state...
# self.builder.call(self.pyerr_clear, [])
if debug.debug_conversion:
self.translator.puts("successfully parsed tuple...")
return [self.builder.load(result) for result in lresults]
def to_native(self, type, llvm_tuple, name=''):
"Generate code to convert a Python object to an LLVM value"
types, lstr = self.lstr([type])
lresult, = self.parse_tuple(lstr, llvm_tuple, [type], name=name)
return lresult
| shiquanwang/numba | numba/codegen/coerce.py | Python | bsd-2-clause | 8,887 |
# Generated by Django 2.2.19 on 2021-03-05 11:09
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tickets', '0003_auto_20180313_0138'),
]
operations = [
migrations.AlterField(
model_name='question',
name='author',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
migrations.AlterField(
model_name='response',
name='author',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL),
),
]
| Inboxen/Inboxen | inboxen/tickets/migrations/0004_auto_20210305_1109.py | Python | agpl-3.0 | 754 |
#!/usr/bin/env python2
# MIT License
#
# Copyright (c) 2016 Zhiang Chen
'''
Receive the depth image from "depth_image", and crop the image by a 34x34 window.
Then publish the cropped image to "cropped_depth_image" with 10hz.
'''
from __future__ import print_function
import rospy
import roslib
import cv2
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
import matplotlib.pyplot as plt
import numpy as np
import sys
image_size = 34
lf_x = 17
lf_y = 73
rt_x = lf_x + image_size
rt_y = lf_y + image_size
class image_converter:
def __init__(self):
'''Initialize ros publisher, subscriber'''
self.pub = rospy.Publisher('/cropped_depth_image',Image,queue_size=1)
self.sub = rospy.Subscriber('/depth_image',Image,self.callback,queue_size=1)
self.bridge = CvBridge()
rospy.loginfo("Initialized!")
def callback(self,data):
cv_image = self.bridge.imgmsg_to_cv2(data, desired_encoding="mono8")
cropped_image = cv_image[lf_y:rt_y,lf_x:rt_x].reshape((image_size, image_size))
ros_image = self.bridge.cv2_to_imgmsg(cropped_image, encoding="mono8")
self.pub.publish(ros_image)
if __name__ == '__main__':
rospy.init_node('depth2input',anonymous=True)
ic = image_converter()
try:
rospy.spin()
except KeyboardInterrupt:
print("Shutting down ROS node depth2input")
| ZhiangChen/deep_learning | auto_recognition/src/depth2input.py | Python | mit | 1,315 |
# -*- coding: utf-8 -*-
"""
Copyright (C) 2010 Dariusz Suchojad <dsuch at gefira.pl>
Licensed under LGPLv3, see LICENSE.txt for terms and conditions.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
# stdlib
import logging, time
from hashlib import sha1
from string import Template
from datetime import datetime, timedelta
# lxml
from lxml import etree
# secwall
from secwall.core import SecurityException
soap_date_time_format = "%Y-%m-%dT%H:%M:%S.%fZ"
soapenv_namespace = 'http://schemas.xmlsoap.org/soap/envelope/'
soap_body_path = '/soapenv:Envelope/soapenv:Body'
soap_body_xpath = etree.XPath(soap_body_path, namespaces={'soapenv':soapenv_namespace})
wsse_namespace = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd'
wsu_namespace = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd'
wss_namespaces = {'soapenv':soapenv_namespace, 'wsse':wsse_namespace, 'wsu':wsu_namespace}
wsse_password_type_text = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordText'
wsse_password_type_digest = 'http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-username-token-profile-1.0#PasswordDigest'
supported_wsse_password_types = (wsse_password_type_text, wsse_password_type_digest)
wsse_username_token_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken'
wsse_username_token_xpath = etree.XPath(wsse_username_token_path, namespaces=wss_namespaces)
wsse_username_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Username'
wsse_username_xpath = etree.XPath(wsse_username_path, namespaces=wss_namespaces)
wsse_password_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Password'
wsse_password_xpath = etree.XPath(wsse_password_path, namespaces=wss_namespaces)
wsse_password_type_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Password/@Type'
wsse_password_type_xpath = etree.XPath(wsse_password_type_path, namespaces=wss_namespaces)
wsse_nonce_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsse:Nonce'
wsse_nonce_xpath = etree.XPath(wsse_nonce_path, namespaces=wss_namespaces)
wsu_username_created_path = '/soapenv:Envelope/soapenv:Header/wsse:Security/wsse:UsernameToken/wsu:Created'
wsu_username_created_xpath = etree.XPath(wsu_username_created_path, namespaces=wss_namespaces)
class WSSE(object):
""" Implements authentication using WS-Security.
"""
def _replace_username_token_elem(self, soap, old_elem, attr_name):
""" A utility function for replacing passwords and nonces with '***'
for the purpose of logging the messages without worrying of disclosing
any data known to be secret.
"""
old_elem = old_elem[0]
attr = old_elem.get(attr_name)
username_token = wsse_username_token_xpath(soap)
if not username_token:
self.error(expected_element=wsse_username_token_path)
username_token = username_token[0]
elem_idx = username_token.index(old_elem)
username_token.remove(old_elem)
new_elem = etree.Element(old_elem.tag)
new_elem.set(attr_name, attr)
new_elem.text = '***'
username_token.insert(elem_idx, new_elem)
return old_elem.text, attr
def _get_digest(self, password, nonce, created):
""" Returns the password's expected digest.
"""
nonce = nonce.decode('base64')
concat = nonce + created + password
h = sha1()
h.update(concat)
return str.encode(h.digest(), 'base64').rstrip('\n')
def error(self, description='', expected_element='', soap=None):
""" A utility function for exceptions in erronous situations. May be
subclassed if error reporting needs to be customized. The 'soap'
parameter is guaranteed to have WSSE password and token replaced
with '***' characters. Note that default implementation doesn't use
the 'soap' parameter however the subclasses are free to do so.
"""
msg = description
if expected_element:
if description:
msg += '. '
msg += 'Element [{0}] doesn\'t exist'.format(expected_element)
raise SecurityException(msg)
def check_nonce(self, wsse_nonce, now, nonce_freshness_time):
""" Checks whether the nonce has been already seen. Default implementation
lets all nonces in. More sophisticated subclasses may wish to override
this method and check the nonce against a cache of some sort.
"""
return False
def on_invalid_username(self, config, given, message):
""" Invoked when the expected and given usernames don't match.
"""
self.error('Invalid username or password')
def on_invalid_password(self, config, given_username, given_password, message):
""" Invoked when the expected and given passwords don't match.
"""
self.error('Invalid username or password')
def on_username_token_expired(self, config, elapsed, message):
""" Invoked when the username token has been found to be expired.
"""
self.error('UsernameToken has expired')
def on_nonce_non_unique(self, config, nonce, now, message):
""" Invoked when the nonce has been found not to be unique.
"""
self.error('Nonce [{0}] is not unique'.format(nonce))
def validate(self, soap, config):
# Shadow the password and a nonce before any processing, getting
# their values along the way.
wsse_password = wsse_password_xpath(soap)
if wsse_password:
wsse_password, wsse_password_type = self._replace_username_token_elem(soap, wsse_password, 'Type')
wsse_nonce = wsse_nonce_xpath(soap)
if wsse_nonce:
wsse_nonce, wsse_encoding_type = self._replace_username_token_elem(soap, wsse_nonce, 'EncodingType')
wsse_username = wsse_username_xpath(soap)
if not wsse_username:
self.error('No username sent', wsse_username_path, soap)
wsse_username = wsse_username[0].text
if config['wsse-pwd-username'] != wsse_username:
self.on_invalid_username(config, wsse_username, soap)
if not wsse_password_type:
self.error('No password type sent', wsse_password_type_path, soap)
if not wsse_password_type in supported_wsse_password_types:
msg = 'Unsupported password type=[{0}], not in [{1}]'.format(wsse_password_type, supported_wsse_password_types)
self.error(msg, soap=soap)
now = datetime.utcnow()
if config['wsse-pwd-reject-empty-nonce-creation']:
wsu_username_created = wsu_username_created_xpath(soap)
if not all((wsse_nonce, wsu_username_created)):
self.error('Both nonce and creation timestamp must be given', soap=soap)
else:
if wsu_username_created:
wsu_username_created = wsu_username_created[0].text
# Check nonce freshness and report error if the UsernameToken is stale.
token_created = datetime.strptime(wsu_username_created, soap_date_time_format)
elapsed = (now - token_created)
if config['wsse-pwd-reject-stale-tokens'] and elapsed.seconds > config['wsse-pwd-reject-expiry-limit']:
self.on_username_token_expired(config, elapsed, soap)
if config.get('wsse-pwd-password-digest'):
expected_password = self._get_digest(config['wsse-pwd-password'],
wsse_nonce, wsu_username_created)
else:
expected_password = config.get('wsse-pwd-password')
if wsse_password != expected_password:
self.on_invalid_password(config, wsse_username, wsse_password, soap)
# Have we already seen such a nonce?
if self.check_nonce(wsse_nonce, now, config.get('wsse-pwd-nonce-freshness-time')):
self.on_nonce_non_unique(config, wsse_nonce, now, soap)
# All good, we let the client in.
return True, wsse_username
| dsuch/sec-wall | code/src/secwall/wsse.py | Python | gpl-3.0 | 8,308 |
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
def norm_col_init(weights, std=1.0):
x = torch.randn(weights.size())
x *= std / torch.sqrt((x**2).sum(1, keepdim=True))
return x
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
weight_shape = list(m.weight.data.size())
fan_in = np.prod(weight_shape[1:4])
fan_out = np.prod(weight_shape[2:4]) * weight_shape[0]
w_bound = np.sqrt(6. / (fan_in + fan_out))
m.weight.data.uniform_(-w_bound, w_bound)
m.bias.data.fill_(0)
elif classname.find('Linear') != -1:
weight_shape = list(m.weight.data.size())
fan_in = weight_shape[1]
fan_out = weight_shape[0]
w_bound = np.sqrt(6. / (fan_in + fan_out))
m.weight.data.uniform_(-w_bound, w_bound)
m.bias.data.fill_(0)
class A3Clstm(torch.nn.Module):
def __init__(self, num_inputs, action_space):
super(A3Clstm, self).__init__()
# convolutional neural networks
self.conv1 = nn.Conv2d(num_inputs, 32, 5, stride=1, padding=2)
self.maxp1 = nn.MaxPool2d(2, 2)
self.conv2 = nn.Conv2d(32, 32, 5, stride=1, padding=1)
self.maxp2 = nn.MaxPool2d(2, 2)
self.conv3 = nn.Conv2d(32, 64, 4, stride=1, padding=1)
self.maxp3 = nn.MaxPool2d(2, 2)
self.conv4 = nn.Conv2d(64, 64, 3, stride=1, padding=1)
self.maxp4 = nn.MaxPool2d(2, 2)
# LSTM Cells
self.lstm = nn.LSTMCell(1024, 512)
num_outputs = action_space.n
# The critic layer
self.critic_linear = nn.Linear(512, 1)
# The actor layer
self.actor_linear = nn.Linear(512, num_outputs)
self.apply(weights_init)
self.actor_linear.weight.data = norm_col_init(
self.actor_linear.weight.data, 0.01)
self.actor_linear.bias.data.fill_(0)
self.critic_linear.weight.data = norm_col_init(
self.critic_linear.weight.data, 1.0)
self.critic_linear.bias.data.fill_(0)
self.lstm.bias_ih.data.fill_(0)
self.lstm.bias_hh.data.fill_(0)
self.train()
# forward propagation
def forward(self, inputs):
inputs, (hx, cx) = inputs
x = F.relu(self.maxp1(self.conv1(inputs)))
x = F.relu(self.maxp2(self.conv2(x)))
x = F.relu(self.maxp3(self.conv3(x)))
x = F.relu(self.maxp4(self.conv4(x)))
x = x.view(x.size(0), -1)
hx, cx = self.lstm(x, (hx, cx))
x = hx
return self.critic_linear(x), self.actor_linear(x), (hx, cx)
| Nasdin/ReinforcementLearning-AtariGame | A3CModel.py | Python | bsd-3-clause | 2,624 |
import os
from optparse import make_option
from django.core.management.base import BaseCommand
from django.conf import settings
import sys
class Command(BaseCommand):
help = "Clear supervisor confs for the given environment"
args = ""
option_list = BaseCommand.option_list + (
make_option('--conf_location', help='Supervisor configuration file path', default=None),
)
def handle(self, *args, **options):
conf_dir = options['conf_location']
environment = settings.SERVER_ENVIRONMENT
if not os.path.exists(conf_dir):
sys.exit("[clear_supervisor_confs] Error: the path %s is not reachable by this process" % conf_dir)
files = os.listdir(conf_dir)
env_confs = filter(lambda x: x.startswith('%s_' % environment) and x.endswith('.conf'), files)
for c in env_confs:
os.remove(os.path.join(conf_dir, c))
print "\t[clear_supervisor_confs] Removed supervisor configuration file: %s" % c
| puttarajubr/commcare-hq | corehq/apps/hqadmin/management/commands/clear_supervisor_confs.py | Python | bsd-3-clause | 995 |
from __future__ import absolute_import
from django import forms
from django.contrib import messages
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.utils.translation import ugettext_lazy as _
from sentry.models import ApiKey, AuditLogEntryEvent
from sentry.web.forms.fields import OriginsField
from sentry.web.frontend.base import OrganizationView
class ApiKeyForm(forms.ModelForm):
allowed_origins = OriginsField(
label=_('Allowed Domains'),
required=False,
help_text=_('Separate multiple entries with a newline.')
)
class Meta:
model = ApiKey
fields = ('label', 'scopes')
class OrganizationApiKeySettingsView(OrganizationView):
required_scope = 'org:admin'
def handle(self, request, organization, key_id):
key = get_object_or_404(ApiKey, organization=organization, id=key_id)
form = ApiKeyForm(
request.POST or None,
instance=key,
initial={
'allowed_origins': key.allowed_origins,
},
)
if form.is_valid():
key.allowed_origins = '\n'.join(form.cleaned_data['allowed_origins'])
key.save()
self.create_audit_entry(
request,
organization=organization,
target_object=key.id,
event=AuditLogEntryEvent.APIKEY_EDIT,
data=key.get_audit_log_data(),
)
messages.add_message(
request,
messages.SUCCESS,
'Your settings were saved.',
)
return HttpResponseRedirect(request.path)
context = {
'key': key,
'form': form,
}
return self.respond('sentry/organization-api-key-settings.html', context)
| jean/sentry | src/sentry/web/frontend/organization_api_key_settings.py | Python | bsd-3-clause | 1,865 |
import OOMP
newPart = OOMP.oompItem(9118)
newPart.addTag("oompType", "LEDS")
newPart.addTag("oompSize", "05")
newPart.addTag("oompColor", "I9")
newPart.addTag("oompDesc", "STAN")
newPart.addTag("oompIndex", "01")
OOMP.parts.append(newPart)
| oomlout/oomlout-OOMP | old/OOMPpart_LEDS_05_I9_STAN_01.py | Python | cc0-1.0 | 242 |
from common import * #@UnusedWildImport
from gridRole import GridRole
properties = ArrayList()
class Rocket(GridRole) :
def onHalfInvaded(self, invader) :
self.deathEvent( "launch" )
invader.deathEvent( "launch" )
ExplosionBuilder(self.actor) \
.dependent().forever().follow().offset(0,20).projectilesPerTick(3) \
.zOrder(10).spread(-70,-110).distance(10).randomSpread().speed(1,2,0,0).fade(3).eventName("flame") \
.create()
# Boiler plate code - no need to change this
def getProperties(self):
return properties
# Boiler plate code - no need to change this
def getClassName(self):
return ClassName( Role, self.__module__ + ".py" )
| nickthecoder/itchy | resources/Cavern-Quest/scripts/rocket.py | Python | gpl-3.0 | 755 |
"""The Session is a wrapper around a Shotgun instance, proxying requests to
the server and applying additional logic on top of it. The Session instance is
designed to be used for a single task and then discarded, since it makes the
assumption that entity relationships do not change.
While not fully documented below, this object will proxy all attributes to the
underlying Shotgun instance, so you can treat this as you would a Shotgun
instance.
"""
from __future__ import with_statement, absolute_import
import errno
import functools
import itertools
import json
import logging
import os
import re
import threading
import urlparse
import warnings
from sgschema import Schema
from .entity import Entity
from .pool import ShotgunPool
from .utils import expand_braces, parse_isotime
log = logging.getLogger(__name__)
class EntityNotFoundWarning(UserWarning):
pass
class EntityNotFoundError(ValueError):
pass
def _asyncable(func):
"""Wrap a function, so that async=True will run it in a thread."""
@functools.wraps(func)
def _wrapped(self, *args, **kwargs):
if kwargs.pop('async', False):
return self._submit_concurrent(func, self, *args, **kwargs)
else:
return func(self, *args, **kwargs)
return _wrapped
def _assert_ownership(func):
"""Wrap a function that takes a list of entities, and make sure that we own them."""
@functools.wraps(func)
def _wrapped(self, entities, *args, **kwargs):
entities = list(entities)
for e in entities:
if isinstance(e, Entity):
if e.session is not self:
raise ValueError('Entity not from this session', e, self)
else:
raise TypeError('Non-Entity passed as entity', e)
return func(self, entities, *args, **kwargs)
return _wrapped
_recursion_sentinel = object()
class Session(object):
"""Shotgun wrapper.
:param shotgun: A Shotgun instance to wrap, or the name to be passed to
``shotgun_api3_registry.connect()`` in order to construct one.
If passed a name, the remaining args and kwargs will also be passed to the
api registry connector.
If passed a descendant of ``shotgun_api3.Shotgun`` (or one is constructed
via the registry), it will be wrapped in a :class:`~sgsession.pool.ShotgunPool` so that
it becomes thread-safe. Any other objects (e.g. mock servers) are used
unmodified.
If passed nothing, ``shotgun_api3_registry.connect`` will be called
the first time :attr:`shotgun` is accessed (which will happen on many
operations). To stop this behaviour, pass ``False``.
"""
#: Mapping of entity types to the field where their "parent" lives.
parent_fields = {
'Asset': 'project',
'Project': None,
'Sequence': 'project',
'Shot': 'sg_sequence',
'Task': 'entity',
'PublishEvent': 'sg_link',
'Version': 'entity',
}
#: Fields to always fetch for every entity.
important_fields_for_all = ['updated_at']
#: Fields to always fetch: maps entity type to a list of fields.
important_fields = {
'Asset': ['code', 'sg_asset_type'],
'HumanUser': ['firstname', 'lastname', 'email', 'login'],
'Project': ['name'],
'PublishEvent': ['code', 'sg_type', 'sg_version'],
'Sequence': ['code'],
'Shot': ['code'],
'Step': ['code', 'short_name', 'entity_type'],
'Task': ['step', 'content'],
'Version': ['code', 'sg_task'],
}
#: Links to always fetch: maps entity type to a mapping of field names to
#: a list of their potential entity types.
important_links = {
'Asset': {
'project': ['Project'],
},
'Sequence': {
'project': ['Project'],
},
'Shot': {
'project': ['Project'],
'sg_sequence': ['Sequence'],
},
'Task': {
'project': ['Project'],
'entity': ['Asset', 'Shot'],
'step': ['Step'],
},
'PublishEvent': {
'project': ['Project'],
'sg_link': ['Task'],
},
}
def __init__(self, shotgun=None, schema=None, *args, **kwargs):
# Lookup strings in the script registry.
if isinstance(shotgun, basestring):
import shotgun_api3_registry
shotgun = shotgun_api3_registry.connect(shotgun, *args, **kwargs)
# Wrap basic shotgun instances in our threader.
self._shotgun = ShotgunPool.wrap(shotgun)
self._shotgun_args = None if shotgun else args
self._shotgun_kwargs = None if shotgun else kwargs
self._schema = schema
self._cache = {}
self._thread_pool = None
@classmethod
def from_entity(cls, entity, *args, **kwargs):
if isinstance(entity, Entity) and entity.session:
return entity.session
else:
return cls(*args, **kwargs)
@property
def shotgun(self):
# Automatically generate Shotgun when we need one.
# We use False to track that there should be nothing set here.
if self._shotgun is None:
import shotgun_api3_registry
self._shotgun = ShotgunPool.wrap(shotgun_api3_registry.connect(
*self._shotgun_args, **self._shotgun_kwargs
))
return self._shotgun or None
@property
def schema(self):
# Automaticaly load schema when we need one.
# We use False to track that there should be nothing set here.
if self._schema is None:
# Wait on caching a schema here until there is a Shotgun.
shotgun = self.shotgun
if not shotgun:
return
try:
self._schema = Schema.from_cache(shotgun)
except ValueError:
self._schema = False
return self._schema or None
def __getattr__(self, name):
return getattr(self.shotgun, name)
def __reduce__(self):
# We assume that the shotgun and sgcache will automatically regenerate.
# Generally, the user should be very careful when pickling sessions.
shotgun = False if self._shotgun is False else None
schema = False if self._schema is False else None
return self.__class__, (shotgun, schema)
def merge(self, data, over=None, created_at=None, _depth=0, _memo=None):
"""Import data containing raw entities into the session.
This will effectively return a copy of any nested structure of lists,
tuples, and dicts, while converting any dicts which look like entities
into an :class:`.Entity`. The returned structure is a copy of the
original.
:param dict data: The raw fields to convert into an :class:`~sgsession.entity.Entity`.
:param bool over: Control for merge behaviour with existing data.
``True`` results in the new data taking precedence, and ``False``
the old data. The default of ``None`` will automatically decide
based on the ``updated_at`` field.
:return: The :class:`~sgsession.entity.Entity`. This will not be a new instance if the
entity was already in the session, but it will have all the newly
merged data in it.
"""
# Track down where we are getting string created_at from.
if created_at and isinstance(created_at, basestring):
# This can be a huge message...
log.error('string created_at (%r) given to Session.merge at depth %d; data to merge: %r' % (
created_at, _depth, data,
))
created_at = parse_isotime(created_at)
# Since we are dealing with recursive structures, we need to memoize
# the outputs by all of the inputs as we create them.
if _memo is None:
_memo = {}
id_ = id(data)
if id_ in _memo:
return _memo[id_]
_memo[id_] = _recursion_sentinel
obj = self._merge(data, over, created_at, _depth, _memo)
# If something fails at setting up a recursive object before returning,
# then we want to fail very hard.
if obj is _recursion_sentinel:
raise RuntimeError('un-memoized recursion')
_memo[id_] = obj
return obj
def _merge(self, data, over, created_at, depth, memo):
# No need to worry about resolving schema here, since Entity.__setitem__
# will ultimately do it.
# Pass through entities if they are owned by us.
if isinstance(data, Entity) and data.session is self:
return data
# Contents of lists and tuples should get merged.
if isinstance(data, list):
# Lists can be cyclic; memoize them.
memo[id(data)] = new = type(data)()
new.extend(self.merge(x, over, created_at, depth + 1, memo) for x in data)
return new
if isinstance(data, tuple):
return type(data)(self.merge(x, over, created_at, depth + 1, memo) for x in data)
if not isinstance(data, dict):
return data
# Non-entity dicts have all their values merged.
if not ('type' in data and 'id' in data):
memo[id(data)] = new = type(data)() # Setup recursion block.
new.update((k, self.merge(v, over, created_at)) for k, v in data.iteritems())
return new
# If it already exists, then merge this into the old one.
new = Entity(data['type'], data['id'], self)
key = new.cache_key
entity = self._cache.setdefault(new.cache_key, new)
memo[id(data)] = entity # Setup recursion block.
entity._update(data, over, created_at, depth + 1, memo)
return entity
def parse_user_input(self, spec, entity_types=None, fetch_project_from_page=False):
spec = spec.strip()
# JSON.
if spec.startswith('{') and spec.endswith('}'):
raw = json.loads(spec)
if 'type' not in raw or 'id' not in raw:
raise ValueError('incomplete JSON entity', spec)
if not isinstance(raw['type'], basestring) or not isinstance(raw['id'], int):
raise ValueError('malformed JSON entity', spec)
return self.merge(raw)
# Accept integer IDs if we know we want a specific type.
if spec.isdigit():
if isinstance(entity_types, basestring):
entity_types = [entity_types]
if entity_types and len(entity_types) == 1:
return self.merge({'type': entity_types[0], 'id': int(spec)})
else:
raise ValueError('int-only spec without single entity_types', spec, entity_types)
# Shotgun detail URL.
m = re.match(r'^https?://\w+\.shotgunstudio\.com/detail/([A-Za-z]+)/(\d+)', spec)
if m:
return self.merge({'type': m.group(1), 'id': int(m.group(2))})
# Shotgun project overview URL.
m = re.match(r'^https?://\w+\.shotgunstudio\.com/page/\d+#([A-Z][A-Za-z]+)_(\d+)_', spec)
if m:
return self.merge({'type': m.group(1), 'id': int(m.group(2))})
# Shotgun page URL.
m = re.match(r'^https?://\w+\.shotgunstudio\.com/page/(\d+)$', spec)
if m:
if not fetch_project_from_page:
raise ValueError('page URL without fetch_project_from_page', spec)
page = self.get('Page', int(m.group(1)), ['project'])
if not page:
raise ValueError('Page entity not found for page URL', spec)
if page.get('project'):
return self.merge(page['project'])
raise ValueError('page URL has no project', spec)
# Direct entities. E.g. `shot:12345?code=whatever`
m = re.match(r'^([A-Za-z]{3,})[:_ -](\d+)(?:_|$|\?(\S*))', spec)
if m:
type_, id_, query = m.groups()
raw = {
'type': type_[0].upper() + type_[1:],
'id': int(id_),
}
if query:
for k, v in urlparse.parse_qsl(query, keep_blank_values=True):
raw.setdefault(k, v)
return self.merge(raw)
raise ValueError('could not parse entity spec', spec)
def _submit_concurrent(self, func, *args, **kwargs):
if not self._thread_pool:
from concurrent.futures import ThreadPoolExecutor
self._thread_pool = ThreadPoolExecutor(8)
return self._thread_pool.submit(func, *args, **kwargs)
@_asyncable
def create(self, type, data=None, return_fields=None, **kwargs):
"""Create an entity of the given type and data.
:return: The new :class:`~sgsession.entity.Entity`.
`See the Shotgun docs for more. <https://github.com/shotgunsoftware/python-api/wiki/Reference%3A-Methods#wiki-create>`_
"""
if data is not None and kwargs:
# This isn't quite ideal, but it doesn't let must confusing get through.
raise TypeError('provide only one of data or **kwargs')
data = self._minimize_entities(data if data is not None else kwargs)
if self.schema:
type = self.schema.resolve_one_entity(type)
data = self.schema.resolve_structure(data, type)
return_fields = self.schema.resolve_field(type, return_fields) if return_fields else []
return_fields = self._add_default_fields(type, return_fields)
return self.merge(self.shotgun.create(type, data, return_fields))
@_asyncable
def update(self, *args, **kwargs):
"""Update the given entity with the given fields.
.. todo:: Add this to the Entity.
`See the Shotgun docs for more. <https://github.com/shotgunsoftware/python-api/wiki/Reference%3A-Methods#wiki-update>`_
"""
# Grab the "type" or 1st argument.
if not (args or kwargs):
raise TypeError('no arguments')
type_ = kwargs.pop('type', None)
if type_ is None:
if not args:
raise TypeError('must provide "type" kwarg or positional type argument')
type_ = args[0]
args = args[1:]
# Figure out if we were given an Entity, or an entity type (string)
if isinstance(type_, Entity):
ids = [type_['id']]
type_ = type_['type']
do_batch = False
elif isinstance(type_, basestring):
ids = kwargs.pop('id', None) or args[0]
args = args[1:]
do_batch = not isinstance(ids, int)
ids = list(ids) if do_batch else [ids]
elif isinstance(type_, (list, type)):
do_batch = True
entities = list(type_)
if not entities:
raise ValueError('entity sequence is empty')
sentinel = object()
non_entity = next((e for e in entities if not isinstance(e, Entity)), sentinel)
if non_entity is not sentinel:
raise ValueError('entity sequence contains non-Entity', non_entity)
type_ = entities[0]['type']
mismatched = next((e for e in entities if e['type'] != type_), None)
if mismatched is not None:
raise ValueError('mismatched entity types', type_, mismatched['type'])
ids = [e['id'] for e in entities]
else:
raise TypeError('first argument must be an Entity, list of entities, or string (entity type)', entity_or_type)
data = {}
for arg in args:
data.update(arg)
data.update(kwargs)
if not data:
raise ValueError('no data provided')
data = self._minimize_entities(data)
if self.schema:
type_ = self.schema.resolve_one_entity(type_)
data = self.schema.resolve_structure(data, type_)
if do_batch:
return self.batch([{
'request_type': 'update',
'entity_type': type_,
'entity_id': id_,
'data': data,
} for id_ in ids])
else:
return self.merge(self.shotgun.update(type_, ids[0], data), over=True)
@_asyncable
def batch(self, requests):
"""Perform a series of requests in a transaction.
`See the Shotgun docs for more. <https://github.com/shotgunsoftware/python-api/wiki/Reference%3A-Methods#wiki-batch>`_
"""
requests = self._minimize_entities(requests)
if self.schema:
requests = self.schema.resolve_structure(requests)
return [self.merge(x, over=True) if isinstance(x, dict) else x for x in self.shotgun.batch(requests)]
def _add_default_fields(self, type_, fields):
fields = set(fields or ['id'])
# Add important fields for this type.
fields.update(self.important_fields_for_all)
fields.update(self.important_fields.get(type_, []))
# Add parent.
parent_field = self.parent_fields.get(type_)
if parent_field:
fields.add(parent_field)
# Add implied owners of deep-fields.
implied = set()
for field in fields:
parts = field.split('.')
for i in xrange(2, len(parts) + 1, 2):
implied.add('.'.join(parts[:i]) + '.id')
fields.update(implied)
# Add important deep-fields for requested type.
for local_field, link_types in self.important_links.get(type_, {}).iteritems():
fields.add(local_field)
for link_type in link_types:
remote_fields = self.important_fields.get(link_type, [])
remote_links = self.important_links.get(link_type, {})
for remote_field in itertools.chain(self.important_fields_for_all, remote_fields, remote_links.iterkeys()):
fields.add('%s.%s.%s' % (local_field, link_type, remote_field))
return sorted(fields)
def _minimize_entities(self, data):
if isinstance(data, dict):
# Attachments need to not be minimized, since they are often
# merged in with their own metadata. If we special cased merging
# them, then this could be a bit smarter and send only what is
# nessesary.
if data.get('type') == 'Attachment':
return data
if 'type' in data and 'id' in data:
return dict(type=data['type'], id=data['id'])
return dict((k, self._minimize_entities(v)) for k, v in data.iteritems())
if isinstance(data, (list, tuple)):
return [self._minimize_entities(x) for x in data]
return data
@_asyncable
def find(self, type_, filters, fields=None, *args, **kwargs):
"""Find entities.
:return: :class:`list` of found :class:`~sgsession.entity.Entity`.
`See the Shotgun docs for more. <https://github.com/shotgunsoftware/python-api/wiki/Reference%3A-Methods#wiki-find>`_
"""
merge = kwargs.pop('merge', True)
if self.schema:
type_ = self.schema.resolve_one_entity(type_)
if kwargs.pop('add_default_fields', True):
fields = self._add_default_fields(type_, fields)
# Expand braces in fields.
expanded_fields = set()
for field in fields:
expanded_fields.update(expand_braces(field))
fields = sorted(expanded_fields)
# Resolve names in fields.
if self.schema:
fields = self.schema.resolve_field(type_, fields) if fields else []
filters = self._minimize_entities(filters)
# Resolve names in filters.
if self.schema and isinstance(filters, (list, tuple)):
for i, old_filter in enumerate(filters):
filter_ = [self.schema.resolve_one_field(type_, old_filter[0])]
filter_.extend(old_filter[1:])
filters[i] = filter_
result = self.shotgun.find(type_, filters, fields, *args, **kwargs)
return [self.merge(x, over=True) for x in result] if merge else result
@_asyncable
def find_one(self, entity_type, filters, fields=None, order=None,
filter_operator=None, retired_only=False, **kwargs):
"""Find one entity.
:return: :class:`~sgsession.entity.Entity` or ``None``.
`See the Shotgun docs for more. <https://github.com/shotgunsoftware/python-api/wiki/Reference%3A-Methods#wiki-find_one>`_
"""
results = self.find(entity_type, filters, fields, order,
filter_operator, 1, retired_only, **kwargs)
if results:
return results[0]
return None
def find_iter(self, *args, **kwargs):
limit = kwargs.pop('limit', None) or None
per_page = kwargs.pop('per_page', limit or 500) # this is the default
async_count = kwargs.pop('async_count', 1)
kwargs['limit'] = per_page
kwargs['async'] = True
page = 1
futures = []
done = False
while not done:
# extract all complete results; we wait for the first one, but
# then take as many others as are already done
rows = futures.pop(0).result() if futures else None
while rows and futures and futures[0].done():
rows.extend(futures.pop(0).result())
# determine if we are done yet
if rows is not None:
# print 'got', len(rows)
# we hit the end of results
if not rows or len(rows) < per_page:
done = True
# we hit the total requested
if limit is not None:
limit -= len(rows)
if limit <= 0:
done = True
# queue up the next queries
while not done and len(futures) < async_count:
# print 'queing', page
kwargs['page'] = page
futures.append(self.find(*args, **kwargs))
page += 1
# yield results
if rows is not None:
for x in rows:
yield x
@_asyncable
def delete(self, entity, entity_id=None):
"""Delete one entity.
.. warning:: This session will **not** forget about the deleted entity,
and all links from other entities will remain intact.
`See the Shotgun docs for more. <https://github.com/shotgunsoftware/python-api/wiki/Reference%3A-Methods#wiki-delete>`_
"""
if not isinstance(entity, Entity):
if self.schema:
entity = self.schema.resolve_one_entity(entity)
if not entity_id:
raise ValueError('must provide entity_id')
entity = self.merge({'type': entity, 'id': entity_id})
res = self.shotgun.delete(entity['type'], entity['id'])
entity._exists = False
return res
@_asyncable
def get(self, type_, id_, fields=None, fetch=True):
"""Get one entity by type and ID.
:param str type_: The entity type to lookup.
:param int id_: The entity ID to lookup. Accepts ``list`` or ``tuple``
of IDs, and returns the same.
:param bool fetch: Request this entity from the server if not cached?
"""
# Handle multiple IDs.
if isinstance(id_, (list, tuple)):
return type(id_)(self.get(type_, x) for x in id_)
if self.schema:
type_ = self.schema.resolve_one_entity(type_)
try:
entity = self._cache[(type_, id_)]
except KeyError:
return self.find_one(type_, [('id', 'is', id_)], fields or [])
else:
if fetch and fields:
entity.fetch(fields)
return entity
def _fetch(self, entities, fields, force=False):
types = list(set(x['type'] for x in entities))
if len(types) > 1:
raise ValueError('can only fetch one type at once')
type_ = types[0]
ids_ = set()
for e in entities:
if force or any(f not in e for f in fields):
ids_.add(e['id'])
if ids_:
res = self.find(
type_,
[['id', 'in'] + list(ids_)],
fields,
)
missing = ids_.difference(e['id'] for e in res)
# Update _exists on the entities.
for e in entities:
e._exists = e['id'] not in missing
if missing:
raise EntityNotFoundError('%s %s not found' % (type_, ', '.join(map(str, sorted(missing)))))
@_assert_ownership
@_asyncable
def filter_exists(self, entities, check=True, force=False):
"""Return the subset of given entities which exist (non-retired).
:param list entities: An iterable of entities to check.
:param bool check: Should the server be consulted if we don't already know?
:param bool force: Should we always check the server?
:returns set: The entities which exist, or aren't sure about.
This will handle multiple entity-types in multiple requests.
"""
if check:
by_type = {}
for x in entities:
by_type.setdefault(x['type'], set()).add(x)
for type_, sub_entities in by_type.iteritems():
if force or any(e._exists is None for e in sub_entities):
found = self.find(type_, [['id', 'in'] + list(e['id'] for e in sub_entities)])
found_ids = set(e['id'] for e in found)
for e in sub_entities:
e._exists = e['id'] in found_ids
return set(e for e in entities if (e._exists or e._exists is None))
@_assert_ownership
@_asyncable
def fetch(self, to_fetch, fields, force=False):
"""Fetch the named fields on the given entities.
:param list to_fetch: Entities to fetch fields for.
:param list fields: The names of fields to fetch on those entities.
:param bool force: Perform a request even if we already have this data?
This will safely handle multiple entitiy types at the same time, and
by default will only make requests of the server if some of the data
does not already exist.
.. note:: This does not assert that all "important" fields exist. See
:meth:`fetch_core`.
"""
by_type = {}
for x in to_fetch:
by_type.setdefault(x['type'], set()).add(x)
for type_, entities in by_type.iteritems():
self._fetch(entities, fields, force=force)
@_assert_ownership
@_asyncable
def fetch_backrefs(self, to_fetch, backref_type, field):
"""Fetch requested backrefs on the given entities.
:param list to_fetch: Entities to get backrefs on.
:param str backref_type: The entity type to look for backrefs on.
:param str field: The name of the field to look for backrefs in.
::
# Find all tasks which refer to this shot.
>>> session.fetch_backrefs([shot], 'Task', 'entity')
"""
by_type = {}
for x in to_fetch:
by_type.setdefault(x['type'], set()).add(x)
for type_, entities in by_type.iteritems():
self.find(backref_type, [[field, 'is'] + [x.minimal for x in entities]])
@_assert_ownership
@_asyncable
def fetch_core(self, to_fetch):
"""Assert all "important" fields exist, and fetch them if they do not.
:param list to_fetch: The entities to get the core fields on.
This will populate all important fields, and important fields on linked
entities.
"""
by_type = {}
for x in to_fetch:
by_type.setdefault(x['type'], set()).add(x)
for type_, entities in by_type.iteritems():
self._fetch(entities, itertools.chain(
self.important_fields_for_all,
self.important_fields.get(type_) or (),
self.important_links.get(type_, {}).iterkeys(),
))
@_assert_ownership
@_asyncable
def fetch_heirarchy(self, to_fetch):
"""Populate the parents as far up as we can go, and return all involved.
With (new-ish) arbitrarily-deep-links on Shotgun, this method could be
made quite a bit more effiecient, since it should be able to request
the entire heirarchy for any given type at once.
See :attr:`parent_fields`.
"""
all_nodes = set()
to_resolve = set()
loop_count = 0
while to_fetch or to_resolve:
# Just in case (because we have messed this up a few times before).
if loop_count > 20:
raise RuntimeError('likely infinite loop')
loop_count += 1
# Go as far up as we already have for the specified entities.
for entity in to_fetch:
all_nodes.add(entity)
while entity.parent(fetch=False):
entity = entity.parent()
all_nodes.add(entity)
if entity['type'] != 'Project':
to_resolve.add(entity)
# There is nothing new to fetch; bail!
if not to_resolve:
break
# Find the type that we have the most entities of, and remove them
# from the list to resolve.
by_type = {}
for x in to_resolve:
all_nodes.add(x)
by_type.setdefault(x['type'], set()).add(x)
type_, to_fetch = max(by_type.iteritems(), key=lambda x: len(x[1]))
to_resolve.difference_update(to_fetch)
# Fetch the parent names.
ids = [x['id'] for x in to_fetch]
parent_name = self.parent_fields[type_]
found = self.find(type_, [['id', 'in'] + ids], [parent_name])
# Make sure we actually get something back for the parent field.
no_parent = [e['id'] for e in found if not e.get(parent_name)]
if no_parent:
raise ValueError('%s %s %s no %s' % (
type_,
', '.join(str(id_) for id_ in sorted(no_parent)),
'have' if len(no_parent) > 1 else 'has',
parent_name,
))
# Track those which didn't come back from the API. Normally, this
# wouldn't happen, but can result from a race condition OR from
# an error on the server side (or a caching layer).
missing = to_fetch.difference(found)
if missing:
raise EntityNotFoundError('%s %s %s not exist' % (
type_,
', '.join(str(id_) for id_ in sorted(no_parent)),
'do' if len(missing) > 1 else 'does',
))
return list(all_nodes)
_guessed_user_lock = threading.Lock()
@_asyncable
def guess_user(self, filter=('email', 'starts_with', '{login}@'), fields=(), fetch=True):
"""Guess Shotgun user from current login name.
Looks for $SHOTGUN_USER_ID in your environment, then a user with an
email that has the login name as the account.
:returns: ``dict`` of ``HumanUser``, or ``None``.
"""
with self._guessed_user_lock:
try:
user = self._guessed_user
except AttributeError:
user = self._guess_user(filter, fields, fetch)
if user:
Session._guessed_user = self.merge(user).as_dict()
else:
Session._guessed_user = None
if not user:
return
entity = self.merge(user)
if fields:
entity.fetch(fields)
return entity
def _guess_user(self, filter, fields, fetch):
# This envvar is used only for this purpose (at Western Post)
id_ = os.environ.get('SHOTGUN_USER_ID')
if id_:
return {'type': 'HumanUser', 'id': int(id_)}
if not fetch:
return
# This envvar is more general, and respected by shotgun_api3_registry.
login = os.environ.get('SHOTGUN_SUDO_AS_LOGIN')
if login:
return self.find_one('HumanUser', [
('login', 'is', login),
], fields or ())
# Finally, search for a user based on the current login.
try:
login = os.getlogin()
except OSError as e:
# this fails on the farm, so fall back onto the envvar
if e.errno != errno.ENOTTY:
raise
login = os.environ.get('USER')
filter_ = tuple(x.format(login=login) for x in filter)
return self.find_one('HumanUser', [filter_], fields)
| westernx/sgsession | sgsession/session.py | Python | bsd-3-clause | 33,714 |
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from __future__ import absolute_import
import re
import six
from six.moves import map # pylint: disable=redefined-builtin
from telemetry.timeline import chrome_trace_category_filter
TRACE_BUFFER_SIZE_IN_KB = 'trace_buffer_size_in_kb'
RECORD_MODE = 'record_mode'
RECORD_CONTINUOUSLY = 'record-continuously'
RECORD_UNTIL_FULL = 'record-until-full'
RECORD_AS_MUCH_AS_POSSIBLE = 'record-as-much-as-possible'
ECHO_TO_CONSOLE = 'trace-to-console'
RECORD_MODES = {
RECORD_UNTIL_FULL,
RECORD_CONTINUOUSLY,
RECORD_AS_MUCH_AS_POSSIBLE,
ECHO_TO_CONSOLE,
}
ENABLE_SYSTRACE_PARAM = 'enable_systrace'
UMA_HISTOGRAM_NAMES_PARAM = 'histogram_names'
def _ConvertStringToCamelCase(string):
"""Convert an underscore/hyphen-case string to its camel-case counterpart.
This function is the inverse of Chromium's ConvertFromCamelCase function
in src/content/browser/devtools/protocol/tracing_handler.cc.
"""
parts = re.split(r'[-_]', string)
return parts[0] + ''.join([p.title() for p in parts[1:]])
# TODO(crbug.com/971471): Don't do this anymore.
def _ConvertDictKeysToCamelCaseRecursively(data):
"""Recursively convert dictionary keys from underscore/hyphen- to camel-case.
This function is the inverse of Chromium's ConvertDictKeyStyle function
in src/content/browser/devtools/protocol/tracing_handler.cc.
"""
if isinstance(data, dict):
return {_ConvertStringToCamelCase(k):
_ConvertDictKeysToCamelCaseRecursively(v)
for k, v in six.iteritems(data)}
if isinstance(data, list):
return list(map(_ConvertDictKeysToCamelCaseRecursively, data))
return data
class ChromeTraceConfig(object):
"""Stores configuration options specific to the Chrome tracing agent.
This produces the trace config JSON string for tracing in Chrome.
Attributes:
record_mode: can be any mode in RECORD_MODES. This corresponds to
record modes in chrome.
category_filter: Object that specifies which tracing categories to trace.
"""
def __init__(self):
self._record_mode = RECORD_CONTINUOUSLY
self._category_filter = (
chrome_trace_category_filter.ChromeTraceCategoryFilter())
self._memory_dump_config = None
self._enable_systrace = False
self._uma_histogram_names = []
self._trace_buffer_size_in_kb = None
self._trace_format = None
@property
def trace_format(self):
return self._trace_format
def SetProtoTraceFormat(self):
self._trace_format = 'proto'
def SetJsonTraceFormat(self):
self._trace_format = 'json'
def SetLowOverheadFilter(self):
self._category_filter = (
chrome_trace_category_filter.CreateLowOverheadFilter())
def SetDefaultOverheadFilter(self):
self._category_filter = (
chrome_trace_category_filter.CreateDefaultOverheadFilter())
def SetDebugOverheadFilter(self):
self._category_filter = (
chrome_trace_category_filter.CreateDebugOverheadFilter())
@property
def category_filter(self):
return self._category_filter
@property
def enable_systrace(self):
return self._enable_systrace
def SetCategoryFilter(self, cf):
if isinstance(cf, chrome_trace_category_filter.ChromeTraceCategoryFilter):
self._category_filter = cf
else:
raise TypeError(
'Must pass SetCategoryFilter a ChromeTraceCategoryFilter instance')
def SetMemoryDumpConfig(self, dump_config):
"""Memory dump config stores the triggers for memory dumps."""
if isinstance(dump_config, MemoryDumpConfig):
self._memory_dump_config = dump_config
else:
raise TypeError(
'Must pass SetMemoryDumpConfig a MemoryDumpConfig instance')
def SetEnableSystrace(self):
self._enable_systrace = True
def SetTraceBufferSizeInKb(self, size):
self._trace_buffer_size_in_kb = size
def EnableUMAHistograms(self, *args):
for uma_histogram_name in args:
self._uma_histogram_names.append(uma_histogram_name)
def HasUMAHistograms(self):
return len(self._uma_histogram_names) != 0
@property
def record_mode(self):
return self._record_mode
@record_mode.setter
def record_mode(self, value):
assert value in RECORD_MODES
self._record_mode = value
def GetChromeTraceConfigForStartupTracing(self):
"""Map the config to a JSON string for startup tracing.
All keys in the returned dictionary use underscore-case (e.g.
'record_mode'). In addition, the 'record_mode' value uses hyphen-case
(e.g. 'record-until-full').
"""
result = {
RECORD_MODE: self._record_mode
}
result.update(self._category_filter.GetDictForChromeTracing())
if self._memory_dump_config:
result.update(self._memory_dump_config.GetDictForChromeTracing())
if self._enable_systrace:
result[ENABLE_SYSTRACE_PARAM] = True
if self._uma_histogram_names:
result[UMA_HISTOGRAM_NAMES_PARAM] = self._uma_histogram_names
if self._trace_buffer_size_in_kb:
result[TRACE_BUFFER_SIZE_IN_KB] = self._trace_buffer_size_in_kb
return result
def GetChromeTraceConfigForDevTools(self):
"""Map the config to a DevTools API config dictionary.
All keys in the returned dictionary use camel-case (e.g. 'recordMode').
In addition, the 'recordMode' value also uses camel-case (e.g.
'recordUntilFull'). This is to invert the camel-case ->
underscore/hyphen-delimited mapping performed in Chromium devtools.
"""
result = self.GetChromeTraceConfigForStartupTracing()
if result[RECORD_MODE]:
result[RECORD_MODE] = _ConvertStringToCamelCase(
result[RECORD_MODE])
if self._enable_systrace:
result.update({ENABLE_SYSTRACE_PARAM: True})
return _ConvertDictKeysToCamelCaseRecursively(result)
class MemoryDumpConfig(object):
"""Stores the triggers for memory dumps in ChromeTraceConfig."""
def __init__(self):
self._triggers = []
def AddTrigger(self, mode, periodic_interval_ms):
"""Adds a new trigger to config.
Args:
periodic_interval_ms: Dump time period in milliseconds.
level_of_detail: Memory dump level of detail string.
Valid arguments are "background", "light" and "detailed".
"""
assert mode in ['background', 'light', 'detailed']
assert periodic_interval_ms > 0
self._triggers.append({'mode': mode,
'periodic_interval_ms': periodic_interval_ms})
def GetDictForChromeTracing(self):
"""Returns the dump config as dictionary for chrome tracing."""
# An empty trigger list would mean no periodic memory dumps.
return {'memory_dump_config': {'triggers': self._triggers}}
| catapult-project/catapult | telemetry/telemetry/timeline/chrome_trace_config.py | Python | bsd-3-clause | 6,778 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2020 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""A keyboard-driven, vim-like browser based on PyQt5."""
import os.path
__author__ = "Florian Bruhin"
__copyright__ = "Copyright 2014-2020 Florian Bruhin (The Compiler)"
__license__ = "GPL"
__maintainer__ = __author__
__email__ = "mail@qutebrowser.org"
__version__ = "1.14.0"
__version_info__ = tuple(int(part) for part in __version__.split('.'))
__description__ = "A keyboard-driven, vim-like browser based on PyQt5."
basedir = os.path.dirname(os.path.realpath(__file__))
| The-Compiler/qutebrowser | qutebrowser/__init__.py | Python | gpl-3.0 | 1,276 |
import numpy
#from MyLimiar import Limiar
#from MatrizA import MatrizA
class Limiarizador(object):
def __init__(self):
pass
def execute(self, matriz, limiar):
#verificar tipos
m = matriz.getM()
n = matriz.getTam()
l = numpy.zeros((n,n))
lim = limiar.next()
for index, value in numpy.ndenumerate(m):
if m[index] >= lim:
l[index] = 1
else:
l[index] = 0
return MatrizA([n, "array", l])
| Evnsan/sage_proj | modulo/Limiarizador.py | Python | gpl-2.0 | 519 |
import glob
def comports():
"""scan for available ports. return a list of device names."""
devices = glob.glob('/dev/tty.*')
return [(d, d, d) for d in devices]
| IECS/MansOS | tools/lib/motelist_src/get_ports_mac.py | Python | mit | 180 |
import numpy as np
import os
def replace_value(line,pos,val):
old_val = line.split()[pos]
index_start = line.find(old_val)
index_end = index_start + len(old_val)
len_diff = len(old_val) - len(val)
return line[:index_start] + val + ' '*len_diff + line[index_end:]
def generate_input_file(settings,input_filename,template_filename):
with open(template_filename, 'r') as template:
with open(input_filename, 'w') as file:
for line in template:
for var,val in settings.items():
if line.find(var) >= 0:
line = replace_value(line, 2, val)
file.write(line)
| DavidAce/DMRG | batch_scripts/generate_inputs/src/generate_inputs.py | Python | mit | 677 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# STDLIB
import inspect
import random
# THIRD PARTY
import pytest
import numpy as np
# LOCAL
import astropy.units as u
from astropy import cosmology
from astropy.cosmology import Cosmology, Planck18, realizations
from astropy.cosmology.core import _COSMOLOGY_CLASSES, Parameter
from astropy.cosmology.io.model import _CosmologyModel, from_model, to_model
from astropy.cosmology.parameters import available
from astropy.modeling import FittableModel
from astropy.modeling.models import Gaussian1D
from astropy.utils.compat.optional_deps import HAS_SCIPY
from .base import IOTestMixinBase, IOFormatTestBase
cosmo_instances = [getattr(realizations, name) for name in available]
cosmo_instances.append("TestToFromTable.setup.<locals>.CosmologyWithKwargs")
###############################################################################
class ToFromModelTestMixin(IOTestMixinBase):
"""
Tests for a Cosmology[To/From]Format with ``format="astropy.model"``.
This class will not be directly called by :mod:`pytest` since its name does
not begin with ``Test``. To activate the contained tests this class must
be inherited in a subclass. Subclasses must define a :func:`pytest.fixture`
``cosmo`` that returns/yields an instance of a |Cosmology|.
See ``TestCosmologyToFromFormat`` or ``TestCosmology`` for examples.
"""
@pytest.fixture
def method_name(self, cosmo):
# get methods, ignoring private and dunder
methods = {n for n in dir(cosmo)
if (callable(getattr(cosmo, n)) and not n.startswith("_"))}
# sieve out incompatible methods
for n in tuple(methods):
# remove non-introspectable methods
try:
sig = inspect.signature(getattr(cosmo, n))
except ValueError:
methods.discard(n)
continue
params = list(sig.parameters.keys())
# remove non redshift methods
if len(params) == 0 or not params[0].startswith("z"):
methods.discard(n)
continue
# dynamically detect ABC and optional dependencies
ERROR_SEIVE = (NotImplementedError, ValueError)
# # ABC can't introspect for good input
if not HAS_SCIPY:
ERROR_SEIVE = ERROR_SEIVE + (ModuleNotFoundError, )
args = np.arange(len(params)) + 1
try:
getattr(cosmo, n)(*args)
except ERROR_SEIVE:
methods.discard(n)
# TODO! pytest doesn't currently allow multiple yields (`cosmo`) so
# testing with 1 random method
# yield from methods
return random.choice(tuple(methods)) if methods else None
# ===============================================================
def test_fromformat_model_wrong_cls(self, from_format):
"""Test when Model is not the correct class."""
model = Gaussian1D(amplitude=10, mean=14)
with pytest.raises(AttributeError):
from_format(model)
def test_toformat_model_not_method(self, to_format):
"""Test when method is not a method."""
with pytest.raises(AttributeError):
to_format("astropy.model", method="this is definitely not a method.")
def test_toformat_model_not_callable(self, to_format):
"""Test when method is actually an attribute."""
with pytest.raises(ValueError):
to_format("astropy.model", method="name")
def test_toformat_model(self, cosmo, to_format, method_name):
"""Test cosmology -> astropy.model."""
if method_name is None: # no test if no method
return
model = to_format("astropy.model", method=method_name)
assert isinstance(model, _CosmologyModel)
# Parameters
expect = tuple([n for n in cosmo.__parameters__ if getattr(cosmo, n) is not None])
assert model.param_names == expect
# scalar result
args = np.arange(model.n_inputs) + 1
got = model.evaluate(*args)
expected = getattr(cosmo, method_name)(*args)
assert np.all(got == expected)
got = model(*args)
expected = getattr(cosmo, method_name)(*args)
assert np.all(got == expected)
# vector result
if "scalar" not in method_name:
args = (np.ones((model.n_inputs, 3)).T + np.arange(model.n_inputs)).T
got = model.evaluate(*args)
expected = getattr(cosmo, method_name)(*args)
assert np.all(got == expected)
got = model(*args)
expected = getattr(cosmo, method_name)(*args)
assert np.all(got == expected)
def test_tofromformat_model_instance(self, cosmo_cls, cosmo, method_name,
to_format, from_format):
"""Test cosmology -> astropy.model -> cosmology."""
if method_name is None: # no test if no method
return
# ------------
# To Model
# this also serves as a test of all added methods / attributes
# in _CosmologyModel.
model = to_format("astropy.model", method=method_name)
assert isinstance(model, _CosmologyModel)
assert model.cosmology_class is cosmo_cls
assert model.cosmology == cosmo
assert model.method_name == method_name
# ------------
# From Model
# it won't error if everything matches up
got = from_format(model, format="astropy.model")
assert got == cosmo
assert set(cosmo.meta.keys()).issubset(got.meta.keys())
# Note: model adds parameter attributes to the metadata
# also it auto-identifies 'format'
got = from_format(model)
assert got == cosmo
assert set(cosmo.meta.keys()).issubset(got.meta.keys())
def test_fromformat_model_subclass_partial_info(self):
"""
Test writing from an instance and reading from that class.
This works with missing information.
"""
pass # there's no partial information with a Model
class TestToFromModel(IOFormatTestBase, ToFromModelTestMixin):
"""Directly test ``to/from_model``."""
def setup_class(self):
self.functions = {"to": to_model, "from": from_model}
| saimn/astropy | astropy/cosmology/io/tests/test_model.py | Python | bsd-3-clause | 6,402 |
#!/usr/bin/python3 -O
# vim: fileencoding=utf-8
import os
import setuptools
import setuptools.command.install
# don't import: import * is unreliable and there is no need, since this is
# compile time and we have source files
def get_console_scripts():
for filename in os.listdir('./qubes/tools'):
basename, ext = os.path.splitext(os.path.basename(filename))
if basename == '__init__' or ext != '.py':
continue
yield basename.replace('_', '-'), 'qubes.tools.{}'.format(basename)
# create simple scripts that run much faster than "console entry points"
class CustomInstall(setuptools.command.install.install):
def run(self):
bin = os.path.join(self.root, "usr/bin")
try:
os.makedirs(bin)
except:
pass
for file, pkg in get_console_scripts():
path = os.path.join(bin, file)
with open(path, "w") as f:
f.write(
"""#!/usr/bin/python3
from {} import main
import sys
if __name__ == '__main__':
sys.exit(main())
""".format(pkg))
os.chmod(path, 0o755)
setuptools.command.install.install.run(self)
if __name__ == '__main__':
setuptools.setup(
name='qubes',
version=open('version').read().strip(),
author='Invisible Things Lab',
author_email='woju@invisiblethingslab.com',
description='Qubes core package',
license='GPL2+',
url='https://www.qubes-os.org/',
packages=setuptools.find_packages(exclude=('core*', 'tests')),
cmdclass={
'install': CustomInstall,
},
entry_points={
'qubes.vm': [
'AppVM = qubes.vm.appvm:AppVM',
'TemplateVM = qubes.vm.templatevm:TemplateVM',
'StandaloneVM = qubes.vm.standalonevm:StandaloneVM',
'AdminVM = qubes.vm.adminvm:AdminVM',
'DispVM = qubes.vm.dispvm:DispVM',
],
'qubes.ext': [
'qubes.ext.admin = qubes.ext.admin:AdminExtension',
'qubes.ext.backup_restore = '
'qubes.ext.backup_restore:BackupRestoreExtension',
'qubes.ext.core_features = qubes.ext.core_features:CoreFeatures',
'qubes.ext.gui = qubes.ext.gui:GUI',
'qubes.ext.audio = qubes.ext.audio:AUDIO',
'qubes.ext.r3compatibility = qubes.ext.r3compatibility:R3Compatibility',
'qubes.ext.pci = qubes.ext.pci:PCIDeviceExtension',
'qubes.ext.block = qubes.ext.block:BlockDeviceExtension',
'qubes.ext.services = qubes.ext.services:ServicesExtension',
'qubes.ext.supported_features = qubes.ext.supported_features:SupportedFeaturesExtension',
'qubes.ext.windows = qubes.ext.windows:WindowsFeatures',
],
'qubes.devices': [
'pci = qubes.ext.pci:PCIDevice',
'block = qubes.ext.block:BlockDevice',
'testclass = qubes.tests.devices:TestDevice',
],
'qubes.storage': [
'file = qubes.storage.file:FilePool',
'file-reflink = qubes.storage.reflink:ReflinkPool',
'linux-kernel = qubes.storage.kernels:LinuxKernel',
'lvm_thin = qubes.storage.lvm:ThinPool',
'callback = qubes.storage.callback:CallbackPool',
],
'qubes.tests.storage': [
'test = qubes.tests.storage:TestPool',
'file = qubes.storage.file:FilePool',
'file-reflink = qubes.storage.reflink:ReflinkPool',
'linux-kernel = qubes.storage.kernels:LinuxKernel',
'lvm_thin = qubes.storage.lvm:ThinPool',
'callback = qubes.storage.callback:CallbackPool',
],
})
| QubesOS/qubes-core-admin | setup.py | Python | lgpl-2.1 | 3,863 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, unicode_literals
from logging import getLogger
import Queue
import time
import os
import hashlib
from ..watchdog import events
from ..watchdog.observers import Observer
from ..watchdog.utils.bricks import OrderedSetQueue
from .. import path_ops, variables as v, app
###############################################################################
LOG = getLogger('PLEX.playlists.common')
# These filesystem events are considered similar
SIMILAR_EVENTS = (events.EVENT_TYPE_CREATED, events.EVENT_TYPE_MODIFIED)
###############################################################################
class PlaylistError(Exception):
"""
The one main exception thrown if anything goes awry
"""
pass
class Playlist(object):
"""
Class representing a synced Playlist with info for both Kodi and Plex.
Attributes:
Plex:
plex_id: unicode
plex_name: unicode
plex_updatedat: unicode
Kodi:
kodi_path: unicode
kodi_filename: unicode
kodi_extension: unicode
kodi_type: unicode
kodi_hash: unicode
Testing for a Playlist() returns ONLY True if all the following attributes
are set; 2 playlists are only equal if all attributes are equal:
plex_id
plex_name
plex_updatedat
kodi_path
kodi_filename
kodi_type
kodi_hash
"""
def __init__(self):
# Plex
self.plex_id = None
self.plex_name = None
self.plex_updatedat = None
# Kodi
self._kodi_path = None
self.kodi_filename = None
self.kodi_extension = None
self.kodi_type = None
self.kodi_hash = None
def __unicode__(self):
return ("{{"
"'plex_id': {self.plex_id}, "
"'plex_name': '{self.plex_name}', "
"'kodi_type': '{self.kodi_type}', "
"'kodi_filename': '{self.kodi_filename}', "
"'kodi_path': '{self._kodi_path}', "
"'plex_updatedat': {self.plex_updatedat}, "
"'kodi_hash': '{self.kodi_hash}'"
"}}").format(self=self)
def __repr__(self):
return self.__unicode__().encode('utf-8')
def __str__(self):
return self.__repr__()
def __bool__(self):
return (self.plex_id and self.plex_updatedat and self.plex_name and
self._kodi_path and self.kodi_filename and self.kodi_type and
self.kodi_hash)
# Used for comparison of playlists
@property
def key(self):
return (self.plex_id, self.plex_updatedat, self.plex_name,
self._kodi_path, self.kodi_filename, self.kodi_type,
self.kodi_hash)
def __eq__(self, playlist):
return self.key == playlist.key
def __ne__(self, playlist):
return self.key != playlist.key
@property
def kodi_path(self):
return self._kodi_path
@kodi_path.setter
def kodi_path(self, path):
f = path_ops.path.basename(path)
try:
self.kodi_filename, self.kodi_extension = f.rsplit('.', 1)
except ValueError:
LOG.error('Trying to set invalid path: %s', path)
raise PlaylistError('Invalid path: %s' % path)
if path.startswith(v.PLAYLIST_PATH_VIDEO):
self.kodi_type = v.KODI_TYPE_VIDEO_PLAYLIST
elif path.startswith(v.PLAYLIST_PATH_MUSIC):
self.kodi_type = v.KODI_TYPE_AUDIO_PLAYLIST
else:
LOG.error('Playlist type not supported for %s', path)
raise PlaylistError('Playlist type not supported: %s' % path)
if not self.plex_name:
self.plex_name = self.kodi_filename
self._kodi_path = path
def kodi_playlist_hash(path):
"""
Returns a md5 hash [unicode] using os.stat() st_size and st_mtime for the
playlist located at path [unicode]
(size of file in bytes and time of most recent content modification)
There are probably way more efficient ways out there to do this
"""
stat = os.stat(path_ops.encode_path(path))
# stat.st_size is of type long; stat.st_mtime is of type float - hash both
m = hashlib.md5()
m.update(repr(stat.st_size))
m.update(repr(stat.st_mtime))
return m.hexdigest().decode('utf-8')
class PlaylistQueue(OrderedSetQueue):
"""
OrderedSetQueue that drops all directory events immediately
"""
def _put(self, item):
if item[0].is_directory:
self.unfinished_tasks -= 1
else:
# Can't use super as OrderedSetQueue is old style class
OrderedSetQueue._put(self, item)
class PlaylistObserver(Observer):
"""
PKC implementation, overriding the dispatcher. PKC will wait for the
duration timeout (in seconds) AFTER receiving a filesystem event. A new
("non-similar") event will reset the timer.
Creating and modifying will be regarded as equal.
"""
def __init__(self, *args, **kwargs):
super(PlaylistObserver, self).__init__(*args, **kwargs)
# Drop the same events that get into the queue even if there are other
# events in between these similar events. Ignore directory events
# completely
self._event_queue = PlaylistQueue()
@staticmethod
def _pkc_similar_events(event1, event2):
if event1 == event2:
return True
elif (event1.src_path == event2.src_path and
event1.event_type in SIMILAR_EVENTS and
event2.event_type in SIMILAR_EVENTS):
# Set created and modified events to equal
return True
return False
def _dispatch_iterator(self, event_queue, timeout):
"""
This iterator will block for timeout (seconds) until an event is
received or raise Queue.Empty.
"""
event, watch = event_queue.get(block=True, timeout=timeout)
event_queue.task_done()
start = time.time()
while time.time() - start < timeout:
try:
new_event, new_watch = event_queue.get(block=False)
except Queue.Empty:
app.APP.monitor.waitForAbort(0.2)
else:
event_queue.task_done()
start = time.time()
if self._pkc_similar_events(new_event, event):
continue
else:
yield event, watch
event, watch = new_event, new_watch
yield event, watch
def dispatch_events(self, event_queue, timeout):
for event, watch in self._dispatch_iterator(event_queue, timeout):
# This is copy-paste of original code
with self._lock:
# To allow unschedule/stop and safe removal of event handlers
# within event handlers itself, check if the handler is still
# registered after every dispatch.
for handler in list(self._handlers.get(watch, [])):
if handler in self._handlers.get(watch, []):
handler.dispatch(event)
| tomkat83/PlexKodiConnect | resources/lib/playlists/common.py | Python | gpl-2.0 | 7,211 |
#
# Copyright (c) 2008--2015 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
# Sends notification to search-server that it should update server index
#
import xmlrpclib
from spacewalk.common.rhnLog import log_error
class SearchNotify:
def __init__(self, host="127.0.0.1", port="2828"):
self.addr = "http://%s:%s" % (host, port)
def notify(self, indexName="server"):
try:
client = xmlrpclib.ServerProxy(self.addr)
result = client.admin.updateIndex(indexName)
except Exception, e:
log_error("Failed to notify search service located at %s to update %s indexes"
% (self.addr, indexName), e)
return False
return result
if __name__ == "__main__":
search = SearchNotify()
result = search.notify()
print "search.notify() = %s" % (result)
| xkollar/spacewalk | backend/server/rhnServer/search_notify.py | Python | gpl-2.0 | 1,394 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""xcom dag task indices
Revision ID: 8504051e801b
Revises: 4addfa1236f1
Create Date: 2016-11-29 08:13:03.253312
"""
from alembic import op
# revision identifiers, used by Alembic.
revision = '8504051e801b'
down_revision = '4addfa1236f1'
branch_labels = None
depends_on = None
def upgrade():
op.create_index('idx_xcom_dag_task_date', 'xcom',
['dag_id', 'task_id', 'execution_date'], unique=False)
def downgrade():
op.drop_index('idx_xcom_dag_task_date', table_name='xcom')
| spektom/incubator-airflow | airflow/migrations/versions/8504051e801b_xcom_dag_task_indices.py | Python | apache-2.0 | 1,297 |
# -*- coding: utf-8 -*-
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from __future__ import absolute_import
import flask_talisman
import flask_talisman.talisman
# TODO: we need to remove unsafe-inline
DEFAULT_CSP_POLICY = {
'default-src': '\'none\'',
'script-src': '\'self\' \'unsafe-inline\'',
'style-src': '\'self\' \'unsafe-inline\'',
'img-src': '\'self\'',
'connect-src': '\'self\'',
}
DEFAULT_CONFIG = dict(
force_https=True,
force_https_permanent=False,
force_file_save=False,
frame_options=flask_talisman.talisman.SAMEORIGIN,
frame_options_allow_from=None,
strict_transport_security=True,
strict_transport_security_preload=False,
strict_transport_security_max_age=flask_talisman.talisman.ONE_YEAR_IN_SECS,
strict_transport_security_include_subdomains=True,
content_security_policy=DEFAULT_CSP_POLICY,
content_security_policy_report_uri=None,
content_security_policy_report_only=False,
session_cookie_secure=True,
session_cookie_http_only=True,
)
security = flask_talisman.Talisman()
def init_app(app):
config = app.config.get('SECURITY', DEFAULT_CONFIG)
security.init_app(app, **config)
return security
| garbas/mozilla-releng-services | lib/backend_common/backend_common/security.py | Python | mpl-2.0 | 1,361 |
from collections import Counter
def checkio(text):
text = text.lower()
n_text = ""
return_word = []
if not text.isalpha():
for digit in text:
if digit.isalpha():
n_text = n_text + digit
c = [c * 1 for c in n_text]
d_count = Counter(c)
text = sorted(d_count.items(), key=lambda x: -x[1])
else:
c = [c * 1 for c in text]
d_count = Counter(c)
text = sorted(d_count.items(), key=lambda x: -x[1])
for word in text:
if text[0][1] == word[1]:
return_word.append(word[0])
return_word.sort()
return return_word[0]
if __name__ == '__main__':
# These "asserts" using only for self-checking and not necessary for auto-testing
assert checkio(u"Hello World!") == "l", "Hello test"
assert checkio(u"How do you do?") == "o", "O is most wanted"
assert checkio(u"One") == "e", "All letter only once."
assert checkio(u"Oops!") == "o", "Don't forget about lower case."
assert checkio(u"AAaooo!!!!") == "a", "Only letters."
assert checkio(u"abe") == "a", "The First."
print("Start the long test")
assert checkio(u"a" * 9000 + u"b" * 1000) == "a", "Long."
print("The local tests are done.")
| Dani4kor/Checkio | most-wanted-letter.py | Python | mit | 1,248 |
# -*- coding: utf-8 -*-
# Maestro Music Manager - https://github.com/maestromusic/maestro
# Copyright (C) 2009-2015 Martin Altmayer, Michael Helmling
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from PyQt5 import QtCore
translate = QtCore.QCoreApplication.translate
class Node:
"""(Abstract) base class for elements in a RootedTreeModel...that is almost everything in playlists,
browser etc.. Node implements the methods required by RootedTreeModel as well as many tree-structure
methods."""
parent = None
def hasContents(self):
"""Return whether this node has at least one child node."""
return len(self.getContents()) > 0
def getContents(self):
"""Return the list of contents."""
# This is a default implementation and does not mean that every node has a contents-attribute
return self.contents
def getContentsCount(self):
"""Return the number of contents (that is direct children) of this node."""
return len(self.getContents())
def setContents(self, contents):
"""Set the list of contents of this container to *contents*. Note that the list won't be copied but
in fact altered: the parents will be set to this node."""
assert isinstance(contents, list)
self.contents = contents
for element in self.contents:
element.parent = self
def insertContents(self, index, nodes):
"""Insert *nodes* at position *index* into this node's contents. As with setContents the list won't
be copied and the parents will be set to this node."""
for node in nodes:
node.parent = self
self.contents[index:index] = nodes
def isFile(self):
"""Return whether this node holds a file. Note that this is in general not the opposite of
isContainer as e.g. rootnodes are neither."""
return False
def isContainer(self):
"""Return whether this node holds a container. Note that this is in general not the opposite of
isFile as e.g. rootnodes are neither."""
return False
def getParents(self, includeSelf=False, excludeRootNode=False):
"""Returns a generator yielding all parents of this node in the current tree structure, from the
direct parent to the root-node.
If *includeSelf* is True, the node itself is yielded before its ancestors.
If *excludeRootNode* is True, nodes of type RootNode are not returned.
"""
if includeSelf:
yield self
parent = self.parent
while parent is not None and not (excludeRootNode and isinstance(parent, RootNode)):
yield parent
parent = parent.parent
def getRoot(self):
"""Return the root node in the tree `self` is part of, i.e., the most remote parent."""
node = self
while node.parent is not None:
node = node.parent
return node
def isAncestorOf(self, node):
"""Return whether this is an ancestor of *node*. A node is considered an ancestor of itself."""
return node == self or self in node.getParents()
def depth(self):
"""Return the depth of this node in the current tree structure. The root node has level 0."""
if self.parent is None:
return 0
else: return 1 + self.parent.depth()
def maxDepth(self):
"""Return the maximum depth of nodes below this node."""
if self.hasContents():
return 1 + max(node.maxDepth() for node in self.getContents())
else: return 0
def index(self, node):
"""Return the index of *node* in this node's contents or raise a ValueError if *node* is not found.
See also find."""
for i, n in enumerate(self.getContents()):
if n == node:
return i
raise ValueError("Node.index: Node {} is not contained in element {}.".format(node, self))
def find(self, node):
"""Return the index of *node* in this node's contents or -1 if *node* is not found. See also index."""
for i, n in enumerate(self.contents):
if n == node:
return i
return -1
def getAllNodes(self, skipSelf=False):
"""Generator which will return all nodes contained in this node or in children of it,
including the node itself unless *skipSelf* is ``True``.
The send-method of the returned generator may be used to decide whether the generator should
descend to the contents of the last node:
>>> generator = model.getAllNodes()
>>> descend = None # send must be invoked with None first
>>> try:
>>> while True:
>>> node = generator.send(descend)
>>> descend = ... # decide whether the generator should yield the contents of node
>>> # If descend is set to False, the generator will skip the contents and
>>> # continue with the next sibling of node
>>> except StopIteration: pass
See http://docs.python.org/3/reference/expressions.html#generator.send
"""
if not skipSelf:
descend = yield self
if descend is False: # Remember that yield usually returns None
return
for node in self.getContents():
generator = node.getAllNodes()
try:
descend = None # send must be called with None first
while True:
descend = yield generator.send(descend)
except StopIteration:
pass # continue to next node
def getAllFiles(self, reverse=False):
"""Generator which will return all files contained in this node or in children of it
(possibly including the node itself).
If *reverse* is True, files will be returned in reversed order.
"""
if self.isFile():
yield self
else:
for element in self.getContents() if not reverse else reversed(self.getContents()):
yield from element.getAllFiles(reverse)
def getAllContainers(self, contentsFirst=False, reverse=False):
"""Generator which will return all containers contained in this node or in children of it
(possibly including the node itself).
If *contentsFirst* is True, contents of a container will be returned prior to the container.
If *reverse* is True, containers will be returned in reversed order.
"""
assert self.getContents() is not None
if self.isContainer():
if contentsFirst == reverse: # both True or both False
yield self
for element in self.getContents() if not reverse else reversed(self.getContents()):
for container in element.getAllContainers(contentsFirst, reverse):
yield container
if contentsFirst != reverse:
yield self
def fileCount(self):
"""Return the number of files contained in this element or in descendants of it. If this node
is a file, return 1."""
if self.isFile():
return 1
else: return sum(node.fileCount() for node in self.getContents())
def offset(self):
"""Get the offset of this element in the current tree structure. For files the offset is defined as
the position of the file in the list of files in the whole tree (so e.g. for a playlist tree the
offset is the position of the file in the flat playlist). For containers the offset is defined as
the offset that a file at the container's location in the tree would have (if the container contains
at least one file, this is the offset of the first file among its descendants)."""
if self.parent is None:
return 0 # rootnode has offset 0
else:
# start with the parent's offset and add child.fileCount() for each child before this node
offset = self.parent.offset()
for child in self.parent.getContents():
if child == self:
return offset
else:
offset += child.fileCount()
raise ValueError('Node.getOffset: Node {} is not contained in its parent {}.'
.format(self, self.parent))
def fileAtOffset(self, offset, allowFileCount=False):
"""Return the file at the given *offset*. Note that *offset* is relative to this node, so only the
tree below this node will be searched.
Usually the inequality 0 <= *offset* < self.fileCount() must be valid. If *allowFileCount* is True,
*offset* may equal self.fileCount(). In that case None is returned, as the offset points behind all
files (that position is usually only interesting for insert operations).
"""
if not isinstance(offset, int):
raise TypeError("Offset must be an integer; I got: {}".format(offset))
if offset == 0 and self.isFile():
return self
else:
child, innerOffset = self.childAtOffset(offset)
if child is None: # offset == self.fileCount()
if allowFileCount:
return None
else: raise IndexError("Offset {} is out of bounds (equals fileCount)".format(offset))
if child.isFile():
return child
else: return child.fileAtOffset(innerOffset)
def childIndexAtOffset(self, offset):
"""Return a tuple: the index of the child C that contains the file F with the given offset (relative
to this element) and the offset of F relative to C ("inner offset").
For example: If this element is the rootnode of a playlist tree containing an album with 13 songs and
a second one with 12 songs, then getChildIndexAtOffset(17) will return (1, 3), since the 18th file in
the playlist (i.e. with offset 17), is contained in the second child (i.e with index 1) and it is the
4th song in that child (i.e. it has offset 3 relative to the album).
If *offset* points to the last position inside this node (in other words offset == self.fileCount()),
then (None, None) is returned.
"""
if offset < 0:
raise IndexError("Offset {} is out of bounds".format(offset))
cOffset = 0
for i in range(0, self.getContentsCount()):
fileCount = self.contents[i].fileCount()
if offset < cOffset + fileCount: # offset points to a file somewhere in self.contents[i]
return i, offset-cOffset
else: cOffset += fileCount
if offset == cOffset: # offset points to the end of the list of files below self
return None, None
raise IndexError("Offset {} is out of bounds".format(offset))
def childAtOffset(self, offset):
"""Return the child containing the file with the given (relative) offset, and the offset of that file
relative to the child. This is a convenience-method for
getContents()[getChildIndexAtOffset(offset)[0]]. Confer getChildIndexAtOffset.
If *offset* points to the last position inside this node (in other words offset == self.fileCount()),
then (None, None) is returned.
"""
index, innerOffset = self.childIndexAtOffset(offset)
if index is None:
return None, None
else: return self.getContents()[index], innerOffset
def firstLeaf(self, allowSelf=False):
"""Return the first leaf below this node (i.e. the node without children with the lowest offset). If
this node does not have children, return None or, if *allowSelf* is True, return the node itself.
"""
if self.hasContents():
return self.getContents()[0].firstLeaf(allowSelf=True)
else: return self if allowSelf else None
def lastLeaf(self, allowSelf=False):
"""Return the last leaf below this node (i.e. the node without children with the highest offset). If
this node does not have children, return None or, if *allowSelf* is True, return the node itself.
"""
if self.hasContents():
return self.getContents()[-1].lastLeaf(allowSelf=True)
else: return self if allowSelf else None
def nextLeaf(self):
"""Return the next leaf in the whole tree after the subtree below this node. E.g.:
A
B A.nextLeaf() == C.nextLeaf() == E
C
D
E
Return None if no such leaf exists.
"""
if self.parent is None:
return None
siblings = self.parent.getContents()
pos = siblings.index(self)
if pos < len(siblings) - 1:
return siblings[pos+1].firstLeaf(allowSelf=True)
else:
return self.parent.nextLeaf()
def wrapperString(self, includeSelf=False, strFunc=None):
"""Return a string that stores the tree structure below this node. If this string is submitted to
Level.createWrappers the same tree will be created again. There are some limitations though:
- the tree below this node must contain only Wrappers,
- to store Wrappers their id is used. Thus you cannot persistently store trees that contain
temporary elements (negative ids).
Both limitations can be circumvented specifying a custom *strFunc*: It must take a node and
return a string and is used to convert the node to a string. Strings returned by *strFunc* must not
contain the characters ',[]'.
"""
if includeSelf:
if strFunc is None and not isinstance(self, Wrapper):
raise ValueError('wrapperString: Tree must contain only Wrappers if *strFunc* is None')
selfString = str(self.element.id) if strFunc is None else strFunc(self)
if self.hasContents():
childrenString = ','.join(c.wrapperString(includeSelf=True, strFunc=strFunc)
for c in self.getContents())
if includeSelf:
return selfString+'['+childrenString+']'
else: return childrenString
else:
if includeSelf:
return selfString
else: return ''
def printStructure(self, indent=''):
"""Debug method: print the tree below this node using indentation."""
print(indent + str(self))
for child in self.getContents():
child.printStructure(indent + ' ')
class RootNode(Node):
"""Rootnodes are used at the top of RootedTreeModel. They are not displayed within the GUI, but recursive
tree operations are much simpler if there is a single root node instead of a list of (visible) roots."""
def __init__(self, model):
self.contents = []
self.model = model
self.parent = None
def __repr__(self):
return 'RootNode[{} children]'.format(len(self.contents))
class Wrapper(Node):
"""A node that marks an element's location in a tree. On each level there is only one instance of each
element and this instance has a fixed list of contents and parents and corresponding positions. To use
elements in trees they must be wrapped by a wrapper which has only one parent and position and its own
list of contents. Usually both parent and contents contain wrappers again (and usually the elements of
those wrappers are an actual parent/contents of the element, but this is not obligatory).
Arguments:
*element*: the element instance wrapped by this wrapper,
*contents*: the list of contents of this wrapper. Usually these are other wrappers. Note that the
list won't be copied but the parents of the list entries will be adjusted.
If this wrapper wraps a file, this argument must be None. For containers it may be None,
in which case the wrapper will be initialized with an empty list.
*position*: the position of this wrapper. May be None.
*parent*: the parent (usually another wrapper or a rootnode)
"""
def __init__(self, element, *, contents=None, position=None, parent=None):
self.element = element
self.position = position
self.parent = parent
if element.isContainer():
if contents is not None:
self.setContents(contents)
else:
self.contents = []
else:
if contents is not None:
raise ValueError("contents must be None for a File-wrapper")
self.contents = None
def copy(self, contents=None, level=None):
"""Return a copy of this wrapper. Because a flat copy of the contents is not possible (parent
pointers would be wrong) all contents are copied recursively. Instead of this you can optionally
specify a list of contents that will be put into the copy regardless of the original's contents.
If *level* is not None, the copy will use elements from the given level instead of the original
elements (this is for example necessary when dropping elements from level to another).
"""
element = self.element if level is None else level.collect(self.element.id)
copy = Wrapper(element, contents=None, position=self.position, parent=self.parent)
if self.isContainer():
if contents is None:
copy.setContents([child.copy(level=level) for child in self.contents])
else: copy.setContents(contents)
return copy
def isFile(self):
return self.element.isFile()
def isContainer(self):
return self.element.isContainer()
def hasContents(self):
return self.element.isContainer() and len(self.contents) > 0
def getContentsCount(self):
return len(self.contents) if self.element.isContainer() else 0
def getContents(self):
return self.contents if self.element.isContainer() else []
def loadContents(self, recursive):
"""Ensure that this wrapper has exactly the contents of the underlying element.
If *recursive* is True, load the contents of all children in the same way.
"""
if self.element.isContainer():
if self.contents is None or len(self.contents) == 0:
self.element.level.collect(self.element.contents.ids)
self.setContents([Wrapper(self.element.level[id], position=pos)
for pos, id in self.element.contents.items()])
if recursive:
for child in self.contents:
child.loadContents(True)
def getTitle(self, prependPosition=False, usePath=True):
"""Return the title of the wrapped element. If *prependPosition* is True and this wrapper has a
position, prepend it to the title. See also Element.getTitle.
"""
title = self.element.getTitle(usePath)
if prependPosition and self.position is not None:
return "{} - {}".format(self.position, title)
else: return title
def getLength(self):
"""Return the length of this element, i.e. the sum of the lengths of all contents. Return None if
length can not be computed because not all contents have been loaded."""
if self.isFile():
return self.element.length
elif self.contents is not None:
lengths = [wrapper.getLength() for wrapper in self.contents]
if None not in lengths:
return sum(lengths)
return None
def getExtension(self):
"""Return the extension of all files in this container. Return None if they have different extension
or at least one of them does not have an extension."""
if self.isFile():
return self.element.url.extension
else:
extension = None
for wrapper in self.contents:
ext = wrapper.getExtension()
if ext is None:
return None
if extension is None:
extension = ext
elif extension != ext:
return None
return extension
def __repr__(self):
return "<W: {}>".format(self.getTitle())
# Note that no __eq__ method is defined for wrappers. Different wrapper instances really are different.
class TextNode(Node):
"""A node that simply displays a piece of text."""
def __init__(self, text, wordWrap=False, parent=None):
self.text = text
self.wordWrap = wordWrap
self.parent = parent
def hasContents(self):
return False
@property
def contents(self):
return list()
def __str__(self):
return "<TextNode: {}>".format(self.text)
def toolTipText(self):
return self.text
| maestromusic/maestro | maestro/core/nodes.py | Python | gpl-3.0 | 21,992 |
from .test_cache import *
from .test_rest import *
from .test_templatetags import *
from .test_admin import *
from .test_settings import *
| joar/djedi-cms | djedi/tests/__init__.py | Python | bsd-3-clause | 139 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib import admin
from models import Project, TestSet, TestCase, TestStep, TestSetRun, TestCaseRun
admin.site.register(Project)
admin.site.register(TestSet)
admin.site.register(TestCase)
admin.site.register(TestStep)
admin.site.register(TestSetRun)
admin.site.register(TestCaseRun)
| Shibashisdas/testmanager-backend | api/admin.py | Python | mit | 363 |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.thread_list, name='thread-list'),
url(r'^add/$', views.thread_create, name='thread-create'),
url(r'^(?P<thread_pk>\d+)/$', views.thread_detail, name='thread-detail'),
url(r'^(?P<path_tags>[\w/]+)/$', views.thread_list, name='thread-list'),
]
| funkybob/django-dequorum | dequorum/urls.py | Python | mit | 346 |
"""
Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com>
This file is part of RockStor.
RockStor is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published
by the Free Software Foundation; either version 2 of the License,
or (at your option) any later version.
RockStor is distributed in the hope that it will be useful, but
WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from smart_manager.models import (Task, TaskDefinition)
from smart_manager.serializers import TaskSerializer
from django.conf import settings
from django.db import transaction
import json
from advanced_sprobe import AdvancedSProbeView
from rest_framework.response import Response
from django.utils.timezone import utc
from datetime import datetime
from storageadmin.util import handle_exception
import logging
logger = logging.getLogger(__name__)
class TaskLogView(AdvancedSProbeView):
serializer_class = TaskSerializer
valid_tasks = ('snapshot', 'scrub',)
def get_queryset(self, *args, **kwargs):
if ('tid' in kwargs):
self.paginate_by = 0
try:
return Task.objects.get(id=kwargs['tid'])
except:
return []
if ('tdid' in kwargs):
td = TaskDefinition.objects.get(id=kwargs['tdid'])
return Task.objects.filter(task_def=td).order_by('-id')
return Task.objects.filter().order_by('-id')
def get_paginate_by(self, foo):
if (self.paginate_by is None):
return None
return settings.PAGINATION['page_size']
| kamal-gade/rockstor-core | src/rockstor/smart_manager/views/task_log.py | Python | gpl-3.0 | 1,865 |
from __future__ import absolute_import, unicode_literals
import functools
import logging
import os
import socket
import tornado.escape
import tornado.ioloop
import tornado.web
import tornado.websocket
import mopidy
from mopidy import core, models
from mopidy.internal import encoding, jsonrpc
logger = logging.getLogger(__name__)
def make_mopidy_app_factory(apps, statics):
def mopidy_app_factory(config, core):
return [
(r'/ws/?', WebSocketHandler, {
'core': core,
}),
(r'/rpc', JsonRpcHandler, {
'core': core,
}),
(r'/(.+)', StaticFileHandler, {
'path': os.path.join(os.path.dirname(__file__), 'data'),
}),
(r'/', ClientListHandler, {
'apps': apps,
'statics': statics,
}),
]
return mopidy_app_factory
def make_jsonrpc_wrapper(core_actor):
inspector = jsonrpc.JsonRpcInspector(
objects={
'core.get_uri_schemes': core.Core.get_uri_schemes,
'core.get_version': core.Core.get_version,
'core.history': core.HistoryController,
'core.library': core.LibraryController,
'core.mixer': core.MixerController,
'core.playback': core.PlaybackController,
'core.playlists': core.PlaylistsController,
'core.tracklist': core.TracklistController,
})
return jsonrpc.JsonRpcWrapper(
objects={
'core.describe': inspector.describe,
'core.get_uri_schemes': core_actor.get_uri_schemes,
'core.get_version': core_actor.get_version,
'core.history': core_actor.history,
'core.library': core_actor.library,
'core.mixer': core_actor.mixer,
'core.playback': core_actor.playback,
'core.playlists': core_actor.playlists,
'core.tracklist': core_actor.tracklist,
},
decoders=[models.model_json_decoder],
encoders=[models.ModelJSONEncoder]
)
def _send_broadcast(client, msg):
# We could check for client.ws_connection, but we don't really
# care why the broadcast failed, we just want the rest of them
# to succeed, so catch everything.
try:
client.write_message(msg)
except Exception as e:
error_msg = encoding.locale_decode(e)
logger.debug('Broadcast of WebSocket message to %s failed: %s',
client.request.remote_ip, error_msg)
# TODO: should this do the same cleanup as the on_message code?
class WebSocketHandler(tornado.websocket.WebSocketHandler):
# XXX This set is shared by all WebSocketHandler objects. This isn't
# optimal, but there's currently no use case for having more than one of
# these anyway.
clients = set()
@classmethod
def broadcast(cls, msg):
if hasattr(tornado.ioloop.IOLoop, 'current'):
loop = tornado.ioloop.IOLoop.current()
else:
loop = tornado.ioloop.IOLoop.instance() # Fallback for pre 3.0
# This can be called from outside the Tornado ioloop, so we need to
# safely cross the thread boundary by adding a callback to the loop.
for client in cls.clients:
# One callback per client to keep time we hold up the loop short
# NOTE: Pre 3.0 does not support *args or **kwargs...
loop.add_callback(functools.partial(_send_broadcast, client, msg))
def initialize(self, core):
self.jsonrpc = make_jsonrpc_wrapper(core)
def open(self):
if hasattr(self, 'set_nodelay'):
# New in Tornado 3.1
self.set_nodelay(True)
else:
self.stream.socket.setsockopt(
socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
self.clients.add(self)
logger.debug(
'New WebSocket connection from %s', self.request.remote_ip)
def on_close(self):
self.clients.discard(self)
logger.debug(
'Closed WebSocket connection from %s',
self.request.remote_ip)
def on_message(self, message):
if not message:
return
logger.debug(
'Received WebSocket message from %s: %r',
self.request.remote_ip, message)
try:
response = self.jsonrpc.handle_json(
tornado.escape.native_str(message))
if response and self.write_message(response):
logger.debug(
'Sent WebSocket message to %s: %r',
self.request.remote_ip, response)
except Exception as e:
error_msg = encoding.locale_decode(e)
logger.error('WebSocket request error: %s', error_msg)
if self.ws_connection:
# Tornado 3.2+ checks if self.ws_connection is None before
# using it, but not older versions.
self.close()
def check_origin(self, origin):
# Allow cross-origin WebSocket connections, like Tornado before 4.0
# defaulted to.
return True
def set_mopidy_headers(request_handler):
request_handler.set_header('Cache-Control', 'no-cache')
request_handler.set_header(
'X-Mopidy-Version', mopidy.__version__.encode('utf-8'))
class JsonRpcHandler(tornado.web.RequestHandler):
def initialize(self, core):
self.jsonrpc = make_jsonrpc_wrapper(core)
def head(self):
self.set_extra_headers()
self.finish()
def post(self):
data = self.request.body
if not data:
return
logger.debug(
'Received RPC message from %s: %r', self.request.remote_ip, data)
try:
self.set_extra_headers()
response = self.jsonrpc.handle_json(
tornado.escape.native_str(data))
if response and self.write(response):
logger.debug(
'Sent RPC message to %s: %r',
self.request.remote_ip, response)
except Exception as e:
logger.error('HTTP JSON-RPC request error: %s', e)
self.write_error(500)
def set_extra_headers(self):
set_mopidy_headers(self)
self.set_header('Accept', 'application/json')
self.set_header('Content-Type', 'application/json; utf-8')
class ClientListHandler(tornado.web.RequestHandler):
def initialize(self, apps, statics):
self.apps = apps
self.statics = statics
def get_template_path(self):
return os.path.dirname(__file__)
def get(self):
set_mopidy_headers(self)
names = set()
for app in self.apps:
names.add(app['name'])
for static in self.statics:
names.add(static['name'])
names.discard('mopidy')
self.render('data/clients.html', apps=sorted(list(names)))
class StaticFileHandler(tornado.web.StaticFileHandler):
def set_extra_headers(self, path):
set_mopidy_headers(self)
class AddSlashHandler(tornado.web.RequestHandler):
@tornado.web.addslash
def prepare(self):
return super(AddSlashHandler, self).prepare()
| dbrgn/mopidy | mopidy/http/handlers.py | Python | apache-2.0 | 7,230 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.10 on 2016-10-26 20:07
from __future__ import unicode_literals
import os
import six
from django.conf import settings
from django.db import migrations
STACKS_DIRECTORY = os.path.join(settings.FILE_STORAGE_DIRECTORY, 'stacks')
def slug_to_id(apps, schema_migration):
"""
Forwards migration
"""
Stack = apps.get_model('stacks', 'Stack')
# Move the stacks/<id>-<slug> directories to just stacks/<id>
for stack in Stack.objects.all():
old_path = os.path.join(STACKS_DIRECTORY, '{}-{}'.format(stack.id, stack.slug))
new_path = os.path.join(STACKS_DIRECTORY, six.text_type(stack.id))
if os.path.exists(old_path):
os.rename(old_path, new_path)
def id_to_slug(apps, schema_migration):
"""
Reverse migration
"""
Stack = apps.get_model('stacks', 'Stack')
# Move the stacks/<id> directories to stacks/<id>-<slug>
for stack in Stack.objects.all():
old_path = os.path.join(STACKS_DIRECTORY, '{}-{}'.format(stack.id, stack.slug))
new_path = os.path.join(STACKS_DIRECTORY, six.text_type(stack.id))
if os.path.exists(new_path):
os.rename(new_path, old_path)
class Migration(migrations.Migration):
dependencies = [
('stacks', '0007_0_8_0_migrations'),
]
operations = [
migrations.RunPython(slug_to_id, id_to_slug),
]
| clarkperkins/stackdio | stackdio/api/stacks/migrations/0008_0_8_0_migrations.py | Python | apache-2.0 | 1,417 |
#!/usr/bin/env python
#
# Script to simulate colonial expansion of the galaxy
import sys
import getopt
import time
import pygame
from collections import defaultdict
from coloniser import Coloniser
from liner import Liner
from galaxy import Galaxy
from config import galaxywidth, galaxyheight, screensize, maxships
import bobj
black = 0, 0, 0
white = 255, 255, 255
##########################################################################
def usage():
sys.stderr.write("Usage: %s\n" % sys.argv[0])
##########################################################################
class Game(bobj.BaseObj):
def __init__(self):
pygame.init()
self.screen = pygame.display.set_mode(screensize)
self.galaxy = Galaxy(galaxywidth, galaxyheight)
self.homeplanet = self.galaxy.findHomePlanet()
self.galaxy.homeplanet = self.homeplanet
self.ships = defaultdict(list)
self.liners = 0
self.colonisers = 0
self.abandoned = 0
self.year = 0
######################################################################
def diaspora(self):
for shptype in self.ships:
for shp in self.ships[shptype][:]:
if shp.currplanet != shp.destination:
shp.move()
else:
if shp.destination.plantype == 'gasgiant':
dest = shp.determine_destination()
if not dest:
self.abandoned += shp.cargo
self.ships[shptype].remove(shp)
else:
if not shp.destination.settledate:
shp.destination.settledate = self.year
shp.unload()
self.ships[shptype].remove(shp)
# Any populous planet can spin off liners
for plnt in self.galaxy.terrestrials:
self.buildShip(plnt, Liner)
######################################################################
def buildShip(self, plnt, shipklass):
sk = shipklass.__name__
if len(self.ships[sk]) >= maxships[sk]:
return
s = shipklass(startplanet=plnt, galaxy=self.galaxy)
if not s.doSpawn():
return
dest = s.determine_destination()
if not dest:
return
s.load(s.cargosize)
self.ships[sk].append(s)
plnt.launches[sk] += 1
return s
######################################################################
def endOfYear(self):
populated = 0
popcap = 0
totpop = 0
for plnt in self.galaxy.terrestrials:
if plnt.population > 0:
plnt.maxdist = int(
min((self.year - plnt.settledate) / 20, 50) +
min((self.year - plnt.settledate) / 40, 50) +
min((self.year - plnt.settledate) / 80, 50) +
min((plnt.population / 1E9), 20) +
(self.year - plnt.settledate) / 200)
totpop += plnt.population
populated += 1
if plnt.homeplanet:
plnt.population += int(plnt.population * 0.001)
else:
plnt.population += int(plnt.population * 0.003)
plnt.population = min(plnt.popcapacity, plnt.population)
# Very populous planets can generate colonisers
self.buildShip(plnt, Coloniser)
if plnt.popcapacity > 0:
popcap += 1
if populated == popcap: # 100% colonised
time.sleep(30)
self.year += 1
######################################################################
def drawText(self, stsys):
surf = self.screen
populated = 0
popcap = 0
totpop = 0
colpop = 0
homepop = 0
for plnt in self.galaxy.terrestrials:
if plnt.population > 0:
populated += 1
totpop += plnt.population
if not plnt.homeplanet:
colpop += plnt.population
else:
homepop = plnt.population
if plnt.popcapacity > 0:
popcap += 1
font = pygame.font.Font(None, 20)
toprint = [
[
"Year: %d" % self.year,
],
[
"Colonised: %d/%d" % (populated, popcap),
"%0.2f%%" % (100.0 * populated / popcap),
],
[
"Pop: %s" % self.humanise(totpop),
"Home %s" % self.humanise(homepop),
"Col: %s" % self.humanise(colpop),
"Dead: %s" % self.humanise(self.abandoned),
],
[
"Liners: %d" % len(self.ships['Liner']),
"Colonisers: %d" % len(self.ships['Coloniser']),
]
]
count = 1
for tp in toprint:
text = font.render(" ".join(tp), 1, white)
textpos = text.get_rect(centerx=surf.get_width() / 2, centery=count*20)
surf.blit(text, textpos)
count += 1
if stsys:
for s in stsys.stars():
count = 5
st = "Star %s" % s.stardesc
text = font.render(st, 1, white)
textpos = text.get_rect(left=0, centery=count*20)
surf.blit(text, textpos)
count += 1
for p in s.planets():
st = "Orbit %d %s " % (p.orbit, p.plantype)
if p.popcapacity:
st += "Pop: %s/%s (%s) %s" % (self.humanise(p.population), self.humanise(p.popcapacity), p.settledate, p.launchstr())
text = font.render(st, 1, white)
textpos = text.get_rect(left=0, centery=count*20)
count += 1
surf.blit(text, textpos)
if p.maxdist > 5:
pygame.draw.circle(surf, white, abs(p.location), p.maxdist, 1)
######################################################################
def plot(self, stsys):
self.screen.fill(black)
for ss in self.galaxy.starsystems():
ss.Plot(self.screen)
for shptyp in self.ships:
for shp in self.ships[shptyp]:
shp.Plot(self.screen)
self.drawText(stsys)
pygame.display.flip()
##########################################################################
def main():
game = Game()
stsys = None
try:
while(1):
game.endOfYear()
for i in range(12):
for event in pygame.event.get():
if event.type == pygame.QUIT:
raise KeyboardInterrupt
elif event.type == pygame.MOUSEBUTTONDOWN:
stsys = game.galaxy.click(event.pos)
game.diaspora()
game.plot(stsys)
except KeyboardInterrupt:
return
##########################################################################
if __name__ == "__main__":
try:
opts, args = getopt.getopt(sys.argv[1:], "v", [])
except getopt.GetoptError, err:
sys.stderr.write("Error: %s\n" % str(err))
usage()
sys.exit(1)
for o, a in opts:
if o == "-v":
verbose = 1
main()
# EOF
| dwagon/Exodus | exodus.py | Python | gpl-3.0 | 7,490 |
from typing import Dict, Optional
from great_expectations.core.expectation_configuration import ExpectationConfiguration
from great_expectations.execution_engine import ExecutionEngine
from great_expectations.expectations.expectation import (
InvalidExpectationConfigurationError,
TableExpectation,
)
from great_expectations.expectations.util import render_evaluation_parameter_string
from great_expectations.render.renderer.renderer import renderer
from great_expectations.render.types import RenderedStringTemplateContent
from great_expectations.render.util import substitute_none_for_missing
class ExpectTableColumnCountToEqual(TableExpectation):
"""Expect the number of columns to equal a value.
expect_table_column_count_to_equal is a :func:`expectation \
<great_expectations.validator.validator.Validator.expectation>`, not a
``column_map_expectation`` or ``column_aggregate_expectation``.
Args:
value (int): \
The expected number of columns.
Other Parameters:
result_format (string or None): \
Which output mode to use: `BOOLEAN_ONLY`, `BASIC`, `COMPLETE`, or `SUMMARY`.
For more detail, see :ref:`result_format <result_format>`.
include_config (boolean): \
If True, then include the expectation config as part of the result object. \
For more detail, see :ref:`include_config`.
catch_exceptions (boolean or None): \
If True, then catch exceptions and include them as part of the result object. \
For more detail, see :ref:`catch_exceptions`.
meta (dict or None): \
A JSON-serializable dictionary (nesting allowed) that will be included in the output without \
modification. For more detail, see :ref:`meta`.
Returns:
An ExpectationSuiteValidationResult
Exact fields vary depending on the values passed to :ref:`result_format <result_format>` and
:ref:`include_config`, :ref:`catch_exceptions`, and :ref:`meta`.
See Also:
expect_table_column_count_to_be_between
"""
library_metadata = {
"maturity": "production",
"package": "great_expectations",
"tags": ["core expectation", "table expectation"],
"contributors": [
"@great_expectations",
],
"requirements": [],
}
metric_dependencies = ("table.column_count",)
success_keys = ("value",)
default_kwarg_values = {
"value": None,
"result_format": "BASIC",
"include_config": True,
"catch_exceptions": False,
"meta": None,
}
args_keys = ("value",)
""" A Metric Decorator for the Column Count"""
def validate_configuration(
self, configuration: Optional[ExpectationConfiguration]
) -> bool:
"""
Validates that a configuration has been set, and sets a configuration if it has yet to be set. Ensures that
necessary configuration arguments have been provided for the validation of the expectation.
Args:
configuration (OPTIONAL[ExpectationConfiguration]): \
An optional Expectation Configuration entry that will be used to configure the expectation
Returns:
True if the configuration has been validated successfully. Otherwise, raises an exception
"""
# Setting up a configuration
super().validate_configuration(configuration)
# Ensuring that a proper value has been provided
try:
assert (
"value" in configuration.kwargs
), "An expected column count must be provided"
assert isinstance(
configuration.kwargs["value"], (int, dict)
), "Provided threshold must be an integer"
if isinstance(configuration.kwargs["value"], dict):
assert (
"$PARAMETER" in configuration.kwargs["value"]
), 'Evaluation Parameter dict for value kwarg must have "$PARAMETER" key.'
except AssertionError as e:
raise InvalidExpectationConfigurationError(str(e))
return True
@classmethod
def _atomic_prescriptive_template(
cls,
configuration=None,
result=None,
language=None,
runtime_configuration=None,
**kwargs,
):
runtime_configuration = runtime_configuration or {}
include_column_name = runtime_configuration.get("include_column_name", True)
include_column_name = (
include_column_name if include_column_name is not None else True
)
styling = runtime_configuration.get("styling")
params = substitute_none_for_missing(configuration.kwargs, ["value"])
template_str = "Must have exactly $value columns."
params_with_json_schema = {
"value": {"schema": {"type": "number"}, "value": params.get("value")},
}
return (template_str, params_with_json_schema, styling)
@classmethod
@renderer(renderer_type="renderer.prescriptive")
@render_evaluation_parameter_string
def _prescriptive_renderer(
cls,
configuration=None,
result=None,
language=None,
runtime_configuration=None,
**kwargs,
):
runtime_configuration = runtime_configuration or {}
include_column_name = runtime_configuration.get("include_column_name", True)
include_column_name = (
include_column_name if include_column_name is not None else True
)
styling = runtime_configuration.get("styling")
params = substitute_none_for_missing(configuration.kwargs, ["value"])
template_str = "Must have exactly $value columns."
return [
RenderedStringTemplateContent(
**{
"content_block_type": "string_template",
"string_template": {
"template": template_str,
"params": params,
"styling": styling,
},
}
)
]
def _validate(
self,
configuration: ExpectationConfiguration,
metrics: Dict,
runtime_configuration: dict = None,
execution_engine: ExecutionEngine = None,
):
expected_column_count = configuration.kwargs.get("value")
actual_column_count = metrics.get("table.column_count")
return {
"success": actual_column_count == expected_column_count,
"result": {"observed_value": actual_column_count},
}
| great-expectations/great_expectations | great_expectations/expectations/core/expect_table_column_count_to_equal.py | Python | apache-2.0 | 6,651 |
import argparse
from util import operations
from os import path
import h5py, re
def get_arguments():
parser = argparse.ArgumentParser(description='Prepare data for training')
parser.add_argument('data', type=str, help='Path to a HDF5 file containing \'smiles\', \'classes\' and \'partition\'')
parser.add_argument('--radius', type=int, default=2, help='Radius for the circular fingerprint')
parser.add_argument('--random', type=bool, default=False, help='If the positions in the fingerprint should be '
'assigned randomly or based on similarity')
return parser.parse_args()
args = get_arguments()
path = path.abspath(args.data)
directory = path[:path.rfind('/')]
name = path[path.rfind('/') + 1 : path.rfind('.')]
#ids = []
#source_hdf5 = h5py.File(args.data, 'r')
#regex = re.compile('[0-9]+-classes')
#for data_set in source_hdf5.keys():
# data_set = str(data_set)
# if regex.match(data_set):
# ids.append(data_set[:-8])
#source_hdf5.close()
#for ident in ids:
operations.prepare_data(directory, name, args.radius, args.random)
| patrick-winter-knime/deep-learning-on-molecules | lsfp/prepare_multiple_data.py | Python | gpl-3.0 | 1,134 |
"""Higher level child and data watching API's.
:Maintainer: Ben Bangert <ben@groovie.org>
:Status: Production
.. note::
:ref:`DataWatch` and :ref:`ChildrenWatch` may only handle a single
function, attempts to associate a single instance with multiple functions
will result in an exception being thrown.
"""
import logging
import time
import warnings
from functools import partial, wraps
from kazoo.retry import KazooRetry
from kazoo.exceptions import (
ConnectionClosedError,
NoNodeError,
KazooException
)
from kazoo.protocol.states import KazooState
log = logging.getLogger(__name__)
_STOP_WATCHING = object()
def _ignore_closed(func):
@wraps(func)
def wrapper(*args, **kwargs):
try:
return func(*args, **kwargs)
except ConnectionClosedError:
pass
return wrapper
class DataWatch(object):
"""Watches a node for data updates and calls the specified
function each time it changes
The function will also be called the very first time its
registered to get the data.
Returning `False` from the registered function will disable future
data change calls. If the client connection is closed (using the
close command), the DataWatch will no longer get updates.
If the function supplied takes three arguments, then the third one
will be a :class:`~kazoo.protocol.states.WatchedEvent`. It will
only be set if the change to the data occurs as a result of the
server notifying the watch that there has been a change. Events
like reconnection or the first call will not include an event.
If the node does not exist, then the function will be called with
``None`` for all values.
.. tip::
Because :class:`DataWatch` can watch nodes that don't exist, it
can be used alternatively as a higher-level Exists watcher that
survives reconnections and session loss.
Example with client:
.. code-block:: python
@client.DataWatch('/path/to/watch')
def my_func(data, stat):
print("Data is %s" % data)
print("Version is %s" % stat.version)
# Above function is called immediately and prints
# Or if you want the event object
@client.DataWatch('/path/to/watch')
def my_func(data, stat, event):
print("Data is %s" % data)
print("Version is %s" % stat.version)
print("Event is %s" % event)
.. versionchanged:: 1.2
DataWatch now ignores additional arguments that were previously
passed to it and warns that they are no longer respected.
"""
def __init__(self, client, path, func=None, *args, **kwargs):
"""Create a data watcher for a path
:param client: A zookeeper client.
:type client: :class:`~kazoo.client.KazooClient`
:param path: The path to watch for data changes on.
:type path: str
:param func: Function to call initially and every time the
node changes. `func` will be called with a
tuple, the value of the node and a
:class:`~kazoo.client.ZnodeStat` instance.
:type func: callable
"""
self._client = client
self._path = path
self._func = func
self._stopped = False
self._run_lock = client.handler.lock_object()
self._version = None
self._retry = KazooRetry(max_tries=None,
sleep_func=client.handler.sleep_func)
self._include_event = None
self._ever_called = False
self._used = False
if args or kwargs:
warnings.warn('Passing additional arguments to DataWatch is'
' deprecated. ignore_missing_node is now assumed '
' to be True by default, and the event will be '
' sent if the function can handle receiving it',
DeprecationWarning, stacklevel=2)
# Register our session listener if we're going to resume
# across session losses
if func is not None:
self._used = True
self._client.add_listener(self._session_watcher)
self._get_data()
def __call__(self, func):
"""Callable version for use as a decorator
:param func: Function to call initially and every time the
data changes. `func` will be called with a
tuple, the value of the node and a
:class:`~kazoo.client.ZnodeStat` instance.
:type func: callable
"""
if self._used:
raise KazooException(
"A function has already been associated with this "
"DataWatch instance.")
self._func = func
self._used = True
self._client.add_listener(self._session_watcher)
self._get_data()
return func
def _log_func_exception(self, data, stat, event=None):
try:
# For backwards compatibility, don't send event to the
# callback unless the send_event is set in constructor
if not self._ever_called:
self._ever_called = True
try:
result = self._func(data, stat, event)
except TypeError:
result = self._func(data, stat)
if result is False:
self._stopped = True
self._func = None
self._client.remove_listener(self._session_watcher)
except Exception as exc:
log.exception(exc)
raise
@_ignore_closed
def _get_data(self, event=None):
# Ensure this runs one at a time, possible because the session
# watcher may trigger a run
with self._run_lock:
if self._stopped:
return
initial_version = self._version
try:
data, stat = self._retry(self._client.get,
self._path, self._watcher)
except NoNodeError:
data = None
# This will set 'stat' to None if the node does not yet
# exist.
stat = self._retry(self._client.exists, self._path,
self._watcher)
if stat:
self._client.handler.spawn(self._get_data)
return
# No node data, clear out version
if stat is None:
self._version = None
else:
self._version = stat.mzxid
# Call our function if its the first time ever, or if the
# version has changed
if initial_version != self._version or not self._ever_called:
self._log_func_exception(data, stat, event)
def _watcher(self, event):
self._get_data(event=event)
def _set_watch(self, state):
with self._run_lock:
self._watch_established = state
def _session_watcher(self, state):
if state == KazooState.CONNECTED:
self._client.handler.spawn(self._get_data)
class ChildrenWatch(object):
"""Watches a node for children updates and calls the specified
function each time it changes
The function will also be called the very first time its
registered to get children.
Returning `False` from the registered function will disable future
children change calls. If the client connection is closed (using
the close command), the ChildrenWatch will no longer get updates.
if send_event=True in __init__, then the function will always be
called with second parameter, ``event``. Upon initial call or when
recovering a lost session the ``event`` is always ``None``.
Otherwise it's a :class:`~kazoo.prototype.state.WatchedEvent`
instance.
Example with client:
.. code-block:: python
@client.ChildrenWatch('/path/to/watch')
def my_func(children):
print "Children are %s" % children
# Above function is called immediately and prints children
"""
def __init__(self, client, path, func=None,
allow_session_lost=True, send_event=False):
"""Create a children watcher for a path
:param client: A zookeeper client.
:type client: :class:`~kazoo.client.KazooClient`
:param path: The path to watch for children on.
:type path: str
:param func: Function to call initially and every time the
children change. `func` will be called with a
single argument, the list of children.
:type func: callable
:param allow_session_lost: Whether the watch should be
re-registered if the zookeeper
session is lost.
:type allow_session_lost: bool
:type send_event: bool
:param send_event: Whether the function should be passed the
event sent by ZooKeeper or None upon
initialization (see class documentation)
The path must already exist for the children watcher to
run.
"""
self._client = client
self._path = path
self._func = func
self._send_event = send_event
self._stopped = False
self._watch_established = False
self._allow_session_lost = allow_session_lost
self._run_lock = client.handler.lock_object()
self._prior_children = None
self._used = False
# Register our session listener if we're going to resume
# across session losses
if func is not None:
self._used = True
if allow_session_lost:
self._client.add_listener(self._session_watcher)
self._get_children()
def __call__(self, func):
"""Callable version for use as a decorator
:param func: Function to call initially and every time the
children change. `func` will be called with a
single argument, the list of children.
:type func: callable
"""
if self._used:
raise KazooException(
"A function has already been associated with this "
"ChildrenWatch instance.")
self._func = func
self._used = True
if self._allow_session_lost:
self._client.add_listener(self._session_watcher)
self._get_children()
return func
@_ignore_closed
def _get_children(self, event=None):
with self._run_lock: # Ensure this runs one at a time
if self._stopped:
return
try:
children = self._client.retry(self._client.get_children,
self._path, self._watcher)
except NoNodeError:
self._stopped = True
return
if not self._watch_established:
self._watch_established = True
if self._prior_children is not None and \
self._prior_children == children:
return
self._prior_children = children
try:
if self._send_event:
result = self._func(children, event)
else:
result = self._func(children)
if result is False:
self._stopped = True
self._func = None
except Exception as exc:
log.exception(exc)
raise
def _watcher(self, event):
if event.type != "NONE":
self._get_children(event)
def _session_watcher(self, state):
if state in (KazooState.LOST, KazooState.SUSPENDED):
self._watch_established = False
elif (state == KazooState.CONNECTED and
not self._watch_established and not self._stopped):
self._client.handler.spawn(self._get_children)
class PatientChildrenWatch(object):
"""Patient Children Watch that returns values after the children
of a node don't change for a period of time
A separate watcher for the children of a node, that ignores
changes within a boundary time and sets the result only when the
boundary time has elapsed with no children changes.
Example::
watcher = PatientChildrenWatch(client, '/some/path',
time_boundary=5)
async_object = watcher.start()
# Blocks until the children have not changed for time boundary
# (5 in this case) seconds, returns children list and an
# async_result that will be set if the children change in the
# future
children, child_async = async_object.get()
.. note::
This Watch is different from :class:`DataWatch` and
:class:`ChildrenWatch` as it only returns once, does not take
a function that is called, and provides an
:class:`~kazoo.interfaces.IAsyncResult` object that can be
checked to see if the children have changed later.
"""
def __init__(self, client, path, time_boundary=30):
self.client = client
self.path = path
self.children = []
self.time_boundary = time_boundary
self.children_changed = client.handler.event_object()
def start(self):
"""Begin the watching process asynchronously
:returns: An :class:`~kazoo.interfaces.IAsyncResult` instance
that will be set when no change has occurred to the
children for time boundary seconds.
"""
self.asy = asy = self.client.handler.async_result()
self.client.handler.spawn(self._inner_start)
return asy
def _inner_start(self):
try:
while True:
async_result = self.client.handler.async_result()
self.children = self.client.retry(
self.client.get_children, self.path,
partial(self._children_watcher, async_result))
self.client.handler.sleep_func(self.time_boundary)
if self.children_changed.is_set():
self.children_changed.clear()
else:
break
self.asy.set((self.children, async_result))
except Exception as exc:
self.asy.set_exception(exc)
def _children_watcher(self, async, event):
self.children_changed.set()
async.set(time.time())
| bsanders/kazoo | kazoo/recipe/watchers.py | Python | apache-2.0 | 14,705 |
from robotpy_ext.common_drivers.navx._impl import AHRSProtocol
def test_decoding():
data = [0]*4
AHRSProtocol.encodeBinaryUint16(42, data, 0)
assert AHRSProtocol.decodeBinaryUint16(data, 0) == 42
AHRSProtocol.encodeBinaryUint16(40000, data, 0)
assert AHRSProtocol.decodeBinaryUint16(data, 0) == 40000
AHRSProtocol.encodeBinaryInt16(-42, data, 0)
assert AHRSProtocol.decodeBinaryInt16(data, 0) == -42
AHRSProtocol.encodeBinaryInt16(42, data, 0)
assert AHRSProtocol.decodeBinaryInt16(data, 0) == 42
AHRSProtocol.encodeProtocolSignedThousandthsFloat(32.0, data, 0)
assert abs(AHRSProtocol.decodeProtocolSignedThousandthsFloat(data, 0) - 32.0) < 0.001 | Twinters007/robotpy-wpilib-utilities | tests/test_navx.py | Python | bsd-3-clause | 719 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkPeeringsOperations(object):
"""VirtualNetworkPeeringsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2016_09_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkPeering"
"""Gets the specified virtual network peering.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the virtual network peering.
:type virtual_network_peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkPeering, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2016_09_01.models.VirtualNetworkPeering
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkPeering"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(virtual_network_peering_parameters, 'VirtualNetworkPeering')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
virtual_network_peering_name, # type: str
virtual_network_peering_parameters, # type: "_models.VirtualNetworkPeering"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkPeering"]
"""Creates or updates a peering in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param virtual_network_peering_name: The name of the peering.
:type virtual_network_peering_name: str
:param virtual_network_peering_parameters: Parameters supplied to the create or update virtual
network peering operation.
:type virtual_network_peering_parameters: ~azure.mgmt.network.v2016_09_01.models.VirtualNetworkPeering
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkPeering or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2016_09_01.models.VirtualNetworkPeering]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeering"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
virtual_network_peering_name=virtual_network_peering_name,
virtual_network_peering_parameters=virtual_network_peering_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeering', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'virtualNetworkPeeringName': self._serialize.url("virtual_network_peering_name", virtual_network_peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings/{virtualNetworkPeeringName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
virtual_network_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkPeeringListResult"]
"""Gets all virtual network peerings in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkPeeringListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2016_09_01.models.VirtualNetworkPeeringListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkPeeringListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2016-09-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkPeeringListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/virtualNetworkPeerings'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2016_09_01/operations/_virtual_network_peerings_operations.py | Python | mit | 22,805 |
#!/usr/bin/env python
'''
Run this script to update all the copyright headers of files
that were changed this year.
For example:
// Copyright (c) 2009-2012 The Presidentielcoin Core developers
it will change it to
// Copyright (c) 2009-2015 The Presidentielcoin Core developers
'''
import os
import time
import re
year = time.gmtime()[0]
CMD_GIT_DATE = 'git log --format=%%ad --date=short -1 %s | cut -d"-" -f 1'
CMD_REGEX= "perl -pi -e 's/(20\d\d)(?:-20\d\d)? The Presidentielcoin/$1-%s The Presidentielcoin/' %s"
REGEX_CURRENT= re.compile("%s The Presidentielcoin" % year)
CMD_LIST_FILES= "find %s | grep %s"
FOLDERS = ["./qa", "./src"]
EXTENSIONS = [".cpp",".h", ".py"]
def get_git_date(file_path):
r = os.popen(CMD_GIT_DATE % file_path)
for l in r:
# Result is one line, so just return
return l.replace("\n","")
return ""
n=1
for folder in FOLDERS:
for extension in EXTENSIONS:
for file_path in os.popen(CMD_LIST_FILES % (folder, extension)):
file_path = os.getcwd() + file_path[1:-1]
if file_path.endswith(extension):
git_date = get_git_date(file_path)
if str(year) == git_date:
# Only update if current year is not found
if REGEX_CURRENT.search(open(file_path, "r").read()) is None:
print n,"Last git edit", git_date, "-", file_path
os.popen(CMD_REGEX % (year,file_path))
n = n + 1
| presidentielcoin/presidentielcoin | contrib/devtools/fix-copyright-headers.py | Python | mit | 1,400 |
# -*- coding: iso-8859-1 -*-
#
# Bicycle Repair Man - the Python Refactoring Browser
# Copyright (C) 2001-2006 Phil Dawes <phil@phildawes.net>
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
# A some of this code is take from Pythius -
# Copyright (GPL) 2001 Jurgen Hermann <jh@web.de>
import os
def containsAny(str, set):
""" Check whether 'str' contains ANY of the chars in 'set'
"""
return 1 in [c in str for c in set]
def getPathOfModuleOrPackage(dotted_name, pathlist):
""" Get the filesystem path for a module or a package.
Return the file system path to a file for a module,
and to a directory for a package. Return None if
the name is not found, or is a builtin or extension module.
"""
import imp
# split off top-most name
parts = dotted_name.split('.', 1)
if len(parts) > 1:
# we have a dotted path, import top-level package
try:
file, pathname, description = imp.find_module(parts[0], pathlist)
if file: file.close()
except ImportError:
return None
# check if it's indeed a package
if description[2] == imp.PKG_DIRECTORY:
# recursively handle the remaining name parts
pathname = getPathOfModuleOrPackage(parts[1], [pathname])
else:
pathname = None
else:
# plain name
try:
file, pathname, description = imp.find_module(dotted_name, pathlist)
if file: file.close()
if description[2]not in[imp.PY_SOURCE, imp.PKG_DIRECTORY]:
pathname = None
except ImportError:
pathname = None
return pathname
def getFilesForName(name):
""" Get a list of module files for a filename, a module or package name,
or a directory.
"""
import imp
if not os.path.exists(name):
# check for glob chars
if containsAny(name, "*?[]"):
import glob
files = glob.glob(name)
list = []
for file in files:
list.extend(getFilesForName(file))
return list
# try to find module or package
name = getPathOfModuleOrPackage(name,[])
if not name:
return[]
if os.path.isdir(name):
# find all python files in directory
list = []
os.path.walk(name, _visit_pyfiles, list)
return list
elif os.path.exists(name) and not name.startswith("."):
# a single file
return [name]
return []
def _visit_pyfiles(list, dirname, names):
""" Helper for getFilesForName().
"""
# get extension for python source files
if not globals().has_key('_py_ext'):
import imp
global _py_ext
_py_ext = [triple[0]for triple in imp.get_suffixes()if triple[2] == imp.PY_SOURCE][0]
# don't recurse into CVS or Subversion directories
if 'CVS'in names:
names.remove('CVS')
if '.svn'in names:
names.remove('.svn')
names_copy = [] + names
for n in names_copy:
if os.path.isdir(os.path.join(dirname, n))and \
not os.path.exists(os.path.join(dirname, n, "__init__.py")):
names.remove(n)
# add all *.py files to list
list.extend(
[os.path.join(dirname, file)
for file in names
if os.path.splitext(file)[1] == _py_ext and not file.startswith(".")])
# returns the directory which holds the first package of the package
# hierarchy under which 'filename' belongs
def getRootDirectory(filename):
if os.path.isdir(filename):
dir = filename
else:
dir = os.path.dirname(filename)
while dir != "" and \
os.path.exists(os.path.join(dir, "__init__.py")):
dir = os.path.dirname(dir)
return dir
# Returns the higher most package directoryname of the package hierarchy
# under which 'filename' belongs
# **** NOT THE SAME AS THE ROOT DIRECTORY OF THE PACKAGE ***
def getPackageBaseDirectory(filename):
if os.path.isdir(filename):
dir = filename
else:
dir = os.path.dirname(filename)
if not os.path.exists(os.path.join(dir, "__init__.py")):
# parent dir is not a package
return dir
while dir != "" and \
os.path.exists(os.path.join(os.path.dirname(dir), "__init__.py")):
dir = os.path.dirname(dir)
return dir
def filenameToModulePath(fname):
directoriesPreceedingRoot = getRootDirectory(fname)
import os
# strip off directories preceeding root package directory
if directoriesPreceedingRoot != "" and directoriesPreceedingRoot != ".":
mpath = fname.replace(directoriesPreceedingRoot, "")
else:
if fname.startswith("."+os.sep): # if starts with './', lob it off
fname = fname[len("."+os.sep):]
mpath = fname
if(mpath[0] == os.path.normpath("/")):
mpath = mpath[1:]
mpath, ext = os.path.splitext(mpath)
mpath = mpath.replace(os.path.normpath("/"), ".")
return mpath
def filenameToModulePath(filename):
filename = os.path.abspath(filename)
package = ""
dot = ""
dir,modname = os.path.split(filename)
while dir != ""and \
os.path.exists(os.path.join(dir, "__init__.py")):
dir, dirname = os.path.split(dir)
package = dirname+dot+package
dot = "."
return package + dot + modname[:-3]
| lebauce/artub | bike/parsing/pathutils.py | Python | gpl-2.0 | 5,507 |
#
# Copyright 2002-2008 Zuza Software Foundation
#
# This file is part of translate.
#
# translate is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# translate is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>.
"""
Classes that hold units of .oo files (oounit) or entire files (oofile).
These are specific .oo files for localisation exported by OpenOffice.org - SDF
format (previously knows as GSI files).
.. There used to be an overview of the format here
http://l10n.openoffice.org/L10N_Framework/Intermediate_file_format.html
The behaviour in terms of escaping is explained in detail in the programming
comments.
"""
# FIXME: add simple test which reads in a file and writes it out again
import os
import re
import warnings
from io import BytesIO
from translate.misc import quote, wStringIO
# File normalisation
normalfilenamechars = (
b"/#.0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
)
class normalizechar(dict):
def __init__(self, normalchars):
self.normalchars = {}
for char in normalchars:
self.normalchars[ord(char)] = char
def __getitem__(self, key):
return self.normalchars.get(key, "_")
normalizetable = normalizechar(normalfilenamechars.decode("ascii"))
def normalizefilename(filename):
"""converts any non-alphanumeric (standard roman) characters to _"""
return filename.translate(normalizetable)
def makekey(ookey, long_keys):
"""converts an oo key tuple into a unique identifier
:param ookey: an oo key
:type ookey: tuple
:param long_keys: Use long keys
:type long_keys: Boolean
:rtype: str
:return: unique ascii identifier
"""
project, sourcefile, resourcetype, groupid, localid, platform = ookey
sourcefile = sourcefile.replace("\\", "/")
if long_keys:
sourcebase = "/".join((project, sourcefile))
else:
sourceparts = sourcefile.split("/")
sourcebase = "".join(sourceparts[-1:])
if len(groupid) == 0 or len(localid) == 0:
fullid = groupid + localid
else:
fullid = groupid + "." + localid
if resourcetype:
fullid = fullid + "." + resourcetype
key = f"{sourcebase}#{fullid}"
return normalizefilename(key)
# These are functions that deal with escaping and unescaping of the text fields
# of the SDF file. These should only be applied to the text column.
# The fields quickhelptext and title are assumed to carry no escaping.
#
# The escaping of all strings except those coming from .xhp (helpcontent2)
# sourcefiles work as follows:
# (newline) -> \n
# (carriage return) -> \r
# (tab) -> \t
# Backslash characters (\) and single quotes (') are not consistently escaped,
# and are therefore left as they are.
#
# For strings coming from .xhp (helpcontent2) sourcefiles the following
# characters are escaped inside XML tags only:
# < -> \< when used with lowercase tagnames (with some exceptions)
# > -> \> when used with lowercase tagnames (with some exceptions)
# " -> \" around XML properties
# The following is consistently escaped in .xhp strings (not only in XML tags):
# \ -> \\
def escape_text(text):
"""Escapes SDF text to be suitable for unit consumption."""
return text.replace("\n", "\\n").replace("\t", "\\t").replace("\r", "\\r")
def unescape_text(text):
"""Unescapes SDF text to be suitable for unit consumption."""
return (
text.replace("\\\\", "\a")
.replace("\\n", "\n")
.replace("\\t", "\t")
.replace("\\r", "\r")
.replace("\a", "\\\\")
)
helptagre = re.compile(r"""<[/]??[a-z_\-]+?(?:| +[a-z]+?=".*?") *[/]??>""")
def escape_help_text(text):
"""Escapes the help text as it would be in an SDF file.
<, >, " are only escaped in <[[:lower:]]> tags. Some HTML tags make it in in
lowercase so those are dealt with. Some OpenOffice.org help tags are not
escaped.
"""
text = text.replace("\\", "\\\\")
for tag in helptagre.findall(text):
escapethistag = False
for escape_tag in [
"ahelp",
"link",
"item",
"emph",
"defaultinline",
"switchinline",
"caseinline",
"variable",
"bookmark_value",
"image",
"embedvar",
"alt",
]:
if tag.startswith("<%s" % escape_tag) or tag == "</%s>" % escape_tag:
escapethistag = True
if tag in ["<br/>", "<help-id-missing/>"]:
escapethistag = True
if escapethistag:
escaped_tag = ("\\<" + tag[1:-1] + "\\>").replace('"', '\\"')
text = text.replace(tag, escaped_tag)
return text
def unescape_help_text(text):
"""Unescapes normal text to be suitable for writing to the SDF file."""
return (
text.replace(r"\<", "<")
.replace(r"\>", ">")
.replace(r"\"", '"')
.replace(r"\\", "\\")
)
class ooline:
"""this represents one line, one translation in an .oo file"""
def __init__(self, parts=None):
"""construct an ooline from its parts"""
if parts is None:
(
self.project,
self.sourcefile,
self.dummy,
self.resourcetype,
self.groupid,
self.localid,
self.helpid,
self.platform,
self.width,
self.languageid,
self.text,
self.helptext,
self.quickhelptext,
self.title,
self.timestamp,
) = [""] * 15
else:
self.setparts(parts)
def setparts(self, parts):
"""create a line from its tab-delimited parts"""
if len(parts) != 15:
warnings.warn(
"oo line contains %d parts, it should contain 15: %r"
% (len(parts), parts)
)
newparts = list(parts)
if len(newparts) < 15:
newparts = newparts + [""] * (15 - len(newparts))
else:
newparts = newparts[:15]
parts = tuple(newparts)
(
self.project,
self.sourcefile,
self.dummy,
self.resourcetype,
self.groupid,
self.localid,
self.helpid,
self.platform,
self.width,
self.languageid,
self._text,
self.helptext,
self.quickhelptext,
self.title,
self.timestamp,
) = parts
def getparts(self):
"""return a list of parts in this line"""
return (
self.project,
self.sourcefile,
self.dummy,
self.resourcetype,
self.groupid,
self.localid,
self.helpid,
self.platform,
self.width,
self.languageid,
self._text,
self.helptext,
self.quickhelptext,
self.title,
self.timestamp,
)
def gettext(self):
"""Obtains the text column and handle escaping."""
if self.sourcefile.endswith(".xhp"):
return unescape_help_text(self._text)
else:
return unescape_text(self._text)
def settext(self, text):
"""Sets the text column and handle escaping."""
if self.sourcefile.endswith(".xhp"):
self._text = escape_help_text(text)
else:
self._text = escape_text(text)
text = property(gettext, settext)
def __str__(self):
"""convert to a string."""
return self.getoutput()
def getoutput(self):
"""return a line in tab-delimited form"""
parts = self.getparts()
return "\t".join(parts)
def getkey(self):
"""get the key that identifies the resource"""
return (
self.project,
self.sourcefile,
self.resourcetype,
self.groupid,
self.localid,
self.platform,
)
class oounit:
"""this represents a number of translations of a resource"""
def __init__(self):
"""construct the oounit"""
self.languages = {}
self.lines = []
def addline(self, line):
"""add a line to the oounit"""
self.languages[line.languageid] = line
self.lines.append(line)
def __str__(self):
"""convert to a string."""
return self.getoutput()
def getoutput(self, skip_source=False, fallback_lang=None):
"""return the lines in tab-delimited form"""
if skip_source:
lines = self.lines[1:]
if not lines:
# Untranslated, so let's do fall-back: (bug 1883)
new_line = ooline(self.lines[0].getparts())
new_line.languageid = fallback_lang
lines = [new_line]
else:
lines = self.lines
return "\r\n".join(str(line) for line in lines)
class oofile:
"""this represents an entire .oo file"""
UnitClass = oounit
encoding = "utf-8"
def __init__(self, input=None):
"""constructs the oofile"""
self.oolines = []
self.units = []
self.ookeys = {}
self.filename = ""
self.languages = []
if input is not None:
self.parse(input)
def addline(self, thisline):
"""adds a parsed line to the file"""
key = thisline.getkey()
element = self.ookeys.get(key)
if element is None:
element = self.UnitClass()
self.units.append(element)
self.ookeys[key] = element
element.addline(thisline)
self.oolines.append(thisline)
if thisline.languageid not in self.languages:
self.languages.append(thisline.languageid)
def parse(self, input):
"""parses lines and adds them to the file"""
if not self.filename:
self.filename = getattr(input, "name", "")
if hasattr(input, "read"):
src = input.read()
input.close()
else:
src = input
src = src.decode(self.encoding)
for line in src.split("\n"):
line = quote.rstripeol(line)
if not line:
continue
parts = line.split("\t")
thisline = ooline(parts)
self.addline(thisline)
def __bytes__(self):
out = BytesIO()
self.serialize(out)
return out.getvalue()
def serialize(self, out, skip_source=False, fallback_lang=None):
"""convert to a string. double check that unicode is handled"""
out.write(self.getoutput(skip_source, fallback_lang).encode(self.encoding))
def getoutput(self, skip_source=False, fallback_lang=None):
"""converts all the lines back to tab-delimited form"""
lines = []
for oe in self.units:
if len(oe.lines) > 2:
warnings.warn(
"contains %d lines (should be 2 at most): languages %r"
% (len(oe.lines), oe.languages)
)
oekeys = [line.getkey() for line in oe.lines]
warnings.warn(
"contains %d lines (should be 2 at most): keys %r"
% (len(oe.lines), oekeys)
)
oeline = oe.getoutput(skip_source, fallback_lang) + "\r\n"
lines.append(oeline)
return "".join(lines)
class oomultifile:
"""this takes a huge GSI file and represents it as multiple smaller
files...
"""
encoding = "utf-8"
def __init__(self, filename, mode=None, multifilestyle="single"):
"""initialises oomultifile from a seekable inputfile or writable outputfile"""
self.filename = filename
if mode is None:
if os.path.exists(filename):
mode = "r"
else:
mode = "w"
self.mode = mode
self.multifilestyle = multifilestyle
self.multifilename = os.path.splitext(filename)[0]
self.multifile = open(filename, mode)
self.subfilelines = {}
if mode.startswith("r"):
self.createsubfileindex()
def createsubfileindex(self):
"""reads in all the lines and works out the subfiles"""
linenum = 0
for line in self.multifile:
subfile = self.getsubfilename(line)
if subfile not in self.subfilelines:
self.subfilelines[subfile] = []
self.subfilelines[subfile].append(linenum)
linenum += 1
def getsubfilename(self, line):
"""looks up the subfile name for the line"""
if line.count("\t") < 2:
raise ValueError("invalid tab-delimited line: %r" % line)
lineparts = line.split("\t", 2)
module, filename = lineparts[0], lineparts[1]
if self.multifilestyle == "onefile":
ooname = self.multifilename
elif self.multifilestyle == "toplevel":
ooname = module
else:
filename = filename.replace("\\", "/")
fileparts = [module] + filename.split("/")
ooname = os.path.join(*fileparts[:-1])
return ooname + os.extsep + "oo"
def listsubfiles(self):
"""returns a list of subfiles in the file"""
return self.subfilelines.keys()
def __iter__(self):
"""iterates through the subfile names"""
yield from self.listsubfiles()
def __contains__(self, pathname):
"""checks if this pathname is a valid subfile"""
return pathname in self.subfilelines
def getsubfilesrc(self, subfile):
"""returns the list of lines matching the subfile"""
lines = []
requiredlines = dict.fromkeys(self.subfilelines[subfile])
linenum = 0
self.multifile.seek(0)
for line in self.multifile:
if linenum in requiredlines:
lines.append(line)
linenum += 1
return "".join(lines)
def openinputfile(self, subfile):
"""returns a pseudo-file object for the given subfile"""
subfilesrc = self.getsubfilesrc(subfile)
inputfile = BytesIO(subfilesrc.encode())
inputfile.filename = subfile
return inputfile
def openoutputfile(self, subfile):
"""returns a pseudo-file object for the given subfile"""
def onclose(contents):
if isinstance(contents, bytes):
contents = contents.decode(self.encoding)
self.multifile.write(contents)
self.multifile.flush()
outputfile = wStringIO.CatchStringOutput(onclose)
outputfile.filename = subfile
return outputfile
def getoofile(self, subfile):
"""returns an oofile built up from the given subfile's lines"""
subfilesrc = self.getsubfilesrc(subfile)
oosubfile = oofile()
oosubfile.filename = subfile
oosubfile.parse(subfilesrc)
return oosubfile
| nijel/translate | translate/storage/oo.py | Python | gpl-2.0 | 15,689 |
from rest_framework.permissions import BasePermission
from kolibri.core.auth.permissions.general import _user_is_admin_for_own_facility
class NetworkLocationPermissions(BasePermission):
"""
A user can access NetworkLocation objects if:
1. User can manage content (to get import/export peers)
2. User is a facility admin (to be able to sync facility with peer)
"""
def has_permission(self, request, view):
return request.user.can_manage_content or _user_is_admin_for_own_facility(
request.user
)
def has_object_permission(self, request, view, obj):
# Don't pass `obj` because locations don't have facilities attached to them
return request.user.can_manage_content or _user_is_admin_for_own_facility(
request.user
)
| indirectlylit/kolibri | kolibri/core/discovery/permissions.py | Python | mit | 811 |
# Copyright (c) 2013 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from neutron.common import constants
from neutron.common import rpc_compat
from neutron.common import topics
from neutron.common import utils
from neutron import manager
from neutron.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class DhcpAgentNotifyAPI(rpc_compat.RpcProxy):
"""API for plugin to notify DHCP agent."""
BASE_RPC_API_VERSION = '1.0'
# It seems dhcp agent does not support bulk operation
VALID_RESOURCES = ['network', 'subnet', 'port']
VALID_METHOD_NAMES = ['network.create.end',
'network.update.end',
'network.delete.end',
'subnet.create.end',
'subnet.update.end',
'subnet.delete.end',
'port.create.end',
'port.update.end',
'port.delete.end']
def __init__(self, topic=topics.DHCP_AGENT, plugin=None):
super(DhcpAgentNotifyAPI, self).__init__(
topic=topic, default_version=self.BASE_RPC_API_VERSION)
self._plugin = plugin
@property
def plugin(self):
if self._plugin is None:
self._plugin = manager.NeutronManager.get_plugin()
return self._plugin
def _schedule_network(self, context, network, existing_agents):
"""Schedule the network to new agents
:return: all agents associated with the network
"""
new_agents = self.plugin.schedule_network(context, network) or []
if new_agents:
for agent in new_agents:
self._cast_message(
context, 'network_create_end',
{'network': {'id': network['id']}}, agent['host'])
elif not existing_agents:
LOG.warn(_('Unable to schedule network %s: no agents available; '
'will retry on subsequent port creation events.'),
network['id'])
return new_agents + existing_agents
def _get_enabled_agents(self, context, network, agents, method, payload):
"""Get the list of agents whose admin_state is UP."""
network_id = network['id']
enabled_agents = [x for x in agents if x.admin_state_up]
active_agents = [x for x in agents if x.is_active]
len_enabled_agents = len(enabled_agents)
len_active_agents = len(active_agents)
if len_active_agents < len_enabled_agents:
LOG.warn(_("Only %(active)d of %(total)d DHCP agents associated "
"with network '%(net_id)s' are marked as active, so "
" notifications may be sent to inactive agents.")
% {'active': len_active_agents,
'total': len_enabled_agents,
'net_id': network_id})
if not enabled_agents:
num_ports = self.plugin.get_ports_count(
context, {'network_id': [network_id]})
notification_required = (
num_ports > 0 and len(network['subnets']) >= 1)
if notification_required:
LOG.error(_("Will not send event %(method)s for network "
"%(net_id)s: no agent available. Payload: "
"%(payload)s")
% {'method': method,
'net_id': network_id,
'payload': payload})
return enabled_agents
def _notify_agents(self, context, method, payload, network_id):
"""Notify all the agents that are hosting the network."""
# fanout is required as we do not know who is "listening"
no_agents = not utils.is_extension_supported(
self.plugin, constants.DHCP_AGENT_SCHEDULER_EXT_ALIAS)
fanout_required = method == 'network_delete_end' or no_agents
# we do nothing on network creation because we want to give the
# admin the chance to associate an agent to the network manually
cast_required = method != 'network_create_end'
if fanout_required:
self._fanout_message(context, method, payload)
elif cast_required:
admin_ctx = (context if context.is_admin else context.elevated())
network = self.plugin.get_network(admin_ctx, network_id)
agents = self.plugin.get_dhcp_agents_hosting_networks(
context, [network_id])
# schedule the network first, if needed
schedule_required = method == 'port_create_end'
if schedule_required:
agents = self._schedule_network(admin_ctx, network, agents)
enabled_agents = self._get_enabled_agents(
context, network, agents, method, payload)
for agent in enabled_agents:
self._cast_message(
context, method, payload, agent.host, agent.topic)
def _cast_message(self, context, method, payload, host,
topic=topics.DHCP_AGENT):
"""Cast the payload to the dhcp agent running on the host."""
self.cast(
context, self.make_msg(method,
payload=payload),
topic='%s.%s' % (topic, host))
def _fanout_message(self, context, method, payload):
"""Fanout the payload to all dhcp agents."""
self.fanout_cast(
context, self.make_msg(method,
payload=payload),
topic=topics.DHCP_AGENT)
def network_removed_from_agent(self, context, network_id, host):
self._cast_message(context, 'network_delete_end',
{'network_id': network_id}, host)
def network_added_to_agent(self, context, network_id, host):
self._cast_message(context, 'network_create_end',
{'network': {'id': network_id}}, host)
def agent_updated(self, context, admin_state_up, host):
self._cast_message(context, 'agent_updated',
{'admin_state_up': admin_state_up}, host)
def notify(self, context, data, method_name):
# data is {'key' : 'value'} with only one key
if method_name not in self.VALID_METHOD_NAMES:
return
obj_type = data.keys()[0]
if obj_type not in self.VALID_RESOURCES:
return
obj_value = data[obj_type]
network_id = None
if obj_type == 'network' and 'id' in obj_value:
network_id = obj_value['id']
elif obj_type in ['port', 'subnet'] and 'network_id' in obj_value:
network_id = obj_value['network_id']
if not network_id:
return
method_name = method_name.replace(".", "_")
if method_name.endswith("_delete_end"):
if 'id' in obj_value:
self._notify_agents(context, method_name,
{obj_type + '_id': obj_value['id']},
network_id)
else:
self._notify_agents(context, method_name, data, network_id)
| subramani95/neutron | neutron/api/rpc/agentnotifiers/dhcp_rpc_agent_api.py | Python | apache-2.0 | 7,721 |
# EXAMPLE 1: Polymorphism in Python with a function:
# ==============================================================================
# We create two classes: Bear and Dog, both can make a distinct sound.
# We then make two instances and call their action using the same method.
class Bear(object):
def sound(self):
print 'Groarrr'
class Dog(object):
def sound(self):
print 'Woof woof!'
def makeSound(animalType):
animalType.sound()
bear_obj = Bear()
dog_obj = Dog()
makeSound(bear_obj)
makeSound(dog_obj)
print
# EXAMPLE 2: Polymorphism with abstract class (most commonly used):
# ==============================================================================
class Document:
def __init__(self, name):
self.name = name
def show(self):
raise NotImplementedError('Subclass must implement abstract method')
class Pdf(Document):
def show(self):
return 'Show word contests!'
class Word(Document):
def show(self):
return 'Show word contests!'
documents = [Pdf('Documents1'), Pdf('Documents2'), Word('Documents3')]
for document in documents:
print document.name + ': ' + document.show()
print
# EXAMPLE 3: Polymorphism with abstract class (most commonly used):
# ==============================================================================
class Car:
def __init__(self, name):
self.name = name
def drive(self):
raise NotImplementedError('Subclass must implement abstract method')
def stop(self):
raise NotImplementedError('Subclass must implement abstract method')
class Sportscar(Car):
def drive(self):
return 'Sportscar driving!'
def stop(self):
return 'Sportscar breaking!'
class Truck(Car):
def drive(self):
return 'Truck driving slowly because heavily loaded.'
def stop(self):
return 'Truck breaking!'
cars = [Truck('Bananatruck'), Truck('Orangetruck'), Sportscar('Z3')]
for car in cars:
print car.name + ': ' + car.drive() | rolandovillca/python_introduction_basic | oo/polymorphism.py | Python | mit | 2,016 |
from requests_lib import enable_requests
| dieseldev/diesel | diesel/util/patches/__init__.py | Python | bsd-3-clause | 41 |
# This file is part of Invenio.
# Copyright (C) 2008, 2009, 2010, 2011, 2012 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Invenio WebJournal Administrator Interface."""
__revision__ = "$Id$"
__lastupdated__ = """$Date$"""
import invenio.legacy.webjournal.adminlib as wjn
from invenio.modules.access.engine import acc_authorize_action
from invenio.legacy.webpage import page, error_page
from invenio.config import CFG_SITE_URL, CFG_SITE_LANG, CFG_SITE_NAME
from invenio.legacy.webuser import getUid, page_not_authorized
from invenio.base.i18n import wash_language, gettext_set_language
from invenio.utils.url import wash_url_argument
from invenio.ext.logging import register_exception
from invenio.legacy.webjournal.config import \
InvenioWebJournalNoJournalOnServerError, \
InvenioWebJournalNoNameError, \
InvenioWebJournalNoCurrentIssueError, \
InvenioWebJournalIssueNumberBadlyFormedError, \
InvenioWebJournalJournalIdNotFoundDBError
from invenio.legacy.webjournal.washer import \
wash_journal_name, \
wash_issue_number
def index(req, ln=CFG_SITE_LANG, journal_name=None, action=""):
"""
Main administration page.
Lists the journals, and offers options to edit them, delete them
or add new journals
"""
navtrail_previous_links = wjn.getnavtrail()
ln = wash_language(ln)
_ = gettext_set_language(ln)
try:
uid = getUid(req)
except:
return error_page('Error', req)
try:
journal_name = wash_journal_name(ln, journal_name)
action = wash_url_argument(action, 'str')
except InvenioWebJournalNoJournalOnServerError as e:
# Ok, no journal. Let the admin add one...
pass
except InvenioWebJournalNoNameError as e:
register_exception(req=req)
return e.user_box()
if action in ['delete', 'askDelete']:
# To perform these, one must be authorized
auth = acc_authorize_action(getUid(req), 'cfgwebjournal',
name=journal_name, with_editor_rights='yes')
else:
auth = acc_authorize_action(getUid(req), 'cfgwebjournal')
if auth[0] == 0:
return page(title=_('WebJournal Admin'),
body=wjn.perform_index(ln=ln,
journal_name=journal_name,
action=action,
uid=getUid(req)),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def administrate(req, journal_name, ln=CFG_SITE_LANG):
"""
Shows the settings of a journal
"""
navtrail_previous_links = wjn.getnavtrail(' > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py">WebJournal Admin</a>' % CFG_SITE_URL)
ln = wash_language(ln)
_ = gettext_set_language(ln)
try:
uid = getUid(req)
except:
return error_page('Error', req)
try:
journal_name = wash_journal_name(ln, journal_name)
except InvenioWebJournalNoJournalOnServerError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalNoNameError as e:
register_exception(req=req)
return e.user_box()
auth = acc_authorize_action(getUid(req), 'cfgwebjournal',
name="%s" % journal_name)
if auth[0] == 0:
as_editor = acc_authorize_action(getUid(req), 'cfgwebjournal',
name="%s" % journal_name,
with_editor_rights='yes')[0] == 0
return page(title=_('Administrate %(journal_name)s' % {'journal_name':journal_name}),
body=wjn.perform_administrate(ln=ln, journal_name=journal_name,
as_editor=as_editor),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def feature_record(req, journal_name="", recid="", img_url="", ln=CFG_SITE_LANG, action=""):
"""
Interface to feature a record. Will be saved in a flat file.
"""
navtrail_previous_links = wjn.getnavtrail(' > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py">WebJournal Admin</a> > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py/administrate?journal_name=%s">%s</a>' % (CFG_SITE_URL, CFG_SITE_URL, journal_name, journal_name))
ln = wash_language(ln)
_ = gettext_set_language(ln)
try:
uid = getUid(req)
except:
return error_page('Error', req)
try:
journal_name = wash_journal_name(ln, journal_name)
except InvenioWebJournalNoJournalOnServerError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalNoNameError as e:
register_exception(req=req)
return e.user_box()
auth = acc_authorize_action(getUid(req), 'cfgwebjournal',
name="%s" % journal_name,
with_editor_rights='yes')
if auth[0] == 0:
return page(title=_("Feature a record"),
body=wjn.perform_feature_record(ln=ln,
journal_name=journal_name,
recid=recid,
img_url=img_url,
action=action),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def alert(req, journal_name="", ln=CFG_SITE_LANG, sent="False", plainText=u"",
htmlMail="", recipients="", subject="", issue="", force="False"):
"""
Sends an email alert, in HTML/PlainText or only PlainText to a mailing
list to alert for new journal releases.
"""
navtrail_previous_links = wjn.getnavtrail(' > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py">WebJournal Admin</a> > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py/administrate?journal_name=%s">%s</a>' % (CFG_SITE_URL, CFG_SITE_URL, journal_name, journal_name))
ln = wash_language(ln)
_ = gettext_set_language(ln)
try:
uid = getUid(req)
except:
return error_page('Error', req)
try:
journal_name = wash_journal_name(ln, journal_name)
issue = wash_issue_number(ln,
journal_name,
issue)
plain_text = wash_url_argument(plainText, 'str')
html_mail = wash_url_argument(htmlMail, 'str')
recipients = wash_url_argument(recipients, 'str')
subject = wash_url_argument(subject, 'str')
sent = wash_url_argument(sent, 'str')
force = wash_url_argument(force, 'str')
except InvenioWebJournalNoJournalOnServerError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalNoNameError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalNoCurrentIssueError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalIssueNumberBadlyFormedError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalJournalIdNotFoundDBError as e:
register_exception(req=req)
return e.user_box()
auth = acc_authorize_action(getUid(req), 'cfgwebjournal',
name="%s" % journal_name,
with_editor_rights='yes')
if auth[0] == 0:
return page(title=_("Email Alert System"),
body=wjn.perform_request_alert(journal_name=journal_name,
issue=issue,
ln=ln,
sent=sent,
plain_text=plain_text,
subject=subject,
recipients=recipients,
html_mail=html_mail,
force=force),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def regenerate(req, journal_name="", issue="", ln=CFG_SITE_LANG,
confirmed_p="", publish_draft_articles_p=""):
"""
Clears the cache for the given issue.
"""
navtrail_previous_links = wjn.getnavtrail(' > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py">WebJournal Admin</a> > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py/administrate?journal_name=%s">%s</a>' % (CFG_SITE_URL, CFG_SITE_URL, journal_name, journal_name))
ln = wash_language(ln)
_ = gettext_set_language(ln)
try:
uid = getUid(req)
except:
return error_page('Error', req)
try:
journal_name = wash_journal_name(ln, journal_name)
issue_number = wash_issue_number(ln, journal_name,
issue)
confirmed_p = wash_url_argument(confirmed_p, 'str') == "confirmed"
publish_draft_articles_p = wash_url_argument(publish_draft_articles_p, 'str') == "move"
except InvenioWebJournalNoJournalOnServerError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalNoNameError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalNoCurrentIssueError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalIssueNumberBadlyFormedError as e:
register_exception(req=req)
return e.user_box()
auth = acc_authorize_action(getUid(req), 'cfgwebjournal',
name="%s" % journal_name)
if auth[0] == 0:
return page(title=confirmed_p and _("Issue regenerated") or _("Regenerate Issue"),
body=wjn.perform_regenerate_issue(ln=ln,
journal_name=journal_name,
issue=issue,
confirmed_p=confirmed_p,
publish_draft_articles_p=publish_draft_articles_p),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def issue_control(req, journal_name="", issue=[],
ln=CFG_SITE_LANG, action="cfg"):
"""
Page that allows full control over creating, backtracing, adding to,
removing from issues.
"""
navtrail_previous_links = wjn.getnavtrail(' > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py">WebJournal Admin</a> > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py/administrate?journal_name=%s">%s</a>' % (CFG_SITE_URL, CFG_SITE_URL, journal_name, journal_name))
ln = wash_language(ln)
_ = gettext_set_language(ln)
try:
uid = getUid(req)
except:
return error_page('Error', req)
try:
journal_name = wash_journal_name(ln, journal_name)
action = wash_url_argument(action, 'str')
issue = wash_url_argument(issue, 'list')
issues = [wash_issue_number(ln,journal_name, _issue) \
for _issue in issue \
if _issue != "ww/YYYY"]
except InvenioWebJournalNoJournalOnServerError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalNoNameError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalNoCurrentIssueError as e:
register_exception(req=req)
return e.user_box()
except InvenioWebJournalIssueNumberBadlyFormedError as e:
register_exception(req=req)
return e.user_box()
auth = acc_authorize_action(getUid(req), 'cfgwebjournal',
name="%s" % journal_name,
with_editor_rights='yes')
if auth[0] == 0:
return page(title=_("Publishing Interface"),
body=wjn.perform_request_issue_control(journal_name=journal_name,
issues=issues,
ln=ln,
action=action),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
def configure(req, journal_name=None, ln=CFG_SITE_LANG, xml_config=u'', action='edit'):
"""
Let admins configure the journal settings
"""
ln = wash_language(ln)
_ = gettext_set_language(ln)
if journal_name is None:
navtrail_previous_links = wjn.getnavtrail(' > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py">WebJournal Admin</a>' % CFG_SITE_URL)
else:
navtrail_previous_links = wjn.getnavtrail(' > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py">WebJournal Admin</a> > <a class="navtrail" href="%s/admin/webjournal/webjournaladmin.py/administrate?journal_name=%s">%s</a>' % (CFG_SITE_URL, CFG_SITE_URL, journal_name, journal_name))
if action in ['add', 'addDone']:
page_title = _('Add Journal')
else:
page_title = _("Edit Settings")
try:
uid = getUid(req)
except:
return error_page('Error', req)
try:
journal_name = wash_journal_name(ln, journal_name, guess=False)
xml_config = wash_url_argument(xml_config, 'str')
action = wash_url_argument(action, 'str')
except InvenioWebJournalNoJournalOnServerError as e:
# Ok, no journal. Let the admin add one...
pass
except InvenioWebJournalNoNameError as e:
register_exception(req=req)
return e.user_box()
auth = acc_authorize_action(getUid(req), 'cfgwebjournal',
name="%s" % journal_name,
with_editor_rights='yes')
if auth[0] == 0:
return page(title=page_title,
body=wjn.perform_request_configure(journal_name=journal_name,
ln=ln,
xml_config=xml_config,
action=action),
uid=uid,
language=ln,
req=req,
navtrail = navtrail_previous_links,
lastupdated=__lastupdated__)
else:
return page_not_authorized(req=req, text=auth[1], navtrail=navtrail_previous_links)
| Lilykos/invenio | invenio/legacy/webjournal/web/admin/webjournaladmin.py | Python | gpl-2.0 | 16,753 |
import unittest
import transaction
from pyramid import testing
from .models import DBSession
class TestMyViewSuccessCondition(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
from sqlalchemy import create_engine
engine = create_engine('sqlite://')
from .models import (
Base,
MyModel,
)
DBSession.configure(bind=engine)
Base.metadata.create_all(engine)
with transaction.manager:
model = MyModel(name='one', value=55)
DBSession.add(model)
def tearDown(self):
DBSession.remove()
testing.tearDown()
def test_passing_view(self):
from .views import my_view
request = testing.DummyRequest()
info = my_view(request)
self.assertEqual(info['one'].name, 'one')
self.assertEqual(info['project'], 'shishosan')
class TestMyViewFailureCondition(unittest.TestCase):
def setUp(self):
self.config = testing.setUp()
from sqlalchemy import create_engine
engine = create_engine('sqlite://')
from .models import (
Base,
MyModel,
)
DBSession.configure(bind=engine)
def tearDown(self):
DBSession.remove()
testing.tearDown()
def test_failing_view(self):
from .views import my_view
request = testing.DummyRequest()
info = my_view(request)
self.assertEqual(info.status_int, 500) | kiritantan/ShishoSan | shishosan/tests.py | Python | mit | 1,500 |
import time
time=time.strftime('%Y-%m-%d %H:%M:%S')
print(type(time))
| xiaoyongaa/ALL | python基础2周/1.py | Python | apache-2.0 | 70 |
# -*- coding: utf-8 -*-
#
from .common import *
class StopTest(TestCase):
def test_block(self):
root = Node.add_root()
node1 = root.add_child(content_object=StopInterpretation())
node2 = variable_assign_value(parent=root)
root = Node.objects.get(id=root.id)
context = Context()
root.interpret(context)
self.assertIsInstance(context.get_variable(VariableDefinition.objects.get(name='A')), Variable.Undefined)
def test_nesting(self):
root = Node.add_root()
node1 = root.add_child()
node1.add_child(content_object=StopInterpretation())
root = Node.objects.get(id=root.id)
node2 = root.add_child()
variable_assign_value(parent=node2)
root = Node.objects.get(id=root.id)
context = Context()
root.interpret(context)
self.assertIsInstance(context.get_variable(VariableDefinition.objects.get(name='A')), Variable.Undefined)
| vlfedotov/django-business-logic | tests/test_stop.py | Python | mit | 965 |
# coding: utf-8
from os.path import join
from re import match
from os.path import dirname, exists, join
from datetime import datetime
import functools
from collections import namedtuple
import re
class ChangeLogException(Exception):
pass
class OnlyLocalException(Exception):
pass
def real_repo_only(method):
@functools.wraps(method)
def wrapped(self, *args, **kwargs):
if not self.has_repo():
raise OnlyLocalException()
return method(self, *args, **kwargs)
return wrapped
class GitInfo(object):
_git_info = None
repo_path = dirname(dirname(__file__))
dump_path = join(dirname(__file__), 'git_info.pickled')
@classmethod
def has_repo(cls):
return exists(join(cls.repo_path, '.git'))
@classmethod
def get_repo(cls):
import git
return git.Repo(cls.repo_path)
def __init__(self):
repo = self.get_repo()
self.full_hash = repo.head.commit.sha
self.root = repo.working_dir
try:
try:
self.branch_name = repo.active_branch.name
except TypeError as e:
self.branch_name = [b for b in repo.remotes[0].refs if b.commit == repo.head.commit][0].name
except Exception:
self.branch_name = '[detached]' #repo.head.commit.sha[0:7]
# print(self.branch_name)
self.history = [self.full_hash] + [p.sha for p in repo.head.commit.iter_parents()]
# self.short_hash = local('git rev-parse --short HEAD', capture=True).stdout
if self.branch_name == 'master':
self.tag = repo.git.describe(match='v*.*.*.*')
# local('git describe --match "v*.*.*.*"', capture=True).stdout
matcher = match(r'^(v((\d+)\.(\d+)\.(\d+))\.(\d+))(-.*)?$', self.tag)
if matcher is None:
abort('invalid tag: %s' % self.tag)
self.short_tag = matcher.group(1)
self.nice_short_version = matcher.group(2)
else:
self.tag = None
self.short_tag = None
self.nice_short_version = self.short_hash
self.timestamp = datetime.now().strftime("%d/%m/%y %H:%M:%S")
self._changelog = []
@property
def short_hash(self):
return self.full_hash[0:7]
@property
def nice_version(self):
return self.branch_name + '-' + self.nice_short_version
def make_tag_nice(self, tag):
if tag == 'HEAD':
return 'dostępne po aktualizacji'
from django.conf import settings
return re.sub(settings.SURROUND_CHANGELOG_PATTERN, settings.SURROUND_CHANGELOG_NICE_VERSION, tag)
def filter_changelog(self, mode):
batches = []
issue_matcher = re.compile(r'^' + '|'.join(mode[1]) + '-')
for b in self.changelog:
entries = []
for e in b.entries:
if e.category in mode[0]:
entries.append(ChangeLogEntry(e.category, list(filter(lambda i: issue_matcher.match(i.key), e.issues)), e.text))
batches.append(ChangeLogBatch(b.tag, b.nice_name, entries))
return batches
@property
def changelog(self):
if not self._changelog:
from django.conf import settings
for number in settings.SURROUND_CHANGELOG_BATCHES:
try:
self._changelog.append(self.changelog_between_versions(settings.SURROUND_CHANGELOG_PATTERN, self.make_tag_nice, number))
except ChangeLogException:
pass
return self._changelog
def dump(self):
import pickle
force_fetch = self.changelog
with open(self.dump_path, 'w') as f:
pickle.dump(self, f)
@real_repo_only
def get_commit(self, sha):
commits = [c for c in self.get_repo().iter_commits('--all') if c.sha == sha]
return commits[0] if commits else None
@real_repo_only
def changelog_between_commits(self, begin, end):
import subprocess
out = subprocess.check_output(['git', 'diff', '--diff-filter=AM', '--unified=0', '%s..%s' % (begin, end), '--', join(self.repo_path, 'changelog.txt')]).split('\n')
changes = []
for add in out[5:]:
m = CHANGELOG_ENTRY_REGEX.match(add)
if m:
changes.append(ChangeLogEntry(m.group('category'), map(ChangeLogIssue, m.group('issues').split(',')), m.group('text')))
return changes
@real_repo_only
def get_sorted_tags(self, pattern):
"""
pattern - for production: r'^v(\d+)\.(\d+)\.(\d+)\.0'
all groups from pattern will be converted to integers, and tags will be compared using the resulting tuple
"""
matcher = re.compile(pattern)
tags = []
for tag in self.get_repo().git.tag().split('\n'):
m = matcher.match(tag)
if m:
numbers = []
for k, v in m.groupdict().items():
if k.startswith('cmp'):
numbers.append((int(k[3:]), int(v)))
numbers.sort()
tags.append((tag, numbers))
return map(lambda t: t[0], sorted(tags, key=lambda t: t[1], reverse=True))
@real_repo_only
def changelog_between_versions(self, pattern, converter=lambda t: t, start=0, length=1):
tags = self.get_sorted_tags(pattern)
tags.insert(0, 'HEAD')
try:
tag = tags[start]
return ChangeLogBatch(tag, converter(tag), self.changelog_between_commits(tags[start + length], tags[start]))
except IndexError:
raise ChangeLogException('invalid changelog batch number')
@classmethod
def get(cls):
if cls._git_info is None:
if cls.has_repo():
cls._git_info = cls()
else:
repo = None
import pickle
with open(cls.dump_path, 'r') as f:
cls._git_info = pickle.load(f)
return cls._git_info
CHANGELOG_ENTRY_REGEX = re.compile(r'^\+(?P<category>\w+)\ (?P<issues>(\w+-\d+(,?))+)\ (?P<text>.*)$')
class ChangeLogIssue():
def __init__(self, key):
self.key = key
def get_absolute_url(self):
return "https://jira.man.poznan.pl/jira/browse/" + self.key
ChangeLogEntry = namedtuple('ChangeLogEntry', ['category', 'issues', 'text'])
ChangeLogBatch = namedtuple('ChangeLogBatch', ['tag', 'nice_name', 'entries'])
| sniegu/django-surround | surround/common/git_info.py | Python | mit | 6,513 |
import statsmodels.api as sm
from matplotlib import pyplot as plt
data = sm.datasets.longley.load()
data.exog = sm.add_constant(data.exog)
mod_fit = sm.OLS(data.endog, data.exog).fit()
res = mod_fit.resid # residuals
fig = sm.qqplot(res)
plt.show()
#qqplot of the residuals against quantiles of t-distribution with 4 degrees of freedom:
import scipy.stats as stats
fig = sm.qqplot(res, stats.t, distargs=(4,))
plt.show()
#qqplot against same as above, but with mean 3 and std 10:
fig = sm.qqplot(res, stats.t, distargs=(4,), loc=3, scale=10)
plt.show()
#Automatically determine parameters for t distribution including the loc and scale:
fig = sm.qqplot(res, stats.t, fit=True, line='45')
plt.show()
| UpSea/midProjects | BasicOperations/08_Statsmodels/01_Statsmodels_01_OLS03.py | Python | mit | 706 |
import hashlib
from django.conf import settings
from django.utils import importlib
from django.utils.datastructures import SortedDict
from django.utils.encoding import smart_str
from django.core.exceptions import ImproperlyConfigured
from django.utils.crypto import (
pbkdf2, constant_time_compare, get_random_string)
from django.utils.translation import ugettext_noop as _
UNUSABLE_PASSWORD = '!' # This will never be a valid encoded hash
HASHERS = None # lazily loaded from PASSWORD_HASHERS
PREFERRED_HASHER = None # defaults to first item in PASSWORD_HASHERS
def is_password_usable(encoded):
return (encoded is not None and encoded != UNUSABLE_PASSWORD)
def check_password(password, encoded, setter=None, preferred='default'):
"""
Returns a boolean of whether the raw password matches the three
part encoded digest.
If setter is specified, it'll be called when you need to
regenerate the password.
"""
if not password or not is_password_usable(encoded):
return False
preferred = get_hasher(preferred)
raw_password = password
password = smart_str(password)
encoded = smart_str(encoded)
# Ancient versions of Django created plain MD5 passwords and accepted
# MD5 passwords with an empty salt.
if ((len(encoded) == 32 and '$' not in encoded) or
(len(encoded) == 37 and encoded.startswith('md5$$'))):
hasher = get_hasher('unsalted_md5')
# Ancient versions of Django accepted SHA1 passwords with an empty salt.
elif len(encoded) == 46 and encoded.startswith('sha1$$'):
hasher = get_hasher('unsalted_sha1')
else:
algorithm = encoded.split('$', 1)[0]
hasher = get_hasher(algorithm)
must_update = hasher.algorithm != preferred.algorithm
is_correct = hasher.verify(password, encoded)
if setter and is_correct and must_update:
setter(raw_password)
return is_correct
def make_password(password, salt=None, hasher='default'):
"""
Turn a plain-text password into a hash for database storage
Same as encode() but generates a new random salt. If
password is None or blank then UNUSABLE_PASSWORD will be
returned which disallows logins.
"""
if not password:
return UNUSABLE_PASSWORD
hasher = get_hasher(hasher)
password = smart_str(password)
if not salt:
salt = hasher.salt()
salt = smart_str(salt)
return hasher.encode(password, salt)
def load_hashers(password_hashers=None):
global HASHERS
global PREFERRED_HASHER
hashers = []
if not password_hashers:
password_hashers = settings.PASSWORD_HASHERS
for backend in password_hashers:
try:
mod_path, cls_name = backend.rsplit('.', 1)
mod = importlib.import_module(mod_path)
hasher_cls = getattr(mod, cls_name)
except (AttributeError, ImportError, ValueError):
raise ImproperlyConfigured("hasher not found: %s" % backend)
hasher = hasher_cls()
if not getattr(hasher, 'algorithm'):
raise ImproperlyConfigured("hasher doesn't specify an "
"algorithm name: %s" % backend)
hashers.append(hasher)
HASHERS = dict([(hasher.algorithm, hasher) for hasher in hashers])
PREFERRED_HASHER = hashers[0]
def get_hasher(algorithm='default'):
"""
Returns an instance of a loaded password hasher.
If algorithm is 'default', the default hasher will be returned.
This function will also lazy import hashers specified in your
settings file if needed.
"""
if hasattr(algorithm, 'algorithm'):
return algorithm
elif algorithm == 'default':
if PREFERRED_HASHER is None:
load_hashers()
return PREFERRED_HASHER
else:
if HASHERS is None:
load_hashers()
if algorithm not in HASHERS:
raise ValueError("Unknown password hashing algorithm '%s'. "
"Did you specify it in the PASSWORD_HASHERS "
"setting?" % algorithm)
return HASHERS[algorithm]
def mask_hash(hash, show=6, char="*"):
"""
Returns the given hash, with only the first ``show`` number shown. The
rest are masked with ``char`` for security reasons.
"""
masked = hash[:show]
masked += char * len(hash[show:])
return masked
class BasePasswordHasher(object):
"""
Abstract base class for password hashers
When creating your own hasher, you need to override algorithm,
verify(), encode() and safe_summary().
PasswordHasher objects are immutable.
"""
algorithm = None
library = None
def _load_library(self):
if self.library is not None:
if isinstance(self.library, (tuple, list)):
name, mod_path = self.library
else:
name = mod_path = self.library
try:
module = importlib.import_module(mod_path)
except ImportError:
raise ValueError("Couldn't load %s password algorithm "
"library" % name)
return module
raise ValueError("Hasher '%s' doesn't specify a library attribute" %
self.__class__)
def salt(self):
"""
Generates a cryptographically secure nonce salt in ascii
"""
return get_random_string()
def verify(self, password, encoded):
"""
Checks if the given password is correct
"""
raise NotImplementedError()
def encode(self, password, salt):
"""
Creates an encoded database value
The result is normally formatted as "algorithm$salt$hash" and
must be fewer than 128 characters.
"""
raise NotImplementedError()
def safe_summary(self, encoded):
"""
Returns a summary of safe values
The result is a dictionary and will be used where the password field
must be displayed to construct a safe representation of the password.
"""
raise NotImplementedError()
class PBKDF2PasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the PBKDF2 algorithm (recommended)
Configured to use PBKDF2 + HMAC + SHA256 with 10000 iterations.
The result is a 64 byte binary string. Iterations may be changed
safely but you must rename the algorithm if you change SHA256.
"""
algorithm = "pbkdf2_sha256"
iterations = 10000
digest = hashlib.sha256
def encode(self, password, salt, iterations=None):
assert password
assert salt and '$' not in salt
if not iterations:
iterations = self.iterations
hash = pbkdf2(password, salt, iterations, digest=self.digest)
hash = hash.encode('base64').strip()
return "%s$%d$%s$%s" % (self.algorithm, iterations, salt, hash)
def verify(self, password, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt, int(iterations))
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, iterations, salt, hash = encoded.split('$', 3)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('iterations'), iterations),
(_('salt'), mask_hash(salt)),
(_('hash'), mask_hash(hash)),
])
class PBKDF2SHA1PasswordHasher(PBKDF2PasswordHasher):
"""
Alternate PBKDF2 hasher which uses SHA1, the default PRF
recommended by PKCS #5. This is compatible with other
implementations of PBKDF2, such as openssl's
PKCS5_PBKDF2_HMAC_SHA1().
"""
algorithm = "pbkdf2_sha1"
digest = hashlib.sha1
class BCryptPasswordHasher(BasePasswordHasher):
"""
Secure password hashing using the bcrypt algorithm (recommended)
This is considered by many to be the most secure algorithm but you
must first install the py-bcrypt library. Please be warned that
this library depends on native C code and might cause portability
issues.
"""
algorithm = "bcrypt"
library = ("py-bcrypt", "bcrypt")
rounds = 12
def salt(self):
bcrypt = self._load_library()
return bcrypt.gensalt(self.rounds)
def encode(self, password, salt):
bcrypt = self._load_library()
data = bcrypt.hashpw(password, salt)
return "%s$%s" % (self.algorithm, data)
def verify(self, password, encoded):
algorithm, data = encoded.split('$', 1)
assert algorithm == self.algorithm
bcrypt = self._load_library()
return constant_time_compare(data, bcrypt.hashpw(password, data))
def safe_summary(self, encoded):
algorithm, empty, algostr, work_factor, data = encoded.split('$', 4)
assert algorithm == self.algorithm
salt, checksum = data[:22], data[22:]
return SortedDict([
(_('algorithm'), algorithm),
(_('work factor'), work_factor),
(_('salt'), mask_hash(salt)),
(_('checksum'), mask_hash(checksum)),
])
class SHA1PasswordHasher(BasePasswordHasher):
"""
The SHA1 password hashing algorithm (not recommended)
"""
algorithm = "sha1"
def encode(self, password, salt):
assert password
assert salt and '$' not in salt
hash = hashlib.sha1(salt + password).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('salt'), mask_hash(salt, show=2)),
(_('hash'), mask_hash(hash)),
])
class MD5PasswordHasher(BasePasswordHasher):
"""
The Salted MD5 password hashing algorithm (not recommended)
"""
algorithm = "md5"
def encode(self, password, salt):
assert password
assert salt and '$' not in salt
hash = hashlib.md5(salt + password).hexdigest()
return "%s$%s$%s" % (self.algorithm, salt, hash)
def verify(self, password, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
encoded_2 = self.encode(password, salt)
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
algorithm, salt, hash = encoded.split('$', 2)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('salt'), mask_hash(salt, show=2)),
(_('hash'), mask_hash(hash)),
])
class UnsaltedSHA1PasswordHasher(BasePasswordHasher):
"""
Very insecure algorithm that you should *never* use; stores SHA1 hashes
with an empty salt.
This class is implemented because Django used to accept such password
hashes. Some older Django installs still have these values lingering
around so we need to handle and upgrade them properly.
"""
algorithm = "unsalted_sha1"
def salt(self):
return ''
def encode(self, password, salt):
assert salt == ''
hash = hashlib.sha1(password).hexdigest()
return 'sha1$$%s' % hash
def verify(self, password, encoded):
encoded_2 = self.encode(password, '')
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
assert encoded.startswith('sha1$$')
hash = encoded[6:]
return SortedDict([
(_('algorithm'), self.algorithm),
(_('hash'), mask_hash(hash)),
])
class UnsaltedMD5PasswordHasher(BasePasswordHasher):
"""
Incredibly insecure algorithm that you should *never* use; stores unsalted
MD5 hashes without the algorithm prefix, also accepts MD5 hashes with an
empty salt.
This class is implemented because Django used to store passwords this way
and to accept such password hashes. Some older Django installs still have
these values lingering around so we need to handle and upgrade them
properly.
"""
algorithm = "unsalted_md5"
def salt(self):
return ''
def encode(self, password, salt):
assert salt == ''
return hashlib.md5(password).hexdigest()
def verify(self, password, encoded):
if len(encoded) == 37 and encoded.startswith('md5$$'):
encoded = encoded[5:]
encoded_2 = self.encode(password, '')
return constant_time_compare(encoded, encoded_2)
def safe_summary(self, encoded):
return SortedDict([
(_('algorithm'), self.algorithm),
(_('hash'), mask_hash(encoded, show=3)),
])
class CryptPasswordHasher(BasePasswordHasher):
"""
Password hashing using UNIX crypt (not recommended)
The crypt module is not supported on all platforms.
"""
algorithm = "crypt"
library = "crypt"
def salt(self):
return get_random_string(2)
def encode(self, password, salt):
crypt = self._load_library()
assert len(salt) == 2
data = crypt.crypt(password, salt)
# we don't need to store the salt, but Django used to do this
return "%s$%s$%s" % (self.algorithm, '', data)
def verify(self, password, encoded):
crypt = self._load_library()
algorithm, salt, data = encoded.split('$', 2)
assert algorithm == self.algorithm
return constant_time_compare(data, crypt.crypt(password, data))
def safe_summary(self, encoded):
algorithm, salt, data = encoded.split('$', 2)
assert algorithm == self.algorithm
return SortedDict([
(_('algorithm'), algorithm),
(_('salt'), salt),
(_('hash'), mask_hash(data, show=3)),
])
| ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/contrib/auth/hashers.py | Python | bsd-3-clause | 14,277 |
# -*- coding: utf-8 -*-
# Third Party Stuff
from django.db.models import signals
def signals_switch():
pre_save = signals.pre_save.receivers
post_save = signals.post_save.receivers
def disconnect():
signals.pre_save.receivers = []
signals.post_save.receivers = []
def reconnect():
signals.pre_save.receivers = pre_save
signals.post_save.receivers = post_save
return disconnect, reconnect
disconnect_signals, reconnect_signals = signals_switch()
| DESHRAJ/wye | tests/utils.py | Python | mit | 504 |
#!/usr/bin/python
# Watermark each page in a PDF document
import sys #, os
import getopt
import math
from Quartz.CoreGraphics import *
from Quartz.ImageIO import *
def drawWatermark(ctx, image, xOffset, yOffset, angle, scale, opacity):
if image:
imageWidth = CGImageGetWidth(image)
imageHeight = CGImageGetHeight(image)
imageBox = CGRectMake(0, 0, imageWidth, imageHeight)
CGContextSaveGState(ctx)
CGContextSetAlpha(ctx, opacity)
CGContextTranslateCTM(ctx, xOffset, yOffset)
CGContextScaleCTM(ctx, scale, scale)
CGContextTranslateCTM(ctx, imageWidth / 2, imageHeight / 2)
CGContextRotateCTM(ctx, angle * math.pi / 180)
CGContextTranslateCTM(ctx, -imageWidth / 2, -imageHeight / 2)
CGContextDrawImage(ctx, imageBox, image)
CGContextRestoreGState(ctx)
def createImage(imagePath):
image = None
# provider = CGDataProviderCreateWithFilename(imagePath) # FIXED: replaced by the following CGDataProviderCreateWithURL()
url = CFURLCreateFromFileSystemRepresentation(kCFAllocatorDefault, imagePath, len(imagePath), False)
provider = CGDataProviderCreateWithURL(url)
if provider:
imageSrc = CGImageSourceCreateWithDataProvider(provider, None)
if imageSrc:
image = CGImageSourceCreateImageAtIndex(imageSrc, 0, None)
if not image:
print "Cannot import the image from file %s" % imagePath
return image
def watermark(inputFile, watermarkFiles, outputFile, under, xOffset, yOffset, angle, scale, opacity, verbose):
images = map(createImage, watermarkFiles)
ctx = CGPDFContextCreateWithURL(CFURLCreateFromFileSystemRepresentation(kCFAllocatorDefault, outputFile, len(outputFile), False), None, None)
if ctx:
pdf = CGPDFDocumentCreateWithURL(CFURLCreateFromFileSystemRepresentation(kCFAllocatorDefault, inputFile, len(inputFile), False))
if pdf:
for i in range(1, CGPDFDocumentGetNumberOfPages(pdf) + 1):
image = images[i % len(images) - 1]
page = CGPDFDocumentGetPage(pdf, i)
if page:
mediaBox = CGPDFPageGetBoxRect(page, kCGPDFMediaBox)
if CGRectIsEmpty(mediaBox):
mediaBox = None
CGContextBeginPage(ctx, mediaBox)
if under:
drawWatermark(ctx, image, xOffset, yOffset, angle, scale, opacity)
CGContextDrawPDFPage(ctx, page)
if not under:
drawWatermark(ctx, image, xOffset, yOffset, angle, scale, opacity)
CGContextEndPage(ctx)
del pdf
CGPDFContextClose(ctx)
del ctx
def main(argv):
verbose = False
readFilename = None
writeFilename = None
under = False
xOffset = 0.0 # FIXED: changed to float value
yOffset = 0.0 # FIXED: changed to float value
angle = 0.0 # FIXED: changed to float value
scale = 1.0 # FIXED: added
opacity = 1.0
# Parse the command line options
try:
options, args = getopt.getopt(argv, "vutx:y:a:p:s:i:o:", ["verbose", "under", "over", "xOffset=", "yOffset=", "angle=", "opacity=", "scale=", "input=", "output=", ])
except getopt.GetoptError:
usage()
sys.exit(2)
for option, arg in options:
print option, arg
if option in ("-i", "--input") :
if verbose:
print "Reading pages from %s." % (arg)
readFilename = arg
elif option in ("-o", "--output") :
if verbose:
print "Setting %s as the output." % (arg)
writeFilename = arg
elif option in ("-v", "--verbose") :
print "Verbose mode enabled."
verbose = True
elif option in ("-u", "--under"):
print "watermark under PDF"
under = True
elif option in ("-t", "--over"): # FIXED: changed to "-t" from "t"
print "watermark over PDF"
under = False
elif option in ("-x", "--xOffset"):
xOffset = float(arg)
elif option in ("-y", "--yOffset"):
yOffset = float(arg)
elif option in ("-a", "--angle"):
angle = -float(arg)
elif option in ("-s", "--scale"):
scale = float(arg)
elif option in ("-p", "--opacity"):
opacity = float(arg)
else:
print "Unknown argument: %s" % (option)
if (len(args) > 0):
watermark(readFilename, args, writeFilename, under, xOffset, yOffset, angle, scale, opacity, verbose);
else:
shutil.copyfile(readFilename, writeFilename);
def usage():
print "Usage: watermark --input <file> --output <file> <watermark files>..."
if __name__ == "__main__":
print sys.argv
main(sys.argv[1:]) | jamescooper/automater | src/tool.py | Python | apache-2.0 | 5,070 |
from config.api2_0_config import *
from on_http_api2_0 import ApiApi as WorkflowApi
from on_http_api2_0 import rest
from modules.logger import Log
from datetime import datetime
from proboscis.asserts import *
from proboscis import SkipTest
from proboscis import test
from json import dumps, loads
import json
LOG = Log(__name__)
@test(groups=['workflowTasks_api2.tests'])
class WorkflowTasksTests(object):
def __init__(self):
self.__client = config.api_client
self.__workflows = None
self.workflowTaskDict ={
"friendlyName": "fn_1",
"injectableName": "in_1",
"implementsTask": "im_1",
"options": {},
"properties": {}
}
@test(groups=['workflowTasks_api2.tests', 'api2_workflowTasks_get'])
def test_workflowTasks__get(self):
""" Testing GET:/worflows/tasks"""
WorkflowApi().workflows_get_all_tasks()
assert_equal(200,self.__client.last_response.status)
assert_not_equal(0, len(json.loads(self.__client.last_response.data)), message='Workflow tasks list was empty!')
@test(groups=['workflowTasks_library_put'], depends_on_groups=['workflowTasks_library_get'])
def test_workflowTasks_put(self):
""" Testing PUT:/workflowTasks """
#Get the number of workflowTasks before we add one
WorkflowApi().workflows_get_all_tasks()
workflowTasksBefore = len(json.loads(self.__client.last_response.data))
#Making sure that there is no workflowTask with the same name from previous test runs
rawj= json.loads(self.__client.last_response.data)
listLen =len(json.loads(self.__client.last_response.data))
inList = False
for i, val in enumerate (rawj):
if ( self.workflowTaskDict['friendlyName'] == str (rawj[i].get('friendlyName')) or inList ):
inList = True
fnameList = str (rawj[i].get('friendlyName')).split('_')
if len(fnameList) > 1:
suffix= int (fnameList[1]) + 1
self.workflowTaskDict['friendlyName']= fnameList[0]+ '_' + str(suffix)
inameList = str (rawj[i].get('injectableName')).split('_')
self.workflowTaskDict['injectableName']= inameList[0]+ '_' + str(suffix)
#adding a workflow task
LOG.info ("Adding workflow task : " + str(self.workflowTaskDict))
WorkflowApi().workflows_put_task(body=self.workflowTaskDict)
resp= self.__client.last_response
assert_equal(201,resp.status)
#Getting the number of profiles after we added one
WorkflowApi().workflows_get_all_tasks()
workflowTasksAfter = len(json.loads(self.__client.last_response.data))
resp= self.__client.last_response
assert_equal(200,resp.status, message=resp.reason)
#Validating that the profile has been added
assert_equal(workflowTasksAfter,workflowTasksBefore+1)
#Validating the content is as expected
rawj= json.loads(self.__client.last_response.data)
listLen =len(json.loads(self.__client.last_response.data))
readWorkflowTask= rawj[len(rawj)-1]
readFriendlyName= readWorkflowTask.get('friendlyName')
readInjectableName = readWorkflowTask.get('injectableName')
assert_equal(readFriendlyName,self.workflowTaskDict.get('friendlyName'))
assert_equal(readInjectableName,self.workflowTaskDict.get('injectableName'))
| DavidjohnBlodgett/RackHD | test/tests/api/v2_0/workflowTasks_tests.py | Python | apache-2.0 | 3,496 |
import unittest
from testlib import testutil, PygrTestProgram
from pygr import sequence
class Sequence_Test(unittest.TestCase):
'basic sequence class tests'
def setUp(self):
self.seq = sequence.Sequence('atttgactatgctccag', 'foo')
def test_length(self):
"Sequence lenght"
assert len(self.seq) == 17
def test_slice(self):
"Sequence slice"
assert str(self.seq[5:10]) == 'actat'
def test_slicerc(self):
"Sequence slice then reverse complement"
assert str(-(self.seq[5:10])) == 'atagt'
def test_rcslice(self):
"Sequence reverse complement then slice"
assert str((-self.seq)[5:10]) == 'gcata'
def test_truncate(self):
"Sequence truncate"
assert str(self.seq[-202020202:5]) == 'atttg'
assert self.seq[-202020202:5] == self.seq[0:5]
assert self.seq[-2020202:] == self.seq
assert str(self.seq[-202020202:-5]) == 'atttgactatgc'
assert str(self.seq[-5:2029]) == 'tccag'
assert str(self.seq[-5:]) == 'tccag'
try:
self.seq[999:10000]
raise ValueError('failed to trap out of bounds slice')
except IndexError:
pass
try:
self.seq[-10000:-3000]
raise ValueError('failed to trap out of bounds slice')
except IndexError:
pass
try:
self.seq[1000:]
raise ValueError('failed to trap out of bounds slice')
except IndexError:
pass
def test_rctruncate(self):
"Sequence reverse complement truncate"
seq= -self.seq
assert str(seq[-202020202:5]) == 'ctgga'
assert seq[-202020202:5] == seq[0:5]
assert seq[-2020202:] == seq
assert str(seq[-202020202:-5]) == 'ctggagcatagt'
assert str(seq[-5:2029]) == 'caaat'
assert str(seq[-5:]) == 'caaat'
try:
seq[999:10000]
raise ValueError('failed to trap out of bounds slice')
except IndexError:
pass
try:
seq[-10000:-3000]
raise ValueError('failed to trap out of bounds slice')
except IndexError:
pass
try:
seq[1000:]
raise ValueError('failed to trap out of bounds slice')
except IndexError:
pass
def test_join(self):
"Sequence join"
assert str(self.seq[5:15] * self.seq[8:]) == 'atgctcc'
def test_rcjoin(self):
"Sequence reverse complement join"
assert str((-(self.seq[5:10])) * ((-self.seq)[5:10])) == 'ata'
def test_seqtype(self):
"Sequence lenght"
assert self.seq.seqtype() == sequence.DNA_SEQTYPE
assert sequence.Sequence('auuugacuaugcuccag', 'foo').seqtype() == \
sequence.RNA_SEQTYPE
assert sequence.Sequence('kqwestvvarphal', 'foo').seqtype() == \
sequence.PROTEIN_SEQTYPE
# @CTB
'''
#from pygrdata_test import PygrSwissprotBase
class Blast_Test(PygrSwissprotBase):
'test basic blast functionality'
@skip_errors(OSError, KeyError)
def setup(self):
PygrSwissprotBase.setup(self)
import pygr.Data
self.sp = pygr.Data.Bio.Seq.Swissprot.sp42()
import os
blastIndexPath = os.path.join(os.path.dirname(self.sp.filepath),
'wikiwacky')
self.sp.formatdb(blastIndexPath)
def blast(self):
hbb = self.sp['HBB1_TORMA']
hits = self.sp.blast(hbb)
edges = hits[hbb].edges(maxgap=1, maxinsert=1,
minAlignSize=14,pIdentityMin=0.5)
for t in edges:
assert len(t[0])>=14, 'result shorter than minAlignSize!'
result = [(t[0], t[1], t[2].pIdentity()) for t in edges]
store = PygrDataTextFile(os.path.join('results', 'seqdb1.pickle'))
correct = store['hbb blast 1']
assert approximate_cmp(result, correct, .0001) == 0, 'blast results should match'
result = [(t[0], t[1], t[2].pIdentity()) for t in hits[hbb].generateSeqEnds()]
correct = store['hbb blast 2']
assert approximate_cmp(result, correct, .0001) == 0, 'blast results should match'
trypsin = self.sp['PRCA_ANASP']
try:
hits[trypsin]
raise ValueError('failed to catch bad alignment query')
except KeyError:
pass
class Blast_reindex_untest(Blast_Test):
'test building blast indexes under a different name'
@skip_errors(OSError, KeyError)
def setup(self):
PygrSwissprotBase.setup(self)
import pygr.Data
self.sp = pygr.Data.Bio.Seq.Swissprot.sp42()
import os
blastIndexPath = os.path.join(os.path.dirname(self.sp.filepath), 'wikiwacky')
self.sp.formatdb()
#self.sp.formatdb(blastIndexPath) # FORCE IT TO STORE INDEX WITH DIFFERENT NAME
#print 'blastIndexPath is', self.sp.blastIndexPath
'''
if __name__ == '__main__':
PygrTestProgram(verbosity=2)
| cjlee112/pygr | tests/sequence_test.py | Python | bsd-3-clause | 5,048 |
from django.db import models
from geodata.models import Country, City, Region
class IndicatorTopic(models.Model):
id = models.AutoField(primary_key=True)
name = models.CharField(max_length=255)
source_note = models.TextField(null=True)
class LendingType(models.Model):
id = models.CharField(max_length=10, primary_key=True)
name = models.CharField(max_length=255)
class IncomeLevel(models.Model):
id = models.CharField(max_length=10, primary_key=True)
name = models.CharField(max_length=255)
class IndicatorSource(models.Model):
id = models.AutoField(primary_key=True)
name = models.TextField()
class Indicator(models.Model):
id = models.CharField(max_length=50, primary_key=True)
description = models.TextField(null=True, blank=True)
friendly_label = models.CharField(max_length=255, null=True, blank=True)
type_data = models.CharField(max_length=255, null=True, blank=True)
# parent = models.ForeignKey()
#selection type is used for i.e. table 14 type of fuel
selection_type = models.CharField(max_length=255, null=True, blank=True)
#deprivation type is used for i.e. table 14 urban, non slum household, one sheltar deprivation
deprivation_type = models.CharField(max_length=255, null=True, blank=True)
source = models.ForeignKey(IndicatorSource, null=True, blank=True)
topic = models.ForeignKey(IndicatorTopic, null=True, blank=True)
def __unicode__(self):
return self.friendly_label
class IndicatorData(models.Model):
indicator = models.ForeignKey(Indicator)
country = models.ForeignKey(Country, null=True)
city = models.ForeignKey(City, null=True)
region = models.ForeignKey(Region, null=True)
value = models.FloatField(null=True, blank=True)
year = models.IntegerField(max_length=5)
class Meta:
verbose_name_plural = "indicator data"
| schlos/OIPA-V2.1 | OIPA/indicator/models.py | Python | agpl-3.0 | 1,876 |
"""Config flow for Kodi integration."""
import logging
from pykodi import CannotConnectError, InvalidAuthError, Kodi, get_kodi_connection
import voluptuous as vol
from homeassistant import config_entries, core, exceptions
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_SSL,
CONF_TIMEOUT,
CONF_USERNAME,
)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.typing import DiscoveryInfoType, Optional
from .const import (
CONF_WS_PORT,
DEFAULT_PORT,
DEFAULT_SSL,
DEFAULT_TIMEOUT,
DEFAULT_WS_PORT,
)
from .const import DOMAIN # pylint:disable=unused-import
_LOGGER = logging.getLogger(__name__)
async def validate_http(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect over HTTP."""
host = data[CONF_HOST]
port = data[CONF_PORT]
username = data.get(CONF_USERNAME)
password = data.get(CONF_PASSWORD)
ssl = data.get(CONF_SSL)
session = async_get_clientsession(hass)
_LOGGER.debug("Connecting to %s:%s over HTTP", host, port)
khc = get_kodi_connection(
host, port, None, username, password, ssl, session=session
)
kodi = Kodi(khc)
try:
await kodi.ping()
except CannotConnectError as error:
raise CannotConnect from error
except InvalidAuthError as error:
raise InvalidAuth from error
async def validate_ws(hass: core.HomeAssistant, data):
"""Validate the user input allows us to connect over WS."""
ws_port = data.get(CONF_WS_PORT)
if not ws_port:
return
host = data[CONF_HOST]
port = data[CONF_PORT]
username = data.get(CONF_USERNAME)
password = data.get(CONF_PASSWORD)
ssl = data.get(CONF_SSL)
session = async_get_clientsession(hass)
_LOGGER.debug("Connecting to %s:%s over WebSocket", host, ws_port)
kwc = get_kodi_connection(
host, port, ws_port, username, password, ssl, session=session
)
try:
await kwc.connect()
if not kwc.connected:
_LOGGER.warning("Cannot connect to %s:%s over WebSocket", host, ws_port)
raise WSCannotConnect()
kodi = Kodi(kwc)
await kodi.ping()
except CannotConnectError as error:
raise WSCannotConnect from error
class KodiConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Kodi."""
VERSION = 1
CONNECTION_CLASS = config_entries.CONN_CLASS_LOCAL_POLL
def __init__(self):
"""Initialize flow."""
self._host: Optional[str] = None
self._port: Optional[int] = DEFAULT_PORT
self._ws_port: Optional[int] = DEFAULT_WS_PORT
self._name: Optional[str] = None
self._username: Optional[str] = None
self._password: Optional[str] = None
self._ssl: Optional[bool] = DEFAULT_SSL
self._discovery_name: Optional[str] = None
async def async_step_zeroconf(self, discovery_info: DiscoveryInfoType):
"""Handle zeroconf discovery."""
self._host = discovery_info["host"]
self._port = int(discovery_info["port"])
self._name = discovery_info["hostname"][: -len(".local.")]
uuid = discovery_info["properties"].get("uuid")
if not uuid:
return self.async_abort(reason="no_uuid")
self._discovery_name = discovery_info["name"]
await self.async_set_unique_id(uuid)
self._abort_if_unique_id_configured(
updates={
CONF_HOST: self._host,
CONF_PORT: self._port,
CONF_NAME: self._name,
}
)
# pylint: disable=no-member # https://github.com/PyCQA/pylint/issues/3167
self.context.update({"title_placeholders": {CONF_NAME: self._name}})
try:
await validate_http(self.hass, self._get_data())
await validate_ws(self.hass, self._get_data())
except InvalidAuth:
return await self.async_step_credentials()
except WSCannotConnect:
return await self.async_step_ws_port()
except CannotConnect:
return self.async_abort(reason="cannot_connect")
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
return self.async_abort(reason="unknown")
return await self.async_step_discovery_confirm()
async def async_step_discovery_confirm(self, user_input=None):
"""Handle user-confirmation of discovered node."""
if user_input is None:
return self.async_show_form(
step_id="discovery_confirm",
description_placeholders={"name": self._name},
)
return self._create_entry()
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
if user_input is not None:
self._host = user_input[CONF_HOST]
self._port = user_input[CONF_PORT]
self._ssl = user_input[CONF_SSL]
try:
await validate_http(self.hass, self._get_data())
await validate_ws(self.hass, self._get_data())
except InvalidAuth:
return await self.async_step_credentials()
except WSCannotConnect:
return await self.async_step_ws_port()
except CannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self._create_entry()
return self._show_user_form(errors)
async def async_step_credentials(self, user_input=None):
"""Handle username and password input."""
errors = {}
if user_input is not None:
self._username = user_input.get(CONF_USERNAME)
self._password = user_input.get(CONF_PASSWORD)
try:
await validate_http(self.hass, self._get_data())
await validate_ws(self.hass, self._get_data())
except InvalidAuth:
errors["base"] = "invalid_auth"
except WSCannotConnect:
return await self.async_step_ws_port()
except CannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self._create_entry()
return self._show_credentials_form(errors)
async def async_step_ws_port(self, user_input=None):
"""Handle websocket port of discovered node."""
errors = {}
if user_input is not None:
self._ws_port = user_input.get(CONF_WS_PORT)
# optional ints return 0 rather than None when empty
if self._ws_port == 0:
self._ws_port = None
try:
await validate_ws(self.hass, self._get_data())
except WSCannotConnect:
errors["base"] = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self._create_entry()
return self._show_ws_port_form(errors)
async def async_step_import(self, data):
"""Handle import from YAML."""
reason = None
try:
await validate_http(self.hass, data)
await validate_ws(self.hass, data)
except InvalidAuth:
_LOGGER.exception("Invalid Kodi credentials")
reason = "invalid_auth"
except CannotConnect:
_LOGGER.exception("Cannot connect to Kodi")
reason = "cannot_connect"
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Unexpected exception")
reason = "unknown"
else:
return self.async_create_entry(title=data[CONF_NAME], data=data)
return self.async_abort(reason=reason)
@callback
def _show_credentials_form(self, errors=None):
schema = vol.Schema(
{
vol.Optional(
CONF_USERNAME, description={"suggested_value": self._username}
): str,
vol.Optional(
CONF_PASSWORD, description={"suggested_value": self._password}
): str,
}
)
return self.async_show_form(
step_id="credentials", data_schema=schema, errors=errors or {}
)
@callback
def _show_user_form(self, errors=None):
default_port = self._port or DEFAULT_PORT
default_ssl = self._ssl or DEFAULT_SSL
schema = vol.Schema(
{
vol.Required(CONF_HOST, default=self._host): str,
vol.Required(CONF_PORT, default=default_port): int,
vol.Required(CONF_SSL, default=default_ssl): bool,
}
)
return self.async_show_form(
step_id="user", data_schema=schema, errors=errors or {}
)
@callback
def _show_ws_port_form(self, errors=None):
suggestion = self._ws_port or DEFAULT_WS_PORT
schema = vol.Schema(
{
vol.Optional(
CONF_WS_PORT, description={"suggested_value": suggestion}
): int
}
)
return self.async_show_form(
step_id="ws_port", data_schema=schema, errors=errors or {}
)
@callback
def _create_entry(self):
return self.async_create_entry(
title=self._name or self._host,
data=self._get_data(),
)
@callback
def _get_data(self):
data = {
CONF_NAME: self._name,
CONF_HOST: self._host,
CONF_PORT: self._port,
CONF_WS_PORT: self._ws_port,
CONF_USERNAME: self._username,
CONF_PASSWORD: self._password,
CONF_SSL: self._ssl,
CONF_TIMEOUT: DEFAULT_TIMEOUT,
}
return data
class CannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(exceptions.HomeAssistantError):
"""Error to indicate there is invalid auth."""
class WSCannotConnect(exceptions.HomeAssistantError):
"""Error to indicate we cannot connect to websocket."""
| tboyce021/home-assistant | homeassistant/components/kodi/config_flow.py | Python | apache-2.0 | 10,723 |
"""Grep dialog for Find in Files functionality.
Inherits from SearchDialogBase for GUI and uses searchengine
to prepare search pattern.
"""
import fnmatch
import os
import sys
from tkinter import StringVar, BooleanVar
from tkinter.ttk import Checkbutton
from idlelib.searchbase import SearchDialogBase
from idlelib import searchengine
# Importing OutputWindow here fails due to import loop
# EditorWindow -> GrepDialop -> OutputWindow -> EditorWindow
def grep(text, io=None, flist=None):
"""Create or find singleton GrepDialog instance.
Args:
text: Text widget that contains the selected text for
default search phrase.
io: iomenu.IOBinding instance with default path to search.
flist: filelist.FileList instance for OutputWindow parent.
"""
root = text._root()
engine = searchengine.get(root)
if not hasattr(engine, "_grepdialog"):
engine._grepdialog = GrepDialog(root, engine, flist)
dialog = engine._grepdialog
searchphrase = text.get("sel.first", "sel.last")
dialog.open(text, searchphrase, io)
class GrepDialog(SearchDialogBase):
"Dialog for searching multiple files."
title = "Find in Files Dialog"
icon = "Grep"
needwrapbutton = 0
def __init__(self, root, engine, flist):
"""Create search dialog for searching for a phrase in the file system.
Uses SearchDialogBase as the basis for the GUI and a
searchengine instance to prepare the search.
Attributes:
globvar: Value of Text Entry widget for path to search.
recvar: Boolean value of Checkbutton widget
for traversing through subdirectories.
"""
SearchDialogBase.__init__(self, root, engine)
self.flist = flist
self.globvar = StringVar(root)
self.recvar = BooleanVar(root)
def open(self, text, searchphrase, io=None):
"Make dialog visible on top of others and ready to use."
SearchDialogBase.open(self, text, searchphrase)
if io:
path = io.filename or ""
else:
path = ""
dir, base = os.path.split(path)
head, tail = os.path.splitext(base)
if not tail:
tail = ".py"
self.globvar.set(os.path.join(dir, "*" + tail))
def create_entries(self):
"Create base entry widgets and add widget for search path."
SearchDialogBase.create_entries(self)
self.globent = self.make_entry("In files:", self.globvar)[0]
def create_other_buttons(self):
"Add check button to recurse down subdirectories."
btn = Checkbutton(
self.make_frame()[0], variable=self.recvar,
text="Recurse down subdirectories")
btn.pack(side="top", fill="both")
def create_command_buttons(self):
"Create base command buttons and add button for search."
SearchDialogBase.create_command_buttons(self)
self.make_button("Search Files", self.default_command, 1)
def default_command(self, event=None):
"""Grep for search pattern in file path. The default command is bound
to <Return>.
If entry values are populated, set OutputWindow as stdout
and perform search. The search dialog is closed automatically
when the search begins.
"""
prog = self.engine.getprog()
if not prog:
return
path = self.globvar.get()
if not path:
self.top.bell()
return
from idlelib.outwin import OutputWindow # leave here!
save = sys.stdout
try:
sys.stdout = OutputWindow(self.flist)
self.grep_it(prog, path)
finally:
sys.stdout = save
def grep_it(self, prog, path):
"""Search for prog within the lines of the files in path.
For the each file in the path directory, open the file and
search each line for the matching pattern. If the pattern is
found, write the file and line information to stdout (which
is an OutputWindow).
"""
dir, base = os.path.split(path)
list = self.findfiles(dir, base, self.recvar.get())
list.sort()
self.close()
pat = self.engine.getpat()
print(f"Searching {pat!r} in {path} ...")
hits = 0
try:
for fn in list:
try:
with open(fn, errors='replace') as f:
for lineno, line in enumerate(f, 1):
if line[-1:] == '\n':
line = line[:-1]
if prog.search(line):
sys.stdout.write(f"{fn}: {lineno}: {line}\n")
hits += 1
except OSError as msg:
print(msg)
print(f"Hits found: {hits}\n(Hint: right-click to open locations.)"
if hits else "No hits.")
except AttributeError:
# Tk window has been closed, OutputWindow.text = None,
# so in OW.write, OW.text.insert fails.
pass
def findfiles(self, dir, base, rec):
"""Return list of files in the dir that match the base pattern.
If rec is True, recursively iterate through subdirectories.
"""
try:
names = os.listdir(dir or os.curdir)
except OSError as msg:
print(msg)
return []
list = []
subdirs = []
for name in names:
fn = os.path.join(dir, name)
if os.path.isdir(fn):
subdirs.append(fn)
else:
if fnmatch.fnmatch(name, base):
list.append(fn)
if rec:
for subdir in subdirs:
list.extend(self.findfiles(subdir, base, rec))
return list
def _grep_dialog(parent): # htest #
from tkinter import Toplevel, Text, SEL, END
from tkinter.ttk import Button
from idlelib.pyshell import PyShellFileList
top = Toplevel(parent)
top.title("Test GrepDialog")
x, y = map(int, parent.geometry().split('+')[1:])
top.geometry(f"+{x}+{y + 175}")
flist = PyShellFileList(top)
text = Text(top, height=5)
text.pack()
def show_grep_dialog():
text.tag_add(SEL, "1.0", END)
grep(text, flist=flist)
text.tag_remove(SEL, "1.0", END)
button = Button(top, text="Show GrepDialog", command=show_grep_dialog)
button.pack()
if __name__ == "__main__":
import unittest
unittest.main('idlelib.idle_test.test_grep', verbosity=2, exit=False)
from idlelib.idle_test.htest import run
run(_grep_dialog)
| Microsoft/PTVS | Python/Product/Miniconda/Miniconda3-x64/Lib/idlelib/grep.py | Python | apache-2.0 | 6,741 |
# -*- coding: utf-8 -*-
# © 2015-2016 Antiun Ingeniería S.L. - Pedro M. Baeza
# © 2015 AvanzOSC - Ainara Galdona
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
from openerp import models, fields, api, exceptions, _
PRORRATE_TAX_LINE_MAPPING = {
29: 28,
33: 32,
35: 34,
37: 36,
39: 38,
41: 40,
}
class L10nEsAeatMod303Report(models.Model):
_inherit = 'l10n.es.aeat.mod303.report'
@api.multi
@api.depends('tax_lines', 'tax_lines.amount', 'casilla_44')
def _compute_total_deducir(self):
super(L10nEsAeatMod303Report, self)._compute_total_deducir()
for report in self:
report.total_deducir += report.casilla_44
casilla_44 = fields.Float(
string="[44] Regularización de la prorrata", default=0,
states={'done': [('readonly', True)]},
help="Regularizacion por aplicación del porcentaje definitivo de "
"prorrata.")
vat_prorrate_type = fields.Selection(
[('none', 'None'),
('general', 'General prorrate'), ],
# ('special', 'Special prorrate')],
readonly=True, states={'draft': [('readonly', False)]},
string="VAT prorrate type", default='none', required=True)
vat_prorrate_percent = fields.Float(
string="VAT prorrate percentage", default=100,
readonly=True, states={'draft': [('readonly', False)]})
@api.constrains('vat_prorrate_percent')
def check_vat_prorrate_percent(self):
if self.vat_prorrate_percent < 0 or self.vat_prorrate_percent > 100:
raise exceptions.Warning(
_('VAT prorrate percent must be between 0 and 100'))
@api.multi
def calculate(self):
res = super(L10nEsAeatMod303Report, self).calculate()
for report in self:
report.casilla_44 = 0
if (report.vat_prorrate_type != 'general' or
report.period_type not in ('4T', '12')):
continue
# Get prorrate from previous declarations
min_date = min(report.periods.mapped('date_start'))
prev_reports = report._get_previous_fiscalyear_reports(min_date)
if any(x.state == 'draft' for x in prev_reports):
raise exceptions.Warning(
_("There's at least one previous report in draft state. "
"Please confirm it before making this one."))
for prev_report in prev_reports:
diff_perc = (report.vat_prorrate_percent -
prev_report.vat_prorrate_percent)
if diff_perc:
report.casilla_44 += (
diff_perc * prev_report.total_deducir /
prev_report.vat_prorrate_percent)
return res
@api.multi
def _prepare_tax_line_vals(self, map_line):
res = super(L10nEsAeatMod303Report, self)._prepare_tax_line_vals(
map_line)
if (self.vat_prorrate_type == 'general' and
map_line.field_number in PRORRATE_TAX_LINE_MAPPING.keys()):
res['amount'] *= self.vat_prorrate_percent / 100
return res
@api.multi
def _process_tax_line_regularization(self, tax_lines):
"""Añadir la parte no deducida de la base como gasto repartido
proporcionalmente entre las cuentas de las líneas de gasto existentes.
"""
all_lines = []
for tax_line in tax_lines:
# We need to treat each tax_line independently
lines = super(L10nEsAeatMod303Report,
self)._process_tax_line_regularization(tax_line)
all_lines += lines
if (self.vat_prorrate_type != 'general' or
tax_line.field_number not in
PRORRATE_TAX_LINE_MAPPING.keys()):
continue
factor = (100 - self.vat_prorrate_percent) / 100
base_tax_line = self.tax_lines.filtered(
lambda x: x.field_number == PRORRATE_TAX_LINE_MAPPING[
tax_line.field_number])
if not base_tax_line.move_lines:
continue
prorrate_debit = sum(x['debit'] for x in lines)
prorrate_credit = sum(x['credit'] for x in lines)
prec = self.env['decimal.precision'].precision_get('Account')
total_prorrate = round(
(prorrate_debit - prorrate_credit) * factor, prec)
account_groups = self.env['account.move.line'].read_group(
[('id', 'in', base_tax_line.move_lines.ids)],
['tax_amount', 'account_id', 'account_analytic_id'],
['account_id', 'account_analytic_id'])
total_balance = sum(x['tax_amount'] for x in account_groups)
extra_lines = []
amount_factor = abs(total_prorrate) / abs(total_balance)
for account_group in account_groups:
analytic_groups = self.env['account.move.line'].read_group(
account_group['__domain'],
['tax_amount', 'analytic_account_id'],
['analytic_account_id'])
for analytic_group in analytic_groups:
balance = analytic_group['tax_amount'] * amount_factor
move_line_vals = {
'name': account_group['account_id'][1],
'account_id': account_group['account_id'][0],
'debit': round(balance, prec) if balance > 0 else 0,
'credit': round(-balance, prec) if balance < 0 else 0,
}
if analytic_group['analytic_account_id']:
move_line_vals['analytic_account_id'] = (
analytic_group['analytic_account_id'])[0]
extra_lines.append(move_line_vals)
# Add/substract possible rounding inaccuracy to the first line
extra_lines = self._prorrate_diff_distribution(
total_prorrate, extra_lines)
all_lines += extra_lines
return all_lines
def _prorrate_diff_distribution(self, prorrate, extra_lines):
count = len(extra_lines)
if not count:
# If no lines, then we can not distribute nothing
return extra_lines
prec = self.env['decimal.precision'].precision_get('Account')
extra_debit = sum(x['debit'] for x in extra_lines)
extra_credit = sum(x['credit'] for x in extra_lines)
extra = extra_debit - extra_credit
diff = round(((-1) * prorrate) - extra, prec)
if prorrate > 0:
column = 'credit'
diff = (-1) * diff
else:
column = 'debit'
n = 0
step = 1. / (10 ** prec)
if diff < 0:
step = (-1) * step
while abs(diff) > 0:
# We need to add some in order to get prorrate
line = extra_lines[n]
next_value = round(line[column] + step, prec)
if line[column] and next_value:
line[column] = next_value
diff = round(diff - step, prec)
n += 1
if n >= count:
# Wrap to first line when last line reached
n = 0
return extra_lines
@api.multi
def _prepare_regularization_extra_move_lines(self):
lines = super(L10nEsAeatMod303Report,
self)._prepare_regularization_extra_move_lines()
if self.casilla_44:
account_number = '6391%' if self.casilla_44 > 0 else '6341%'
lines.append({
'name': _('Regularización prorrata IVA'),
'account_id': self.env['account.account'].search(
[('code', 'like', account_number),
('company_id', '=', self.company_id.id),
('type', '!=', 'view')], limit=1).id,
'debit': -self.casilla_44 if self.casilla_44 < 0 else 0.0,
'credit': self.casilla_44 if self.casilla_44 > 0 else 0.0,
})
return lines
| RamonGuiuGou/l10n-spain | l10n_es_aeat_vat_prorrate/models/mod303.py | Python | agpl-3.0 | 8,146 |
# coding: utf-8
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import pytest
import asyncio
from datetime import datetime, timedelta
from azure.core.exceptions import HttpResponseError
from azure.core.pipeline.transport import AioHttpTransport
from multidict import CIMultiDict, CIMultiDictProxy
from azure.storage.blob import BlobType, BlobBlock, BlobSasPermissions, generate_blob_sas, ContainerEncryptionScope, \
generate_container_sas, ContainerSasPermissions, generate_account_sas, ResourceTypes, AccountSasPermissions
from azure.storage.blob.aio import BlobServiceClient
from settings.testcase import BlobPreparer
from devtools_testutils.storage.aio import AsyncStorageTestCase
# ------------------------------------------------------------------------------
# The encryption scope are pre-created using management plane tool ArmClient.
# So we can directly use the scope in the test.
TEST_ENCRYPTION_KEY_SCOPE = "antjoscope1"
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE = ContainerEncryptionScope(
default_encryption_scope="containerscope")
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE_DENY_OVERRIDE = {
"default_encryption_scope": "containerscope",
"prevent_encryption_scope_override": True
}
TEST_SAS_ENCRYPTION_SCOPE = "testscope1"
TEST_SAS_ENCRYPTION_SCOPE_2 = "testscope2"
# ------------------------------------------------------------------------------
class AiohttpTestTransport(AioHttpTransport):
"""Workaround to vcrpy bug: https://github.com/kevin1024/vcrpy/pull/461
"""
async def send(self, request, **config):
response = await super(AiohttpTestTransport, self).send(request, **config)
if not isinstance(response.headers, CIMultiDictProxy):
response.headers = CIMultiDictProxy(CIMultiDict(response.internal_response.headers))
response.content_type = response.headers.get("content-type")
return response
class StorageCPKAsyncTest(AsyncStorageTestCase):
async def _setup(self, bsc):
self.config = bsc._config
self.byte_data = self.get_random_bytes(64 * 1024)
self.container_name = self.get_resource_name('utcontainer')
if self.is_live:
try:
await bsc.create_container(self.container_name)
except:
pass
def _teardown(self, bsc):
if self.is_live:
loop = asyncio.get_event_loop()
try:
loop.run_until_complete(bsc.delete_container(self.container_name))
except:
pass
return super(StorageCPKAsyncTest, self).tearDown()
# --Helpers-----------------------------------------------------------------
def _get_blob_reference(self):
return self.get_resource_name("cpk")
async def _create_block_blob(self, bsc, blob_name=None, data=None, encryption_scope=None, max_concurrency=1, overwrite=False):
blob_name = blob_name if blob_name else self._get_blob_reference()
blob_client = bsc.get_blob_client(self.container_name, blob_name)
data = data if data else b''
resp = await blob_client.upload_blob(data, encryption_scope=encryption_scope, max_concurrency=max_concurrency, overwrite=overwrite)
return blob_client, resp
async def _create_append_blob(self, bsc, encryption_scope=None):
blob_name = self._get_blob_reference()
blob = bsc.get_blob_client(
self.container_name,
blob_name)
await blob.create_append_blob(encryption_scope=encryption_scope)
return blob
async def _create_page_blob(self, bsc, encryption_scope=None):
blob_name = self._get_blob_reference()
blob = bsc.get_blob_client(
self.container_name,
blob_name)
await blob.create_page_blob(1024 * 1024, encryption_scope=encryption_scope)
return blob
# -- Test cases for APIs supporting CPK ----------------------------------------------
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_put_block_and_put_block_list(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
self.container_name = self.get_resource_name('utcontainer')
blob_client, _ = await self._create_block_blob(bsc)
await blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
await blob_client.stage_block('2', b'BBB', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
await blob_client.stage_block('3', b'CCC', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')]
put_block_list_resp = await blob_client.commit_block_list(block_list,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(put_block_list_resp['etag'])
self.assertIsNotNone(put_block_list_resp['last_modified'])
self.assertTrue(put_block_list_resp['request_server_encrypted'])
self.assertEqual(put_block_list_resp['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), b'AAABBBCCC')
self.assertEqual(blob.properties.etag, put_block_list_resp['etag'])
self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified'])
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.live_test_only
@BlobPreparer()
async def test_put_block_and_put_block_list_with_blob_sas(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
await self._setup(bsc)
blob_name = self._get_blob_reference()
token1 = generate_blob_sas(
storage_account_name,
self.container_name,
blob_name,
account_key=storage_account_key,
permission=BlobSasPermissions(read=True, write=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE,
)
blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\
.get_blob_client(self.container_name, blob_name)
await blob_client.stage_block('1', b'AAA')
await blob_client.stage_block('2', b'BBB')
await blob_client.stage_block('3', b'CCC')
# Act
block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2'), BlobBlock(block_id='3')]
put_block_list_resp = await blob_client.commit_block_list(block_list)
# Assert
self.assertIsNotNone(put_block_list_resp['etag'])
self.assertIsNotNone(put_block_list_resp['last_modified'])
self.assertTrue(put_block_list_resp['request_server_encrypted'])
self.assertEqual(put_block_list_resp['encryption_scope'], TEST_SAS_ENCRYPTION_SCOPE)
# Act get the blob content
blob = await blob_client.download_blob()
content = await blob.readall()
# Assert content was retrieved with the cpk
self.assertEqual(content, b'AAABBBCCC')
self.assertEqual(blob.properties.etag, put_block_list_resp['etag'])
self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified'])
self.assertEqual(blob.properties.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(bsc)
@pytest.mark.live_test_only
@BlobPreparer()
async def test_put_block_and_put_block_list_with_blob_sas_fails(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
await self._setup(bsc)
blob_name = self._get_blob_reference()
token1 = generate_blob_sas(
storage_account_name,
self.container_name,
blob_name,
account_key=storage_account_key,
permission=BlobSasPermissions(read=True, write=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE,
)
blob_client = BlobServiceClient(self.account_url(storage_account_name, "blob"), token1)\
.get_blob_client(self.container_name, blob_name)
# both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception
with self.assertRaises(HttpResponseError):
await blob_client.stage_block('1', b'AAA', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# both ses in SAS and encryption_scopes are both set and have SAME values will succeed
await blob_client.stage_block('1', b'AAA', encryption_scope=TEST_SAS_ENCRYPTION_SCOPE)
# Act
block_list = [BlobBlock(block_id='1')]
# both ses in SAS and encryption_scopes are both set and have DIFFERENT values will throw exception
with self.assertRaises(HttpResponseError):
await blob_client.commit_block_list(block_list, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# both ses in SAS and encryption_scopes are both set and have SAME values will succeed
put_block_list_resp = await blob_client.commit_block_list(block_list, encryption_scope=TEST_SAS_ENCRYPTION_SCOPE)
# Assert
self.assertIsNotNone(put_block_list_resp['etag'])
self.assertIsNotNone(put_block_list_resp['last_modified'])
self.assertTrue(put_block_list_resp['request_server_encrypted'])
self.assertEqual(put_block_list_resp['encryption_scope'], TEST_SAS_ENCRYPTION_SCOPE)
# generate a sas with a different encryption scope
token2 = generate_blob_sas(
storage_account_name,
self.container_name,
blob_name,
account_key=storage_account_key,
permission=BlobSasPermissions(read=True, write=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE,
)
blob_client_diff_encryption_scope_sas = BlobServiceClient(self.account_url(storage_account_name, "blob"), token2)\
.get_blob_client(self.container_name, blob_name)
# blob can be downloaded successfully no matter which encryption scope is used on the blob actually
# the encryption scope on blob is TEST_SAS_ENCRYPTION_SCOPE and ses is TEST_ENCRYPTION_KEY_SCOPE in SAS token,
# while we can still download the blob successfully
blob = await blob_client_diff_encryption_scope_sas.download_blob()
content = await blob.readall()
# Assert content was retrieved with the cpk
self.assertEqual(content, b'AAA')
self.assertEqual(blob.properties.etag, put_block_list_resp['etag'])
self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified'])
self.assertEqual(blob.properties.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(bsc)
@pytest.mark.live_test_only
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_create_block_blob_with_chunks(self, storage_account_name, storage_account_key):
# parallel operation
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
# to force the in-memory chunks to be used
self.config.use_byte_buffer = True
# Act
# create_blob_from_bytes forces the in-memory chunks to be used
blob_client, upload_response = await self._create_block_blob(bsc, data=self.byte_data, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE,
max_concurrency=2)
# Assert
self.assertIsNotNone(upload_response['etag'])
self.assertIsNotNone(upload_response['last_modified'])
self.assertTrue(upload_response['request_server_encrypted'])
self.assertEqual(upload_response['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), self.byte_data)
self.assertEqual(blob.properties.etag, upload_response['etag'])
self.assertEqual(blob.properties.last_modified, upload_response['last_modified'])
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.live_test_only
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_create_block_blob_with_sub_streams(self, storage_account_name, storage_account_key):
# problem with the recording framework can only run live
# Act
# create_blob_from_bytes forces the in-memory chunks to be used
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
retry_total=0,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
# to force the in-memory chunks to be used
self.config.use_byte_buffer = True
blob_client, upload_response = await self._create_block_blob(bsc, data=self.byte_data, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE,
max_concurrency=2)
# Assert
self.assertIsNotNone(upload_response['etag'])
self.assertIsNotNone(upload_response['last_modified'])
self.assertTrue(upload_response['request_server_encrypted'])
self.assertEqual(upload_response['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), self.byte_data)
self.assertEqual(blob.properties.etag, upload_response['etag'])
self.assertEqual(blob.properties.last_modified, upload_response['last_modified'])
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_create_block_blob_with_single_chunk(self, storage_account_name, storage_account_key):
# Act
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
data = b'AAABBBCCC'
# create_blob_from_bytes forces the in-memory chunks to be used
blob_client, upload_response = await self._create_block_blob(bsc, data=data, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(upload_response['etag'])
self.assertIsNotNone(upload_response['last_modified'])
self.assertTrue(upload_response['request_server_encrypted'])
self.assertEqual(upload_response['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), data)
self.assertEqual(blob.properties.etag, upload_response['etag'])
self.assertEqual(blob.properties.last_modified, upload_response['last_modified'])
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_put_block_from_url_and_commit(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
# create source blob and get source blob url
source_blob_name = self.get_resource_name("sourceblob")
self.config.use_byte_buffer = True # Make sure using chunk upload, then we can record the request
source_blob_client, _ = await self._create_block_blob(bsc, blob_name=source_blob_name, data=self.byte_data)
source_blob_sas = generate_blob_sas(
source_blob_client.account_name,
source_blob_client.container_name,
source_blob_client.blob_name,
snapshot=source_blob_client.snapshot,
account_key=source_blob_client.credential.account_key,
permission=BlobSasPermissions(read=True),
expiry=datetime.utcnow() + timedelta(hours=1)
)
source_blob_url = source_blob_client.url + "?" + source_blob_sas
# create destination blob
self.config.use_byte_buffer = False
destination_blob_client, _ = await self._create_block_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act part 1: make put block from url calls
await destination_blob_client.stage_block_from_url(block_id=1, source_url=source_blob_url,
source_offset=0, source_length=4 * 1024,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
await destination_blob_client.stage_block_from_url(block_id=2, source_url=source_blob_url,
source_offset=4 * 1024, source_length=4 * 1024,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert blocks
committed, uncommitted = await destination_blob_client.get_block_list('all')
self.assertEqual(len(uncommitted), 2)
self.assertEqual(len(committed), 0)
# commit the blocks without cpk should fail
block_list = [BlobBlock(block_id='1'), BlobBlock(block_id='2')]
with self.assertRaises(HttpResponseError):
await destination_blob_client.commit_block_list(block_list)
# Act commit the blocks with cpk should succeed
put_block_list_resp = await destination_blob_client.commit_block_list(block_list,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(put_block_list_resp['etag'])
self.assertIsNotNone(put_block_list_resp['last_modified'])
self.assertTrue(put_block_list_resp['request_server_encrypted'])
self.assertEqual(put_block_list_resp['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await destination_blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), self.byte_data[0: 8 * 1024])
self.assertEqual(blob.properties.etag, put_block_list_resp['etag'])
self.assertEqual(blob.properties.last_modified, put_block_list_resp['last_modified'])
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.live_test_only
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_append_block(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
blob_client = await self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
for content in [b'AAA', b'BBB', b'CCC']:
append_blob_prop = await blob_client.append_block(content, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(append_blob_prop['etag'])
self.assertIsNotNone(append_blob_prop['last_modified'])
self.assertTrue(append_blob_prop['request_server_encrypted'])
self.assertEqual(append_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), b'AAABBBCCC')
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_append_block_from_url(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
source_blob_name = self.get_resource_name("sourceblob")
self.config.use_byte_buffer = True # chunk upload
source_blob_client, _ = await self._create_block_blob(bsc, blob_name=source_blob_name, data=self.byte_data)
source_blob_sas = generate_blob_sas(
source_blob_client.account_name,
source_blob_client.container_name,
source_blob_client.blob_name,
snapshot=source_blob_client.snapshot,
account_key=source_blob_client.credential.account_key,
permission=BlobSasPermissions(read=True),
expiry=datetime.utcnow() + timedelta(hours=1)
)
source_blob_url = source_blob_client.url + "?" + source_blob_sas
self.config.use_byte_buffer = False
destination_blob_client = await self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
append_blob_prop = await destination_blob_client.append_block_from_url(source_blob_url,
source_offset=0,
source_length=4 * 1024,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(append_blob_prop['etag'])
self.assertIsNotNone(append_blob_prop['last_modified'])
self.assertTrue(append_blob_prop['request_server_encrypted'])
self.assertEqual(append_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await destination_blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), self.byte_data[0: 4 * 1024])
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_create_append_blob_with_chunks(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
blob_client = await self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
append_blob_prop = await blob_client.upload_blob(self.byte_data,
blob_type=BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(append_blob_prop['etag'])
self.assertIsNotNone(append_blob_prop['last_modified'])
self.assertTrue(append_blob_prop['request_server_encrypted'])
self.assertEqual(append_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), self.byte_data)
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_update_page(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
blob_client = await self._create_page_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
page_blob_prop = await blob_client.upload_page(self.byte_data,
offset=0,
length=len(self.byte_data),
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(page_blob_prop['etag'])
self.assertIsNotNone(page_blob_prop['last_modified'])
self.assertTrue(page_blob_prop['request_server_encrypted'])
self.assertEqual(page_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await blob_client.download_blob(offset=0,
length=len(self.byte_data))
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), self.byte_data)
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_update_page_from_url(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
source_blob_name = self.get_resource_name("sourceblob")
self.config.use_byte_buffer = True # Make sure using chunk upload, then we can record the request
source_blob_client, _ = await self._create_block_blob(bsc, blob_name=source_blob_name, data=self.byte_data)
source_blob_sas = generate_blob_sas(
source_blob_client.account_name,
source_blob_client.container_name,
source_blob_client.blob_name,
snapshot=source_blob_client.snapshot,
account_key=source_blob_client.credential.account_key,
permission=BlobSasPermissions(read=True),
expiry=datetime.utcnow() + timedelta(hours=1)
)
source_blob_url = source_blob_client.url + "?" + source_blob_sas
self.config.use_byte_buffer = False
blob_client = await self._create_page_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
page_blob_prop = await blob_client.upload_pages_from_url(source_blob_url,
offset=0,
length=len(self.byte_data),
source_offset=0,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(page_blob_prop['etag'])
self.assertIsNotNone(page_blob_prop['last_modified'])
self.assertTrue(page_blob_prop['request_server_encrypted'])
self.assertEqual(page_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await blob_client.download_blob(offset=0,
length=len(self.byte_data))
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), self.byte_data)
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.live_test_only
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_create_page_blob_with_chunks(self, storage_account_name, storage_account_key):
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
# Act
blob_client = bsc.get_blob_client(self.container_name, self._get_blob_reference())
page_blob_prop = await blob_client.upload_blob(self.byte_data,
blob_type=BlobType.PageBlob,
max_concurrency=2,
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(page_blob_prop['etag'])
self.assertIsNotNone(page_blob_prop['last_modified'])
self.assertTrue(page_blob_prop['request_server_encrypted'])
self.assertEqual(page_blob_prop['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
# Act get the blob content
blob = await blob_client.download_blob()
# Assert content was retrieved with the cpk
self.assertEqual(await blob.readall(), self.byte_data)
self.assertEqual(blob.properties.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_get_set_blob_metadata(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
blob_client, _ = await self._create_block_blob(bsc, data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act
blob_props = await blob_client.get_blob_properties()
# Assert
self.assertTrue(blob_props.server_encrypted)
self.assertEqual(blob_props.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
# Act set blob properties
metadata = {'hello': 'world', 'number': '42', 'up': 'upval'}
with self.assertRaises(HttpResponseError):
await blob_client.set_blob_metadata(
metadata=metadata,
)
await blob_client.set_blob_metadata(metadata=metadata, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
blob_props = await blob_client.get_blob_properties()
md = blob_props.metadata
self.assertEqual(3, len(md))
self.assertEqual(md['hello'], 'world')
self.assertEqual(md['number'], '42')
self.assertEqual(md['up'], 'upval')
self.assertFalse('Up' in md)
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_snapshot_blob(self, storage_account_name, storage_account_key):
# Arrange
# test chunking functionality by reducing the size of each chunk,
# otherwise the tests would take too long to execute
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
storage_account_key,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024,
transport=AiohttpTestTransport(connection_data_block_size=1024))
await self._setup(bsc)
blob_client, _ = await self._create_block_blob(bsc, data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Act without cpk should not work
with self.assertRaises(HttpResponseError):
await blob_client.create_snapshot()
# Act with cpk should work
blob_snapshot = await blob_client.create_snapshot(encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Assert
self.assertIsNotNone(blob_snapshot)
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_list_blobs(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
await self._setup(bsc)
blob_client, _ = await self._create_block_blob(bsc, blob_name="blockblob", data=b'AAABBBCCC', encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
await self._create_append_blob(bsc, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
container_client = bsc.get_container_client(self.container_name)
generator = container_client.list_blobs(include="metadata")
async for blob in generator:
self.assertIsNotNone(blob)
# Assert: every listed blob has encryption_scope
self.assertEqual(blob.encryption_scope, TEST_ENCRYPTION_KEY_SCOPE)
self._teardown(bsc)
@pytest.mark.live_test_only
@BlobPreparer()
async def test_list_blobs_using_container_encryption_scope_sas(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
await self._setup(bsc)
token = generate_container_sas(
storage_account_name,
self.container_name,
storage_account_key,
permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE
)
bsc_with_sas_credential = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=token,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
# blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE
blob_client, _ = await self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True)
await self._create_append_blob(bsc_with_sas_credential)
# generate a token with TEST_ENCRYPTION_KEY_SCOPE
token2 = generate_container_sas(
storage_account_name,
self.container_name,
storage_account_key,
permission=ContainerSasPermissions(read=True, write=True, list=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_ENCRYPTION_KEY_SCOPE
)
bsc_with_diff_sas_credential = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=token2,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
container_client = bsc_with_diff_sas_credential.get_container_client(self.container_name)
# The ses field in SAS token when list blobs is different from the encryption scope used on creating blob, while
# list blobs should also succeed
generator = container_client.list_blobs(include="metadata")
async for blob in generator:
self.assertIsNotNone(blob)
# Assert: every listed blob has encryption_scope
# and the encryption scope is the same as the one on blob creation
self.assertEqual(blob.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(bsc)
@pytest.mark.live_test_only
@BlobPreparer()
async def test_copy_with_account_encryption_scope_sas(self, storage_account_name, storage_account_key):
# Arrange
sas_token = generate_account_sas(
storage_account_name,
account_key=storage_account_key,
resource_types=ResourceTypes(object=True, container=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE_2
)
bsc_with_sas_credential = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=sas_token,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
await self._setup(bsc_with_sas_credential)
# blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE_2
blob_client, _ = await self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True)
#
sas_token2 = generate_account_sas(
storage_account_name,
account_key=storage_account_key,
resource_types=ResourceTypes(object=True, container=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE
)
bsc_with_account_key_credential = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=sas_token2,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
copied_blob = self.get_resource_name('copiedblob')
copied_blob_client = bsc_with_account_key_credential.get_blob_client(self.container_name, copied_blob)
# TODO: to confirm with Sean/Heidi ses in SAS cannot be set for async copy.
# The test failed for async copy (without requires_sync=True)
await copied_blob_client.start_copy_from_url(blob_client.url, requires_sync=True)
props = await copied_blob_client.get_blob_properties()
self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(bsc_with_sas_credential)
@pytest.mark.live_test_only
@BlobPreparer()
async def test_copy_blob_from_url_with_ecryption_scope(self, storage_account_name, storage_account_key):
# Arrange
# create sas for source blob
sas_token = generate_account_sas(
storage_account_name,
account_key=storage_account_key,
resource_types=ResourceTypes(object=True, container=True),
permission=AccountSasPermissions(read=True, write=True, delete=True, list=True),
expiry=datetime.utcnow() + timedelta(hours=1),
)
bsc_with_sas_credential = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=sas_token,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
await self._setup(bsc_with_sas_credential)
blob_client, _ = await self._create_block_blob(bsc_with_sas_credential, blob_name="blockblob", data=b'AAABBBCCC', overwrite=True)
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
copied_blob = self.get_resource_name('copiedblob')
copied_blob_client = bsc.get_blob_client(self.container_name, copied_blob)
await copied_blob_client.start_copy_from_url(blob_client.url, requires_sync=True,
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE)
props = await copied_blob_client.get_blob_properties()
self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(bsc_with_sas_credential)
@pytest.mark.live_test_only
@BlobPreparer()
async def test_copy_with_user_delegation_encryption_scope_sas(self, storage_account_name, storage_account_key):
# Arrange
# to get user delegation key
oauth_token_credential = self.generate_oauth_token()
service_client = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=oauth_token_credential,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
user_delegation_key = await service_client.get_user_delegation_key(datetime.utcnow(),
datetime.utcnow() + timedelta(hours=1))
await self._setup(service_client)
blob_name = self.get_resource_name('blob')
sas_token = generate_blob_sas(
storage_account_name,
self.container_name,
blob_name,
account_key=user_delegation_key,
permission=BlobSasPermissions(read=True, write=True, create=True, delete=True),
expiry=datetime.utcnow() + timedelta(hours=1),
encryption_scope=TEST_SAS_ENCRYPTION_SCOPE
)
bsc_with_delegation_sas = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=sas_token,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
# blob is encrypted using TEST_SAS_ENCRYPTION_SCOPE
blob_client, _ = await self._create_block_blob(bsc_with_delegation_sas, blob_name=blob_name, data=b'AAABBBCCC', overwrite=True)
props = await blob_client.get_blob_properties()
self.assertEqual(props.encryption_scope, TEST_SAS_ENCRYPTION_SCOPE)
self._teardown(service_client)
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_create_container_with_default_cpk_n(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
container_client = await bsc.create_container(
'asynccpkcontainer',
container_encryption_scope=TEST_CONTAINER_ENCRYPTION_KEY_SCOPE)
container_props = await container_client.get_container_properties()
self.assertEqual(
container_props.encryption_scope.default_encryption_scope,
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope)
self.assertEqual(container_props.encryption_scope.prevent_encryption_scope_override, False)
async for container in bsc.list_containers(name_starts_with='asynccpkcontainer'):
self.assertEqual(
container_props.encryption_scope.default_encryption_scope,
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope)
self.assertEqual(container_props.encryption_scope.prevent_encryption_scope_override, False)
blob_client = container_client.get_blob_client("appendblob")
# providing encryption scope when upload the blob
resp = await blob_client.upload_blob(b'aaaa', BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
# Use the provided encryption scope on the blob
self.assertEqual(resp['encryption_scope'], TEST_ENCRYPTION_KEY_SCOPE)
await container_client.delete_container()
@pytest.mark.playback_test_only
@BlobPreparer()
@AsyncStorageTestCase.await_prepared_test
async def test_create_container_with_default_cpk_n_deny_override(self, storage_account_name, storage_account_key):
# Arrange
bsc = BlobServiceClient(
self.account_url(storage_account_name, "blob"),
credential=storage_account_key,
connection_data_block_size=1024,
max_single_put_size=1024,
min_large_block_upload_threshold=1024,
max_block_size=1024,
max_page_size=1024)
container_client = await bsc.create_container(
'asyncdenyoverridecpkcontainer',
container_encryption_scope=TEST_CONTAINER_ENCRYPTION_KEY_SCOPE_DENY_OVERRIDE
)
container_props = await container_client.get_container_properties()
self.assertEqual(
container_props.encryption_scope.default_encryption_scope,
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope)
self.assertEqual(container_props.encryption_scope.prevent_encryption_scope_override, True)
async for container in bsc.list_containers(name_starts_with='asyncdenyoverridecpkcontainer'):
self.assertEqual(
container_props.encryption_scope.default_encryption_scope,
TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope)
self.assertEqual(container_props.encryption_scope.prevent_encryption_scope_override, True)
blob_client = container_client.get_blob_client("appendblob")
# It's not allowed to set encryption scope on the blob when the container denies encryption scope override.
with self.assertRaises(HttpResponseError):
await blob_client.upload_blob(b'aaaa', BlobType.AppendBlob, encryption_scope=TEST_ENCRYPTION_KEY_SCOPE)
resp = await blob_client.upload_blob(b'aaaa', BlobType.AppendBlob)
self.assertEqual(resp['encryption_scope'], TEST_CONTAINER_ENCRYPTION_KEY_SCOPE.default_encryption_scope)
await container_client.delete_container()
| Azure/azure-sdk-for-python | sdk/storage/azure-storage-blob/tests/test_cpk_n_async.py | Python | mit | 51,849 |
"""
Platform for the Daikin AC.
For more details about this component, please refer to the documentation
https://home-assistant.io/components/daikin/
"""
import logging
from datetime import timedelta
from socket import timeout
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.components.discovery import SERVICE_DAIKIN
from homeassistant.const import (
CONF_HOSTS, CONF_ICON, CONF_MONITORED_CONDITIONS, CONF_NAME, CONF_TYPE
)
from homeassistant.helpers import discovery
from homeassistant.helpers.discovery import load_platform
from homeassistant.util import Throttle
REQUIREMENTS = ['pydaikin==0.4']
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'daikin'
HTTP_RESOURCES = ['aircon/get_sensor_info', 'aircon/get_control_info']
ATTR_TARGET_TEMPERATURE = 'target_temperature'
ATTR_INSIDE_TEMPERATURE = 'inside_temperature'
ATTR_OUTSIDE_TEMPERATURE = 'outside_temperature'
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=60)
COMPONENT_TYPES = ['climate', 'sensor']
SENSOR_TYPE_TEMPERATURE = 'temperature'
SENSOR_TYPES = {
ATTR_INSIDE_TEMPERATURE: {
CONF_NAME: 'Inside Temperature',
CONF_ICON: 'mdi:thermometer',
CONF_TYPE: SENSOR_TYPE_TEMPERATURE
},
ATTR_OUTSIDE_TEMPERATURE: {
CONF_NAME: 'Outside Temperature',
CONF_ICON: 'mdi:thermometer',
CONF_TYPE: SENSOR_TYPE_TEMPERATURE
}
}
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Optional(
CONF_HOSTS, default=[]
): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(
CONF_MONITORED_CONDITIONS,
default=list(SENSOR_TYPES.keys())
): vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)])
})
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Establish connection with Daikin."""
def discovery_dispatch(service, discovery_info):
"""Dispatcher for Daikin discovery events."""
host = discovery_info.get('ip')
if daikin_api_setup(hass, host) is None:
return
for component in COMPONENT_TYPES:
load_platform(hass, component, DOMAIN, discovery_info,
config)
discovery.listen(hass, SERVICE_DAIKIN, discovery_dispatch)
for host in config.get(DOMAIN, {}).get(CONF_HOSTS, []):
if daikin_api_setup(hass, host) is None:
continue
discovery_info = {
'ip': host,
CONF_MONITORED_CONDITIONS:
config[DOMAIN][CONF_MONITORED_CONDITIONS]
}
load_platform(hass, 'sensor', DOMAIN, discovery_info, config)
return True
def daikin_api_setup(hass, host, name=None):
"""Create a Daikin instance only once."""
if DOMAIN not in hass.data:
hass.data[DOMAIN] = {}
api = hass.data[DOMAIN].get(host)
if api is None:
from pydaikin import appliance
try:
device = appliance.Appliance(host)
except timeout:
_LOGGER.error("Connection to Daikin could not be established")
return False
if name is None:
name = device.values['name']
api = DaikinApi(device, name)
return api
class DaikinApi:
"""Keep the Daikin instance in one place and centralize the update."""
def __init__(self, device, name):
"""Initialize the Daikin Handle."""
self.device = device
self.name = name
self.ip_address = device.ip
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self, **kwargs):
"""Pull the latest data from Daikin."""
try:
for resource in HTTP_RESOURCES:
self.device.values.update(
self.device.get_resource(resource)
)
except timeout:
_LOGGER.warning(
"Connection failed for %s", self.ip_address
)
| persandstrom/home-assistant | homeassistant/components/daikin.py | Python | apache-2.0 | 3,885 |
from __future__ import absolute_import, print_function, division
import operator
from petl.compat import OrderedDict
from petl.util import RowContainer, hybridrows, expr, rowgroupby
from petl.transform.sorts import sort
def fieldmap(table, mappings=None, failonerror=False, errorvalue=None):
"""
Transform a table, mapping fields arbitrarily between input and output. E.g.::
>>> from petl import fieldmap, look
>>> look(table1)
+------+----------+-------+----------+----------+
| 'id' | 'sex' | 'age' | 'height' | 'weight' |
+======+==========+=======+==========+==========+
| 1 | 'male' | 16 | 1.45 | 62.0 |
+------+----------+-------+----------+----------+
| 2 | 'female' | 19 | 1.34 | 55.4 |
+------+----------+-------+----------+----------+
| 3 | 'female' | 17 | 1.78 | 74.4 |
+------+----------+-------+----------+----------+
| 4 | 'male' | 21 | 1.33 | 45.2 |
+------+----------+-------+----------+----------+
| 5 | '-' | 25 | 1.65 | 51.9 |
+------+----------+-------+----------+----------+
>>> from collections import OrderedDict
>>> mappings = OrderedDict()
>>> # rename a field
... mappings['subject_id'] = 'id'
>>> # translate a field
... mappings['gender'] = 'sex', {'male': 'M', 'female': 'F'}
>>> # apply a calculation to a field
... mappings['age_months'] = 'age', lambda v: v * 12
>>> # apply a calculation to a combination of fields
... mappings['bmi'] = lambda rec: rec['weight'] / rec['height']**2
>>> # transform and inspect the output
... table2 = fieldmap(table1, mappings)
>>> look(table2)
+--------------+----------+--------------+--------------------+
| 'subject_id' | 'gender' | 'age_months' | 'bmi' |
+==============+==========+==============+====================+
| 1 | 'M' | 192 | 29.48870392390012 |
+--------------+----------+--------------+--------------------+
| 2 | 'F' | 228 | 30.8531967030519 |
+--------------+----------+--------------+--------------------+
| 3 | 'F' | 204 | 23.481883600555488 |
+--------------+----------+--------------+--------------------+
| 4 | 'M' | 252 | 25.55260331279326 |
+--------------+----------+--------------+--------------------+
| 5 | '-' | 300 | 19.0633608815427 |
+--------------+----------+--------------+--------------------+
>>> # field mappings can also be added and/or updated after the table is created
... # via the suffix notation
... table3 = fieldmap(table1)
>>> table3['subject_id'] = 'id'
>>> table3['gender'] = 'sex', {'male': 'M', 'female': 'F'}
>>> table3['age_months'] = 'age', lambda v: v * 12
>>> # use an expression string this time
... table3['bmi'] = '{weight} / {height}**2'
>>> look(table3)
+--------------+----------+--------------+--------------------+
| 'subject_id' | 'gender' | 'age_months' | 'bmi' |
+==============+==========+==============+====================+
| 1 | 'M' | 192 | 29.48870392390012 |
+--------------+----------+--------------+--------------------+
| 2 | 'F' | 228 | 30.8531967030519 |
+--------------+----------+--------------+--------------------+
| 3 | 'F' | 204 | 23.481883600555488 |
+--------------+----------+--------------+--------------------+
| 4 | 'M' | 252 | 25.55260331279326 |
+--------------+----------+--------------+--------------------+
| 5 | '-' | 300 | 19.0633608815427 |
+--------------+----------+--------------+--------------------+
Note also that the mapping value can be an expression string, which will be
converted to a lambda function via :func:`expr`.
"""
return FieldMapView(table, mappings=mappings, failonerror=failonerror,
errorvalue=errorvalue)
class FieldMapView(RowContainer):
def __init__(self, source, mappings=None, failonerror=False, errorvalue=None):
self.source = source
if mappings is None:
self.mappings = OrderedDict()
else:
self.mappings = mappings
self.failonerror = failonerror
self.errorvalue = errorvalue
def __setitem__(self, key, value):
self.mappings[key] = value
def __iter__(self):
return iterfieldmap(self.source, self.mappings, self.failonerror, self.errorvalue)
def iterfieldmap(source, mappings, failonerror, errorvalue):
it = iter(source)
flds = it.next()
outflds = mappings.keys()
yield tuple(outflds)
mapfuns = dict()
for outfld, m in mappings.items():
if m in flds:
mapfuns[outfld] = operator.itemgetter(m)
elif isinstance(m, int) and m < len(flds):
mapfuns[outfld] = operator.itemgetter(m)
elif isinstance(m, basestring):
mapfuns[outfld] = expr(m)
elif callable(m):
mapfuns[outfld] = m
elif isinstance(m, (tuple, list)) and len(m) == 2:
srcfld = m[0]
fm = m[1]
if callable(fm):
mapfuns[outfld] = composefun(fm, srcfld)
elif isinstance(fm, dict):
mapfuns[outfld] = composedict(fm, srcfld)
else:
raise Exception('expected callable or dict') # TODO better error
else:
raise Exception('invalid mapping', outfld, m) # TODO better error
for row in hybridrows(flds, it):
try:
# use list comprehension if possible
outrow = [mapfuns[outfld](row) for outfld in outflds]
except:
# fall back to doing it one field at a time
outrow = list()
for outfld in outflds:
try:
val = mapfuns[outfld](row)
except:
if failonerror:
raise
else:
val = errorvalue
outrow.append(val)
yield tuple(outrow)
def composefun(f, srcfld):
def g(rec):
return f(rec[srcfld])
return g
def composedict(d, srcfld):
def g(rec):
k = rec[srcfld]
if k in d:
return d[k]
else:
return k
return g
def rowmap(table, rowmapper, fields, failonerror=False, missing=None):
"""
Transform rows via an arbitrary function. E.g.::
>>> from petl import rowmap, look
>>> look(table1)
+------+----------+-------+----------+----------+
| 'id' | 'sex' | 'age' | 'height' | 'weight' |
+======+==========+=======+==========+==========+
| 1 | 'male' | 16 | 1.45 | 62.0 |
+------+----------+-------+----------+----------+
| 2 | 'female' | 19 | 1.34 | 55.4 |
+------+----------+-------+----------+----------+
| 3 | 'female' | 17 | 1.78 | 74.4 |
+------+----------+-------+----------+----------+
| 4 | 'male' | 21 | 1.33 | 45.2 |
+------+----------+-------+----------+----------+
| 5 | '-' | 25 | 1.65 | 51.9 |
+------+----------+-------+----------+----------+
>>> def rowmapper(row):
... transmf = {'male': 'M', 'female': 'F'}
... return [row[0],
... transmf[row[1]] if row[1] in transmf else row[1],
... row[2] * 12,
... row[4] / row[3] ** 2]
...
>>> table2 = rowmap(table1, rowmapper, fields=['subject_id', 'gender', 'age_months', 'bmi'])
>>> look(table2)
+--------------+----------+--------------+--------------------+
| 'subject_id' | 'gender' | 'age_months' | 'bmi' |
+==============+==========+==============+====================+
| 1 | 'M' | 192 | 29.48870392390012 |
+--------------+----------+--------------+--------------------+
| 2 | 'F' | 228 | 30.8531967030519 |
+--------------+----------+--------------+--------------------+
| 3 | 'F' | 204 | 23.481883600555488 |
+--------------+----------+--------------+--------------------+
| 4 | 'M' | 252 | 25.55260331279326 |
+--------------+----------+--------------+--------------------+
| 5 | '-' | 300 | 19.0633608815427 |
+--------------+----------+--------------+--------------------+
The `rowmapper` function should return a single row (list or tuple).
.. versionchanged:: 0.9
Hybrid row objects supporting data value access by either position or by
field name are now passed to the `rowmapper` function.
"""
return RowMapView(table, rowmapper, fields, failonerror=failonerror,
missing=missing)
class RowMapView(RowContainer):
def __init__(self, source, rowmapper, fields, failonerror=False, missing=None):
self.source = source
self.rowmapper = rowmapper
self.fields = fields
self.failonerror = failonerror
self.missing = missing
def __iter__(self):
return iterrowmap(self.source, self.rowmapper, self.fields, self.failonerror,
self.missing)
def iterrowmap(source, rowmapper, fields, failonerror, missing):
it = iter(source)
srcflds = it.next()
yield tuple(fields)
for row in hybridrows(srcflds, it, missing):
try:
outrow = rowmapper(row)
yield tuple(outrow)
except:
if failonerror:
raise
def recordmap(table, recmapper, fields, failonerror=False):
"""
Transform records via an arbitrary function.
.. deprecated:: 0.9
Use :func:`rowmap` insteand.
"""
return rowmap(table, recmapper, fields, failonerror=failonerror)
def rowmapmany(table, rowgenerator, fields, failonerror=False, missing=None):
"""
Map each input row to any number of output rows via an arbitrary function.
E.g.::
>>> from petl import rowmapmany, look
>>> look(table1)
+------+----------+-------+----------+----------+
| 'id' | 'sex' | 'age' | 'height' | 'weight' |
+======+==========+=======+==========+==========+
| 1 | 'male' | 16 | 1.45 | 62.0 |
+------+----------+-------+----------+----------+
| 2 | 'female' | 19 | 1.34 | 55.4 |
+------+----------+-------+----------+----------+
| 3 | '-' | 17 | 1.78 | 74.4 |
+------+----------+-------+----------+----------+
| 4 | 'male' | 21 | 1.33 | |
+------+----------+-------+----------+----------+
>>> def rowgenerator(row):
... transmf = {'male': 'M', 'female': 'F'}
... yield [row[0], 'gender', transmf[row[1]] if row[1] in transmf else row[1]]
... yield [row[0], 'age_months', row[2] * 12]
... yield [row[0], 'bmi', row[4] / row[3] ** 2]
...
>>> table2 = rowmapmany(table1, rowgenerator, fields=['subject_id', 'variable', 'value'])
>>> look(table2)
+--------------+--------------+--------------------+
| 'subject_id' | 'variable' | 'value' |
+==============+==============+====================+
| 1 | 'gender' | 'M' |
+--------------+--------------+--------------------+
| 1 | 'age_months' | 192 |
+--------------+--------------+--------------------+
| 1 | 'bmi' | 29.48870392390012 |
+--------------+--------------+--------------------+
| 2 | 'gender' | 'F' |
+--------------+--------------+--------------------+
| 2 | 'age_months' | 228 |
+--------------+--------------+--------------------+
| 2 | 'bmi' | 30.8531967030519 |
+--------------+--------------+--------------------+
| 3 | 'gender' | '-' |
+--------------+--------------+--------------------+
| 3 | 'age_months' | 204 |
+--------------+--------------+--------------------+
| 3 | 'bmi' | 23.481883600555488 |
+--------------+--------------+--------------------+
| 4 | 'gender' | 'M' |
+--------------+--------------+--------------------+
The `rowgenerator` function should yield zero or more rows (lists or tuples).
See also the :func:`melt` function.
.. versionchanged:: 0.9
Hybrid row objects supporting data value access by either position or by
field name are now passed to the `rowgenerator` function.
"""
return RowMapManyView(table, rowgenerator, fields, failonerror=failonerror,
missing=missing)
class RowMapManyView(RowContainer):
def __init__(self, source, rowgenerator, fields, failonerror=False, missing=None):
self.source = source
self.rowgenerator = rowgenerator
self.fields = fields
self.failonerror = failonerror
self.missing = missing
def __iter__(self):
return iterrowmapmany(self.source, self.rowgenerator, self.fields,
self.failonerror, self.missing)
def iterrowmapmany(source, rowgenerator, fields, failonerror, missing):
it = iter(source)
srcflds = it.next()
yield tuple(fields)
for row in hybridrows(srcflds, it, missing):
try:
for outrow in rowgenerator(row):
yield tuple(outrow)
except:
if failonerror:
raise
def recordmapmany(table, rowgenerator, fields, failonerror=False):
"""
Map each input row (as a record) to any number of output rows via an
arbitrary function.
.. deprecated:: 0.9
Use :func:`rowmapmany` instead.
"""
return rowmapmany(table, rowgenerator, fields, failonerror=failonerror)
def rowgroupmap(table, key, mapper, fields=None, missing=None, presorted=False,
buffersize=None, tempdir=None, cache=True):
"""
Group rows under the given key then apply `mapper` to yield zero or more
output rows for each input group of rows.
.. versionadded:: 0.12
"""
return RowGroupMapView(table, key, mapper, fields=fields,
presorted=presorted,
buffersize=buffersize, tempdir=tempdir, cache=cache)
class RowGroupMapView(RowContainer):
def __init__(self, source, key, mapper, fields=None,
presorted=False, buffersize=None, tempdir=None, cache=True):
if presorted:
self.source = source
else:
self.source = sort(source, key, buffersize=buffersize,
tempdir=tempdir, cache=cache)
self.key = key
self.fields = fields
self.mapper = mapper
def __iter__(self):
return iterrowgroupmap(self.source, self.key, self.mapper, self.fields)
def iterrowgroupmap(source, key, mapper, fields):
yield tuple(fields)
for key, rows in rowgroupby(source, key):
for row in mapper(key, rows):
yield row
| rs/petl | src/petl/transform/maps.py | Python | mit | 16,043 |
#!/bin/false
# This file is part of Espruino, a JavaScript interpreter for Microcontrollers
#
# Copyright (C) 2013 Gordon Williams <gw@pur3.co.uk>
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# ----------------------------------------------------------------------------------------
# This file contains information for a specific board - the available pins, and where LEDs,
# Buttons, and other in-built peripherals are. It is used to build documentation as well
# as various source and header files for Espruino.
# ----------------------------------------------------------------------------------------
import pinutils;
info = {
'name' : "DPT-Board",
'default_console' : "EV_USBSERIAL",
'binary_name' : 'espruino_%v_dpt_board',
};
chip = {
'part' : "DPTBOARD",
'family' : "LINUX",
'package' : "",
'ram' : -1,
'flash' : -1,
'speed' : -1,
'usart' : 1,
'spi' : 1,
'i2c' : 1,
'adc' : 0,
'dac' : 0,
};
# left-right, or top-bottom order
devices = {
};
def get_pins():
pins = pinutils.generate_pins(0,27)
return pins
| AlexanderBrevig/Espruino | boards/DPTBOARD.py | Python | mpl-2.0 | 1,205 |
import os
import shutil
from django.contrib.sites.models import Site
from django.conf import settings
from .models import SiteResources, SitePeople
def resources_for_site():
return SiteResources.objects.get(site=Site.objects.get_current()).resources.all()
def users_for_site():
return SitePeople.objects.get(site=Site.objects.get_current()).people.all()
def sed(filename, change_dict):
""" Update file replacing key with value in provided dictionary """
f = open(filename, 'r')
data = f.read()
f.close()
for key, val in change_dict.items():
data = data.replace(key, val)
f = open(filename, 'w')
f.write(data)
f.close()
def dump_model(model, filename):
from django.core import serializers
data = serializers.serialize("json", model.objects.all(), indent=4)
f = open(filename, "w")
f.write(data)
f.close()
def add_site(name, domain):
""" Add a site to database, create directory tree """
# get latest SITE id
sites = Site.objects.all()
used_ids = [v[0] for v in sites.values_list()]
site_id = max(used_ids) + 1
# current settings is one of the sites
project_dir = os.path.realpath(os.path.join(settings.SITE_ROOT, '../'))
site_dir = os.path.join(project_dir, 'site%s' % site_id)
site_template = os.path.join(os.path.dirname(__file__), 'site_template')
shutil.copytree(site_template, site_dir)
# update configuration and settings files
change_dict = {
'$SITE_ID': str(site_id),
'$SITE_NAME': name,
'$DOMAIN': domain,
'$SITE_ROOT': site_dir,
'$SERVE_PATH': settings.SERVE_PATH,
'$PORTNUM': '8%s' % str(site_id).zfill(3),
'$GEOSERVER_URL': settings.GEOSERVER_URL,
'$PROJECT_NAME': os.path.basename(os.path.dirname(settings.PROJECT_ROOT)),
}
sed(os.path.join(site_dir, 'conf/gunicorn'), change_dict)
sed(os.path.join(site_dir, 'conf/nginx'), change_dict)
sed(os.path.join(site_dir, 'settings.py'), change_dict)
sed(os.path.join(site_dir, 'local_settings_template.py'), change_dict)
sed(os.path.join(site_dir, 'wsgi.py'), change_dict)
# add site to database
site = Site(id=site_id, name=name, domain=domain)
site.save()
dump_model(Site, os.path.join(project_dir, 'sites.json'))
| davekennewell/geonode | geonode/contrib/geosites/utils.py | Python | gpl-3.0 | 2,308 |
from __future__ import print_function
import lldb
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class ScopedEnumType(TestBase):
mydir = TestBase.compute_mydir(__file__)
@skipIf(dwarf_version=['<', '4'])
def test(self):
self.build()
self.main_source = "main.cpp"
self.main_source_spec = lldb.SBFileSpec(self.main_source)
(target, process, thread, bkpt) = lldbutil.run_to_source_breakpoint(self,
'// Set break point at this line.', self.main_source_spec)
frame = thread.GetFrameAtIndex(0)
self.expect("expr f == Foo::FooBar",
substrs=['(bool) $0 = true'])
value = frame.EvaluateExpression("f == Foo::FooBar")
self.assertTrue(value.IsValid())
self.assertTrue(value.GetError().Success())
self.assertEqual(value.GetValueAsUnsigned(), 1)
value = frame.EvaluateExpression("b == BarBar")
self.assertTrue(value.IsValid())
self.assertTrue(value.GetError().Success())
self.assertEqual(value.GetValueAsUnsigned(), 1)
## b is not a Foo
value = frame.EvaluateExpression("b == Foo::FooBar")
self.assertTrue(value.IsValid())
self.assertFalse(value.GetError().Success())
## integral is not implicitly convertible to a scoped enum
value = frame.EvaluateExpression("1 == Foo::FooBar")
self.assertTrue(value.IsValid())
self.assertFalse(value.GetError().Success())
| llvm-mirror/lldb | packages/Python/lldbsuite/test/commands/expression/scoped_enums/TestScopedEnumType.py | Python | apache-2.0 | 1,570 |
from twisted.trial import unittest
from twisted.python.failure import Failure
from deluge.httpdownloader import download_file
from deluge.log import setupLogger
from email.utils import formatdate
class DownloadFileTestCase(unittest.TestCase):
def setUp(self):
setupLogger("warning", "log_file")
def tearDown(self):
pass
def assertContains(self, filename, contents):
f = open(filename)
try:
self.assertEqual(f.read(), contents)
except Exception, e:
self.fail(e)
finally:
f.close()
return filename
def failIfContains(self, filename, contents):
f = open(filename)
try:
self.failIfEqual(f.read(), contents)
except Exception, e:
self.fail(e)
finally:
f.close()
return filename
def test_download(self):
d = download_file("http://deluge-torrent.org", "index.html")
d.addCallback(self.assertEqual, "index.html")
return d
def test_download_without_required_cookies(self):
url = "http://deluge-torrent.org/httpdownloader.php?test=cookie"
d = download_file(url, "none")
d.addCallback(self.fail)
d.addErrback(self.assertIsInstance, Failure)
return d
def test_download_with_required_cookies(self):
url = "http://deluge-torrent.org/httpdownloader.php?test=cookie"
cookie = { "cookie" : "password=deluge" }
d = download_file(url, "monster", headers=cookie)
d.addCallback(self.assertEqual, "monster")
d.addCallback(self.assertContains, "COOKIE MONSTER!")
return d
def test_download_with_rename(self):
url = "http://deluge-torrent.org/httpdownloader.php?test=rename&filename=renamed"
d = download_file(url, "original")
d.addCallback(self.assertEqual, "renamed")
d.addCallback(self.assertContains, "This file should be called renamed")
return d
def test_download_with_rename_fail(self):
url = "http://deluge-torrent.org/httpdownloader.php?test=rename&filename=renamed"
d = download_file(url, "original")
d.addCallback(self.assertEqual, "original")
d.addCallback(self.assertContains, "This file should be called renamed")
return d
def test_download_with_rename_sanitised(self):
url = "http://deluge-torrent.org/httpdownloader.php?test=rename&filename=/etc/passwd"
d = download_file(url, "original")
d.addCallback(self.assertEqual, "passwd")
d.addCallback(self.assertContains, "This file should be called /etc/passwd")
return d
def test_download_with_rename_prevented(self):
url = "http://deluge-torrent.org/httpdownloader.php?test=rename&filename=spam"
d = download_file(url, "forced", force_filename=True)
d.addCallback(self.assertEqual, "forced")
d.addCallback(self.assertContains, "This file should be called spam")
return d
def test_download_with_gzip_encoding(self):
url = "http://deluge-torrent.org/httpdownloader.php?test=gzip&msg=success"
d = download_file(url, "gzip_encoded")
d.addCallback(self.assertContains, "success")
return d
def test_download_with_gzip_encoding_disabled(self):
url = "http://deluge-torrent.org/httpdownloader.php?test=gzip&msg=fail"
d = download_file(url, "gzip_encoded", allow_compression=False)
d.addCallback(self.failIfContains, "fail")
return d
def test_page_redirect(self):
url = "http://deluge-torrent.org/httpdownloader.php?test=redirect"
d = download_file(url, "none")
d.addCallback(self.fail)
d.addErrback(self.assertIsInstance, Failure)
return d
def test_page_not_found(self):
d = download_file("http://does.not.exist", "none")
d.addCallback(self.fail)
d.addErrback(self.assertIsInstance, Failure)
return d
def test_page_not_modified(self):
headers = { 'If-Modified-Since' : formatdate(usegmt=True) }
d = download_file("http://deluge-torrent.org", "index.html", headers=headers)
d.addCallback(self.fail)
d.addErrback(self.assertIsInstance, Failure)
return d
| laanwj/deluge | tests/test_httpdownloader.py | Python | gpl-3.0 | 4,280 |
#
# This file is part of opsd.
#
# opsd is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# opsd is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with opsd. If not, see <http://www.gnu.org/licenses/>.
"""Interface to allow the dome controller to operate an Astrohaven dome via domed"""
from warwick.observatory.dome import (
CommandStatus as DomeCommandStatus,
DomeShutterStatus,
DomeHeartbeatStatus)
from warwick.observatory.operations.constants import DomeStatus
from warwick.observatory.common import daemons, validation
CONFIG_SCHEMA = {
'type': 'object',
'additionalProperties': ['module'],
'required': [
'daemon', 'movement_timeout', 'heartbeat_timeout'
],
'properties': {
'daemon': {
'type': 'string',
'daemon_name': True
},
'movement_timeout': {
'type': 'number',
'minimum': 0
},
'heartbeat_timeout': {
'type': 'number',
'minimum': 0
}
}
}
def validate_config(config_json):
return validation.validation_errors(config_json, CONFIG_SCHEMA, {
'daemon_name': validation.daemon_name_validator,
})
class DomeInterface:
"""Interface to allow the dome controller to operate an Astrohaven dome via domed"""
def __init__(self, dome_config_json):
self._daemon = getattr(daemons, dome_config_json['daemon'])
# Communications timeout when opening or closing the dome (takes up to ~80 seconds for the onemetre dome)
self._movement_timeout = dome_config_json['movement_timeout']
# Timeout period (seconds) for the dome controller
# The dome heartbeat is pinged once per LOOP_DELAY when the dome is under
# automatic control and is fully open or fully closed. This timeout should
# be large enough to account for the time it takes to open and close the dome
self._heartbeat_timeout = dome_config_json['heartbeat_timeout']
def query_status(self):
with self._daemon.connect() as dome:
status = dome.status()
if status['heartbeat_status'] in [DomeHeartbeatStatus.TrippedClosing,
DomeHeartbeatStatus.TrippedIdle]:
return DomeStatus.Timeout
if status['shutter_a'] == DomeShutterStatus.Closed and \
status['shutter_b'] == DomeShutterStatus.Closed:
return DomeStatus.Closed
if status['shutter_a'] in [DomeShutterStatus.Opening, DomeShutterStatus.Closing] or \
status['shutter_b'] in [DomeShutterStatus.Opening, DomeShutterStatus.Closing]:
return DomeStatus.Moving
return DomeStatus.Open
def ping_heartbeat(self):
print('dome: sending heartbeat ping')
with self._daemon.connect() as dome:
ret = dome.set_heartbeat_timer(self._heartbeat_timeout)
return ret == DomeCommandStatus.Succeeded
def disable_heartbeat(self):
print('dome: disabling heartbeat')
with self._daemon.connect() as dome:
ret = dome.set_heartbeat_timer(self._heartbeat_timeout)
return ret == DomeCommandStatus.Succeeded
def close(self):
print('dome: sending heartbeat ping before closing')
with self._daemon.connect() as dome:
dome.set_heartbeat_timer(self._heartbeat_timeout)
print('dome: closing')
with self._daemon.connect(timeout=self._movement_timeout) as dome:
ret = dome.close_shutters('ba')
return ret == DomeCommandStatus.Succeeded
def open(self):
print('dome: sending heartbeat ping before opening')
with self._daemon.connect() as dome:
dome.set_heartbeat_timer(self._heartbeat_timeout)
print('dome: opening')
with self._daemon.connect(timeout=self._movement_timeout) as dome:
ret = dome.open_shutters('ab')
return ret == DomeCommandStatus.Succeeded
| warwick-one-metre/opsd | warwick/observatory/operations/dome/astrohaven/__init__.py | Python | gpl-3.0 | 4,416 |
"""
the functions
get_record_csv
get_court_id
are intended to be the closest interface points with the front end.
each has comments on its function prior to its definition and a series of demonstrative example calls afterwards
get_records_csv will require some changes to work with a db - it has been written to hopefully make this easy
get_court_id will eventually need a change to its file path on line 134
"""
import csv
import json
<<<<<<< HEAD
import os
=======
import itertools
>>>>>>> f8f2d3c1b709b0888c248bba74e2b20bfc553ec3
#this is a helper function intended to be called by another function making a data call that produces a (pseudo) list of dictionaries
def match_in_list(requiredFields,pseudoList):
matches=[]
for citationDict in pseudoList:
#default is no match
match=0
for key in requiredFields.keys():
if requiredFields[key]==citationDict[key]:
match=1
else:
match=0
if match==1:
matches.append(citationDict)
if len(matches)>0:
return matches
else:
return "No match was found!"
#this function is an example of a type intended to be called from the front end
#this function will be superseded by a similar one which makes a database call
def get_record_csv(requiredFields,targetData):
if type(requiredFields)!=dict:
return 'The required fields (the first function argument) must be in dictionary format'
#targetData must be 'citations' on 'violations'
if targetData!='citations' and targetData!='violations' and targetData!='Warrants':
return 'The targeted dataset (the second function argument) is invalid'
#going to need to do something different with the path
with open('C:\Users\Alexander\Documents\GitHub\injustice_dropper\data\\'+targetData+'.csv', 'rb') as citations:
reader = csv.DictReader(citations)
return match_in_list(requiredFields,reader)
#this is a demonstrative test of a front end function
testPositive={'court_address': '7150 Natural Bridge Road', 'first_name': 'Kathleen'}
testNegative={'court_address': 'ass road', 'first_name': 'assface'}
print(get_record_csv(testPositive,'citations'))
print(get_record_csv(testNegative,'citations'))
testPositive={'violation_number': '682690971-01', 'violation_description': 'Improper Passing'}
testNegative={'violation_number': '12345', 'violation_description': 'dookie'}
print(get_record_csv(testPositive,'violations'))
print(get_record_csv(testNegative,'violations'))
testPositive={'Defendant': 'AARON, ANDRE L', 'ZIP Code': '63103'}
testNegative={'Defendant': 'AARON, BOOTY L', 'ZIP Code': '99999'}
print(get_record_csv(testPositive,'Warrants'))
print(get_record_csv(testNegative,'Warrants'))
#this is a helper function not intended to be called by the front end
#this function formats inputs for point_in_poly
def polygon_reformat(polyListCoords):
return [(t[0],t[1]) for t in polyListCoords]
#polygon vs multi polygon key is in the same layer as coordinates
#this function deals with some issues related to non simply connected multipolygons
#this is a helper function not intended to be called by the front end
def polygon_inclusion_resolver(x,y,geometryDict):
if geometryDict['type']=='Polygon':
cleanedList=[polygon_reformat(geometryDict['coordinates'][0])]
if geometryDict['type']=='MultiPolygon':
cleanedList=[]
for polygon in geometryDict['coordinates'][0]:
cleanedList.append(polygon_reformat(polygon))
#count how many polygons the point is contained in from the list
containedCount=0
for i,polygonList in enumerate(cleanedList):
if point_in_poly(x,y,polygonList)=="IN":
containedCount+=1
if containedCount%2==1:
return "IN"
else:
return "OUT"
#poly is a list of (x,y) tuples
#this is a helper function not intended to be called by the front end
def point_in_poly(x,y,poly):
# check if point is a vertex
if (x,y) in poly: return "IN"
# check if point is on a boundary
for i in range(len(poly)):
p1 = None
p2 = None
if i==0:
p1 = poly[0]
p2 = poly[1]
else:
p1 = poly[i-1]
p2 = poly[i]
if p1[1] == p2[1] and p1[1] == y and x > min(p1[0], p2[0]) and x < max(p1[0], p2[0]):
return "IN"
n = len(poly)
inside = False
p1x,p1y = poly[0]
for i in range(n+1):
p2x,p2y = poly[i % n]
if y > min(p1y,p2y):
if y <= max(p1y,p2y):
if x <= max(p1x,p2x):
if p1y != p2y:
xints = (y-p1y)*(p2x-p1x)/(p2y-p1y)+p1x
if p1x == p2x or x <= xints:
inside = not inside
p1x,p1y = p2x,p2y
if inside: return "IN"
else: return "OUT"
"""
example key structure to get to the actual list of coordinates
test['features'][0]['geometry']['coordinates'][0]
example key structure to get to court properties
test['features'][0]['properties']
the 0 is just to pull the first record in the list as an example
"""
#this function takes latitude and longitude coords and returns a dictionary mapping court id to full court data
#this function is intended to be called from the front end
def get_court_id(lat,long):
json_data=open(os.getcwd() + '/data/courts.geojson.txt').read()
rawData = json.loads(json_data)
courtIdDict={}
for courtRecord in rawData['features']:
#the code needs lat and long to be flipped because it was written in the middle of the night
if polygon_inclusion_resolver(long,lat,courtRecord['geometry'])=="IN":
courtIdDict[courtRecord['properties']['court_id']]=courtRecord['properties']
return courtIdDict
#this is a demonstrative test of a front end function
#on the boundary of florissant and unincorpated county
print(get_court_id(38.8086707727844,-90.2860498354983))
#just unincorporated county
print(get_court_id(38.80867077279,-90.2860498354983))
#the vinita terrace courthouse
print(get_court_id(38.685607,-90.329282))
#in the indian ocean somewhere, produces an empty dict
print(get_court_id(0,0))
def get_analytics_raw(courtName):
#get the raw court data from geojson
json_data=open(os.getcwd() + '/data/courts.geojson.txt').read()
rawData = json.loads(json_data)
#a big list of all possible court data with summary data
courtKeys={}
for courtRecord in rawData['features']:
for key in courtRecord['properties']:
#record data for the specific court when appropriate
if courtName.lower()==courtRecord['properties']['court_name'].lower():
specificCourt=courtRecord['properties']
if key not in courtKeys:
courtKeys[key]={'total':0,'sum':0,'type':type(courtRecord['properties'][key]),'masterList':[]}
courtKeys[key]['masterList'].append(courtRecord['properties'][key])
try:
floatValue=float(courtRecord['properties'][key])
courtKeys[key]['total']+=1
courtKeys[key]['sum']+=floatValue
except ValueError:
'do nothing'
comparisons={}
for key in specificCourt:
try:
if float(courtKeys[key]['sum'])!=0 and float(courtKeys[key]['total'])!=0:
comparisons[key]={
'userValue':specificCourt[key],
'average':float(courtKeys[key]['sum'])/courtKeys[key]['total'],
'percentDiff':(float(specificCourt[key])-float(courtKeys[key]['sum'])/courtKeys[key]['total'])/(float(courtKeys[key]['sum'])/courtKeys[key]['total'])
}
else:
comparisons[key]={
'userValue':specificCourt[key],
'average':0,
'percentDiff':0
}
except ValueError:
'do nothing'
return {'comparisons':comparisons,'allData':courtKeys}
print(get_analytics_raw('country club hills')['comparisons'])
def name_input_permuter(nameString):
tallWords=[]
for preWord in nameString.split():
for word in preWord.split(','):
tallWords.append(word)
finalOutput={'firstLast':[],'libraryStyle':[]}
for t in itertools.permutations(tallWords,3):
finalOutput['firstLast'].append((t[0],t[1]))
finalOutput['libraryStyle'].append(t[0]+','+t[1]+' '+t[2][0])
return finalOutput
| xHeliotrope/injustice_dropper | pull_from_dataset.py | Python | mit | 8,630 |
#!/usr/bin/env python
# Encoding: UTF-8
"""Part of qdex: a Pokédex using PySide and veekun's pokedex library.
Tools handling custom YAML tags
"""
# Not to be confused with the PyYaml library
from __future__ import absolute_import
import yaml
from forrin.translator import TranslatableString
translatableStringTag = u'tag:encukou.cz,2011:forrin/_'
try:
BaseDumper = yaml.CSafeDumper
BaseLoader = yaml.CSafeLoader
except AttributeError:
BaseDumper = yaml.SafeDumper
BaseLoader = yaml.SafeLoader
class Dumper(BaseDumper):
"""Custom YAML dumper"""
pass
def representTranslatableString(dumper, data):
"""Represent a forrin TranslatableString"""
representation = {}
if data.context:
representation['context'] = data.context
if data.comment:
representation['comment'] = data.comment
if data.plural:
representation['plural'] = data.plural
if representation:
representation['message'] = data.message
return dumper.represent_mapping(translatableStringTag, representation)
else:
return dumper.represent_scalar(translatableStringTag, data.message)
Dumper.add_representer(TranslatableString, representTranslatableString)
def dump(data, stream=None):
"""As in yaml.load, but use our own dialect"""
return yaml.dump(data, stream, Dumper, encoding='utf-8', indent=4)
class Loader(BaseLoader):
"""Custom YAML loader"""
pass
def constructTranslatableString(loader, node):
"""Construct a TranslatableString from a YAML node"""
try:
message = loader.construct_scalar(node)
except yaml.constructor.ConstructorError:
return TranslatableString(**loader.construct_mapping(node))
else:
return TranslatableString(message)
Loader.add_constructor(translatableStringTag, constructTranslatableString)
def load(stream):
"""As in yaml.load, but resolve forrin _ tags"""
return yaml.load(stream, Loader)
def extractMessages(fileobj, keywords, commentTags, options):
"""Extract Babel messages out of a YAML file"""
currentArgs = None
currentKey = None
for event in yaml.parse(fileobj):
if isinstance(event, yaml.events.MappingStartEvent):
if event.tag == translatableStringTag:
currentArgs = {}
elif isinstance(event, yaml.events.MappingEndEvent) and currentArgs:
try:
message = currentArgs['context'] + '|' + currentArgs['message']
except KeyError:
message = currentArgs['message']
try:
comments = [currentArgs['comment']]
except KeyError:
comments = []
yield event.start_mark.line, '_', message, comments
currentArgs = None
elif isinstance(event, yaml.events.ScalarEvent):
if currentArgs is not None:
if currentKey is None:
currentKey = event.value
else:
currentArgs[currentKey] = event.value
currentKey = None
elif event.tag == translatableStringTag:
yield event.start_mark.line, '_', event.value, []
| encukou/qdex | qdex/yaml.py | Python | mit | 3,188 |
"""Support for LCN devices."""
import asyncio
import logging
import pypck
from homeassistant import config_entries
from homeassistant.const import (
CONF_IP_ADDRESS,
CONF_NAME,
CONF_PASSWORD,
CONF_PORT,
CONF_RESOURCE,
CONF_USERNAME,
)
from homeassistant.helpers import entity_registry as er
from homeassistant.helpers.entity import Entity
from .const import CONF_DIM_MODE, CONF_SK_NUM_TRIES, CONNECTION, DOMAIN
from .helpers import generate_unique_id, import_lcn_config
from .schemas import CONFIG_SCHEMA # noqa: F401
from .services import SERVICES
PLATFORMS = ["binary_sensor", "climate", "cover", "light", "scene", "sensor", "switch"]
_LOGGER = logging.getLogger(__name__)
async def async_setup(hass, config):
"""Set up the LCN component."""
if DOMAIN not in config:
return True
# initialize a config_flow for all LCN configurations read from
# configuration.yaml
config_entries_data = import_lcn_config(config[DOMAIN])
for config_entry_data in config_entries_data:
hass.async_create_task(
hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data=config_entry_data,
)
)
return True
async def async_setup_entry(hass, config_entry):
"""Set up a connection to PCHK host from a config entry."""
hass.data.setdefault(DOMAIN, {})
if config_entry.entry_id in hass.data[DOMAIN]:
return False
settings = {
"SK_NUM_TRIES": config_entry.data[CONF_SK_NUM_TRIES],
"DIM_MODE": pypck.lcn_defs.OutputPortDimMode[config_entry.data[CONF_DIM_MODE]],
}
# connect to PCHK
lcn_connection = pypck.connection.PchkConnectionManager(
config_entry.data[CONF_IP_ADDRESS],
config_entry.data[CONF_PORT],
config_entry.data[CONF_USERNAME],
config_entry.data[CONF_PASSWORD],
settings=settings,
connection_id=config_entry.entry_id,
)
try:
# establish connection to PCHK server
await lcn_connection.async_connect(timeout=15)
except pypck.connection.PchkAuthenticationError:
_LOGGER.warning('Authentication on PCHK "%s" failed', config_entry.title)
return False
except pypck.connection.PchkLicenseError:
_LOGGER.warning(
'Maximum number of connections on PCHK "%s" was '
"reached. An additional license key is required",
config_entry.title,
)
return False
except TimeoutError:
_LOGGER.warning('Connection to PCHK "%s" failed', config_entry.title)
return False
_LOGGER.debug('LCN connected to "%s"', config_entry.title)
hass.data[DOMAIN][config_entry.entry_id] = {
CONNECTION: lcn_connection,
}
# remove orphans from entity registry which are in ConfigEntry but were removed
# from configuration.yaml
if config_entry.source == config_entries.SOURCE_IMPORT:
entity_registry = await er.async_get_registry(hass)
entity_registry.async_clear_config_entry(config_entry.entry_id)
# forward config_entry to components
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
# register service calls
for service_name, service in SERVICES:
if not hass.services.has_service(DOMAIN, service_name):
hass.services.async_register(
DOMAIN, service_name, service(hass).async_call_service, service.schema
)
return True
async def async_unload_entry(hass, config_entry):
"""Close connection to PCHK host represented by config_entry."""
# forward unloading to platforms
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in PLATFORMS
]
)
)
if unload_ok and config_entry.entry_id in hass.data[DOMAIN]:
host = hass.data[DOMAIN].pop(config_entry.entry_id)
await host[CONNECTION].async_close()
# unregister service calls
if unload_ok and not hass.data[DOMAIN]: # check if this is the last entry to unload
for service_name, _ in SERVICES:
hass.services.async_remove(DOMAIN, service_name)
return unload_ok
class LcnEntity(Entity):
"""Parent class for all entities associated with the LCN component."""
def __init__(self, config, entry_id, device_connection):
"""Initialize the LCN device."""
self.config = config
self.entry_id = entry_id
self.device_connection = device_connection
self._unregister_for_inputs = None
self._name = config[CONF_NAME]
@property
def unique_id(self):
"""Return a unique ID."""
unique_device_id = generate_unique_id(
(
self.device_connection.seg_id,
self.device_connection.addr_id,
self.device_connection.is_group,
)
)
return f"{self.entry_id}-{unique_device_id}-{self.config[CONF_RESOURCE]}"
@property
def should_poll(self):
"""Lcn device entity pushes its state to HA."""
return False
async def async_added_to_hass(self):
"""Run when entity about to be added to hass."""
if not self.device_connection.is_group:
self._unregister_for_inputs = self.device_connection.register_for_inputs(
self.input_received
)
async def async_will_remove_from_hass(self):
"""Run when entity will be removed from hass."""
if self._unregister_for_inputs is not None:
self._unregister_for_inputs()
@property
def name(self):
"""Return the name of the device."""
return self._name
def input_received(self, input_obj):
"""Set state/value when LCN input object (command) is received."""
| w1ll1am23/home-assistant | homeassistant/components/lcn/__init__.py | Python | apache-2.0 | 6,029 |
#!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Elements are non-text bits living in blips like images, gadgets etc.
This module defines the Element class and the derived classes.
"""
import logging
import sys
import util
class Element(object):
"""Elements are non-text content within a document.
These are generally abstracted from the Robot. Although a Robot can query the
properties of an element it can only interact with the specific types that
the element represents.
Properties of elements are both accessible directly (image.url) and through
the properties dictionary (image.properties['url']). In general Element
should not be instantiated by robots, but rather rely on the derived classes.
"""
def __init__(self, element_type, **properties):
"""Initializes self with the specified type and any properties.
Args:
element_type: string typed member of ELEMENT_TYPE
properties: either a dictionary of initial properties, or a dictionary
with just one member properties that is itself a dictionary of
properties. This allows us to both use
e = Element(atype, prop1=val1, prop2=prop2...)
and
e = Element(atype, properties={prop1:val1, prop2:prop2..})
"""
#TODO: don't use setattr
if len(properties) == 1 and 'properties' in properties:
properties = properties['properties']
self.type = element_type
# as long as the operation_queue of an element in None, it is
# unattached. After an element is acquired by a blip, the blip
# will set the operation_queue to make sure all changes to the
# element are properly send to the server.
self._operation_queue = None
for key, val in properties.items():
setattr(self, key, val)
@classmethod
def from_json(cls, json):
"""Class method to instantiate an Element based on a json string."""
etype = json['type']
props = json['properties'].copy()
element_class = ALL.get(etype)
if not element_class:
# Unknown type. Server could be newer than we are
return Element(element_type=etype, properties=props)
return element_class.from_props(props)
def get(self, key, default=None):
"""Standard get interface."""
return getattr(self, key, default)
def serialize(self):
"""Custom serializer for Elements.
Element need their non standard attributes returned in a dict named
properties.
"""
props = {}
data = {}
for attr in dir(self):
if attr.startswith('_'):
continue
val = getattr(self, attr)
if val is None or callable(val):
continue
val = util.serialize(val)
if attr == 'type':
data[attr] = val
else:
props[attr] = val
data['properties'] = util.serialize(props)
return data
class Input(Element):
"""A single-line input element."""
type = 'INPUT'
def __init__(self, name, value='', label=''):
super(Input, self).__init__(Input.type,
name=name,
value=value,
default_value=value,
label=label)
@classmethod
def from_props(cls, props):
return Input(name=props['name'], value=props['value'], label=props['label'])
class Check(Element):
"""A checkbox element."""
type = 'CHECK'
def __init__(self, name, value=''):
super(Check, self).__init__(Check.type,
name=name, value=value, default_value=value)
@classmethod
def from_props(cls, props):
return Check(name=props['name'], value=props['value'])
class Button(Element):
"""A button element."""
type = 'BUTTON'
def __init__(self, name, caption):
super(Button, self).__init__(Button.type,
name=name, value=caption)
@classmethod
def from_props(cls, props):
return Button(name=props['name'], caption=props['value'])
class Label(Element):
"""A label element."""
type = 'LABEL'
def __init__(self, label_for, caption):
super(Label, self).__init__(Label.type,
name=label_for, value=caption)
@classmethod
def from_props(cls, props):
return Label(label_for=props['name'], caption=props['value'])
class RadioButton(Element):
"""A radio button element."""
type = 'RADIO_BUTTON'
def __init__(self, name, group):
super(RadioButton, self).__init__(RadioButton.type,
name=name, value=group)
@classmethod
def from_props(cls, props):
return RadioButton(name=props['name'], group=props['value'])
class RadioButtonGroup(Element):
"""A group of radio buttons."""
type = 'RADIO_BUTTON_GROUP'
def __init__(self, name, value):
super(RadioButtonGroup, self).__init__(RadioButtonGroup.type,
name=name, value=value)
@classmethod
def from_props(cls, props):
return RadioButtonGroup(name=props['name'], value=props['value'])
class Password(Element):
"""A password element."""
type = 'PASSWORD'
def __init__(self, name, value):
super(Password, self).__init__(Password.type,
name=name, value=value)
@classmethod
def from_props(cls, props):
return Password(name=props['name'], value=props['value'])
class TextArea(Element):
"""A text area element."""
type = 'TEXTAREA'
def __init__(self, name, value):
super(TextArea, self).__init__(TextArea.type,
name=name, value=value)
@classmethod
def from_props(cls, props):
return TextArea(name=props['name'], value=props['value'])
class Line(Element):
"""A line element.
Note that Lines are represented in the text as newlines.
"""
type = 'LINE'
def __init__(self,
line_type=None,
indent=None,
alignment=None,
direction=None):
super(Line, self).__init__(Line.type,
lineType=line_type,
indent=indent,
alignment=alignment,
direction=direction)
@classmethod
def from_props(cls, props):
return Line(line_type=props.get('lineType'),
indent=props.get('indent'),
alignment=props.get('alignment'),
direction=props.get('direction'))
class Gadget(Element):
"""A gadget element."""
type = 'GADGET'
def __init__(self, url, props=None):
if props is None:
props = {}
props['url'] = url
super(Gadget, self).__init__(Gadget.type, properties=props)
@classmethod
def from_props(cls, props):
return Gadget(props.get('url'), props)
class Installer(Element):
"""An installer element."""
type = 'INSTALLER'
def __init__(self, manifest):
super(Installer, self).__init__(Installer.type, manifest=manifest)
@classmethod
def from_props(cls, props):
return Installer(props.get('manifest'))
class Image(Element):
"""An image element."""
type = 'IMAGE'
def __init__(self, url='', width=None, height=None,
attachmentId=None, caption=None):
super(Image, self).__init__(Image.type, url=url, width=width,
height=height, attachmentId=attachmentId, caption=caption)
@classmethod
def from_props(cls, props):
props = dict([(key.encode('utf-8'), value)
for key, value in props.items()])
logging.info('from_props=' + str(props))
return apply(Image, [], props)
def is_element(cls):
"""Returns whether the passed class is an element."""
try:
if not issubclass(cls, Element):
return False
return hasattr(cls, 'type')
except TypeError:
return False
ALL = dict([(item.type, item) for item in globals().copy().values()
if is_element(item)])
| franckverrot/live-note | waveapi/element.py | Python | gpl-3.0 | 8,441 |
# -*- coding: utf-8 -*-
###############################################################################
#
# ListSigningCertificates
# Returns information about the signing certificates associated with the specified user. If there are none, the action returns an empty list.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListSigningCertificates(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListSigningCertificates Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ListSigningCertificates, self).__init__(temboo_session, '/Library/Amazon/IAM/ListSigningCertificates')
def new_input_set(self):
return ListSigningCertificatesInputSet()
def _make_result_set(self, result, path):
return ListSigningCertificatesResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListSigningCertificatesChoreographyExecution(session, exec_id, path)
class ListSigningCertificatesInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListSigningCertificates
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AWSAccessKeyId(self, value):
"""
Set the value of the AWSAccessKeyId input for this Choreo. ((required, string) The Access Key ID provided by Amazon Web Services.)
"""
super(ListSigningCertificatesInputSet, self)._set_input('AWSAccessKeyId', value)
def set_AWSSecretKeyId(self, value):
"""
Set the value of the AWSSecretKeyId input for this Choreo. ((required, string) The Secret Key ID provided by Amazon Web Services.)
"""
super(ListSigningCertificatesInputSet, self)._set_input('AWSSecretKeyId', value)
def set_Marker(self, value):
"""
Set the value of the Marker input for this Choreo. ((optional, string) Used for pagination to indicate the starting point of the results to return.)
"""
super(ListSigningCertificatesInputSet, self)._set_input('Marker', value)
def set_MaxItems(self, value):
"""
Set the value of the MaxItems input for this Choreo. ((optional, integer) Used for pagination to limit the number of results returned. Defaults to 100.)
"""
super(ListSigningCertificatesInputSet, self)._set_input('MaxItems', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are "xml" (the default) and "json".)
"""
super(ListSigningCertificatesInputSet, self)._set_input('ResponseFormat', value)
def set_UserName(self, value):
"""
Set the value of the UserName input for this Choreo. ((optional, string) The name of the user.)
"""
super(ListSigningCertificatesInputSet, self)._set_input('UserName', value)
class ListSigningCertificatesResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListSigningCertificates Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from Amazon.)
"""
return self._output.get('Response', None)
class ListSigningCertificatesChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListSigningCertificatesResultSet(response, path)
| jordanemedlock/psychtruths | temboo/core/Library/Amazon/IAM/ListSigningCertificates.py | Python | apache-2.0 | 4,648 |
# -*- coding: utf8 -*-
'''
Copyright 2009 Denis Derman <denis.spir@gmail.com> (former developer)
Copyright 2011-2012 Peter Potrowl <peter017@gmail.com> (current developer)
This file is part of Pijnu.
Pijnu is free software: you can redistribute it and/or modify it
under the terms of the GNU Lesser General Public License as published
by the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Pijnu is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with Pijnu. If not, see <http://www.gnu.org/licenses/>.
'''
"""
<definition>
# simplified constants
SPACE : ' ' : drop
SPACING : SPACE+ : drop
LPAREN : '(' : drop
RPAREN : ')' : drop
COMMA : ',' : drop
DOC : "\'\'\'" : drop
FUNCSTART : "@function" : drop
FUNCEND : "@end" : drop
COLON : ':' : drop
EOL : '\n' : drop
TAB : '\t'
INDENT : TAB / SPACE+ : drop
DEDENT : !INDENT
CODECHAR : [\x20..\x7f \t]
DOCCHAR : [\x20..\x7f \t\n]
IDENTIFIER : [a..z A..Z _] [a..z A..Z 0..9 _]* : join
# lower-level patterns
funcName : SPACING IDENTIFIER SPACING? : liftValue
argument : IDENTIFIER
codeLine : INDENT? CODECHAR+ EOL : join
# func def
type : COLON IDENTIFIER SPACING? : liftValue
typeDef : type? : keep
moreArg : COMMA SPACE* argument : liftNode
argList : LPAREN argument moreArg* RPAREN : extract
arguments : argList? : keep
docBody : INDENT* DOC (!DOC DOCCHAR)* DOC EOL? : join
doc : docBody? : keep
codeBody : INDENT codeLine+ DEDENT : liftValue
code : codeBody? : keep
funcDef : FUNCSTART funcName typeDef arguments EOL doc code FUNCEND
"""
from pijnu import *
### title: SebastienFunction ###
### <toolset>
# none
### <definition>
# simplified constants
SPACE = Char(' ', format="' '")(drop)
SPACING = Repetition(SPACE, numMin=1,numMax=False, format='SPACE+')(drop)
LPAREN = Char('(', format="'('")(drop)
RPAREN = Char(')', format="')'")(drop)
COMMA = Char(',', format="','")(drop)
DOC = Word("'''", format='"\\\'\\\'\\\'"')(drop)
FUNCSTART = Word('@function', format='"@function"')(drop)
FUNCEND = Word('@end', format='"@end"')(drop)
COLON = Char(':', format="':'")(drop)
EOL = Char('\n', format="'\\n'")(drop)
TAB = Char('\t', format="'\\t'")
INDENT = Choice([TAB, Repetition(SPACE, numMin=1,numMax=False, format='SPACE+')], format='TAB / SPACE+')(drop)
DEDENT = NextNot(INDENT, format='!INDENT')
CODECHAR = Klass(format='[\\x20..\\x7f \\t]', charset=' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f\t')
DOCCHAR = Klass(format='[\\x20..\\x7f \\t\\n]', charset=' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`abcdefghijklmnopqrstuvwxyz{|}~\x7f\t\n')
IDENTIFIER = Sequence([Klass(format='[a..z A..Z _]', charset='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_'), String(Klass(format='[a..z A..Z 0..9 _]', charset='abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_'), numMin=False,numMax=False, format='[a..z A..Z 0..9 _]*')], format='[a..z A..Z _] [a..z A..Z 0..9 _]*')(join)
# lower-level patterns
funcName = Sequence([SPACING, IDENTIFIER, Option(SPACING, format='SPACING?')], format='SPACING IDENTIFIER SPACING?')(liftValue)
argument = copy(IDENTIFIER)
codeLine = Sequence([Option(INDENT, format='INDENT?'), Repetition(CODECHAR, numMin=1,numMax=False, format='CODECHAR+'), EOL], format='INDENT? CODECHAR+ EOL')(join)
# func def
type = Sequence([COLON, IDENTIFIER, Option(SPACING, format='SPACING?')], format='COLON IDENTIFIER SPACING?')(liftValue)
typeDef = Option(type, format='type?')(keep)
moreArg = Sequence([COMMA, Repetition(SPACE, numMin=False,numMax=False, format='SPACE*'), argument], format='COMMA SPACE* argument')(liftNode)
argList = Sequence([LPAREN, argument, Repetition(moreArg, numMin=False,numMax=False, format='moreArg*'), RPAREN], format='LPAREN argument moreArg* RPAREN')(extract)
arguments = Option(argList, format='argList?')(keep)
docBody = Sequence([Repetition(INDENT, numMin=False,numMax=False, format='INDENT*'), DOC, Repetition(Sequence([NextNot(DOC, format='!DOC'), DOCCHAR], format='!DOC DOCCHAR'), numMin=False,numMax=False, format='(!DOC DOCCHAR)*'), DOC, Option(EOL, format='EOL?')], format='INDENT* DOC (!DOC DOCCHAR)* DOC EOL?')(join)
doc = Option(docBody, format='docBody?')(keep)
codeBody = Sequence([INDENT, Repetition(codeLine, numMin=1,numMax=False, format='codeLine+'), DEDENT], format='INDENT codeLine+ DEDENT')(liftValue)
code = Option(codeBody, format='codeBody?')(keep)
funcDef = Sequence([FUNCSTART, funcName, typeDef, arguments, EOL, doc, code, FUNCEND], format='FUNCSTART funcName typeDef arguments EOL doc code FUNCEND')
SebastienFunctionParser = Parser(locals(), 'funcDef', 'SebastienFunction', 'SebastienFunction.py')
| peter17/pijnu | samples/SebastienFunction.py | Python | gpl-3.0 | 5,130 |
"""Diffusion across rows"""
from __future__ import print_function
from math import sqrt
from ase import Atoms, Atom
from ase.io import write
from ase.visualize import view
from ase.constraints import FixAtoms
from ase.optimize import QuasiNewton
from ase.optimize import MDMin
from ase.neb import NEB
from ase.calculators.emt import EMT
a = 4.0614
b = a / sqrt(2)
h = b / 2
initial = Atoms('Al2',
positions=[(0, 0, 0),
(a / 2, b / 2, -h)],
cell=(a, b, 2 * h),
pbc=(1, 1, 0))
initial *= (2, 2, 2)
initial.append(Atom('Al', (a / 2, b / 2, 3 * h)))
initial.center(vacuum=4.0, axis=2)
final = initial.copy()
final.positions[-1, 0] += a
view([initial, final])
# Construct a list of images:
images = [initial]
for i in range(5):
images.append(initial.copy())
images.append(final)
# Make a mask of zeros and ones that select fixed atoms (the
# two bottom layers):
mask = initial.positions[:, 2] - min(initial.positions[:, 2]) < 1.5 * h
constraint = FixAtoms(mask=mask)
print(mask)
for image in images:
# Let all images use an EMT calculator:
image.set_calculator(EMT())
image.set_constraint(constraint)
# Relax the initial and final states:
QuasiNewton(initial).run(fmax=0.05)
QuasiNewton(final).run(fmax=0.05)
# Create a Nudged Elastic Band:
neb = NEB(images)
# Make a starting guess for the minimum energy path (a straight line
# from the initial to the final state):
neb.interpolate()
# Relax the NEB path:
minimizer = MDMin(neb)
minimizer.run(fmax=0.05)
# Write the path to a trajectory:
view(images)
# 564 meV
write('jump2.traj', images)
| misdoro/python-ase | doc/tutorials/selfdiffusion/neb2.py | Python | gpl-2.0 | 1,640 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-08-11 12:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='GeneDetailed',
fields=[
('uid', models.IntegerField(blank=True, primary_key=True, serialize=False)),
('chrom', models.TextField(blank=True, null=True)),
('gene', models.TextField(blank=True, null=True)),
('is_hgnc', models.NullBooleanField()),
('ensembl_gene_id', models.TextField(blank=True, null=True)),
('transcript', models.TextField(blank=True, null=True)),
('biotype', models.TextField(blank=True, null=True)),
('transcript_status', models.TextField(blank=True, null=True)),
('ccds_id', models.TextField(blank=True, null=True)),
('hgnc_id', models.TextField(blank=True, null=True)),
('entrez_id', models.TextField(blank=True, null=True)),
('cds_length', models.TextField(blank=True, null=True)),
('protein_length', models.TextField(blank=True, null=True)),
('transcript_start', models.TextField(blank=True, null=True)),
('transcript_end', models.TextField(blank=True, null=True)),
('strand', models.TextField(blank=True, null=True)),
('synonym', models.TextField(blank=True, null=True)),
('rvis_pct', models.TextField(blank=True, null=True)),
('mam_phenotype_id', models.TextField(blank=True, null=True)),
],
options={
'db_table': 'gene_detailed',
'managed': False,
},
),
migrations.CreateModel(
name='GeneSummary',
fields=[
('uid', models.IntegerField(blank=True, primary_key=True, serialize=False)),
('chrom', models.TextField(blank=True, null=True)),
('gene', models.TextField(blank=True, null=True)),
('is_hgnc', models.NullBooleanField()),
('ensembl_gene_id', models.TextField(blank=True, null=True)),
('hgnc_id', models.TextField(blank=True, null=True)),
('transcript_min_start', models.TextField(blank=True, null=True)),
('transcript_max_end', models.TextField(blank=True, null=True)),
('strand', models.TextField(blank=True, null=True)),
('synonym', models.TextField(blank=True, null=True)),
('rvis_pct', models.TextField(blank=True, null=True)),
('mam_phenotype_id', models.TextField(blank=True, null=True)),
('in_cosmic_census', models.NullBooleanField()),
],
options={
'db_table': 'gene_summary',
'managed': False,
},
),
migrations.CreateModel(
name='Resources',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.TextField(blank=True, null=True)),
('resource', models.TextField(blank=True, null=True)),
],
options={
'db_table': 'resources',
'managed': False,
},
),
migrations.CreateModel(
name='SampleGenotypeCounts',
fields=[
('sample_id', models.IntegerField(blank=True, primary_key=True, serialize=False)),
('num_hom_ref', models.IntegerField(blank=True, null=True)),
('num_het', models.IntegerField(blank=True, null=True)),
('num_hom_alt', models.IntegerField(blank=True, null=True)),
('num_unknown', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'sample_genotype_counts',
'managed': False,
},
),
migrations.CreateModel(
name='SampleGenotypes',
fields=[
('sample_id', models.IntegerField(blank=True, primary_key=True, serialize=False)),
('gt_types', models.BinaryField(blank=True, null=True)),
],
options={
'db_table': 'sample_genotypes',
'managed': False,
},
),
migrations.CreateModel(
name='Samples',
fields=[
('sample_id', models.IntegerField(blank=True, primary_key=True, serialize=False)),
('family_id', models.TextField(blank=True, null=True)),
('name', models.TextField(blank=True, null=True, unique=True)),
('paternal_id', models.TextField(blank=True, null=True)),
('maternal_id', models.TextField(blank=True, null=True)),
('sex', models.TextField(blank=True, null=True)),
('phenotype', models.TextField(blank=True, null=True)),
],
options={
'db_table': 'samples',
'managed': False,
},
),
migrations.CreateModel(
name='Variants',
fields=[
('chrom', models.TextField(blank=True)),
('start', models.IntegerField(blank=True, db_column='start', null=True)),
('end', models.IntegerField(blank=True, null=True)),
('variant_id', models.IntegerField(blank=True, primary_key=True, serialize=False)),
('ref', models.TextField(blank=True)),
('alt', models.TextField(blank=True)),
('quality', models.FloatField(blank=True, db_column='qual', null=True)),
('pass_filter', models.TextField(blank=True, db_column='filter')),
('gts_blob', models.BinaryField(blank=True, db_column='gts', null=True)),
('gt_types_blob', models.BinaryField(blank=True, db_column='gt_types', null=True)),
('in_dbsnp', models.NullBooleanField()),
('dbsnp', models.TextField(blank=True, db_column='rs_ids')),
('clinvar_sig', models.TextField(blank=True)),
('clinvar_disease_acc', models.TextField(blank=True)),
('gerp_bp_score', models.FloatField(blank=True, null=True)),
('gerp_element_pval', models.FloatField(blank=True, null=True)),
('gene_symbol', models.TextField(blank=True, db_column='gene')),
('transcript', models.TextField(blank=True)),
('exon', models.TextField(blank=True)),
('is_exonic', models.NullBooleanField()),
('is_coding', models.NullBooleanField()),
('is_lof', models.NullBooleanField()),
('codon_change', models.TextField(blank=True)),
('aa_change', models.TextField(blank=True)),
('impact', models.TextField(blank=True)),
('impact_so', models.TextField(blank=True)),
('impact_severity', models.TextField(blank=True)),
('polyphen_pred', models.TextField(blank=True)),
('polyphen_score', models.FloatField(blank=True)),
('sift_pred', models.TextField(blank=True, null=True)),
('sift_score', models.FloatField(blank=True, null=True)),
('read_depth', models.IntegerField(blank=True, db_column='depth', null=True)),
('rms_map_qual', models.FloatField(blank=True, null=True)),
('qual_depth', models.FloatField(blank=True, null=True)),
('allele_count', models.IntegerField(blank=True, null=True)),
('cadd_raw', models.FloatField(blank=True, null=True)),
('cadd_scaled', models.FloatField(blank=True, null=True)),
('in_esp', models.NullBooleanField()),
('in_1kg', models.NullBooleanField()),
('in_exac', models.NullBooleanField()),
('aaf_esp_all', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True)),
('aaf_1kg_all', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True)),
('aaf_exac_all', models.DecimalField(blank=True, decimal_places=2, max_digits=7, null=True)),
('aaf_max_all', models.DecimalField(blank=True, db_column='max_aaf_all', decimal_places=2, max_digits=7, null=True)),
('allele_freq', models.FloatField(blank=True, db_column='AF', null=True)),
('base_qual_rank_sum', models.FloatField(blank=True, db_column='BaseQRankSum', null=True)),
('fisher_strand_bias', models.FloatField(blank=True, db_column='FS', null=True)),
('map_qual_rank_sum', models.FloatField(blank=True, db_column='MQRankSum', null=True)),
('read_pos_rank_sum', models.FloatField(blank=True, db_column='ReadPosRankSum', null=True)),
('strand_bias_odds_ratio', models.FloatField(blank=True, db_column='SOR', null=True)),
('hgvsp', models.TextField(blank=True, db_column='vep_hgvsp')),
('hgvsc', models.TextField(blank=True, db_column='vep_hgvsc')),
],
options={
'db_table': 'variants',
'managed': False,
},
),
migrations.CreateModel(
name='VcfHeader',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('vcf_header', models.TextField(blank=True)),
],
options={
'db_table': 'vcf_header',
'managed': False,
},
),
migrations.CreateModel(
name='Version',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('version', models.TextField(blank=True)),
],
options={
'db_table': 'version',
'managed': False,
},
),
]
| 444thLiao/VarappX | varapp/migrations/0001_initial.py | Python | gpl-3.0 | 10,400 |