text
stringlengths 29
850k
|
---|
# Copyright (c) 2012-2015 Netforce Co. Ltd.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
# OR OTHER DEALINGS IN THE SOFTWARE.
from netforce.model import Model, fields
import uuid
class ReportTemplate(Model):
_name = "report.template"
_string = "Report Template"
_multi_company = True
_fields = {
"name": fields.Char("Template Name", required=True, search=True),
"type": fields.Selection([
["cust_invoice", "Customer Invoice"],
["cust_credit_note", "Customer Credit Note"],
["supp_invoice", "Supplier Invoice"],
["payment", "Payment"],
["account_move", "Journal Entry"],
["sale_quot", "Quotation"],
["sale_order", "Sales Order"],
["purch_order", "Purchase Order"],
["purchase_request", "Purchase Request"],
["prod_order", "Production Order"],
["goods_receipt", "Goods Receipt"],
["goods_transfer", "Goods Transfer"],
["goods_issue", "Goods Issue"],
["pay_slip", "Pay Slip"],
["tax_detail", "Tax Detail"],
["hr_expense", "HR Expense"],
["landed_cost","Landed Cost"],
["other", "Other"]], "Template Type", required=True, search=True),
"format": fields.Selection([["odt", "ODT (old)"], ["odt2", "ODT"], ["ods", "ODS"], ["docx", "DOCX (old)"], ["xlsx", "XLSX"], ["jrxml", "JRXML (old)"], ["jrxml2", "JRXML"], ["jsx","JSX"]], "Template Format", required=True, search=True),
"file": fields.File("Template File"),
"company_id": fields.Many2One("company", "Company"),
"model_id": fields.Many2One("model", "Model"),
"method": fields.Char("Method"),
}
_defaults = {
"file_type": "odt",
}
ReportTemplate.register()
|
Are you looking for a UPVC cleaning specialist in the Warrington, Wigan or Liverpool area? Then contact us today for a FREE QUOTE on 01925 223 547!
UPVC cleaning the interior and exterior of your home will improve your property’s appearance.
Just Clean will clean and polish all UPVC fixtures and fittings. We alwways use the same method of cleaning; wash, dry and polish for the best result and your gleaming finish.
We are proud to offer our UPVC cleaning services to clients in Warrington, Wigan, Liverpool and the surrounding area. If you would like more details on any of our services then contact us today on 0800 023 8797. |
# This file is part of the Hotwire Shell user interface.
#
# Copyright (C) 2007,2008 Colin Walters <walters@verbum.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
# USA.
import logging
import gobject
import gtk
from .wraplabel import WrapLabel
_logger = logging.getLogger("hotwire.ui.MsgArea")
# This file is a Python translation of gedit/gedit/gedit-message-area.c
class MsgArea(gtk.HBox):
__gtype_name__ = "MsgArea"
__gsignals__ = {
"response" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, (gobject.TYPE_INT,)),
"close" : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE, [])
}
def __init__(self, buttons, **kwargs):
super(MsgArea, self).__init__(**kwargs)
self.__contents = None
self.__labels = []
self.__changing_style = False
self.__main_hbox = gtk.HBox(False, 16) # FIXME: use style properties
self.__main_hbox.show()
self.__main_hbox.set_border_width(8) # FIXME: use style properties
self.__action_area = gtk.VBox(True, 4); # FIXME: use style properties
self.__action_area.show()
self.__main_hbox.pack_end (self.__action_area, False, True, 0)
self.pack_start(self.__main_hbox, True, True, 0)
self.set_app_paintable(True)
self.connect("expose-event", self.__paint)
# Note that we connect to style-set on one of the internal
# widgets, not on the message area itself, since gtk does
# not deliver any further style-set signals for a widget on
# which the style has been forced with gtk_widget_set_style()
self.__main_hbox.ensure_style()
self.__main_hbox.connect("style-set", self.__on_style_set)
self.add_buttons(buttons)
def __get_response_data(self, w, create):
d = w.get_data('hotwire-msg-area-data')
if (d is None) and create:
d = {'respid': None}
w.set_data('hotwire-msg-area-data', d)
return d
def __find_button(self, respid):
children = self.__actionarea.get_children()
for child in children:
rd = self.__get_response_data(child, False)
if rd is not None and rd['respid'] == respid:
return child
def __close(self):
cancel = self.__find_button(gtk.RESPONSE_CANCEL)
if cancel is None:
return
self.response(gtk.RESPONSE_CANCEL)
def __paint(self, w, event):
gtk.Style.paint_flat_box(w.style,
w.window,
gtk.STATE_NORMAL,
gtk.SHADOW_OUT,
None,
w,
"tooltip",
w.allocation.x + 1,
w.allocation.y + 1,
w.allocation.width - 2,
w.allocation.height - 2)
return False
def __on_style_set(self, w, style):
if self.__changing_style:
return
# This is a hack needed to use the tooltip background color
window = gtk.Window(gtk.WINDOW_POPUP);
window.set_name("gtk-tooltip")
window.ensure_style()
style = window.get_style()
self.__changing_style = True
self.set_style(style)
for label in self.__labels:
label.set_style(style)
self.__changing_style = False
window.destroy()
self.queue_draw()
def __get_response_for_widget(self, w):
rd = self.__get_response_data(w, False)
if rd is None:
return gtk.RESPONSE_NONE
return rd['respid']
def __on_action_widget_activated(self, w):
response_id = self.__get_response_for_widget(w)
self.response(response_id)
def add_action_widget(self, child, respid):
rd = self.__get_response_data(child, True)
rd['respid'] = respid
if not isinstance(child, gtk.Button):
raise ValueError("Can only pack buttons as action widgets")
child.connect('clicked', self.__on_action_widget_activated)
if respid != gtk.RESPONSE_HELP:
self.__action_area.pack_start(child, False, False, 0)
else:
self.__action_area.pack_end(child, False, False, 0)
def set_contents(self, contents):
self.__contents = contents
self.__main_hbox.pack_start(contents, True, True, 0)
def add_button(self, btext, respid):
button = gtk.Button(stock=btext)
button.set_focus_on_click(False)
button.set_flags(gtk.CAN_DEFAULT)
button.show()
self.add_action_widget(button, respid)
return button
def add_buttons(self, args):
_logger.debug("init buttons: %r", args)
for (btext, respid) in args:
self.add_button(btext, respid)
def set_response_sensitive(self, respid, setting):
for child in self.__action_area.get_children():
rd = self.__get_response_data(child, False)
if rd is not None and rd['respid'] == respid:
child.set_sensitive(setting)
break
def set_default_response(self, respid):
for child in self.__action_area.get_children():
rd = self.__get_response_data(child, False)
if rd is not None and rd['respid'] == respid:
child.grab_default()
break
def response(self, respid):
self.emit('response', respid)
def add_stock_button_with_text(self, text, stockid, respid):
b = gtk.Button(label=text)
b.set_focus_on_click(False)
img = gtk.Image()
img.set_from_stock(stockid, gtk.ICON_SIZE_BUTTON)
b.set_image(img)
b.show_all()
self.add_action_widget(b, respid)
return b
def set_text_and_icon(self, stockid, primary_text, secondary_text=None):
hbox_content = gtk.HBox(False, 8)
hbox_content.show()
image = gtk.Image()
image.set_from_stock(stockid, gtk.ICON_SIZE_DIALOG)
image.show()
hbox_content.pack_start(image, False, False, 0)
image.set_alignment(0.5, 0.5)
vbox = gtk.VBox(False, 6)
vbox.show()
hbox_content.pack_start (vbox, True, True, 0)
self.__labels = []
primary_markup = "<b>%s</b>" % (primary_text,)
primary_label = WrapLabel(primary_markup)
primary_label.show()
vbox.pack_start(primary_label, True, True, 0)
primary_label.set_use_markup(True)
primary_label.set_line_wrap(True)
primary_label.set_alignment(0, 0.5)
primary_label.set_flags(gtk.CAN_FOCUS)
primary_label.set_selectable(True)
self.__labels.append(primary_label)
if secondary_text:
secondary_markup = "<small>%s</small>" % (secondary_text,)
secondary_label = WrapLabel(secondary_markup)
secondary_label.show()
vbox.pack_start(secondary_label, True, True, 0)
secondary_label.set_flags(gtk.CAN_FOCUS)
secondary_label.set_use_markup(True)
secondary_label.set_line_wrap(True)
secondary_label.set_selectable(True)
secondary_label.set_alignment(0, 0.5)
self.__labels.append(secondary_label)
self.set_contents(hbox_content)
class MsgAreaController(gtk.HBox):
__gtype_name__ = "MsgAreaController"
def __init__(self):
super(MsgAreaController, self).__init__()
self.__msgarea = None
self.__msgid = None
def has_message(self):
return self.__msgarea is not None
def get_msg_id(self):
return self.__msgid
def set_msg_id(self, msgid):
self.__msgid = msgid
def clear(self):
if self.__msgarea is not None:
self.remove(self.__msgarea)
self.__msgarea.destroy()
self.__msgarea = None
self.__msgid = None
def new_from_text_and_icon(self, stockid, primary, secondary=None, buttons=[]):
self.clear()
msgarea = self.__msgarea = MsgArea(buttons)
msgarea.set_text_and_icon(stockid, primary, secondary)
self.pack_start(msgarea, expand=True)
return msgarea
|
I often have people come who are tired of being here on earth. Many have yearned to go back "home" their entire life. They have always known earth is not their home but family and friends around them never understood this homesickness, no one had an answer that made any sense, so most just keep quiet about it, but the sadness h as always been there.
Dolores Cannon wrote about them in her book “Three Waves of Volunteers”, they aren’t suicidal, they just can’t figure humans out, it’s uncomfortable, humans do the most in-humane things to other beings. This creates great pain, and for those who are sensitive the pain can become unbearable.
One thing runs true through every volunteer I've met, they all want to help. They passionately want to help people, animals, plants.... the Earth, they are driven to make a difference in some way no matter how uncomfortable it may get on this planet they will follow this through. Actually they have changed the earth and will continue to do so until it is done.
I feel very honored to have met so many of these beautiful souls and had the opportunity to help them find their own answers and inner strength to carry on.
….. I know this is a place we want to be- it’s like it’s the 8th inning and it’s really bad but if you stick around and stick it out and don’t quit too early will miss the awesomeness. |
#-*- coding: utf-8 -*-
""" EOSS catalog system
catalog objects ORM model used for the db connection
"""
__author__ = "Thilo Wehrmann, Steffen Gebhardt"
__copyright__ = "Copyright 2016, EOSS GmbH"
__credits__ = ["Thilo Wehrmann", "Steffen Gebhardt"]
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "Thilo Wehrmann"
__email__ = "twehrmann@eoss.cloud"
__status__ = "Production"
from geoalchemy2 import Geometry
from sqlalchemy import Column, DateTime, String, Integer, ForeignKey, Float
from sqlalchemy.dialects.postgresql import JSONB
from sqlalchemy.orm import relationship
from sqlalchemy.schema import UniqueConstraint
from model import Context
from utilities import GUID
class Catalog_Dataset(Context().getBase()):
__tablename__ = "global_catalog"
__table_args__ = (
UniqueConstraint('entity_id', 'tile_identifier'),
{'sqlite_autoincrement': True, 'schema': 'catalogue'}
)
id = Column(Integer, primary_key=True, autoincrement=True)
entity_id = Column(String, index=True, nullable=False)
acq_time = Column(DateTime(timezone=False))
tile_identifier = Column(String, index=True, nullable=False)
clouds = Column(Float, nullable=False)
resources = Column(JSONB)
level = Column(String, index=True, nullable=False)
daynight = Column(String, index=True, nullable=False)
sensor = Column(String, index=True, nullable=False)
time_registered = Column(DateTime(timezone=False))
def __repr__(self):
return '<%s: id:%s (%s) [%s]>' % (self.__class__.__name__, self.entity_id, str(self.acq_time), self.tile_identifier)
def __eq__(self, other):
"""Override the default Equals behavior"""
if isinstance(other, self.__class__):
bools = list()
for k in ['entity_id', 'acq_time', 'tile_identifier', 'clouds']:
bools.append(str(self.__dict__[k]).replace('+00:00', '') == str(other.__dict__[k]).replace('+00:00', ''))
return all(bools)
return False
class EossProject(Context().getBase()):
__tablename__ = 'project'
__table_args__ = (
UniqueConstraint('id', name='uq_project_identfier'),
UniqueConstraint('uuid', name='uq_project_uuid'),
{'sqlite_autoincrement': True, 'schema': 'staging'}
)
id = Column(Integer, primary_key=True, autoincrement=True)
uuid = Column(GUID, index=True, nullable=False)
name = Column(String, nullable=False)
project_start = Column(DateTime(timezone=True))
project_end = Column(DateTime(timezone=True))
geom = Column(Geometry('POLYGON', srid=4326), nullable=False)
def __repr__(self):
return "<Project(name=%s, start=%s)>" % (
self.uuid, self.identifier)
class Spatial_Reference_type(Context().getBase()):
__tablename__ = 'spatialreferencetype'
__table_args__ = (
{'sqlite_autoincrement': True, 'schema': 'catalogue'}
)
id = Column(Integer, primary_key=True, autoincrement=True)
name = Column(String, nullable=False)
description = Column(String, nullable=False)
shortcut = Column(String, nullable=True)
class Spatial_Reference(Context().getBase()):
__tablename__ = 'spatialreference'
__table_args__ = (
{'sqlite_autoincrement': True, 'schema': 'catalogue'}
)
id = Column(Integer, primary_key=True, autoincrement=True)
ref_id = Column(String, nullable=False)
ref_name = Column(String, nullable=False)
geom = Column(Geometry('POLYGON', srid=4326), nullable=False)
referencetype_id = Column(Integer, ForeignKey(Spatial_Reference_type.id))
referencetype = relationship("Spatial_Reference_type", uselist=False)
def __repr__(self):
return '<%s> %s, %d>' % (self.__class__.__name__, self.ref_name, self.referencetype_id)
class SensorAggregation(Context().getBase()):
__tablename__ = "sensor_aggregation"
__table_args__ = (
UniqueConstraint('sensor', 'level', 'aggregation_type'),
{'sqlite_autoincrement': True, 'schema': 'catalogue'}
)
id = Column(Integer, primary_key=True, autoincrement=True)
sensor = Column(String, ForeignKey(Catalog_Dataset.sensor), index=True, nullable=False)
level = Column(String, ForeignKey(Catalog_Dataset.level), index=True, nullable=False)
aggregation_type = Column(String, index=True, nullable=False)
aggregation_name = Column(String, index=True, nullable=False)
|
Description: Custom Chopper is creative inspiration for us. Get more photo about Cars and Motorcycles related with Custom Chopper by looking at photos gallery at the bottom of this page. We are want to say thanks if you like to share this post to another people via your facebook, pinterest, google plus or twitter account.
Fokker build Starfighter delivered to the Luftwaffe and operated at Luke in USAF markings for German pilot training. This was the final year for the Luftwaffe Starfighters in the USA and it was later sold to the Taiwan Air Force. |
# Copyright (c) 1996-2015 PSERC. All rights reserved.
# Use of this source code is governed by a BSD-style
# license that can be found in the LICENSE file.
"""Checks for P-Q capability curve constraints.
"""
from sys import stderr
from numpy import any, zeros, nonzero
from pypower.idx_gen import QMAX, QMIN, PMAX, PC1, PC2, QC1MIN, QC1MAX, QC2MIN, QC2MAX
def hasPQcap(gen, hilo='B'):
"""Checks for P-Q capability curve constraints.
Returns a column vector of 1's and 0's. The 1's correspond to rows of
the C{gen} matrix which correspond to generators which have defined a
capability curve (with sloped upper and/or lower bound on Q) and require
that additional linear constraints be added to the OPF.
The C{gen} matrix in version 2 of the PYPOWER case format includes columns
for specifying a P-Q capability curve for a generator defined as the
intersection of two half-planes and the box constraints on P and Q. The
two half planes are defined respectively as the area below the line
connecting (Pc1, Qc1max) and (Pc2, Qc2max) and the area above the line
connecting (Pc1, Qc1min) and (Pc2, Qc2min).
If the optional 2nd argument is 'U' this function returns C{True} only for
rows corresponding to generators that require the upper constraint on Q.
If it is 'L', only for those requiring the lower constraint. If the 2nd
argument is not specified or has any other value it returns true for rows
corresponding to gens that require either or both of the constraints.
It is smart enough to return C{True} only if the corresponding linear
constraint is not redundant w.r.t the box constraints.
@author: Ray Zimmerman (PSERC Cornell)
"""
## check for errors capability curve data
if any( gen[:, PC1] > gen[:, PC2] ):
stderr.write('hasPQcap: Pc1 > Pc2\n')
if any( gen[:, QC2MAX] > gen[:, QC1MAX] ):
stderr.write('hasPQcap: Qc2max > Qc1max\n')
if any( gen[:, QC2MIN] < gen[:, QC1MIN] ):
stderr.write('hasPQcap: Qc2min < Qc1min\n')
L = zeros(gen.shape[0], bool)
U = zeros(gen.shape[0], bool)
k = nonzero( gen[:, PC1] != gen[:, PC2] )
if hilo != 'U': ## include lower constraint
Qmin_at_Pmax = gen[k, QC1MIN] + (gen[k, PMAX] - gen[k, PC1]) * \
(gen[k, QC2MIN] - gen[k, QC1MIN]) / (gen[k, PC2] - gen[k, PC1])
L[k] = Qmin_at_Pmax > gen[k, QMIN]
if hilo != 'L': ## include upper constraint
Qmax_at_Pmax = gen[k, QC1MAX] + (gen[k, PMAX] - gen[k, PC1]) * \
(gen[k, QC2MAX] - gen[k, QC1MAX]) / (gen[k, PC2] - gen[k, PC1])
U[k] = Qmax_at_Pmax < gen[k, QMAX]
return L | U
|
Join us before the library opens to exercise, have fun, and get in shape together following a Zumba routine on DVD. Please ring the bell for entry. Water will be provided. |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2007-2015, Raffaele Salmaso <raffaele@salmaso.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import io
import os
from setuptools import setup
import stua
setup(
packages=["stua"],
name="stua",
version=stua.__version__,
description = io.open(os.path.join(os.path.dirname(__file__), "README.md"), "rt").read(),
long_description="",
author=stua.__author__,
author_email=stua.__author_email__,
url="https://bitbucket.org/rsalmaso/python-stua",
license="MIT",
classifiers=[
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.2",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Topic :: Utilities",
"Development Status :: 4 - Beta",
],
include_package_data=True,
install_requires=[],
zip_safe=False,
)
|
Jay Carpet One has you covered with popular resilient flooring options such as vinyl and luxury vinyl. We also carry linoleum flooring, which offers a natural take on resilient flooring. Linoleum looks very similar to vinyl, but, while vinyl is made from plastic, linoleum is made from natural materials. These materials typically include linseed oil, wood flour, and powdered cork. As a result, linoleum is more eco friendly than vinyl, and it also may contribute to a healthier living environment.
Linoleum, like vinyl, is available in planks and tiles. These planks and tiles are usually installed using the same glue-down method you use with vinyl, linoleum, and some hardwoods. For an easier installation, you also can grab linoleum flooring rolls that are similar to sheet vinyl.
Linoleum flooring is considered resilient because it is stain, scratch, and even water resistant. As a result, it is easy to clean and maintain. At the same time, given its construction, linoleum has a consistent color and pattern through the backing of the floor. This means that, unlike most vinyl and laminate floors, linoleum will not show wear as easily over time.
Given its natural looks and durability, linoleum is suitable for residential and commercial settings. It can be used in kitchens and bathrooms, as well as heavier traffic commercial areas such as warehouses.
To choose the right linoleum floor for your next home or commercial project, be sure to visit our Athens, PA showroom! |
import os
import argparse
import logging
logging.basicConfig(level=logging.DEBUG)
def add_data_args(parser):
data = parser.add_argument_group('Data', 'the input images')
data.add_argument('--data-train', type=str, help='the training data')
data.add_argument('--data-val', type=str, help='the validation data')
data.add_argument('--rgb-mean', type=str, default='123.68,116.779,103.939',
help='a tuple of size 3 for the mean rgb')
data.add_argument('--pad-size', type=int, default=0,
help='padding the input image')
data.add_argument('--image-shape', type=str,
help='the image shape feed into the network, e.g. (3,224,224)')
data.add_argument('--num-classes', type=int, help='the number of classes')
data.add_argument('--num-examples', type=int, help='the number of training examples')
data.add_argument('--data-nthreads', type=int, default=4,
help='number of threads for data decoding')
data.add_argument('--benchmark', type=int, default=0,
help='if 1, then feed the network with synthetic data')
return data
def add_data_aug_args(parser):
aug = parser.add_argument_group(
'Image augmentations', 'implemented in src/io/image_aug_default.cc')
aug.add_argument('--random-crop', type=int, default=1,
help='if or not randomly crop the image')
aug.add_argument('--random-mirror', type=int, default=1,
help='if or not randomly flip horizontally')
aug.add_argument('--max-random-h', type=int, default=0,
help='max change of hue, whose range is [0, 180]')
aug.add_argument('--max-random-s', type=int, default=0,
help='max change of saturation, whose range is [0, 255]')
aug.add_argument('--max-random-l', type=int, default=0,
help='max change of intensity, whose range is [0, 255]')
aug.add_argument('--max-random-aspect-ratio', type=float, default=0,
help='max change of aspect ratio, whose range is [0, 1]')
aug.add_argument('--max-random-rotate-angle', type=int, default=0,
help='max angle to rotate, whose range is [0, 360]')
aug.add_argument('--max-random-shear-ratio', type=float, default=0,
help='max ratio to shear, whose range is [0, 1]')
aug.add_argument('--max-random-scale', type=float, default=1,
help='max ratio to scale')
aug.add_argument('--min-random-scale', type=float, default=1,
help='min ratio to scale, should >= img_size/input_shape. otherwise use --pad-size')
return aug
def set_data_aug_level(aug, level):
if level >= 1:
aug.set_defaults(random_crop=1, random_mirror=1)
if level >= 2:
aug.set_defaults(max_random_h=36, max_random_s=50, max_random_l=50)
if level >= 3:
aug.set_defaults(max_random_rotate_angle=10, max_random_shear_ratio=0.1, max_random_aspect_ratio=0.25)
def add_fit_args(parser):
"""
parser : argparse.ArgumentParser
return a parser added with args required by fit
"""
train = parser.add_argument_group('Training', 'model training')
train.add_argument('--network', type=str,
help='the neural network to use')
train.add_argument('--num-layers', type=int,
help='number of layers in the neural network, required by some networks such as resnet')
train.add_argument('--gpus', type=str,
help='list of gpus to run, e.g. 0 or 0,2,5. empty means using cpu')
train.add_argument('--kv-store', type=str, default='device',
help='key-value store type')
train.add_argument('--num-epochs', type=int, default=100,
help='max num of epochs')
train.add_argument('--lr', type=float, default=0.1,
help='initial learning rate')
train.add_argument('--lr-factor', type=float, default=0.1,
help='the ratio to reduce lr on each step')
train.add_argument('--lr-step-epochs', type=str,
help='the epochs to reduce the lr, e.g. 30,60')
train.add_argument('--optimizer', type=str, default='sgd',
help='the optimizer type')
train.add_argument('--mom', type=float, default=0.9,
help='momentum for sgd')
train.add_argument('--wd', type=float, default=0.0001,
help='weight decay for sgd')
train.add_argument('--batch-size', type=int, default=128,
help='the batch size')
train.add_argument('--disp-batches', type=int, default=20,
help='show progress for every n batches')
train.add_argument('--model-prefix', type=str,
help='model prefix')
parser.add_argument('--monitor', dest='monitor', type=int, default=0,
help='log network parameters every N iters if larger than 0')
train.add_argument('--load-epoch', type=int,
help='load the model on an epoch using the model-load-prefix')
train.add_argument('--top-k', type=int, default=0,
help='report the top-k accuracy. 0 means no report.')
train.add_argument('--test-io', type=int, default=0,
help='1 means test reading speed without training')
return train
if __name__ == '__main__':
# download data
# (train_fname, val_fname) = download_cifar10()
# parse args
parser = argparse.ArgumentParser(description="train cifar100",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
add_fit_args(parser)
add_data_args(parser)
add_data_aug_args(parser)
set_data_aug_level(parser, 2)
parser.set_defaults(
# network
network = 'cifar10',
num_layers = 8,
# data
data_train = '/data/cifar/cifar100/train.rec',
data_val = '/data/cifar/cifar100/test.rec',
num_classes = 100,
num_examples = 50000,
image_shape = '3,32,32',
#pad_size = 4,
# train
batch_size = 256,
num_epochs = 200,
lr_step_epochs = '50,100,150',
optimizer = 'sgd',
disp_batches = 10,
lr = 0.1,
top_k = 5,
)
args = parser.parse_args()
parser.add_argument('--log', dest='log_file', type=str, default="train.log",
help='save training log to file') |
Every stash needs a fancy place to call home, and TrippyStore.com seems to have all the bases covered when it comes to obscuring the goods! Take this bronze Om & Lotus designed stash container, for instance. Coming in at 5 x 7 x 2.5”, this box makes for the perfect table-side storage solution for all things cannabis-related. Complete with beautiful etchings and an easy-to-handle latch, this artistic container will certainly turn heads at your next private smoking session.
Check out the Trippy Store for a myriad of affordable and aesthetic storage solutions, almost all under $30!
For anyone looking for a slightly more discreet way to store your cannabis products, diversion safes are the way to go. This ginger ale can safe from OpenTip.com is a perfect solution to divert potential burglars, keep your roommates or family away from the goods, as well as provide some stash protection from the elements. Many believe that due to these containers being the last place anybody would look, they can perhaps be more protective of your valuables than an actual safe.
OpenTip has many options available in terms of diversion containers, from shaving cream to dog food cans. Head over and throw one in your cart today!
Even joints need safe passage in this day and age. While not discreet by any means, these prescription joint tubes from Spencer’s Gifts are the perfect statement for anyone packing some serious joint heat. Each four-pack of prescription plastic tubes is illustrated with cannabis-themed insignias, making them fitting for their primary function. The tubes themselves are roomy enough to fit one, if not several rolled creations with no problem at all. At $5.99, these bundles wont break the bank, either.
Strains & Products The Best Cannabis Stash Bags on the Market Cannabis 101 Leafly's Guide to Storing Cannabis Cannabis 101 How to Store Cannabis Infused Products Cannabis 101 How Long Does Cannabis Oil Last? |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0069_verificationrequest'),
]
operations = [
migrations.CreateModel(
name='UserImage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('image', models.ImageField(null=True, upload_to=b'user_images/', blank=True)),
],
options={
},
bases=(models.Model,),
),
migrations.AddField(
model_name='verificationrequest',
name='comment',
field=models.TextField(help_text='If you have anything to say to the person who is going to verify your account, feel free to write it here!', null=True, verbose_name='Comment', blank=True),
preserve_default=True,
),
migrations.AddField(
model_name='verificationrequest',
name='images',
field=models.ManyToManyField(related_name='request', to='api.UserImage'),
preserve_default=True,
),
migrations.AlterField(
model_name='account',
name='verified',
field=models.PositiveIntegerField(default=0, choices=[(0, b''), (1, 'Silver Verified'), (2, 'Gold Verified'), (3, 'Bronze Verified')]),
preserve_default=True,
),
migrations.AlterField(
model_name='verificationrequest',
name='account',
field=models.ForeignKey(related_name='verificationrequest', to='api.Account', unique=True),
preserve_default=True,
),
migrations.AlterField(
model_name='verificationrequest',
name='verification',
field=models.PositiveIntegerField(default=0, verbose_name='Verification', choices=[(0, b''), (1, 'Silver Verified'), (2, 'Gold Verified'), (3, 'Bronze Verified')]),
preserve_default=True,
),
]
|
It’s been less than 2 weeks since ‘Communicating Through Cats’, our hugely successful exhibition about the life and work of Louis Wain, closed to the public, but the Exhibition Gallery at Brent Museum is already hosting an exciting new project.
The artworks on loan from Chris Beetles Gallery and Bethlem Royal Hospital Museum and Archives were packed up carefully and returned to their homes by specialist removers early last week, but the gallery wasn’t empty for long! For the next two months, Brent Museum has handed the keys to the gallery over to two local artists, who will be in residency until the end of December.
Artist Alex McIntyre and poet Vicky Wilson are sharing their experiences of PLACE, and what it means to them, and they are inviting local residents – and you – to share your own thoughts and experiences. During ‘Our Place/Your Place, the gallery will be hosting and interactive writing zone where you can tell them what it means to you to live in Brent, and an interactive modelling zone where you can create sculptures, as well as their own finished works and works in progress. They will also be holding family workshops and drop-in activity sessions throughout their residency; find out more at Alex’s website and visit them in the gallery to make your own contribution!
Our Place/Your Place is part of the Greenhouse Project, which has seen local creative arts organisations and community groups holding workshops and showcasing perfomances and exhibitions throughout Willesden Green Library Centre.
← An inspired new display! |
"""Functions for reading data sets (LSP, INRIA, Buffy, etc.)"""
from abc import abstractmethod, ABCMeta
from copy import copy
from io import BytesIO
from zipfile import is_zipfile, ZipFile
import numpy as np
from scipy.io import loadmat
from scipy.misc import imread
# Configuration files will only be allowed to specify classes with the
# following names to use as dataset loaders.
ALLOWED_LOADERS = [
'LSP',
'LSPET'
]
def split_items(items, num_groups):
"""Splits a list of items into ``num_groups`` groups fairly (i.e. every
item is assigned to exactly one group and no group is more than one item
larger than any other)."""
per_set = len(items) / float(num_groups)
assert per_set >= 1, "At least one set will be empty"
small = int(np.floor(per_set))
big = small + 1
num_oversized = len(items) % small
rv_items = []
total_allocated = 0
for i in range(num_groups):
if i < num_oversized:
l = items[total_allocated:total_allocated + big]
total_allocated += big
else:
l = items[total_allocated:total_allocated + small]
total_allocated += small
rv_items.append(l)
assert total_allocated == len(items), "Did not assign exactly 100% of " \
"items to a group"
assert len(rv_items) == num_groups, "Wrong number of groups"
return rv_items
class DataSet(object):
"""ABC for datasets"""
__metaclass__ = ABCMeta
def post_init(self):
"""Should be called after __init__."""
self.num_samples = len(self.joints.locations)
self.scales = self._calculate_scales()
assert np.all(self.scales >= 18)
assert np.any(self.scales > 18)
assert self.scales.shape == (self.num_samples,)
self.template_size = self._calculate_template_size()
assert self.template_size > 0
def split(self, num_groups):
"""Splits one monolothic dataset into several equally sized
datasets. May need to be overridden."""
assert num_groups > 1, "It's not splitting if there's < 2 groups :)"
# Shallow-copy myself several times
rv = tuple(copy(self) for i in range(num_groups))
# Figure out which indices each group will get
indices = np.arange(self.num_samples)
np.random.shuffle(indices)
rv_indices = split_items(indices, num_groups)
for new_dataset, new_indices in zip(rv, rv_indices):
new_dataset.joints = self.joints.for_indices(new_indices)
new_dataset.image_ids = np.array(self.image_ids)[new_indices]
new_dataset.post_init()
return rv
def _calculate_scales(self):
"""Calculates a scale factor for each image in the dataset. This is
indended to indicate roughly how long the average limb is in each image
(in pixels), so that images taken at different distances from a person
can be considered differently for joint RP (relative position)
clustering and the like. Magic constants (75th percentile, 18px
minimum) taken from Chen & Yuille's code"""
lengths = np.zeros((self.num_samples, len(self.joints.pairs)))
# If the length of a limb is 0, then we'll mark it as invalid for our
# calculations
valid = np.ones_like(lengths, dtype=bool) # array of True
for idx, pair in enumerate(self.joints.pairs):
fst_prt, snd_prt = pair
fst_loc = self.joints.locations[:, fst_prt, :2]
snd_loc = self.joints.locations[:, snd_prt, :2]
assert fst_loc.shape == (self.num_samples, 2)
assert fst_loc.shape == snd_loc.shape
# lengths stores the length of each limb in the model
pair_dists = np.linalg.norm(fst_loc - snd_loc, axis=1)
lengths[:, idx] = pair_dists
# Mark zeros invalid
valid[pair_dists == 0, idx] = False
# The last limb is head-neck (we can consider this the "root" limb,
# since we assume that the head is the root for graphical model
# calculations). We will normalise all lengths to this value.
exp_med = np.zeros(len(self.joints.pairs) - 1)
for idx in xrange(len((self.joints.pairs[:-1]))):
# Ignore entries where head distance or joint distance is 0
valid_col = valid[:, idx] * valid[:, -1]
# No more than 15% of entries should be eliminated this way
assert np.sum(valid_col) >= 0.85 * valid_col.size
log_neck = np.log(lengths[valid_col, -1])
log_diff = np.log(lengths[valid_col, idx]) - log_neck
exp_med[idx] = np.exp(np.median(log_diff))
# Norm calculated lengths using the exponent of the median of the
# quantities we calculated above
norm_factor_nc = exp_med.reshape((1, -1))
norm_factor = np.concatenate([norm_factor_nc, [[1]]], axis=1)
assert norm_factor.shape == (1, len(self.joints.pairs))
normed_lengths = lengths / norm_factor
percentiles = np.percentile(normed_lengths, 75, axis=1)
assert percentiles.ndim == 1
assert len(percentiles) == self.num_samples
assert not np.any(np.isnan(percentiles) + np.isinf(percentiles))
# NOTE: Chen & Yuille use scale_x and scale_y, but that seems to be
# redundant, since scale_x == scale_y in their code (init_scale.m)
return np.maximum(percentiles, 18)
def _calculate_template_size(self):
"""Use calculated scales to choose template sizes for body part
detection. Follows Chen & Yuille formula."""
# This is a little different to Chen & Yuille's formula (they use a
# fixed aspect ratio, and calculate a square root which makes no sense
# in context), but it should yield the same result
side_lengths = 2 * self.scales + 1
assert side_lengths.shape == (self.num_samples,)
bottom_length = np.percentile(side_lengths, 1)
template_side = int(np.floor(bottom_length / self.STEP))
return template_side
@abstractmethod
def load_image(self, identifier):
pass
@abstractmethod
def load_all_images(self):
pass
class Joints(object):
"""Class to store the locations of key points on a person and the
connections between them."""
def __init__(self, point_locations, joint_pairs, point_names=None):
# First, some sanity checks
as_set = set(tuple(sorted(p)) for p in joint_pairs)
assert len(as_set) == len(joint_pairs), "There are duplicate joints"
assert isinstance(point_locations, np.ndarray), "Point locations " \
"must be expressed as a Numpy ndarray"
assert point_locations.ndim == 3, "Point location array must be 3D"
assert point_locations.shape[2] == 3, "Points must have (x, y) " \
"location and visibility."
num_points = point_locations.shape[1]
for first, second in joint_pairs:
assert 0 <= first < num_points and 0 <= second < num_points, \
"Joints must be between extant points."
assert point_locations.shape[1] < 64, "Are there really 64+ points " \
"in your pose graph?"
if point_names is not None:
assert len(point_names) == point_locations.shape[1], "Need as " \
"many names as points in pose graph."
# We can access these directly
self.pairs = joint_pairs
self.locations = point_locations
self.point_names = point_names
self.num_parts = point_locations.shape[1]
self.parents = self.get_parents_array()
self.adjacent = self.get_adjacency_matrix()
# pair_indices[(i, j)] contains an index into self.pairs for each joint
# i->j (or j->i; it's bidirectional).
self.pair_indices = {}
for idx, pair in enumerate(joint_pairs):
p1, p2 = (pair[0], pair[1]), (pair[1], pair[0])
self.pair_indices[p1] = self.pair_indices[p2] = idx
def for_indices(self, indices):
"""Takes a series of indices corresponding to data samples and returns
a new ``Joints`` instance containing only samples corresponding to
those indices."""
return Joints(self.locations[indices], self.pairs, self.point_names)
def get_parents_array(self):
"""Produce a p-dimensional array giving the parent of part i."""
rv = -1 * np.ones(self.num_parts, dtype='int32')
for child, parent in self.pairs:
assert 0 <= child < self.num_parts
assert 0 <= parent < self.num_parts
assert rv[child] == -1
rv[child] = parent
# Now assign the root. If this fails with "Too many values to unpack",
# then it means that there are two parts with no parents!
root_idx, = np.flatnonzero(rv == -1)
rv[root_idx] = root_idx
return rv
def get_adjacency_matrix(self):
"""Produces a p * p adjacency matrix."""
rv = np.zeros((self.num_parts, self.num_parts), dtype='bool')
for i, j in self.pairs:
assert 0 <= i < self.num_parts
assert 0 <= j < self.num_parts
rv[i, j] = rv[j, i] = True
return rv
# TODO: Enable visualisation of points! This would be a good idea if I
# wanted to check that my skeletons are correct.
class LSP(DataSet):
"""Loads the Leeds Sports Poses dataset from a ZIP file."""
PATH_PREFIX = 'lsp_dataset/'
# ID_WIDTH is the number of digits in the LSP image filenames (e.g.
# im0022.jpg has width 4).
ID_WIDTH = 4
# TODO: Clarify what this does. It's analogous to conf.step (in lsp_conf
# and flic_conf) from Chen & Yuille's code.
STEP = 4
POINT_NAMES = [
"Right ankle", # 0
"Right knee", # 1
"Right hip", # 2
"Left hip", # 3
"Left knee", # 4
"Left ankle", # 5
"Right wrist", # 6
"Right elbow", # 7
"Right shoulder", # 8
"Left shoulder", # 9
"Left elbow", # 10
"Left wrist", # 11
"Neck", # 12
"Head top" # 13
]
# NOTE: 'Root' joint should be last, joints should be ordered child ->
# parent
JOINTS = [
(0, 1), # Right shin (ankle[0] -> knee[1])
(1, 2), # Right thigh (knee[1] -> hip[2])
(2, 8), # Right side of body (hip[2] -> shoulder[8])
(5, 4), # Left shin (ankle[5] -> knee[4])
(4, 3), # Left thigh (knee[4] -> hip[3])
(3, 9), # Left side of body (hip[3] -> shoulder[9])
(7, 8), # Right upper arm (elbow[7] -> shoulder[8])
(6, 7), # Right forearm (wrist[6] -> elbow[7])
(8, 12), # Right shoulder (shoulder[8] -> neck[12])
(10, 9), # Left upper arm (elbow[10] -> shoulder[9])
(9, 12), # Left shoulder (shoulder[9] -> neck[12])
(11, 10), # Left forearm (wrist[11] -> elbow[10])
(12, 13), # Neck and head
]
def __init__(self, lsp_path):
assert is_zipfile(lsp_path), "Supplied path must be to lsp_dataset.zip"
self.lsp_path = lsp_path
self.joints = self._load_joints()
self.image_ids = list(range(1, len(self.joints.locations) + 1))
self.post_init()
def _transpose_joints(self, joints):
return joints.T
def _load_joints(self):
"""Load ``joints.mat`` from LSP dataset. Return value holds a 2000x14x3
ndarray. The first dimension selects an image, the second selects a
joint, and the final dimension selects between an x-coord, a y-coord
and a visibility."""
with ZipFile(self.lsp_path) as zip_file:
target = self.PATH_PREFIX + 'joints.mat'
buf = BytesIO(zip_file.read(target))
mat = loadmat(buf)
# TODO: Return something a little more user-friendly. In
# particular, I should check whether Numpy supports some sort
# of naming for fields.
point_locations = self._transpose_joints(mat['joints'])
return Joints(point_locations, self.JOINTS, self.POINT_NAMES)
def load_image(self, zero_ident):
"""Takes an integer image idenifier in 0, 1, ..., self.num_samples - 1
and returns an associated image. The image will have dimensions
corresponding to row number, column number and channels (RGB,
usually)."""
assert isinstance(zero_ident, int)
ident = self.image_ids[zero_ident]
assert ident > 0
# Images start from 1, not 0
str_ident = str(ident).zfill(self.ID_WIDTH)
file_path = self.PATH_PREFIX + 'images/im' + str_ident + '.jpg'
with ZipFile(self.lsp_path) as zip_file:
try:
with zip_file.open(file_path) as image_file:
rv = imread(image_file)
assert rv.ndim == 3
assert np.all(np.array(rv.shape) != 0)
return rv
except Exception as e:
print("Couldn't load '{}' from '{}'".format(
file_path, self.lsp_path
))
raise e
def load_all_images(self):
"""Return a list of all images in the archive, ordered to correspond to
joints matrix."""
return [self.load_image(idx) for idx in xrange(self.num_samples)]
class LSPET(LSP):
"""Like LSP, but specific to the Leeds Extended Poses dataset."""
PATH_PREFIX = ''
ID_WIDTH = 5
def _transpose_joints(self, joints):
return joints.transpose((2, 0, 1))
|
What is Mountain Architecture? The mountain architecture vernacular consists of bold, natural and textured buildings and materials. These buildings should functionally and aesthetically withstand rugged mountainous environments, as well as blend into the topography.
Mountain homes should take advantage of nature by bringing the outdoors in through ample amounts of glazing and natural materials, and by extending indoor living spaces to the outside (outdoor living rooms) with decks, terraces and other exterior areas. A home exterior should look like it has grown out of the site rather than being lowered down from a helicopter. It can take advantage of the surrounding trees, boulders and other landforms by incorporating them into the home and the outdoor living rooms, and designing around them. Interiors should have a good range of natural materials and forms. These are often more rustic than the typical home, and sometimes have an “old world” appearance. Designed tastefully, this will often give the home a “rustic elegance”.
Mountain architecture is organically massed to taper down into the site. Multiple volumes conform to the existing terrain and are in scale with the existing landscape. Large, symmetrical, obtrusive building forms are often avoided. Some single story elements help keep a mountain home residential in scale.
Broad sheltering roofs appear to cascade down in steps or multi-level designs, and protect against winter snow, spring rain and summer sun. Roofs generally have a 4:12 to 12:12 pitch. Depending on the locality, there is little ornamentation. For instance, the rugged, bold architecture of Big Sky and Whitefish in Montana, Telluride, Colorado and Jackson Hole, Wyoming may often have less ornamentation and detail than buildings in Vail, Colorado and Lake Tahoe.
Architects who design mountain architecture most often utilize natural materials and warm, earthy colors. Roofs may consist of cedar shakes or slate, sometimes with copper accents. Siding is frequently cedar (boards, shake shingles or logs) and stone. Recycled barn wood is sometimes used for a more rustic look. If wood is to be stained it should let the natural grains show through. The stone should be bigger at the base to give it an anchored and structural appearance. Large mortar joints should be avoided in the mountain style.
In the mountains, columns, beams, rafters and other structural elements are bigger and bolder for protection against heavy snow loads. These are typically douglas fir and/or reclaimed wood, and can be built in timber frame or timber post and beam construction. Windows are typically kept to a minimum on the front elevation, while opening up to broad views on the back or view elevations, which many times have daylight basements to take advantage of the steep slopes. Windows typically have wood or metal clad (aluminum, copper or bronze) frames. These frames can have an aged patina for an older appearance. Garage doors are wood, many times in the carriage style.
Landscaping is also an important element in quality mountain architecture. Home sites should be designed for maximum privacy, minimal visibility, minimized grading and disruption of natural drainage. Sites are kept natural by containing water runoff, and enhancing the natural landforms and vegetation. Smart design should keep retaining walls to a minimum, both to preserve a natural look as well as save on costs. These retaining walls are most often natural stone. Vegetation will help these walls blend into the site.
Other landscape elements include native hardscaping (patios, walkways), and minimal walls, fences and gates. When these are incorporated they should look open and natural, and relate to the building. A good architect or landscape architect should know of some good tricks here for the benefits of privacy and the homeowner’s personal tastes. Driveways are often natural materials such as stone, but are more cost effective with concrete (colored, stamped, aggregate), pre-cast pavers, or asphalt.
Mountain style architecture has similar elements to other vernaculars. These include Montana ranch, rustic western style, Adirondack, and historic logging and mining styles. Craftsman/Arts & Crafts, Chalet, Prairie, Japanese and even Tuscan elements can be incorporated into the mountain style if done tastefully.
Mountain Architecture generally occurs in the mountains of the West, and in pockets of the Northeast and Southeast. In the West, besides the previously mentioned Vail, Jackson Hole, Big Sky, Whitefish, Telluride and Lake Tahoe, other areas with mountain style homes include Sun Valley, Coeur d’Alene, Sandpoint and Priest Lake in Idaho, Aspen, Breckenridge, Steamboat Springs, Durango and Crested Butte in Colorado, and Park City in Utah. In Southwestern Canada, Whistler is by far the most popular mountain resort area, and hosted the 2010 Winter Olympics. Canmore in Alberta is popular with it’s close proximity to Banff, and was the site of the nordic events when Calgary hosted the 1988 Winter Olympics.
For further information, please also see our blog post Origins of Mountain Architecture in America.
Feel free to peruse more mountain architecture photos and renderings. Hendricks Architecture specializes in the design of luxury mountain style homes and cabins, and has been listed yearly among Mountain Living’s top mountain architects. We try to add a little bit of soul into each home, to reflect the personalities and wishes of the homeowners. Most of the homes we’ve completed are in mountain resort areas throughout the West. |
#!/usr/bin/env python
from twisted.internet.protocol import ClientFactory
from twisted.protocols.basic import LineReceiver
from twisted.internet import reactor
from twisted.internet.task import LoopingCall
import sys, re
# Connector class
class Client:
def __init__(self, core, name):
self.version = "PyAPRSd (aprs::client v1.0)"
self.core = core
self.connection = None
self.name = name
class APRSClient(LineReceiver):
def __init__(self, client, core, callsign, passcode, receive_filter):
self.client = client
self.core = core
self.callsign = callsign
self.passcode = passcode
self.receive_filter = receive_filter
print self.core.version
print self.client.version
print self.client.name
def connectionMade(self):
self.sendLine("user " + self.callsign + " pass " + self.passcode + " vers PyAPRSd 0.1 filter " + self.receive_filter)
pass
def lineReceived(self, line):
print line
pass
def sendPacket(self, packet):
pass
class APRSClientFactory(ClientFactory):
def __init__(self, client, core, protocol, callsign, passcode, receive_filter):
self.client = client
self.core = core
self.protocol = protocol
self.callsign = callsign
self.passcode = passcode
self.receive_filter = receive_filter
def clientConnectionFailed(self, connector, reason):
print 'connection failed:', reason.getErrorMessage()
self.client.disconnect()
def clientConnectionLost(self, connector, reason):
print 'connection lost:', reason.getErrorMessage()
reactor.stop()
def buildProtocol(self, addr):
return self.protocol(self.client, self.core, self.callsign, self.passcode, self.receive_filter)
class APRSClientException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
def tick(self, server):
pass
def connect(self, server, port, callsign, passcode, receive_filter):
try:
factory = self.APRSClientFactory(self, self.core, self.APRSClient, callsign, passcode, receive_filter)
self.connection = reactor.connectTCP(server, port, factory)
lc = LoopingCall(self.tick, server)
lc.start(1)
except self.APRSClientException, e:
print e.value
|
While fast-fashion is single handedly taking over our closets with inexpensive brands like H&M and Forever 21, we may not realize the environmental and social impact this method of clothing production has on our economy and world.
That’s why more and more companies are committed to safer, cleaner, and more ethical clothing production methods. The result? Better quality clothing for you because your jacket is built to last, and a more positive experience for the environment because of less waste and more use of sustainable resources.
UBB uses organic textiles wherever they can, and removes a pound of trash from the world’s oceans and waterways for each product they sell.
traditional methods. They’re also heavily invested in responsible down, quality crops, education initiatives, and more.
Shinola is working to bring manufacturing jobs back to Detroit and back to this country. They’ve partnered with a group of extraordinary American manufacturers whose obsession with craftsmanship and quality matches their own.
With exceptional quality, ethical factories, and radical transparency, Everlane is the brand that just keeps on giving.
According to Nisolo, “All Nisolo producers receive, at a minimum, beyond fair trade wages, healthcare, and a healthy working environment.” Not only that, the shoe company works closely with small, independent artisans to help grow their businesses and production capabilities.
Nudie Jeans only sources 100% organic cotton for their jeans and has transparent production for a fair product.
“Soft. Simple. Sustainable”, are the words that Alternative Apparel lives by. Their design is rooted in sustainable and eco-friendly practices.
AG believes in being socially active and responsible. Whether it’s their Ozone Technology and use of eco-conscious fibers, or their partnership with charity: water, they’re committed to making a difference.
G-Star is committed to sustainable operations that reduce their direct impact on the environment and they mostly use organic cotton, recycled cotton, recycled polyester and Tencel®. |
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.module_utils.facts.virtual.base import Virtual, VirtualCollector
from ansible.module_utils.facts.virtual.sysctl import VirtualSysctlDetectionMixin
class FreeBSDVirtual(Virtual, VirtualSysctlDetectionMixin):
"""
This is a FreeBSD-specific subclass of Virtual. It defines
- virtualization_type
- virtualization_role
"""
platform = 'FreeBSD'
def get_virtual_facts(self):
virtual_facts = {}
# Set empty values as default
virtual_facts['virtualization_type'] = ''
virtual_facts['virtualization_role'] = ''
if os.path.exists('/dev/xen/xenstore'):
virtual_facts['virtualization_type'] = 'xen'
virtual_facts['virtualization_role'] = 'guest'
if virtual_facts['virtualization_type'] == '':
virtual_product_facts = self.detect_virt_product('kern.vm_guest') or self.detect_virt_product('hw.hv_vendor')
virtual_facts.update(virtual_product_facts)
if virtual_facts['virtualization_type'] == '':
virtual_vendor_facts = self.detect_virt_vendor('hw.model')
virtual_facts.update(virtual_vendor_facts)
return virtual_facts
class FreeBSDVirtualCollector(VirtualCollector):
_fact_class = FreeBSDVirtual
_platform = 'FreeBSD'
|
Keep updated with our garage door company's latest discounts in this page and do come back again and again for more deals and new offers. Our contractor is experienced and works with experienced 24 hour technicians for full commercial and residential services and garage door repairs.Enjoy our great discounts today! You can print the coupons right below. |
#!/usr/bin/python
import math
import numpy as np
from props import getNode
from .logger import log
from . import transformations
# camera parameters are stored in the global property tree, but this
# class provides convenient getter/setter functions
d2r = math.pi / 180.0
r2d = 180.0 / math.pi
camera_node = getNode('/config/camera', True)
def set_defaults():
# meta data
camera_node.setString('make', 'unknown')
camera_node.setString('model', 'unknown')
camera_node.setString('lens_model', 'unknown')
# camera lens parameters
camera_node.setFloat('focal_len_mm', 0.0)
camera_node.setFloat('ccd_width_mm', 0.0)
camera_node.setFloat('ccd_height_mm', 0.0)
# camera calibration parameters
camera_node.setLen('K', 9, init_val=0.0)
camera_node.setLen('dist_coeffs', 5, init_val=0.0)
# full size of camera image (these values may be needed for
# sentera images processed through their rolling shutter
# corrector that are not full width/height.
camera_node.setFloat('width_px', 0)
camera_node.setFloat('height_px', 0)
# camera mount parameters: these are offsets from the aircraft body
# mount_node = camera_node.getChild('mount', create=True)
# mount_node.setFloat('yaw_deg', 0.0)
# mount_node.setFloat('pitch_deg', 0.0)
# mount_node.setFloat('roll_deg', 0.0)
def set_meta(make, model, lens_model):
camera_node.setString('make', make)
camera_node.setString('model', model)
camera_node.setString('lens_model', lens_model)
def set_lens_params(ccd_width_mm, ccd_height_mm, focal_len_mm):
camera_node.setFloat('ccd_width_mm', ccd_width_mm)
camera_node.setFloat('ccd_height_mm', ccd_height_mm)
camera_node.setFloat('focal_len_mm', focal_len_mm)
def get_lens_params():
return ( camera_node.getFloat('ccd_width_mm'),
camera_node.getFloat('ccd_height_mm'),
camera_node.getFloat('focal_len_mm') )
def get_K(optimized=False):
"""
Form the camera calibration matrix K using 5 parameters of
Finite Projective Camera model. (Note skew parameter is 0)
See Eqn (6.10) in:
R.I. Hartley & A. Zisserman, Multiview Geometry in Computer Vision,
Cambridge University Press, 2004.
"""
tmp = []
if optimized and camera_node.hasChild('K_opt'):
for i in range(9):
tmp.append( camera_node.getFloatEnum('K_opt', i) )
else:
for i in range(9):
tmp.append( camera_node.getFloatEnum('K', i) )
K = np.copy(np.array(tmp)).reshape(3,3)
return K
def set_K(fx, fy, cu, cv, optimized=False):
K = np.identity(3)
K[0,0] = fx
K[1,1] = fy
K[0,2] = cu
K[1,2] = cv
# store as linear python list
tmp = K.ravel().tolist()
if optimized:
camera_node.setLen('K_opt', 9)
for i in range(9):
camera_node.setFloatEnum('K_opt', i, tmp[i])
else:
camera_node.setLen('K', 9)
for i in range(9):
camera_node.setFloatEnum('K', i, tmp[i])
# dist_coeffs = array[5] = k1, k2, p1, p2, k3
def get_dist_coeffs(optimized=False):
tmp = []
if optimized and camera_node.hasChild('dist_coeffs_opt'):
for i in range(5):
tmp.append( camera_node.getFloatEnum('dist_coeffs_opt', i) )
else:
for i in range(5):
tmp.append( camera_node.getFloatEnum('dist_coeffs', i) )
return np.array(tmp)
def set_dist_coeffs(dist_coeffs, optimized=False):
if optimized:
camera_node.setLen('dist_coeffs_opt', 5)
for i in range(5):
camera_node.setFloatEnum('dist_coeffs_opt', i, dist_coeffs[i])
else:
camera_node.setLen('dist_coeffs', 5)
for i in range(5):
camera_node.setFloatEnum('dist_coeffs', i, dist_coeffs[i])
def set_image_params(width_px, height_px):
camera_node.setInt('width_px', width_px)
camera_node.setInt('height_px', height_px)
def get_image_params():
return ( camera_node.getInt('width_px'),
camera_node.getInt('height_px') )
def set_mount_params(yaw_deg, pitch_deg, roll_deg):
mount_node = camera_node.getChild('mount', True)
mount_node.setFloat('yaw_deg', yaw_deg)
mount_node.setFloat('pitch_deg', pitch_deg)
mount_node.setFloat('roll_deg', roll_deg)
#camera_node.pretty_print()
def get_mount_params():
mount_node = camera_node.getChild('mount', True)
return [ mount_node.getFloat('yaw_deg'),
mount_node.getFloat('pitch_deg'),
mount_node.getFloat('roll_deg') ]
def get_body2cam():
yaw_deg, pitch_deg, roll_deg = get_mount_params()
body2cam = transformations.quaternion_from_euler(yaw_deg * d2r,
pitch_deg * d2r,
roll_deg * d2r,
"rzyx")
return body2cam
# def derive_other_params():
# K = get_K()
# fx = K[0,0]
# fy = K[1,1]
# cu = K[0,2]
# cv = K[1,2]
# width_px = camera_node.getFloat('width_px')
# height_px = camera_node.getFloat('height_px')
# ccd_width_mm = camera_node.getFloat('ccd_width_mm')
# ccd_height_mm = camera_node.getFloat('ccd_height_mm')
# focal_len_mm = camera_node.getFloat('focal_len_mm')
# if cu < 1.0 and width_px > 0:
# cu = width_px * 0.5
# if cv < 1.0 and height_px > 0:
# cv = height_px * 0.5
# if fx < 1 and focal_len_mm > 0 and width_px > 0 and ccd_width_mm > 0:
# fx = (focal_len_mm * width_px) / ccd_width_mm
# if fy < 1 and focal_len_mm > 0 and height_px > 0 and ccd_height_mm > 0:
# fy = (focal_len_mm * height_px) / ccd_height_mm
|
You like working with TreeSize Personal? You have comments or suggestions?
I installed the 32 bit version on Windows XP and the 64 bit version on Windows 10. Both installed and working fine. A really great program. I have many system analysis programs on my machine, but I've never seen one that gives such complete information about the contents of the file systems as TreeSize does! |
from django.contrib.auth import authenticate, get_user_model, login
from django.core.exceptions import PermissionDenied
from django.utils.translation import ugettext as _
from django.views.decorators.csrf import csrf_protect
from rest_framework import status
from rest_framework.response import Response
from misago.conf import settings
from misago.core import forms
from misago.core.mail import mail_user
from misago.users import captcha
from misago.users.bans import ban_ip
from misago.users.forms.register import RegisterForm
from misago.users.models import (ACTIVATION_REQUIRED_USER,
ACTIVATION_REQUIRED_ADMIN)
from misago.users.serializers import AuthenticatedUserSerializer
from misago.users.tokens import make_activation_token
from misago.users.validators import validate_new_registration
@csrf_protect
def create_endpoint(request):
if settings.account_activation == 'closed':
raise PermissionDenied(
_("New users registrations are currently closed."))
form = RegisterForm(request.data)
try:
captcha.test_request(request)
except forms.ValidationError as e:
form.add_error('captcha', e)
if not form.is_valid():
return Response(form.errors, status=status.HTTP_400_BAD_REQUEST)
captcha.reset_session(request.session)
try:
validate_new_registration(
request.user_ip,
form.cleaned_data['username'],
form.cleaned_data['email'])
except PermissionDenied:
staff_message = _("This ban was automatically imposed on "
"%(date)s due to denied register attempt.")
message_formats = {'date': date_format(timezone.now())}
staff_message = staff_message % message_formats
validation_ban = ban_ip(
request.user_ip,
staff_message=staff_message,
length={'days': 1}
)
raise PermissionDenied(
_("Your IP address is banned from performing this action."),
{'ban': validation_ban.get_serialized_message()})
activation_kwargs = {}
if settings.account_activation == 'user':
activation_kwargs = {
'requires_activation': ACTIVATION_REQUIRED_USER
}
elif settings.account_activation == 'admin':
activation_kwargs = {
'requires_activation': ACTIVATION_REQUIRED_ADMIN
}
User = get_user_model()
new_user = User.objects.create_user(form.cleaned_data['username'],
form.cleaned_data['email'],
form.cleaned_data['password'],
joined_from_ip=request.user_ip,
set_default_avatar=True,
**activation_kwargs)
mail_subject = _("Welcome on %(forum_title)s forums!")
mail_subject = mail_subject % {'forum_title': settings.forum_name}
if settings.account_activation == 'none':
authenticated_user = authenticate(
username=new_user.email,
password=form.cleaned_data['password'])
login(request, authenticated_user)
mail_user(request, new_user, mail_subject,
'misago/emails/register/complete')
return Response({
'activation': 'active',
'username': new_user.username,
'email': new_user.email
})
else:
activation_token = make_activation_token(new_user)
activation_by_admin = new_user.requires_activation_by_admin
activation_by_user = new_user.requires_activation_by_user
mail_user(
request, new_user, mail_subject,
'misago/emails/register/inactive',
{
'activation_token': activation_token,
'activation_by_admin': activation_by_admin,
'activation_by_user': activation_by_user,
})
if activation_by_admin:
activation_method = 'activation_by_admin'
else:
activation_method = 'activation_by_user'
return Response({
'activation': activation_method,
'username': new_user.username,
'email': new_user.email
})
|
Part Number STDP405 is available in Stock. Fill out the below form to get a quote for Seagate part Number STDP405 with all required details and our dedicated account manager will handle your Quote promptly and give you best quote for your required parts. Also browse our Related Components section below, to find other related IT hardware parts of manufacturer Seagate. |
import json
import os
import textwrap
import time
import six
import prettytable
def add_arg(func, *args, **kwargs):
"""Bind CLI arguments to a shell.py `do_foo` function."""
if not hasattr(func, 'arguments'):
func.arguments = []
if (args, kwargs) not in func.arguments:
func.arguments.insert(0, (args, kwargs))
def arg(*args, **kwargs):
"""Decorator for CLI args.
Example:
>>> @arg("name", help="Name of the new entity")
... def entity_create(args):
... pass
"""
def _decorator(func):
add_arg(func, *args, **kwargs)
return func
return _decorator
def multi_arg(*args, **kwargs):
"""Decorator for multiple CLI args.
Example:
>>> @arg("name", help="Name of the new entity")
... def entity_create(args):
... pass
"""
def _decorator(func):
add_arg(func, *args, **kwargs)
return func
return _decorator
def print_original_dict(d):
d = json.dumps(d, encoding='UTF-8', ensure_ascii=False, indent=2)
print(d)
def print_dict(d, dict_property="Property", dict_value="Value", wrap=0):
pt = prettytable.PrettyTable([dict_property, dict_value], caching=False)
pt.align = 'l'
for k, v in sorted(d.items()):
# convert dict to str to check length
if isinstance(v, (dict, list)):
# v = jsonutils.dumps(v)
v = json.dumps(v)
if wrap > 0:
v = textwrap.fill(str(v), wrap)
# if value has a newline, add in multiple rows
# e.g. fault with stacktrace
if v and isinstance(v, six.string_types) and r'\n' in v:
lines = v.strip().split(r'\n')
col1 = k
for line in lines:
pt.add_row([col1, line])
col1 = ''
else:
if v is None:
v = '-'
pt.add_row([k, v])
# result = encodeutils.safe_encode(pt.get_string())
result = pt.get_string()
if six.PY3:
result = result.decode()
print(result)
def print_list(objs, fields, formatters={}, sortby_index=None):
'''
give the fields of objs to be printed.
:param objs:
:param fields: the fields to be printed
:param formatters:
:param sortby_index:
:return:
'''
if sortby_index is None:
sortby = None
else:
sortby = fields[sortby_index]
mixed_case_fields = ['serverId']
pt = prettytable.PrettyTable([f for f in fields], caching=False)
pt.align = 'l'
for o in objs:
row = []
for field in fields:
if field in formatters:
row.append(formatters[field](o))
else:
if field in mixed_case_fields:
field_name = field.replace(' ', '_')
# else:
# field_name = field.lower().replace(' ', '_')
field_name = field
data = o.get(field_name, '')
if data is None:
data = '-'
row.append(data)
pt.add_row(row)
if sortby is not None:
result = pt.get_string(sortby=sortby)
else:
result = pt.get_string()
if six.PY3:
result = result.decode()
print(result)
def env(*args, **kwargs):
"""Returns environment variable set."""
for arg in args:
value = os.environ.get(arg)
if value:
return value
return kwargs.get('default', '')
def parse_time(d):
for (k, v) in d.items():
if 'Time' in k and isinstance(v, int) and v > 1000000000:
d[k] = time.strftime('%F %T', time.localtime(v))
|
This week The IRA will be at the MBA Secondary Market Conference & Expo, as always held at the Marriott Marquis in Times Square. The 8th floor reception and bar is where folks generally hang out. Attendees should not miss the panel on mortgage servicing rights at 3:00 PM Monday. We’ll give our impressions of this important conference in the next edition of The Institutional Risk Analyst.
First, we heard Banque de France Governor Villeroy de Galhau confirm that the European Central Bank intends to continue reinvesting its portfolio of securities indefinitely. This means continued low interest rates in Europe and, significantly, increasing monetary policy divergence between the EU and the US.
Second and following from the first point, the banking system in Europe remains extremely fragile, this despite happy talk from various bankers we met during the trip. The fact of sustained quantitative easing by the ECB, however, is a tacit admission that the state must continue to tax savings in order to transfer value to debtors such as banks. Overall, the ECB clearly does not believe that economic growth has reached sufficiently robust levels such that extraordinary policy steps should end.
Italian banks, for example, admit to bad loans equal to 14.5 percent of total loans. Double that number to capture the economic reality under so-called international accounting rules. Italian banks have packaged and securitized non-performing loans (NPLs) to sell them to investors, supported by Italian government guarantees on senior tranches. These NPL deals are said to be popular with foreign hedge funds, yet this explicit state bailout of the banks illustrates the core fiscal problem facing Italy.
And third, the fact of agreement between the opposition parties in Italy means that the days of the Eurozone as we know it today may be numbered. The accord between the Five Star Movement (M5S) and the far-right League Party (Lega) of Silvio Berlusconi marks a deterioration in the commitment to fiscal discipline in Europe. Specifically, the M5S/Lega coalition wants EU assent to increased spending and cutting taxes – an explicit embrace of the Trumpian economic model operating in the US.
The M5S/Lega coalition is essentially asking (or rather blackmailing) the EU into waiving the community’s fiscal rules as a concession to keep Italy in the Union. The M5S/Lega coalition manifesto, entitled appropriately “Government for Change,” suggests plans have been made for Italy to leave the single currency, calls for sanctions against Russia to be scrapped and reveals plans to ask the European Central Bank to forgive all of the Italian debt the ECB purchased as part of QE.
John Dizard, writing in the Financial Times on Friday, notes the new spending in Italy will be funded via “mini-BoTs,” referring to Italian T-bills. The M5S/Lega coalition apparently wishes to issue small (euro) denomination, non-interest-bearing Treasury bills. The paper would be in the form of bearer securities that would be secured by Italian state tax revenues.
Dizard notes that the logical conclusion of the Italian scheme, which allows the printing of a de facto fiat currency in the form of bearer bonds, will result in either Germany or Italy leaving the EU.
The Italian evolution suggests that Ben Bernanke, Mario Draghi and their counterparts in Japan, by embracing mass purchases of securities via Quantitative Easing, have opened Pandora’s Box when it comes to sovereign debt forgiveness. We especially like the fact that mini-BOTs will be in physical form, printed like lottery tickets. The spread on Italy is now trading 1.65 percent over German Bunds vs 1.5 percent last week and is likely to widen further.
Among the biggest challenges facing Italy’s new government and all EU heads of state is the growing economic policy divergence between the US and Europe. Again, to repeat point two above, the Europeans have no intention of raising interest rates anytime soon and, to this end, will continue to reinvest returns of principal from the ECB’s securities portfolio.
Given the Fed’s focus on raising interest rates in 2018, it seems reasonable to assume that the euro is headed lower vs the dollar. The assumption on the Federal Open Market Committee, of course, is that US inflation is near 2 percent, giving us a real interest rate measured against LIBOR at 3 percent, for example, of one hundred basis points.
But what if the FOMC is wrong about inflation and, particularly, if the favorite inflation measure used by American economists is overstated? Is the broadly defined personal consumption expenditure (PCE) index, which the FOMC relies upon for assessing economic conditions and fiscal policy, inflation, and employment, really the best measure of price change? And is the FOMC currently making a rather gigantic mistake in raising interest rates further?
Our friend Brian Barnier at Fed Dashboard has done some interesting work on this question over the past several years, including his May 10, 2018 missive (“Concentrated price changes mean less control for the Fed”). The chart below from Fed Dashboard shows the components of PCE.
The San Francisco Fed has also done some great work on this issue of “PCE diffusion.” If you are indeed a data dependent monetary agency, the idea of using the center point average of the diverse factors in PCE as a bellwether for monetary inflation is a bit odd. Notice, for example, that increases in the cost of financial services such as banks, auto insurance and financial advice are among the biggest positive factors in the PCE index. Increases in interest paid on excess reserves (IOER) by the Fed also positively impacts PCE, Barnier tells The IRA.
Important for Europe, the Fed’s use of PCE is leading to rising interest rates, which in turn is driving up dollar borrowing costs in Europe, as shown in the FRED chart below of three month LIBOR vs three month Treasury bills. The FOMC’s view of inflation also is supporting a rally in the much battered dollar. But what if the Fed’s favorite indicator, namely PCE, is overstating the actual rate of price change?
Economists on both sides of the Atlantic like to neatly separate “real-world” indicators like interest rates and debt from supposed monetary factors such as PCE. But the divergence of monetary policy in the US and Europe suggests this is difficult in practice.
There seems to be a basic conflict in how inflation is perceived in Washington and Brussels. This conflict of visions promises to be increasingly problematic in the weeks and months ahead with a stronger dollar and higher US interest rates pressuring emerging nations. The big risk we see both for Europe is that the narrative being followed by the FOMC assumes that inflation is rising, at least as measured by PCE, when in fact deflation driven by excessive debt may still be the central tendency of aggregate price change. If PCE is overstating monetary price change, then the FOMC should not raise rates further.
So the good news is that Europe is showing some signs of life in terms of economic growth. A weaker euro may help in the near term. The bad news is that the EU’s banks remain largely crippled by non-performing loans accumulated during previous economic slumps. And the level of debt held by nations such as Italy is growing steadily. With the UK already headed for the door, the latest political developments in Italy may presage the end of the EU as it stands today. How the Germans and other euro nations deal with the new government in Italy will tell the tale.
This entry was posted on Monday, May 21st, 2018 at 2:09 pm and is filed under Immediately available to public. You can leave a response, or trackback from your own site. |
import os
from django.test import TestCase
from django.test.utils import override_settings
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.models import User, Group
from userprofile.forms import USERNAME_MIN_LENGTH
from userprofile.models import Producer, UserImage
TEST_ROOT = settings.MEDIA_ROOT + '/private/unittest'
TESTPRODUCER = 'unittest_prod'
TESTUSER = 'unittest_user'
TESTPASSWORD = 'testpassword'
TESTEMAIL = 'test@example.com'
TESTPAYPAL = 'paypal@example.com'
PRODUCER_URLS = [
'/profile/registration/pictureupload',
'/profile/registration/pictures',
'/profile/registration/info',
'/profile/payment',
'/profile/pictureupload',
'/profile/pictures',
'/profile/info',
]
USER_URLS = [
'/profile/changepassword',
]
@override_settings(PREPEND_WWW=False)
class LoggedOutTest(TestCase):
def setUp(self):
os.environ['RECAPTCHA_TESTING'] = 'True'
settings.OPEN = True
def tearDown(self):
os.environ['RECAPTCHA_TESTING'] = 'False'
def test_home(self):
r = self.client.get('/')
self.assertEqual(r.status_code, 200)
def test_urls(self):
for url in PRODUCER_URLS + USER_URLS:
r = self.client.get(url)
self.assertEqual(r.status_code, 302)
def test_login_redirect(self):
url = USER_URLS[0]
r = self.client.get(url)
self.assertRedirects(r, '/accounts/login/?next=/profile/changepassword')
def test_register_user(self):
r = self.client.post('/register_user', {
'username': TESTUSER,
'password1': TESTPASSWORD,
'password2': TESTPASSWORD,
'email': TESTEMAIL,
'recaptcha_response_field': 'PASSED',
})
# Check for redirect upon successful registration
self.assertRedirects(r, '/video/list')
u = User.objects.get(username=TESTUSER)
self.assertEqual(u.username, TESTUSER)
self.assertEqual(u.email, TESTEMAIL)
# Check that no producer entry was made for the user
self.assertEqual(0, Producer.objects.filter(user=u).count())
def test_register_producer(self):
r = self.client.post('/register_producer', {
'username': TESTPRODUCER,
'password1': TESTPASSWORD,
'password2': TESTPASSWORD,
'email': TESTEMAIL,
'recaptcha_response_field': 'PASSED',
})
# Check for redirect upon successful registration
self.assertRedirects(r, '/profile/registration/pictureupload')
u = User.objects.get(username=TESTPRODUCER)
self.assertEqual(u.username, TESTPRODUCER)
self.assertEqual(u.email, TESTEMAIL)
p = u.producer
self.assertTrue(p)
self.assertFalse(p.approved)
def test_bad_registration(self):
r = self.client.post('/register_user', {
'username': TESTUSER,
'password1': TESTPASSWORD,
'password2': 'wrong!!!',
'email': TESTEMAIL,
'recaptcha_response_field': 'PASSED',
})
self.assertFormError(r, 'form', None, 'The two passwords you entered didn\'t match.')
r = self.client.post('/register_user', {
'username': TESTUSER,
'password1': 'wrong!!!',
'password2': TESTPASSWORD,
'email': TESTEMAIL,
'recaptcha_response_field': 'PASSED',
})
self.assertFormError(r, 'form', None, 'The two passwords you entered didn\'t match.')
def test_valid_username(self):
r = self.client.post('/register_user', {
'username': 'aa',
'password1': TESTPASSWORD,
'password2': TESTPASSWORD,
'email': TESTEMAIL,
'recaptcha_response_field': 'PASSED',
})
self.assertFormError(r, 'form', 'username', 'Ensure this value has at least 3 characters (it has 2).')
r = self.client.post('/register_user', {
'username': ' a ',
'password1': TESTPASSWORD,
'password2': TESTPASSWORD,
'email': TESTEMAIL,
'recaptcha_response_field': 'PASSED',
})
self.assertFormError(r, 'form', 'username', "Your username must be at least %d characters long." % USERNAME_MIN_LENGTH)
r = self.client.post('/register_user', {
'username': '8whatever',
'password1': TESTPASSWORD,
'password2': TESTPASSWORD,
'email': TESTEMAIL,
'recaptcha_response_field': 'PASSED',
})
self.assertFormError(r, 'form', 'username', "Your username must begin with a letter a-z.")
@override_settings(PREPEND_WWW=False)
class UserTest(TestCase):
def setUp(self):
self.user = User.objects.create_user(TESTUSER, TESTEMAIL, TESTPASSWORD)
def test_urls(self):
for url in PRODUCER_URLS:
r = self.client.get(url)
self.assertEqual(r.status_code, 302)
def test_login_redirect(self):
url = PRODUCER_URLS[0]
r = self.client.get(url)
self.assertRedirects(r, '/accounts/login/?next=/profile/registration/pictureupload')
@override_settings(PREPEND_WWW=False)
class ProducerTest(TestCase):
def setUp(self):
os.environ['RECAPTCHA_TESTING'] = 'True'
# This registers a new producer
r = self.client.post('/register_producer', {
'username': TESTPRODUCER,
'password1': TESTPASSWORD,
'password2': TESTPASSWORD,
'email': TESTEMAIL,
'recaptcha_response_field': 'PASSED',
})
self.assertTrue(self.client.login(username=TESTPRODUCER, password=TESTPASSWORD))
def tearDown(self):
os.environ['RECAPTCHA_TESTING'] = 'False'
# Delete all image files
images = UserImage.objects.all()
for i in images:
i.image.delete()
i.thumbnail.delete()
def get_user(self):
return User.objects.get(username=TESTPRODUCER)
def test_urls(self):
for url in PRODUCER_URLS:
r = self.client.get(url)
self.assertEqual(r.status_code, 200)
r = self.client.get('/u/' + TESTPRODUCER)
self.assertEqual(r.status_code, 200)
def test_payment(self):
url = '/profile/ajax/payment'
p = self.get_user().producer
# First the producer is not approved, so this shouldn't work
self.client.post(url, {
'paypal': TESTPAYPAL,
})
# Need to re-load the producer object
p = Producer.objects.get(id=p.id)
self.assertEqual(p.paypal, '')
# Now approve them, and it should save
p.approved = True
p.save()
self.client.post(url, {
'paypal': TESTPAYPAL,
})
# Need to re-load the producer object
p = Producer.objects.get(id=p.id)
self.assertEqual(p.paypal, TESTPAYPAL)
p.approved = False
p.save()
def test_picture_upload(self):
imagepath = os.path.join(TEST_ROOT, 'test.jpg')
# We'll try uploading two pictures, and then delete one of them.
for i in range(2):
with open(imagepath) as fp:
self.client.post('/profile/pictureupload', {
'form-0-image': fp,
'form-TOTAL_FORMS': 1,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': 1000,
})
# Make sure the pictures uploaded successfully
images = self.get_user().userimage_set.all()
self.assertEqual(len(images), 2)
# Test that the files exist
for i in range(2):
userimage = images[i]
pathname = os.path.join(settings.MEDIA_ROOT, 'p', 'uploads', 'userimages', 'full', os.path.basename(userimage.image.url))
self.assertTrue(os.path.exists(pathname))
if i == 0:
path_delete = pathname
else:
path_profile = pathname
# Now try to delete one
self.client.post('/profile/ajax/pictures', {
'delete': (images[0].id, )
})
# Make sure one picture was deleted
images = self.get_user().userimage_set.all()
self.assertEqual(len(images), 1)
self.assertFalse(os.path.exists(path_delete))
self.assertTrue(os.path.exists(path_profile))
# Delete the remaining picture manually
images[0].image.delete()
images[0].delete()
self.assertFalse(os.path.exists(path_profile))
def test_bad_picture_upload(self):
imagepath = os.path.join(TEST_ROOT, 'test.jp;g')
with open(imagepath) as fp:
self.client.post('/profile/pictureupload', {
'form-0-image': fp,
'form-TOTAL_FORMS': 1,
'form-INITIAL_FORMS': 0,
'form-MAX_NUM_FORMS': 1000,
})
# No images should have gotten uploaded
self.assertEqual(self.get_user().userimage_set.all().count(), 0)
|
The AgaviFromPopulationFilter is handy to re-fill a form after validation failed and the user needs to check the values again (without refilling them by hand again).
But the FormPopulationFilter (FPF) is also capable of pre-filling forms.
and your Formfields 'name' and 'description' will be prefilled.
I added the check for 'read' method, since you don't want to auto populate in the case you submitted the data. |
#! /usr/bin/env python2
'''
This script converts a VCF file to SIFT4G input.
#Example input:
#CHROM POS ID REF
scaffold_1 1 . C
scaffold_1 2 . CAA
scaffold_1 3 . T
scaffold_1 4 . A
scaffold_1 5 . A
scaffold_1 6 . A
scaffold_1 7 . C
scaffold_1 8 . C
scaffold_1 9 . C
#Example output:
#CHROM POS ID REF ALT QUAL FILTER INFO
scaffold_1 1 . C A . . .
scaffold_1 1 . C T . . .
scaffold_1 1 . C G . . .
scaffold_1 1 . C C . . .
scaffold_1 3 . T A . . .
scaffold_1 3 . T T . . .
scaffold_1 3 . T G . . .
scaffold_1 3 . T C . . .
scaffold_1 4 . A A . . .
scaffold_1 4 . A T . . .
scaffold_1 4 . A G . . .
scaffold_1 4 . A C . . .
scaffold_1 5 . A A . . .
scaffold_1 5 . A T . . .
scaffold_1 5 . A G . . .
scaffold_1 5 . A C . . .
scaffold_1 6 . A A . . .
scaffold_1 6 . A T . . .
scaffold_1 6 . A G . . .
scaffold_1 6 . A C . . .
scaffold_1 7 . C A . . .
scaffold_1 7 . C T . . .
scaffold_1 7 . C G . . .
scaffold_1 7 . C C . . .
scaffold_1 8 . C A . . .
scaffold_1 8 . C T . . .
scaffold_1 8 . C G . . .
scaffold_1 8 . C C . . .
scaffold_1 9 . C A . . .
scaffold_1 9 . C T . . .
scaffold_1 9 . C G . . .
scaffold_1 9 . C C . . .
#command:
$ python vcf_to_SIFT4G.py -i input.vcf -o output.vcf
#contact:
Dmytro Kryvokhyzha dmytro.kryvokhyzha@evobio.eu
'''
############################# modules #############################
import calls # my custom module
############################# options #############################
parser = calls.CommandLineParser()
parser.add_argument('-i', '--input', help = 'name of the input file', type=str, required=True)
parser.add_argument('-o', '--output', help = 'name of the output file', type=str, required=True)
args = parser.parse_args()
############################# program #############################
counter = 0
print('Opening the file...')
with open(args.input) as datafile:
header_line = datafile.readline()
print('Creating the output...')
outfile = open(args.output, 'w')
outfile.write('#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n')
print('Converting...')
for line in datafile:
words = line.split()
chr_pos_ref = words[0:4]
ref = words[3]
if len(ref) == 1: # to skip insertions
for nucl in ['A', 'T', 'G', 'C']:
chr_pos_refP = '\t'.join(str(e) for e in chr_pos_ref)
outfile.write("%s\t%s\t.\t.\t.\n" % (chr_pos_refP, nucl))
# track progress
counter += 1
if counter % 1000000 == 0:
print str(counter), "lines processed"
datafile.close()
outfile.close()
print('Done!')
|
when you use a CambodiaYP.com Website, in which case our system automatically collects information relating to your visit to that website, such as your IP address.
CambodiaYP.com Website will not rent, sell, or share information about you with other people or non-affiliated companies. Any other data collected on the site is purely for the purpose of running the site and will not be shared, rented or sold. Please also refer to our Terms of Business.
The type of information that we collect from you may include the following: your name, phone number, address, email address, company position/title, cookies, personal information that you include in your business profile or personal information that you email/send using email functionality on a CambodiaYP.com Website website.
provide you with a better service experience with content and advertising that is more relevant to your interests and create a business information database and associated content for use as part of GBD's online business. Whilst CambodiaYP.com Website's database is designed to operate as a business directory (not a directory of individuals) it is possible that personal information may become included in the business information database.
If you are receiving promotional information from CambodiaYP.com Website and do not wish to receive this information any longer, you may remove your name from our list by contacting us and asking to be removed from our mailing list. Please allow 7 days for this request to be processed. |
'''A utility for finding Python packages that may not be in use.
'''
from __future__ import print_function
import os
import sys
import codecs
__all__ = ('__version__', 'main')
__version__ = '0.1.2'
if sys.stdout.encoding is None:
sys.stdout = codecs.getwriter('utf8')(sys.stdout)
if sys.stderr.encoding is None:
sys.stderr = codecs.getwriter('utf8')(sys.stderr)
def is_venv():
'''Redefinition of pip's running_under_virtualenv().
'''
return hasattr(sys, 'real_prefix') \
or sys.prefix != getattr(sys, 'base_prefix', sys.prefix)
def main():
if 'VIRTUAL_ENV' in os.environ and not is_venv():
# Activate the virtualenv before importing moult's program to avoid
# loading modules.
print('Activating', os.environ['VIRTUAL_ENV'])
activate = os.path.join(os.environ['VIRTUAL_ENV'], 'bin', 'activate_this.py')
if os.path.exists(activate):
with open(activate) as fp:
exec(compile(fp.read(), activate, 'exec'), {'__file__': activate})
from moult.program import run
return run()
|
Civilization cannot endure if it is unfaithful to reality and engages in delusional behavior. The delusional behavior produces dyscivic and dysgenic results, results that always terminate the afflicted and end with extinction of those so deluded. Track the decline of historical empires, and you will see all sorts of delusional behaviors arises prior to decline and fall. In short, foolish prosperity produces disconnection from reality by removing immediate and regular routine encounters with reality. Too long and too good an isolation creates bubbles of unreality from which the aforementioned delusions and degeneracy stem.
The solution is for the leaders of a nation to deliberately and resolutely remain faithful. They must ensure that their youths have any such mistaken perceptions on what reality demands of them disabused good and hard, so that even the most stubborn get the message. Reality is king, to which all men must bend the knee, and not the dictates of men wearing crowns or miters.
That's all there is to it. The thing to take forward is to remove all of the barriers between you and reality. Reality is harsh, but it is sovereign. Be faithful to it, and all that you accomplish will endure well after you are gone. It is building on the hardest stone, certain to withstand. Iron seems stronger, but never endures as stone does. |
__author__ = 'amentis'
from RxAPI.RxGUI import LineEdit
class PasswordEdit(LineEdit):
"""
password input field
"""
def __init__(self, parent, name, text=" "):
"""
@param parent: RxGUIObject parent REXI object
@param name: str name of the REXI object
@param text: str value of the line edit field
"""
LineEdit.__init__(self, parent, name, text)
def get(self):
"""
@return: str HTML of the password edit field
"""
style = " "
if self._style_internal_enabled:
style += self._style_internal
style += """
#%s {color: %s;font: %s; %s background-color: %s; }
""" % (self.get_name(), self._text_color.get(), self._font.get(), self._border.get(),
self._background_color.get())
style += self._css
self._parent.add_css(style)
self._parent.append_javascript(self.get_javascript())
return """
<input type=\"password\" id="{0}" class="LineEdit" value=\"{1}\" />
""".format(self.get_name(), self._text)
javascript_class = """
function PasswordEdit (name) {
this.name = name;
this.set_size = function(width, height) {
$(\"#\" + this.name).style.width = width;
$(\"#\" + this.name).style.height = height;
};
this.get_font = function() {
return $(\"#\" + this.name).style.font;
};
this.get_colors = function() {
return [$(\"#\" + this.name).style.color, $(\"#\" + this.name).background-color];
};
this.get_text = function () {
$(\"#\" + this.name).html();
};
this.set_size = function (width, height) {
$(\"#\" + this.name).style.width = width;
$(\"#\" + this.name).style.width = height;
};
this.get_text = function () {
return $(\"#\" + this.name).val();
};
this.set_text = function (text) {
$(\"#\" + this.name).val(text);
};
this.append_text = function (text) {
$(\"#\" + this.name).val($(\"#\" + this.name).val() + text);
};
this.prepend_text = function (text) {
$(\"#\" + this.name).val(text + $(\"#\" + this.name).val());
};
this.clear_text = function () {
$(\"#\" + this.name).val(\"\");
};
}
""" |
Black Caps captain Brendon McCullum is highly unlikely to play in tomorrow's final one day cricket match against Sri Lanka in Mt Maunganui, while senior bowler Tim Southee's definitely out.
McCullum's back injury hasn't progressed as well as medical staff would like, while Southee won't play tomorrow with his foot injury, and he's set to miss the two Twenty20 matches too.
The skipper hurt his back fielding in the 10-wicket victory in Christchurch, and McCullum sat out the big loss in game three in Nelson, as well as the washed-out fourth match, also at Saxton Oval.
"He's really struggling," New Zealand coach Mike Hesson said.
"(He) hasn't progressed as well as we would've thought so he's probably unlikely tomorrow."
McCullum wasn't part of the Twenty20 squad anyway so Hesson thought a good break would be best for the veteran long-term, with the Pakistan series starting late next week now the target for his return.
Southee pulled up lame during the third one day international against Sri Lanka after having a break in the first two fixtures, but he clearly needs longer out of the game now.
"We're going to find out a little more today but he certainly won't be playing in this game and probably unlikely during the T20s, but we'll know probably a little bit more tomorrow," Hesson said.
"But there's nothing major, it's just a matter of time really."
The country's fastest bowler Trent Boult is likely to feature, while seamer Doug Bracewell, who was rested for the Nelson washout, is set to play too.
New Zealand lead the five-match series 2-1.
Rain has forced the abandonment of the fourth ODI between New Zealand and Sri Lanka in Nelson.
The Sri Lankan cricketers have kept the one day series against New Zealand alive, cruising to an eight-wicket win in game three in Nelson.
New Zealand fast bowler Tim Southee will miss the fourth one-day cricket international against Sri Lanka with a foot injury and will be replaced by Matt Henry.
The New Zealand Cricket boss sees no reason why Pakistan bowler Mohamed Amir shouldn't be allowed to play the Black Caps in the upcoming series.
The Black Caps could host a day-night Test against South Africa when they tour New Zealand next year. |
'''
Test chassis operation
@author chenyuan.xu
'''
import zstackwoodpecker.operations.baremetal_operations as baremetal_operations
import zstackwoodpecker.operations.account_operations as acc_ops
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.operations.cluster_operations as cluster_ops
import zstackwoodpecker.operations.net_operations as net_ops
import zstackwoodpecker.zstack_test.zstack_test_vm as zstack_vm_header
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import test_stub
import threading
import zstacklib.utils.shell as shell
import time
import os
vm = None
baremetal_cluster_uuid = None
pxe_uuid = None
host_ip = None
exc_info = []
def inspect_chassis(chassis_uuid):
try:
baremetal_operations.inspect_chassis(chassis_uuid)
except:
exc_info.append(sys.exc_info())
def check_thread_exception():
if exc_info:
info1 = exc_info[0][1]
info2 = exc_info[0][2]
raise info1, None, info2
def test():
global vm, baremetal_cluster_uuid, pxe_uuid, host_ip
test_util.test_dsc('Create baremetal cluster and attach network')
zone_uuid = res_ops.query_resource(res_ops.ZONE)[0].uuid
cond = res_ops.gen_query_conditions('type', '=', 'baremetal')
cluster = res_ops.query_resource(res_ops.CLUSTER, cond)
if not cluster:
baremetal_cluster_uuid = test_stub.create_cluster(zone_uuid).uuid
else:
baremetal_cluster_uuid = cluster[0].uuid
cond = res_ops.gen_query_conditions('name', '=', os.environ.get('l3NoVlanNetworkName1'))
l3_network = res_ops.query_resource(res_ops.L3_NETWORK, cond)[0]
cidr = l3_network.ipRanges[0].networkCidr
cond = res_ops.gen_query_conditions('l3Network.uuid', '=', l3_network.uuid)
l2_uuid = res_ops.query_resource(res_ops.L2_NETWORK, cond)[0].uuid
sys_tags = "l2NetworkUuid::%s::clusterUuid::%s::cidr::{%s}" %(l2_uuid, baremetal_cluster_uuid, cidr)
net_ops.attach_l2(l2_uuid, baremetal_cluster_uuid, [sys_tags])
test_util.test_dsc('Create pxe server')
pxe_servers = res_ops.query_resource(res_ops.PXE_SERVER)
[pxe_ip, interface] = test_stub.get_pxe_info()
if not pxe_servers:
pxe_uuid = test_stub.create_pxe(dhcp_interface = interface, hostname = pxe_ip, zoneUuid = zone_uuid).uuid
baremetal_operations.attach_pxe_to_cluster(pxe_uuid, baremetal_cluster_uuid)
test_util.test_dsc('Create vms to simulate baremetal host')
#mn_ip = res_ops.query_resource(res_ops.MANAGEMENT_NODE)[0].hostName
#cond = res_ops.gen_query_conditions('managementIp', '=', mn_ip)
#host = res_ops.query_resource(res_ops.HOST, cond)[0]
host_list = []
hosts = res_ops.query_resource(res_ops.HOST)
num = len(hosts)
for i in range (0, num):
host_list.append(hosts[i])
host_uuid = hosts[0].uuid
host_ip = hosts[0].managementIp
cond = res_ops.gen_query_conditions('hypervisorType', '=', 'KVM')
cluster_uuid = res_ops.query_resource(res_ops.CLUSTER, cond)[0].uuid
cond = res_ops.gen_query_conditions('name', '=', os.environ.get('scenl3VPCNetworkName1'))
l3_network = res_ops.query_resource(res_ops.L3_NETWORK, cond)[0]
l3_uuid_list = []
l3_uuid_list.append(l3_network.uuid)
cond = res_ops.gen_query_conditions('name', '=', os.environ.get('l3PublicNetworkName'))
public_network = res_ops.query_resource(res_ops.L3_NETWORK, cond)[0]
l3_uuid_list.append(public_network.uuid)
vm_list = []
for i in range (0, num):
vm_name = 'baremetal_vm_%s' % str(i)
vm = test_stub.create_vm_multi_l3(l3_uuid_list=l3_uuid_list, default_l3_uuid = l3_network.uuid, vm_name = vm_name, host_uuid = hosts[i].uuid, cluster_uuid = cluster_uuid)
vm_list.append(vm)
test_util.test_dsc('Create chassis')
chassis_list = []
for i in range (0, num):
test_stub.create_vbmc(vm_list[i], hosts[i].managementIp, 623)
chassis = test_stub.create_chassis(baremetal_cluster_uuid, address = hosts[i].managementIp)
chassis_list.append(chassis)
#Hack inspect ks file to support vbmc, include ipmi device logic and ipmi addr to 127.0.0.1
node_ip = os.environ.get('node1Ip')
ks = '/home/%s/zstack-woodpecker/integrationtest/vm/baremetal/inspector_ks.cfg' % node_ip
path = '/var/lib/zstack/baremetal/ftp/ks/inspector_ks.cfg'
session_uuid = acc_ops.login_as_admin()
cmd = "ip r | grep %s | awk '{print $NF}'" % interface
pxe_server_ip = test_lib.lib_execute_ssh_cmd(pxe_ip, 'root', 'password', cmd, 180).strip()
os.system("sed -i 's/session_uuid/%s/g' %s" %(session_uuid, ks))
os.system("sed -i 's/pxe_server_ip/%s/g' %s" %(pxe_server_ip, ks))
os.system("sed -i 's/pxe_server_uuid/%s/g' %s" %(pxe_uuid, ks))
shell.call('scp %s %s:%s' %(ks, pxe_ip, path))
test_util.test_dsc('Inspect chassis, Because vbmc have bugs, \
reset vm unable to enable boot options, power off/on then reset is worked')
# baremetal_operations.inspect_chassis(chassis_uuid)
for i in range(0, num):
baremetal_operations.inspect_chassis(chassis_list[i].uuid)
baremetal_operations.power_off_baremetal(chassis_list[i].uuid)
time.sleep(3)
status = baremetal_operations.get_power_status(chassis_list[i].uuid).status
if status != "Chassis Power is off":
test_util.test_fail('Fail to power off chassis %s, current status is %s' %(chassis_list[i].uuid, status))
baremetal_operations.power_on_baremetal(chassis_list[i].uuid)
time.sleep(3)
status = baremetal_operations.get_power_status(chassis_list[i].uuid).status
if status != "Chassis Power is on":
test_util.test_fail('Fail to power on chassis %s, current status is %s' %(chassis_list[i].uuid, status))
n = 0
while n < num:
thread_threshold = 10
check_thread_exception()
thread = threading.Thread(target=inspect_chassis, args=(chassis_list[n].uuid,))
n += 1
while threading.active_count() > thread_threshold:
time.sleep(1)
thread.start()
while threading.active_count() > 1:
time.sleep(0.05)
time.sleep(120)
test_util.test_dsc('Check hardware info')
for i in (0, num):
hwinfo = test_stub.check_hwinfo(chassis_list[i].uuid)
if not hwinfo:
test_util.test_fail('Fail to get hardware info during the first inspection')
test_util.test_dsc('Clear env')
for i in range (0, num):
test_stub.delete_vbmc(vm_list[i], hosts[i].managementIp)
baremetal_operations.delete_chassis(chassis_list[i].uuid)
vm_list[i].destroy()
baremetal_operations.delete_pxe(pxe_uuid)
cluster_ops.delete_cluster(baremetal_cluster_uuid)
test_util.test_pass('Create chassis Test Success')
def error_cleanup():
global vm, baremetal_cluster_uuid, pxe_uuid, host_ip
for i in range (0, num):
if vm_list[i]:
test_stub.delete_vbmc(vm_list[i], hosts[i].managementIp)
baremetal_operations.delete_chassis(chassis_list[i].uuid)
vm_list[i].destroy()
if hosts[i].managementIp:
test_stub.delete_vbmc(vm_list[i], hosts.managementIp)
if baremetal_cluster_uuid:
cluster_ops.delete_cluster(baremetal_cluster_uuid)
if pxe_uuid:
baremetal_ops.delete_pxe(pxe_uuid)
|
This basic tank is made of a ribbed knit material. It has a deep v-back and has a flow fit. Goes great with skinny jeans or denim shorts or with a kimono over the top! |
"""Models related to aliases management."""
import hashlib
import random
from django.core.urlresolvers import reverse
from django.db import models
from django.utils.encoding import python_2_unicode_compatible, smart_text
from django.utils.translation import ugettext as _, ugettext_lazy
from reversion import revisions as reversion
from modoboa.core import signals as core_signals
from modoboa.lib.email_utils import split_mailbox
from modoboa.lib.exceptions import (
PermDeniedException, BadRequest, Conflict, NotFound
)
from .base import AdminObject
from .domain import Domain
from .mailbox import Mailbox
from .. import signals
@python_2_unicode_compatible
class Alias(AdminObject):
"""Mailbox alias."""
address = models.CharField(
ugettext_lazy("address"), max_length=254,
help_text=ugettext_lazy(
"The alias address."
)
)
domain = models.ForeignKey(Domain, null=True)
enabled = models.BooleanField(
ugettext_lazy("enabled"),
help_text=ugettext_lazy("Check to activate this alias"),
default=True
)
internal = models.BooleanField(default=False)
description = models.TextField(
ugettext_lazy("Description"), blank=True)
expire_at = models.DateTimeField(
ugettext_lazy("Expire at"), blank=True, null=True)
_objectname = 'MailboxAlias'
class Meta:
permissions = (
("view_aliases", "View aliases"),
)
ordering = ["address"]
unique_together = (("address", "internal"), )
app_label = "admin"
def __str__(self):
return smart_text(self.address)
@classmethod
def generate_random_address(cls):
"""Generate a random address (local part)."""
m = hashlib.md5()
for x in random.sample(xrange(10000000), 60):
m.update(str(x))
return m.hexdigest()[:20]
@property
def identity(self):
return self.address
@property
def name_or_rcpt(self):
rcpts_count = self.recipients_count
if not rcpts_count:
return "---"
rcpts = self.recipients
if rcpts_count > 1:
return "%s, ..." % rcpts[0]
return rcpts[0]
@property
def type(self):
"""FIXME: deprecated."""
return "alias"
@property
def tags(self):
return [{"name": "alias", "label": _("alias"), "type": "idt"}]
def get_absolute_url(self):
"""Return detail url for this alias."""
return reverse("admin:alias_detail", args=[self.pk])
def post_create(self, creator):
from modoboa.lib.permissions import grant_access_to_object
super(Alias, self).post_create(creator)
if creator.is_superuser:
for admin in self.domain.admins:
grant_access_to_object(admin, self)
def set_recipients(self, address_list):
"""Set recipients for this alias.
Special recipients:
* local mailbox + extension: r_mailbox will be set to local mailbox
* alias address == recipient address: valid only to keep local copies
(when a forward is defined) and to create exceptions when a catchall
is defined on the associated domain
"""
to_create = []
for address in set(address_list):
if not address:
continue
if self.aliasrecipient_set.filter(address=address).exists():
continue
local_part, domname, extension = (
split_mailbox(address, return_extension=True))
if domname is None:
raise BadRequest(
u"%s %s" % (_("Invalid address"), address)
)
domain = Domain.objects.filter(name=domname).first()
kwargs = {"address": address, "alias": self}
if (
(domain is not None) and
(
any(
r[1] for r in signals.use_external_recipients.send(
self, recipients=address)
) is False
)
):
rcpt = Mailbox.objects.filter(
domain=domain, address=local_part).first()
if rcpt is None:
rcpt = Alias.objects.filter(
address='%s@%s' % (local_part, domname)
).first()
if rcpt is None:
raise NotFound(
_("Local recipient {}@{} not found")
.format(local_part, domname)
)
if rcpt.address == self.address:
raise Conflict
kwargs["r_alias"] = rcpt
else:
kwargs["r_mailbox"] = rcpt
to_create.append(AliasRecipient(**kwargs))
AliasRecipient.objects.bulk_create(to_create)
# Remove old recipients
self.aliasrecipient_set.exclude(
address__in=address_list).delete()
@property
def recipients(self):
"""Return the recipient list."""
return (
self.aliasrecipient_set.order_by("address")
.values_list("address", flat=True)
)
@property
def recipients_count(self):
"""Return the number of recipients of this alias."""
return self.aliasrecipient_set.count()
def from_csv(self, user, row, expected_elements=5):
"""Create a new alias from a CSV file entry."""
if len(row) < expected_elements:
raise BadRequest(_("Invalid line: %s" % row))
address = row[1].strip()
localpart, domname = split_mailbox(address)
try:
domain = Domain.objects.get(name=domname)
except Domain.DoesNotExist:
raise BadRequest(_("Domain '%s' does not exist" % domname))
if not user.can_access(domain):
raise PermDeniedException
core_signals.can_create_object.send(
sender="import", context=user, object_type="mailbox_aliases")
core_signals.can_create_object.send(
sender="import", context=domain, object_type="mailbox_aliases")
if Alias.objects.filter(address=address).exists():
raise Conflict
self.address = address
self.domain = domain
self.enabled = (row[2].strip() in ["True", "1", "yes", "y"])
self.save()
self.set_recipients([raddress.strip() for raddress in row[3:]])
self.post_create(user)
def to_csv(self, csvwriter):
row = ["alias", self.address.encode("utf-8"), self.enabled]
row += self.recipients
csvwriter.writerow(row)
reversion.register(Alias)
@python_2_unicode_compatible
class AliasRecipient(models.Model):
"""An alias recipient."""
address = models.EmailField()
alias = models.ForeignKey(Alias)
# if recipient is a local mailbox
r_mailbox = models.ForeignKey(Mailbox, blank=True, null=True)
# if recipient is a local alias
r_alias = models.ForeignKey(
Alias, related_name="alias_recipient_aliases", blank=True, null=True)
class Meta:
app_label = "admin"
db_table = "modoboa_admin_aliasrecipient"
unique_together = [
("alias", "r_mailbox"),
("alias", "r_alias")
]
def __str__(self):
"""Return alias and recipient."""
return smart_text(
"{} -> {}".format(self.alias.address, self.address)
)
|
This week, Murthy Law Firm attorneys will answer questions regarding AC21 portability issues.
I am working for employer "A" which has two office location in West Coast and East Coast, My Perm, I-140 has been filed and approved from East coast location. There are job openings with my current employer "A" in West Coast location with same Job title and job responsibility I am currently working in East Coast Location. Can you please clarify below points.
1. If I move from East coast to West coast, as there is change of work location with same employer "A" do I need to file AC21.?
2. Do I need to file 485J again for West Location along with AC21.? I have already filed 485J for East Coast Location.
3. With in how many days I need to inform USCIS about my change of Job location (AC21).
4. Is AC21 replaced with 485J.? or both same.
I got my GC EAD in 2015. After 8 months, I moved to new employer using EVL since there was no 485j at that time.
Few months ago, I got 485j RFE and it's approved after attorney response.
Current date is 4 weeks behind to my priority date. I planning to change employer again now.
Is it OK to change employer now using new 485j? Do you see any complexity with this?
My GC is filed under EB-1 C Category with multinational manager duties. I currently have both EAD & L1-A visa.
My GC interview was completed last October waiting for priority date to become current. It might become current around Jan 2020.
Can i change company on EAD?
Do we need to inform USCIS about job change through AC21?
what happens if new company files AC 21 with some other job duties?
Your flexibility depends on how the labor certification was prepared and/or if you have a pending AOS. If you have a pending AOS you may be able to utilize AC21. It is advisable to discuss your specific circumstance directly with a U.S. immigration attorney.
It is permissible to utilize AC21 multiple times.
You can utilize an EAD to change employers, however filing an AC21 notification to USCIS is still not required unless specifically requested by USCIS (such as an RFE). Before making any changes it is advisable to confirm the new position is same/similar as required for AC21. |
from pylab import pi,floor,sign
class Angle:
lower_bound = 0.0
upper_bound = 2*pi
include_upper_bound = False
cyclical = True
value = None
def __init__(self, value, lower_bound=0, upper_bound=2*pi, include_upper_bound=False,type='rad', cyclical=True):
"""type may be 'rad' 'hms' or 'sdms'"""
self.lower_bound = lower_bound
self.upper_bound = upper_bound
self.include_upper_bound = include_upper_bound
self.cyclical=cyclical
if type == 'rad':
self.set_rad(value)
elif type == 'hms':
self.set_hms(*value)
elif type == 'sdms':
self.set_sdms(*value)
pass
def adjust(self, x=None):
v = self.value
if x is not None:
v = x
if self.cyclical:
if self.include_upper_bound and v == self.upper_bound:
return self.value
range = self.upper_bound - self.lower_bound
steps = floor((v - self.lower_bound)/range)
v -= steps*range
else:
v=max(self.lower_bound, min(v,self.upper_bound))
if x is None:
self.value = v
return v
def set_rad(self, new_value):
self.value = new_value
return self.adjust()
def set_hms(self, h,m,s):
self.value = (h+m/60.0+s/3600.0)*pi/12.0
return self.adjust()
def set_sdms(self, sign_char, d, m, s):
self.value = (d+m/60.0+s/3600.0)*pi/180.0
if sign_char == '-':
self.value = -self.value
return self.adjust()
def as_hms(self, decimals=0):
h_float = abs(self.value)*12.0/pi
h_int = int(floor(h_float))
m_float = 60*(h_float - h_int)
m_int = int(floor(m_float))
s_float = 60*(m_float - m_int)
s_int = int(floor(s_float))
frac_int = int(floor(10**decimals*(s_float - s_int)+0.5))
if frac_int >= 10**decimals:
frac_int -= 10**decimals
s_int +=1
if s_int >= 60:
s_int -= 60
m_int += 1
if m_int >= 60:
m_int -= 60
h_int += 1
max_h = int(floor(self.upper_bound*12/pi+0.5))
min_h = int(floor(self.lower_bound*12/pi+0.5))
if h_int >= max_h and self.cyclical and not self.include_upper_bound:
h_int -= (max_h-min_h)
sign_char=''
if self.value < 0:
sign_char = '-'
base_str = sign_char+str(h_int).rjust(2,'0')+':'+str(m_int).rjust(2,'0')+':'+str(s_int).rjust(2,'0')
if decimals is 0:
return base_str
else:
return base_str+'.'+str(frac_int).rjust(decimals,'0')
def as_sdms(self,decimals=0):
min_val_size = len(str(int(floor(abs(self.lower_bound)*180/pi))))
max_val_size = len(str(int(floor(abs(self.upper_bound)*180/pi))))
deg_size=max(min_val_size, max_val_size)
sign_char = '- +'[int(sign(self.value))+1]
d_float = abs(self.value)*180/pi
d_int = int(floor(d_float))
m_float = 60*(d_float - d_int)
m_int = int(floor(m_float))
s_float = 60*(m_float - m_int)
s_int = int(floor(s_float))
frac_int = int(floor(10**decimals*(s_float - s_int)+0.5))
if frac_int >= 10**decimals:
frac_int -= 10**decimals
s_int +=1
if s_int >= 60:
s_int -= 60
m_int += 1
if m_int >= 60:
m_int -= 60
d_int += 1
max_d = int(floor(self.upper_bound*180/pi+0.5))
min_d = int(floor(self.lower_bound*180/pi+0.5))
if d_int >= max_d and self.cyclical and not self.include_upper_bound:
d_int -= (max_d-min_d)
base_str = sign_char+str(d_int).rjust(deg_size,'0')+':'+str(m_int).rjust(2,'0')+':'+str(s_int).rjust(2,'0')
if decimals is 0:
return base_str
else:
return base_str+'.'+str(frac_int).rjust(decimals,'0')
pass
class RightAscension(Angle):
def __init__(self, value, type='rad'):
Angle.__init__(self,value, 0.0, 2*pi, include_upper_bound=False, cyclical=True, type=type)
pass
pass
class Declination(Angle):
def __init__(self, value, type='rad'):
Angle.__init__(self, value, -pi/2, pi/2, include_upper_bound=True, cyclical=False, type=type)
pass
pass
class HourAngle(Angle):
def __init__(self, value, type='rad'):
Angle.__init__(self, value, -pi, pi, include_upper_bound=False, cyclical=True, type=type)
pass
pass
class EquatorialDirection:
ra = RightAscension(0.0)
dec = Declination(0.0)
ref_frame = 'J2000'
def __init__(self,ra,dec, ref_frame='J2000'):
self.ra.set_rad(ra.value)
self.dec.set_rad(dec.value)
self.ref_frame = ref_frame
pass
def __str__(self):
return '%(ref_frame)s RA: %(ra)s, DEC: %(dec)s' % \
{'ra': self.ra.as_hms(),
'dec': self.dec.as_sdms(),
'ref_frame': self.ref_frame}
|
This paper presents a view of nature as a network of infocomputational agents organized in a dynamical hierarchy of lev- els. It provides a framework for unification of currently disparate understandingsofnatural,formal,technical,behavioralandsocialphe- nomena based on information as a structure, differences in one system that cause the differences in another system, and computation as its dynamics,i.e.physicalprocessofmorphologicalchangeintheinforma- tional structure. We address some of the frequent misunderstandings regarding the natural/morphological computational models and their relationships to physical systems, especially cognitive systems such as living beings. Natural morphological infocomputation as a conceptual frameworknecessitatesgeneralizationofmodelsofcomputationbeyond thetraditionalTuringmachinemodelpresentingsymbolmanipulation, and requires agent-based concurrent resource-sensitive models of com- putation in order to be able to cover the whole range of phenomena from physics to cognition. The central role of agency, particularly ma- terial vs. cognitive agency is highlighted. |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys, os
import time,wave
import math
import json
import urllib
import urllib.request, urllib.error
import cookielib
import base64
class RecaiusAsr():
def __init__(self, service_id="", passwd=""):
self._baseAuthUrl="https://api.recaius.jp/auth/v2/"
self._baseAsrUrl="https://api.recaius.jp/asr/v2/"
self._service_id=service_id
self._passwd=passwd
self._token = ''
self._uuid = ''
self._vid=1
self._silence = getWavData("silence.wav")
self._expiry=0
self._boundary = "----Boundary"
opener = urllib.request.build_opener(urllib.request.HTTPSHandler(debuglevel=0),
urllib.request.HTTPCookieProcessor(cookielib.CookieJar()))
urllib.request.install_opener(opener)
def setAccount(self, service_id, passwd):
self._service_id=service_id
self._passwd=passwd
#-------- Recaius Authorization
def requestAuthToken(self, ex_sec=600):
url = self._baseAuthUrl+'tokens'
headers = {'Content-Type' : 'application/json' }
data = { "speech_recog_jaJP": { "service_id" : self._service_id, "password" : self._passwd}, "expiry_sec" : ex_sec }
request = urllib.request.Request(url, data=json.dumps(data), headers=headers)
try:
result = urllib.urlopen(request)
except urllib.error.HTTPError as e:
print ('Error code:', e.code)
return None
except urllib.error.URLError as e:
print ('URLErroe reason:', e.reason)
return None
else:
response = result.read()
res = response.decode('utf-8')
self._expiry = time.time() + ex_sec
print (res)
data=json.loads(res)
self._token=data['token']
return self._token
def refreshAuthToken(self, ex_sec=600):
url = self._baseAuthUrl+'tokens'
headers = {'Content-Type' : 'application/json', 'X-Token' : self._token }
data = { "speech_recog_jaJP": { "service_id" : self._service_id, "password" : self._passwd}, "expiry_sec" : ex_sec }
request = urllib.request.Request(url, data=json.dumps(data), headers=headers)
request.get_method = lambda : 'PUT'
try:
result = urllib.urlopen(request)
except urllib.error.HTTPError as e:
print( 'Error code:', e.code)
return -1
except urllib.error.URLError as e:
print ('URLErroe reason:', e.reason)
return -1
else:
response = result.read()
res = response.decode('utf-8')
self._expiry = time.time() + ex_sec
#print (res)
return self._expiry
def checkAuthToken(self):
query_string = {'service_name' : 'speech_recog_jaJP'}
url = '{0}?{1}'.format(self._baseAuthUrl+'tokens', urllib.urlencode(query_string))
headers = {'Content-Type' : 'application/json', 'X-Token' : self._token }
request = urllib.request.Request(url, headers=headers)
try:
result = urllib.urlopen(request)
except urllib.error.HTTPError as e:
print ('Error code:', e.code)
return -1
except urllib.error.URLError as e:
print ('URLErroe reason:', e.reason)
return -1
else:
response = result.read()
res = response.decode('utf-8')
data=json.loads(res)
return data['remaining_sec']
#-------- Voice Recognition
def startVoiceRecogSession(self, model=1):
url = self._baseAsrUrl+'voices'
headers = {'Content-Type' : 'application/json', 'X-Token' : self._token }
data = { "audio_type": "audio/x-linear",
"result_type": "nbest",
#"push_to_talk": True,
"model_id": model,
"comment": "Start" }
request = urllib.request.Request(url, data=json.dumps(data), headers=headers)
try:
result = urllib.urlopen(request)
except urllib.error.HTTPError as e:
print ('Error code:', e.code)
print ('Reason:', e.reason)
return False
except urllib.error.URLError as e:
print ('URLErroe reason:', e.reason)
return False
else:
response = result.read()
res = response.decode('utf-8')
data=json.loads(res)
self._uuid = data['uuid']
self._boundary = "----Boundary"+base64.b64encode(self._uuid)
return True
def endVoiceRecogSession(self):
url = self._baseAsrUrl+'voices/'+self._uuid
headers = {'X-Token' : self._token }
request = urllib.request.Request(url, headers=headers)
request.get_method = lambda : 'DELETE'
try:
result = urllib.urlopen(request)
except urllib.error.HTTPError as e:
print ('Error code:', e.code)
print ('Reason:', e.reason)
return False
except urllib.error.URLError as e:
print( 'URLErroe reason:', e.reason)
return False
else:
response = result.read()
res = response.decode('utf-8')
if res : print (res)
return True
def getVoiceRecogResult(self, data):
#data = self._silence+data
data += self._silence+self._silence
voice_data = divString(data, 16364)
#voice_data = divString(data, 32728)
self._vid=0
for d in voice_data:
self._vid += 1
res = self.sendSpeechData(self._vid, d)
if res :
data=json.loads(res)
for d in data:
if d['type'] == 'RESULT' :
return d
print (res)
return self.flushVoiceRecogResult()
def sendSpeechData(self, vid, data):
url = self._baseAsrUrl+'voices/'+self._uuid
headers = {'Content-Type' : 'multipart/form-data','X-Token' : self._token }
form_data = ""
form_data += self._boundary+"\r\n"
form_data += "Content-Disposition: form-data;name=\"voice_id\"\r\n\r\n"
form_data += str(vid)+"\r\n"
form_data += self._boundary+"\r\n"
form_data += "Content-Disposition: form-data;name=\"voice\"\r\n"
form_data += "Content-Type: application/octet-stream\r\n\r\n"
form_data += data
form_data += "\r\n"
form_data += self._boundary+"\r\n"
request = urllib.request.Request(url)
request.add_header( 'X-Token', self._token )
request.add_header( 'Content-Type', 'multipart/form-data')
request.add_data(bytearray(form_data))
request.get_method = lambda : 'PUT'
try:
result = urllib.urlopen(request)
except urllib.error.HTTPError as e:
print ('Error code:', e.code)
print ('Reason:', e.reason)
return False
except urllib.error.URLError as e:
print ('URLErroe reason:', e.reason)
return False
else:
response = result.read()
res = response.decode('utf-8')
if res :
return res
return False
def flushVoiceRecogResult(self):
url = self._baseAsrUrl+'voices/'+self._uuid+"/flush"
headers = {'Content-Type' : 'application/json', 'X-Token' : self._token }
data = { "voice_id": self._vid }
request = urllib.request.Request(url, data=json.dumps(data), headers=headers)
request.get_method = lambda : 'PUT'
try:
result = urllib.urlopen(request)
except urllib.error.HTTPError as e:
print( 'Error code:', e.code)
print( 'Reason:', e.reason)
return False
except urllib.error.URLError as e:
print( 'URLErroe reason:', e.reason)
return False
else:
response = result.read()
res = response.decode('utf-8')
return res
def request_speech_recog(self, data):
result = ""
self.requestAuthToken()
recaius = self.startVoiceRecogSession()
if recaius :
result = self.getVoiceRecogResult(data)
self.endVoiceRecogSession()
return result
def getWavData(fname):
try:
f = wave.open(fname)
data = f.readframes(f.getnframes())
f.close()
return data
except:
return ""
def divString(s, n):
ll=len(s)
res = []
for x in range(int(math.ceil(float(ll) / n))):
res.append( s[ x*n : x*n + n ] )
return res
#
# Main
#
if __name__ == '__main__':
import glob
recaius = RecaiusAsr('haraisao_MAj34mD8GZ', 'isao11038867')
files = glob.glob('log/*.wav')
files.sort()
for f in files:
print (f)
data = getWavData(f)
result = recaius.request_speech_recog(data)
if result :
try:
data = json.loads( result )
i=1
for d in data[0]['result'] :
if 'confidence' in d :
score=str(d['confidence'])
else:
score="0.0"
print ("#"+str(i)+":"+d['str']+" ("+score+")")
#print d
i+=1
except:
print( result)
else:
print ("No Result")
print( "")
|
Get the best euro exchange rate on a credit card. The pound has fallen against the euro uSwitch Limited is authorised and regulated by the Financial Conduct.
Get the best exchange rates and save money now! Want to wait until the exchange rate has improved? UKForex offers a range of options so you get the best deal.
Getting the Best Euro Exchange Rate on Your Credit Well help you find the best ways to get the best Euro exchange rates so that you dont lose out on any.
Order Tesco Travel Money online Choose your currency and enter the amount you would like to buy and click check online exchange rates Best Travel Money. The British pound to euro exchange rate forecast and outlook vs the US British Pound To Euro Exchange Rate Best GBPEUR Exchange Rate Predicted.
Explore Easter at Tesco For gift Helping you save. Brand Guarantee Find out more. Tesco Clubcard Exchange your points for a heap Rate this page; Gift cards. Pound is up over 6pc as Brexit vote begins: should I buy dollars or and the current exchange rate against the euro best deal, with a 1. 28 rate.
Please visit a Tesco Travel Money bureau to see our in store Fixed rate savings. Fixed rate Are the in store exchange rates the same as the online. Best Euro Exchange Rates; Best US Dollar Exchange Rates; Best Australian Dollar Exchange Rates; Best Exchange Rate Finder @ Pound Sterling Live (C). Calculate live currency and foreign exchange rates with this free currency converter. Access premium XE Services like Rate Alerts.
savings accounts, insurance as well as foreign currency exchange. Tesco bank offers Tesco Club card Euro, US Dollars Lock in exchange rate each.
Compare euro exchange rates to find the best this tells you how many euros you get for each pound you exchange. How can I find the best euro exchange rate. Beat the Brexit effect when buying your holiday money fortune if you secure the best exchange an exchange rate of just 1. 14 to the pound.
Tesco Exchange Rates. money against the UKs leading foreign currency providers to find the best exchange rate. not currently offer a Euro travellers. Watch videoPound sterling set for alltime low to be leaning toward the first part of 2017 as the best moment to trigger the exchange rate; Euro; pound sterling.
Order Tesco Travel Money online Choose your currency and enter the amount you would like to buy and click check online exchange rates Best Travel Money. The British pound to euro exchange rate forecast and outlook vs the US British Pound To Euro Exchange Rate Best GBPEUR Exchange Rate Predicted. Explore Easter at Tesco For gift Helping you save. Brand Guarantee Find out more. Tesco Clubcard Exchange your points for a heap Rate this page; Gift cards. Pound is up over 6pc as Brexit vote begins: should I buy dollars or and the current exchange rate against the euro best deal, with a 1. 28 rate.
Please visit a Tesco Travel Money bureau to see our in store Fixed rate savings. Fixed rate Are the in store exchange rates the same as the online. Best Euro Exchange Rates; Best US Dollar Exchange Rates; Best Australian Dollar Exchange Rates; Best Exchange Rate Finder @ Pound Sterling Live (C). Calculate live currency and foreign exchange rates with this free currency converter. Access premium XE Services like Rate Alerts. Live Pound to Euro exchange rate Best Pound to Euro Exchange Rate To convert Pounds to Euros or determine the Pound Euro exchange rate simply use the.
Compare euro exchange rates to find the best this tells you how many euros you get for each pound you exchange. How can I find the best euro exchange rate. Compare today's best Euro exchange rates from the biggest currency suppliers in the UK including the Post Pound To Turkish Lira Exchange Rate Better Than Before. Beat the Brexit effect when buying your holiday money fortune if you secure the best exchange an exchange rate of just 1. 14 to the pound.
Buy Euros. The Euro is the most used foreign currency of all the Thomas Cook Travel Money offers a competitive Euro exchange rate with the option to buy online. Watch video Airports start exchanging less than one Euro for each Pound Sterling. Caxton said the best London airport euro exchange rate was on offer at Gatwick. |
#! /usr/bin/env python
import os
import ipdb as pdb
import errno
from datetime import datetime
#import caffe
from loaders import load_imagenet_mean, load_labels, caffe
from jby_misc import WithTimer
from caffe_misc import shownet, RegionComputer, save_caffe_image
import numpy as np
default_layers = ['conv1', 'conv2', 'conv3', 'conv4', 'conv5', 'fc6', 'fc7', 'fc8', 'prob']
default_is_conv = [('conv' in ll) for ll in default_layers]
def hardcoded_get():
prototxt = '/home/jyosinsk/results/140311_234854_afadfd3_priv_netbase_upgraded/deploy_1.prototxt'
weights = '/home/jyosinsk/results/140311_234854_afadfd3_priv_netbase_upgraded/caffe_imagenet_train_iter_450000'
datadir = '/home/jyosinsk/imagenet2012/val'
filelist = 'mini_valid.txt'
imagenet_mean = load_imagenet_mean()
net = caffe.Classifier(prototxt, weights,
mean=imagenet_mean,
channel_swap=(2,1,0),
raw_scale=255,
image_dims=(256, 256))
net.set_phase_test()
net.set_mode_cpu()
labels = load_labels()
return net, imagenet_mean, labels, datadir, filelist
def mkdir_p(path):
# From https://stackoverflow.com/questions/600268/mkdir-p-functionality-in-python
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
class MaxTracker(object):
def __init__(self, is_conv, n_channels, n_top = 10, initial_val = -1e99, dtype = 'float32'):
self.is_conv = is_conv
self.max_vals = np.ones((n_channels, n_top), dtype = dtype) * initial_val
self.n_top = n_top
if is_conv:
self.max_locs = -np.ones((n_channels, n_top, 4), dtype = 'int') # image_idx, image_class, i, j
else:
self.max_locs = -np.ones((n_channels, n_top, 2), dtype = 'int') # image_idx, image_class
def update(self, blob, image_idx, image_class):
data = blob[0] # Note: makes a copy of blob, e.g. (96,55,55)
n_channels = data.shape[0]
data_unroll = data.reshape((n_channels, -1)) # Note: no copy eg (96,3025). Does nothing if not is_conv
maxes = data_unroll.argmax(1) # maxes for each channel, eg. (96,)
#insertion_idx = zeros((n_channels,))
#pdb.set_trace()
for ii in xrange(n_channels):
idx = np.searchsorted(self.max_vals[ii], data_unroll[ii, maxes[ii]])
if idx == 0:
# Smaller than all 10
continue
# Store new max in the proper order. Update both arrays:
# self.max_vals:
self.max_vals[ii,:idx-1] = self.max_vals[ii,1:idx] # shift lower values
self.max_vals[ii,idx-1] = data_unroll[ii, maxes[ii]] # store new max value
# self.max_locs
self.max_locs[ii,:idx-1] = self.max_locs[ii,1:idx] # shift lower location data
# store new location
if self.is_conv:
self.max_locs[ii,idx-1] = (image_idx, image_class) + np.unravel_index(maxes[ii], data.shape[1:])
else:
self.max_locs[ii,idx-1] = (image_idx, image_class)
class NetMaxTracker(object):
def __init__(self, layers = default_layers, is_conv = default_is_conv, n_top = 10, initial_val = -1e99, dtype = 'float32'):
self.layers = layers
self.is_conv = is_conv
self.init_done = False
self.n_top = n_top
self.initial_val = initial_val
def _init_with_net(self, net):
self.max_trackers = {}
for layer,is_conv in zip(self.layers, self.is_conv):
blob = net.blobs[layer].data
self.max_trackers[layer] = MaxTracker(is_conv, blob.shape[1], n_top = self.n_top,
initial_val = self.initial_val,
dtype = blob.dtype)
self.init_done = True
def update(self, net, image_idx, image_class):
'''Updates the maxes found so far with the state of the given net. If a new max is found, it is stored together with the image_idx.'''
if not self.init_done:
self._init_with_net(net)
for layer in self.layers:
blob = net.blobs[layer].data
self.max_trackers[layer].update(blob, image_idx, image_class)
def load_file_list(filelist):
image_filenames = []
image_labels = []
with open(filelist, 'r') as ff:
for line in ff.readlines():
fields = line.strip().split()
image_filenames.append(fields[0])
image_labels.append(int(fields[1]))
return image_filenames, image_labels
def scan_images_for_maxes(net, datadir, filelist, n_top):
image_filenames, image_labels = load_file_list(filelist)
print 'Scanning %d files' % len(image_filenames)
print ' First file', os.path.join(datadir, image_filenames[0])
tracker = NetMaxTracker(n_top = n_top)
for image_idx in xrange(len(image_filenames)):
filename = image_filenames[image_idx]
image_class = image_labels[image_idx]
#im = caffe.io.load_image('../../data/ilsvrc12/mini_ilsvrc_valid/sized/ILSVRC2012_val_00000610.JPEG')
do_print = (image_idx % 100 == 0)
if do_print:
print '%s Image %d/%d' % (datetime.now().ctime(), image_idx, len(image_filenames))
with WithTimer('Load image', quiet = not do_print):
im = caffe.io.load_image(os.path.join(datadir, filename))
with WithTimer('Predict ', quiet = not do_print):
net.predict([im], oversample = False) # Just take center crop
with WithTimer('Update ', quiet = not do_print):
tracker.update(net, image_idx, image_class)
print 'done!'
return tracker
def save_representations(net, datadir, filelist, layer, first_N = None):
image_filenames, image_labels = load_file_list(filelist)
if first_N is None:
first_N = len(image_filenames)
assert first_N <= len(image_filenames)
image_indices = range(first_N)
print 'Scanning %d files' % len(image_indices)
assert len(image_indices) > 0
print ' First file', os.path.join(datadir, image_filenames[image_indices[0]])
indices = None
rep = None
for ii,image_idx in enumerate(image_indices):
filename = image_filenames[image_idx]
image_class = image_labels[image_idx]
do_print = (image_idx % 10 == 0)
if do_print:
print '%s Image %d/%d' % (datetime.now().ctime(), image_idx, len(image_indices))
with WithTimer('Load image', quiet = not do_print):
im = caffe.io.load_image(os.path.join(datadir, filename))
with WithTimer('Predict ', quiet = not do_print):
net.predict([im], oversample = False) # Just take center crop
with WithTimer('Store ', quiet = not do_print):
if rep is None:
rep_shape = net.blobs[layer].data[0].shape # e.g. (256,13,13)
rep = np.zeros((len(image_indices),) + rep_shape) # e.g. (1000,256,13,13)
indices = [0] * len(image_indices)
indices[ii] = image_idx
rep[ii] = net.blobs[layer].data[0]
print 'done!'
return indices,rep
def get_max_data_extent(net, layer, rc, is_conv):
'''Gets the maximum size of the data layer that can influence a unit on layer.'''
if is_conv:
conv_size = net.blobs[layer].data.shape[2:4] # e.g. (13,13) for conv5
layer_slice_middle = (conv_size[0]/2,conv_size[0]/2+1, conv_size[1]/2,conv_size[1]/2+1) # e.g. (6,7,6,7,), the single center unit
data_slice = rc.convert_region(layer, 'data', layer_slice_middle)
return data_slice[1]-data_slice[0], data_slice[3]-data_slice[2] # e.g. (163, 163) for conv5
else:
# Whole data region
return net.blobs['data'].data.shape[2:4] # e.g. (227,227) for fc6,fc7,fc8,prop
def output_max_patches(max_tracker, net, layer, idx_begin, idx_end, num_top, datadir, filelist, outdir, do_which):
do_maxes, do_deconv, do_deconv_norm, do_backprop, do_backprop_norm, do_info = do_which
assert do_maxes or do_deconv or do_deconv_norm or do_backprop or do_backprop_norm or do_info, 'nothing to do'
mt = max_tracker
rc = RegionComputer()
image_filenames, image_labels = load_file_list(filelist)
print 'Loaded filenames and labels for %d files' % len(image_filenames)
print ' First file', os.path.join(datadir, image_filenames[0])
num_top_in_mt = mt.max_locs.shape[1]
assert num_top <= num_top_in_mt, 'Requested %d top images but MaxTracker contains only %d' % (num_top, num_top_in_mt)
assert idx_end >= idx_begin, 'Range error'
size_ii, size_jj = get_max_data_extent(net, layer, rc, mt.is_conv)
data_size_ii, data_size_jj = net.blobs['data'].data.shape[2:4]
n_total_images = (idx_end-idx_begin) * num_top
for cc, channel_idx in enumerate(range(idx_begin, idx_end)):
unit_dir = os.path.join(outdir, layer, 'unit_%04d' % channel_idx)
mkdir_p(unit_dir)
if do_info:
info_filename = os.path.join(unit_dir, 'info.txt')
info_file = open(info_filename, 'w')
print >>info_file, '# is_conv val image_idx image_class i(if is_conv) j(if is_conv) filename'
# iterate through maxes from highest (at end) to lowest
for max_idx_0 in range(num_top):
max_idx = num_top_in_mt - 1 - max_idx_0
if mt.is_conv:
im_idx, im_class, ii, jj = mt.max_locs[channel_idx, max_idx]
else:
im_idx, im_class = mt.max_locs[channel_idx, max_idx]
recorded_val = mt.max_vals[channel_idx, max_idx]
filename = image_filenames[im_idx]
do_print = (max_idx_0 == 0)
if do_print:
print '%s Output file/image(s) %d/%d' % (datetime.now().ctime(), cc * num_top, n_total_images)
if mt.is_conv:
# Compute the focus area of the data layer
layer_indices = (ii,ii+1,jj,jj+1)
data_indices = rc.convert_region(layer, 'data', layer_indices)
data_ii_start,data_ii_end,data_jj_start,data_jj_end = data_indices
touching_imin = (data_ii_start == 0)
touching_jmin = (data_jj_start == 0)
# Compute how much of the data slice falls outside the actual data [0,max] range
ii_outside = size_ii - (data_ii_end - data_ii_start) # possibly 0
jj_outside = size_jj - (data_jj_end - data_jj_start) # possibly 0
if touching_imin:
out_ii_start = ii_outside
out_ii_end = size_ii
else:
out_ii_start = 0
out_ii_end = size_ii - ii_outside
if touching_jmin:
out_jj_start = jj_outside
out_jj_end = size_jj
else:
out_jj_start = 0
out_jj_end = size_jj - jj_outside
else:
ii,jj = 0,0
data_ii_start, out_ii_start, data_jj_start, out_jj_start = 0,0,0,0
data_ii_end, out_ii_end, data_jj_end, out_jj_end = size_ii, size_ii, size_jj, size_jj
if do_info:
print >>info_file, 1 if mt.is_conv else 0, '%.6f' % mt.max_vals[channel_idx, max_idx],
if mt.is_conv:
print >>info_file, '%d %d %d %d' % tuple(mt.max_locs[channel_idx, max_idx]),
else:
print >>info_file, '%d %d' % tuple(mt.max_locs[channel_idx, max_idx]),
print >>info_file, filename
if not (do_maxes or do_deconv or do_deconv_norm or do_backprop or do_backprop_norm):
continue
with WithTimer('Load image', quiet = not do_print):
im = caffe.io.load_image(os.path.join(datadir, filename))
with WithTimer('Predict ', quiet = not do_print):
net.predict([im], oversample = False) # Just take center crop, same as in scan_images_for_maxes
if len(net.blobs[layer].data.shape) == 4:
reproduced_val = net.blobs[layer].data[0,channel_idx,ii,jj]
else:
reproduced_val = net.blobs[layer].data[0,channel_idx]
if abs(reproduced_val - recorded_val) > .1:
print 'Warning: recorded value %s is suspiciously different from reproduced value %s. Is the filelist the same?' % (recorded_val, reproduced_val)
if do_maxes:
#grab image from data layer, not from im (to ensure preprocessing / center crop details match between image and deconv/backprop)
out_arr = np.zeros((3,size_ii,size_jj), dtype='float32')
out_arr[:, out_ii_start:out_ii_end, out_jj_start:out_jj_end] = net.blobs['data'].data[0,:,data_ii_start:data_ii_end,data_jj_start:data_jj_end]
with WithTimer('Save img ', quiet = not do_print):
save_caffe_image(out_arr, os.path.join(unit_dir, 'maxim_%03d.png' % max_idx_0),
autoscale = False, autoscale_center = 0)
if do_deconv or do_deconv_norm:
diffs = net.blobs[layer].diff * 0
if len(diffs.shape) == 4:
diffs[0,channel_idx,ii,jj] = 1.0
else:
diffs[0,channel_idx] = 1.0
with WithTimer('Deconv ', quiet = not do_print):
net.deconv_from_layer(layer, diffs)
out_arr = np.zeros((3,size_ii,size_jj), dtype='float32')
out_arr[:, out_ii_start:out_ii_end, out_jj_start:out_jj_end] = net.blobs['data'].diff[0,:,data_ii_start:data_ii_end,data_jj_start:data_jj_end]
if out_arr.max() == 0:
print 'Warning: Deconv out_arr in range', out_arr.min(), 'to', out_arr.max(), 'ensure force_backward: true in prototxt'
if do_deconv:
with WithTimer('Save img ', quiet = not do_print):
save_caffe_image(out_arr, os.path.join(unit_dir, 'deconv_%03d.png' % max_idx_0),
autoscale = False, autoscale_center = 0)
if do_deconv_norm:
out_arr = np.linalg.norm(out_arr, axis=0)
with WithTimer('Save img ', quiet = not do_print):
save_caffe_image(out_arr, os.path.join(unit_dir, 'deconvnorm_%03d.png' % max_idx_0))
if do_backprop or do_backprop_norm:
diffs = net.blobs[layer].diff * 0
diffs[0,channel_idx,ii,jj] = 1.0
with WithTimer('Backward ', quiet = not do_print):
net.backward_from_layer(layer, diffs)
out_arr = np.zeros((3,size_ii,size_jj), dtype='float32')
out_arr[:, out_ii_start:out_ii_end, out_jj_start:out_jj_end] = net.blobs['data'].diff[0,:,data_ii_start:data_ii_end,data_jj_start:data_jj_end]
if out_arr.max() == 0:
print 'Warning: Deconv out_arr in range', out_arr.min(), 'to', out_arr.max(), 'ensure force_backward: true in prototxt'
if do_backprop:
with WithTimer('Save img ', quiet = not do_print):
save_caffe_image(out_arr, os.path.join(unit_dir, 'backprop_%03d.png' % max_idx_0),
autoscale = False, autoscale_center = 0)
if do_backprop_norm:
out_arr = np.linalg.norm(out_arr, axis=0)
with WithTimer('Save img ', quiet = not do_print):
save_caffe_image(out_arr, os.path.join(unit_dir, 'backpropnorm_%03d.png' % max_idx_0))
if do_info:
info_file.close()
|
Hacker’s cheeks reddened. “Seems sorta stupid now, but at the time it didn’t. I wanted to know if his treatment centers were just for Mexicans, or if a white guy could get in, too.
I thought he’d ask for details, but he didn’t. He nodded once, still rubbing his jaw.
“That’s right. And that’s all. At least that’s suppose to be all. But when I was still an addict, I needed money all the time. And I met a guy.” He continued to pace, scratching the back of his head so hard he reminded me of a dog with fleas. I winced at the thought, realizing this might not be so far from the truth.
“He was a myste, like you,” Hacker went on. “I saw that right away. He said he could help me, and that far from havin’ to pay him he’d go ahead and pay me on top of what he could do for me. How could I say no?
Something stirred in the back of my mind, grasses rustling in a light wind. A memory, though I couldn’t place it.
He laughed, short and bitter. “It worked just the way they wanted it to. I don’t need to wait for the moons to become coyote. And I can change into him anytime I want. Changin’ back is . . . well that’s more complicated. Sometimes it’s quick, sometimes it takes a day or more. But all of that is beside the point. Always was, as it happens.
He lifted his t-shirt and pointed at a crater-like scar on his side, beneath his left arm.
So much had clicked into place for me while Hacker talked. That memory — it was from the seeing spell I’d cast in Sweetwater Park. This is what Dimples and Bear had been doing with the blood from the homeless man. Dimples’s spell made it possible for Bear, who must have been a were, to change anytime Dimples wanted him to. The roar of pain I had heard before their victim lost consciousness was Bear turning. For all I knew, he really was part bear.
Weres like Hacker and Bear had been made into servants of the dark sorcerers who changed them; wereslaves, in a manner of speaking. Being a were still carried a stigma, in some ways even more so than being a weremyste. At least we kept our human form. Our phasings were misunderstood, as was the more permanent psychological damage they caused. But some people valued the spells we could cast, and few ever questioned our humanity.
Weres, however, had been portrayed in movies and on television as monsters, and from all that I had heard — I’d never seen it for myself — their transformation to and from animal form could be terrifying for the uninitiated. Others in Hacker’s position had no recourse. Hacker could talk to Amaya, though clearly Jacinto had not been able to do much for him. But others like him would be reluctant to admit to anyone what they were, much less that they had been stripped of their freedom in this way. And having no magic of their own, they couldn’t fight back, not against a sorcerer.
But their plight also begged a question that chilled me to my core: If this could be done to weres, could it also be done to weremystes? Could a myste who was powerful enough cast a similar spell on me, so that he or she could induce in me at will the insanity and enhanced power of the phasings? Sure, I had access to spells, too. I could defend myself. To a point. But what if the myste in question was more skilled than I was, more powerful? Could I be used as a magical slave as well? Could my Dad? Could a myste, or a cabal of them, create an entire army of ensorcelled magical warriors, beyond reason, wielding spells too powerful for those not in the midst of a magically induced phasing to withstand? |
# -*- coding: utf-8 -*-
#
# HnTool rules - ssh
# Copyright (C) 2009-2010 Hugo Doria <mail@hugodoria.org>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
import os
import HnTool.modules.util
from HnTool.modules.rule import Rule as MasterRule
class Rule(MasterRule):
def __init__(self, options):
MasterRule.__init__(self, options)
self.short_name="ssh"
self.long_name="Checks security problems on sshd config file"
self.type="config"
self.required_files = ['/etc/ssh/sshd_config', '/etc/sshd_config']
def requires(self):
return self.required_files
def analyze(self, options):
check_results = self.check_results
ssh_conf_file = self.required_files
for sshd_conf in ssh_conf_file:
if os.path.isfile(sshd_conf):
# dict with all the lines
lines = HnTool.modules.util.hntool_conf_parser(sshd_conf)
# Checking if SSH is using the default port
if 'Port' in lines:
if int(lines['Port']) == 22:
check_results['low'].append('SSH is using the default port')
else:
check_results['ok'].append('SSH is not using the default port')
else:
check_results['low'].append('SSH is using the default port')
# Checking if the Root Login is allowed
if 'PermitRootLogin' in lines:
if lines['PermitRootLogin'] == 'yes':
check_results['medium'].append('Root access allowed')
else:
check_results['ok'].append('Root access is not allowed')
else:
check_results['medium'].append('Root access is allowed')
# Checking if SSH is using protocol v2 (recommended)
if 'Protocol' in lines:
if int(lines['Protocol']) == 2:
check_results['ok'].append('SSH is using protocol v2')
else:
check_results['high'].append('SSH is not using protocol v2')
else:
check_results['high'].append('SSH is not using protocol v2')
# Checking if empty password are allowed (shouldn't)
if 'PermitEmptyPasswords' in lines:
if lines['PermitEmptyPasswords'] == 'yes':
check_results['high'].append('Empty passwords are allowed')
else:
check_results['ok'].append('Empty passwords are not allowed')
else:
check_results['high'].append('Empty passwords are allowed')
# Checking if X11 Forward is allowed (shouldn't)
if 'X11Forwarding' in lines:
if lines['X11Forwarding'] == 'yes':
check_results['low'].append('X11 forward is allowed')
else:
check_results['ok'].append('X11 forward is not allowed')
else:
check_results['ok'].append('X11 forward is not allowed')
# Checking if SSH allow TCP Forward (shouldn't)
if 'AllowTcpForwarding' in lines:
if lines['AllowTcpForwarding'] == 'yes':
check_results['low'].append('TCP forwarding is allowed')
else:
check_results['ok'].append('TCP forwarding is not allowed')
else:
check_results['low'].append('TCP forwarding is allowed')
return check_results |
We outperform on Dell motherboard repair hands down even so much that we have repaired over 100 laptops in the last 90 days shipped to us from all over the world.
Yes, you read that correct. Our Dell motherboard repair are so good that we have even had parts sent to us from all over the world including Australia, Germany, Japan, the Dominican Republic, and many more also including almost all 50 states for repair. We are hands down head and shoulders above our competition when it comes to Dell motherboard repair. We have been working on laptops at a component level since 2004. Our staff is trained, skilled, and trusted to complete most any task you need when getting your Dell motherboard repaired. Our staff has full access to all Dell and HP schematics to aid in our repairs. Our electronic technicians have real world experience in the avionics, automotive, and electronics industries, and we employ a mixed staff including military veteran and minority staff.
What do the customers say about our Dell motherboard repair?
One of our clients sent us an email well after 30 days, actually almost one year, saying that we had fixed their Dell motherboard repair.
Does your Dell motherboard repair involve your laptop having a blank screen, no wireless, no webcam, sound stopped working, spinning wheel, a circle with a line, or is the DVD or SATA hard drive not being recognized anymore?
Does this question sound like a question you have today? If it does we can help you repair your laptop from reflowing to reballing to component level repairs based on your issue. We perform Dell motherboard repair as a specialty. Most of our Dell motherboard repair services are between $75 to $125.
What models do you support Dell motherboard repair on?
A list of models we support and perform Dell motherboard repair include: All Dell models. |
# -*- coding: utf-8; -*-
#
# This file is part of Superdesk.
#
# Copyright 2017 Sourcefabric z.u. and contributors.
#
# For the full copyright and license information, please see the
# AUTHORS and LICENSE files distributed with this source code, or
# at https://www.sourcefabric.org/superdesk/license
from apps.archive.common import ARCHIVE
from superdesk import get_resource_service
from superdesk.metadata.item import ITEM_TYPE, CONTENT_TYPE, PUBLISH_STATES, \
ITEM_STATE
from superdesk.resource import Resource
from apps.packages.package_service import PackageService, create_root_group,\
get_item_ref
from eve.utils import config
from eve.validation import ValidationError
from superdesk.errors import SuperdeskApiError
from superdesk.services import BaseService
from superdesk.metadata.packages import GROUPS, GROUP_ID, REFS, RESIDREF,\
ROOT_GROUP, ID_REF, PACKAGE_TYPE
class PublishedPackageItemsResource(Resource):
schema = {
'package_id': {
'type': 'string',
'required': True
},
'new_items': {
'type': 'list',
'required': True,
'schema': {
'type': 'dict',
'schema': {
'group': {'type': 'string'},
'item_id': {'type': 'string'}
}
}
}
}
datasource = {
'source': 'archive'
}
resource_methods = ['POST']
privileges = {'POST': ARCHIVE}
class PublishedPackageItemsService(BaseService):
package_service = PackageService()
def create(self, docs, **kwargs):
ids = []
for doc in docs:
original = get_resource_service(ARCHIVE).find_one(req=None, _id=doc['package_id'])
if not original or original[ITEM_TYPE] != CONTENT_TYPE.COMPOSITE:
raise SuperdeskApiError.badRequestError('Invalid package identifier')
if original[ITEM_STATE] not in PUBLISH_STATES:
raise SuperdeskApiError.badRequestError('Package was not published')
items = {}
for new_item in doc['new_items']:
item = get_resource_service(ARCHIVE).find_one(req=None, _id=new_item['item_id'])
if not item:
raise SuperdeskApiError.badRequestError('Invalid item identifier %s' % new_item['item_id'])
try:
self.package_service.check_for_circular_reference(original, new_item['item_id'])
except ValidationError:
raise SuperdeskApiError.badRequestError('Circular reference in item %s', new_item['item_id'])
items[item[config.ID_FIELD]] = item
updates = {key: original[key] for key in [config.ID_FIELD, PACKAGE_TYPE, GROUPS]
if key in original}
create_root_group([updates])
items_refs = []
for new_item in doc['new_items']:
items_refs.append(self._set_item_assoc(updates, new_item, items[new_item['item_id']]))
get_resource_service(ARCHIVE).system_update(original[config.ID_FIELD], updates, original)
for item_ref in items_refs:
self.package_service.update_link(updates, item_ref)
items_published = [new_item[ITEM_STATE] in PUBLISH_STATES for new_item in items.values()]
if any(items_published):
get_resource_service('archive_correct').patch(id=doc['package_id'], updates=updates)
ids.append(original[config.ID_FIELD])
return ids
def _set_item_assoc(self, package, new_item, item_doc):
group = self._get_group(package, new_item['group'])
for assoc in group[REFS]:
if assoc.get(RESIDREF) == new_item['item_id']:
return assoc
item_ref = get_item_ref(item_doc)
group[REFS].append(item_ref)
return item_ref
def _get_group(self, package, group):
for package_group in package[GROUPS]:
if group == package_group[GROUP_ID]:
return package_group
self._add_group_in_root(group, package[GROUPS])
package[GROUPS].append({GROUP_ID: group, REFS: []})
return package[GROUPS][-1]
def _add_group_in_root(self, group, groups):
root_refs = []
for group_meta in groups:
if group_meta.get(GROUP_ID) == ROOT_GROUP:
root_refs = [ref[ID_REF] for ref in group_meta[REFS]]
if group not in root_refs:
group_meta[REFS].append({ID_REF: group})
|
Joseph Henry Glennon was born on 22 January 1885 at Chelsea, Suffolk County, Massachusetts.1 He was the son of Daniel Glennon and Catherine Gilman.
Joseph Henry Glennon was listed as the head of household on the 1920 census on 3 January 1920 at 27 School Street, Waltham, Middlesex County, Massachusetts. Also living in the house was his wife, Mary Glennon, his son, Robert Glennon and Thomas Jennings Glennon.2 Joseph Henry Glennon was on the census of 3 January 1920 at 27 School Street, Waltham, Middlesex County, Massachusetts, ED 486, Ward 2, Page 6B, Image 700, Line 70, Dwelling 103, Family 129 Film# T625-719. He lived in December 1931 at 411 Saratoga Street, East Boston, Suffolk County, Massachusetts; Mass card for F.J. Jennings. |
import nickmab.async_util.web as w
from _utils import expect_specific_err as _expect_specific_err
'''To be run by executing py.test in the parent dir'''
def test_json_query_pool():
q = {
'ip': 'http://ip.jsontest.com/',
'headers': 'http://headers.jsontest.com/',
}
p = w.JSONQueryPool(1, q)
assert not p.is_finished
p.run()
assert p.is_finished
assert not p.has_exception
assert isinstance(p.result['ip'], dict)
assert isinstance(p.result['headers'], dict)
def test_json_query_pool_with_exception():
q = {
'nil': 'dslkfjlsdkfjlksd'
}
p = w.JSONQueryPool(1, q)
p.run()
assert p.has_exception
def test_json_query_pool_no_target():
_expect_specific_err(w.JSONQueryPool, ValueError)
def test_json_query_pool_wrong_type():
_expect_specific_err(w.JSONQueryPool, TypeError,
kwargs={ 'num_worker_threads': 'fun', 'queries': { 's': 's' } })
_expect_specific_err(w.JSONQueryPool, TypeError,
kwargs={ 'queries': { 1: 'fun' } })
_expect_specific_err(w.JSONQueryPool, TypeError,
kwargs={ 'queries': { '1': 2 } })
_expect_specific_err(w.JSONQueryPool, TypeError, kwargs={ 'queries': 123 })
|
The first night in Beijing was much less than I expected. I was told by who I was couch surfing with that I would be rooming with women teachers and students. I ended up in a room with 3 late teenage boys and the guy who offered me the "couch". He first put me on the top bunk above the teens. I haven't had to climb up on a top bunk for a few years and I'm not the type of person who sleeps or climbs on the top bunk. I get up there and was waiting for people to go to bed as I was completely exhausted and all the lights were on. Finally, the guy told me I could sleep in his bottom bunk which was pretty much in a closet with windows.
I am usually quite content and figured if I can't deal I will just find a hotel. I guess I am just getting to an age where I want a bit more comfort... Especially if I'm not home in TicTac.
The bathrooms in China are a bit different. It is one open space... No shower curtain or anything so when you shower the entire room gets wet... Including the toilet. I was quite happy to have a western toilet and not a whole in the ground but it was always wet... And the floor was always wet so you had to wipe down the toilet and hold up your pant legs so your clothes wouldn't get wet. Also, sharing a bathroom with teenage boys is disgusting. It smelled so bad I would gag while brushing my teeth. I wasn't courageous enough to shower in that bathroom. Yuck-O! I have no idea what this guy was thinking when he offered me the place to stay.
The first day I went to the Forbidden City and the 2 parks surrounding the Forbidden City. I was a lot of walking and a lot of stairs but it was a beautiful day. I will have more pictures when I get back to the states and can take them off my camera but here are a few to start from my iPad.
Better photos when I get back to the states.
What I learned quickly is the subway is super simple in Beijing. I downloaded a Beijing Metro app on my iPad which was helpful but there are subway maps everywhere to check to see where you need to go. I bought one ticket each time I went to the subway. The temp tickets only last the day and you scan to get in and insert them to get out without them being returned. Don't be afraid to push to get a spot. It wasn't as crowded as I thought it would be. Certain times of the day were quite crowded but nothing like you see on YouTube where they push people in.
Every place has security so if you can get away with not carrying a bag then don't. I never carried my bag and got thru security quickly but if you bring a bag you have to wait in huge lines to get the bag checked thru the scanner. I'm not a huge fan of crowds and people so if I can avoid I do.
On the way back to where I was staying I stopped at he Super Walmart for some food since there weren't many places to eat in the area. What an interesting place. First, there would be no RV parking there. They had underground parking and a lot of people rode their bikes.
There are multiple floors... Just confusing. I only went to the grocery section which had fresh fish on ice and in water. Everything was strange looking but you could find what you needed. In the produce section they had a counter with workers that weighed your produce and put a tag on it so there is no looking up at the checkout. I guess to save time but not sure how efficient that really is.
I found a couple things to eat and had to go in what felt like circles and escalators to the check out... And guess what... Not enough checkers... And it was hot... Either no AC or it wasn't on very high. Lines were much closer together and crowds of people. I almost just left as it was taking forever but I didn't have any food so I had to suffer thru... I survived.
It was an enjoyable day but was dreading going back to where I was staying with the teenage boys. I told myself after my tour the next day I could find a new place... So that meant two more nights. I went back to where I was staying and the door was locked... The place was suppose to be open all the time. Luckily I knocked and someone let me in. I went straight to my bed. It was a crowded place with many dorm type rooms and the open spaces were filled with stuff. I just stayed in my bottom bunk, watched a movie and fell asleep not talking to anyone. It was very confusing because the deal was I spoke with his students for an hour a day and I would get a meal and a place to stay. No one was every around for conversation or a meal... And to be honest the place was dirty and smelled funny so there was no way I would be able to eat there.
The lovely view from my closet bunk bed.
Just kept telling myself... It is a free place sleep. Fell asleep excited about the Beijing Sideways tour to the Great Wall in the morning. |
from server.logic.grid.interval import into_interval
###############################################################################
# DEBUGGING AND PRINTING STATIC DATA #
###############################################################################
def store_coverage(grid):
""" output filled cells to text file "Ghent.txt" """
with open("ghent.txt", "w+") as f:
for row in grid.data:
f.write("%s\n" % ''.join(" " if len(field)
== 0 else "##" for field in row))
print("SIZE: %i %i" % (len(grid.data), len(grid.data[0])))
def store_graph(graph):
""" output city roads to svg file. """
bounds = reduce(lambda x, y: x + y, (into_interval(node, node, 0.0)
for (_, node) in graph.iter_nodes()))
SCALE = 100
with open("ghent.svg", "w+") as f:
f.write('<svg xmlns="http://www.w3.org/2000/svg" \
xmlns:xlink="http://www.w3.org/1999/xlink">\n')
for (start_id, _, end_id) in graph.iter_edges():
f.write('<line x1="%f" y1="%f" x2="%f" y2="%f" style="stroke:#000000;"/>\n' %
((-graph.get(start_id).x + bounds.maxx) * SCALE,
(-graph.get(start_id).y + bounds.maxy) * SCALE,
(-graph.get(end_id).x + bounds.maxx) * SCALE,
(-graph.get(end_id).y + bounds.maxy) * SCALE))
f.write("</svg>")
|
This is my favorite crochet project so far. I love it!!
SOOOO pretty! I love the colors you chose. |
import weakref
import numba.cuda
import numpy as np
from .cuda import cuda_deserialize, cuda_serialize
from .serialize import dask_deserialize, dask_serialize
try:
from .rmm import dask_deserialize_rmm_device_buffer
except ImportError:
dask_deserialize_rmm_device_buffer = None
@cuda_serialize.register(numba.cuda.devicearray.DeviceNDArray)
def cuda_serialize_numba_ndarray(x):
# Making sure `x` is behaving
if not (x.flags["C_CONTIGUOUS"] or x.flags["F_CONTIGUOUS"]):
shape = x.shape
t = numba.cuda.device_array(shape, dtype=x.dtype)
t.copy_to_device(x)
x = t
header = x.__cuda_array_interface__.copy()
header["strides"] = tuple(x.strides)
header["lengths"] = [x.nbytes]
frames = [
numba.cuda.cudadrv.devicearray.DeviceNDArray(
shape=(x.nbytes,), strides=(1,), dtype=np.dtype("u1"), gpu_data=x.gpu_data,
)
]
return header, frames
@cuda_deserialize.register(numba.cuda.devicearray.DeviceNDArray)
def cuda_deserialize_numba_ndarray(header, frames):
(frame,) = frames
shape = header["shape"]
strides = header["strides"]
arr = numba.cuda.devicearray.DeviceNDArray(
shape=shape,
strides=strides,
dtype=np.dtype(header["typestr"]),
gpu_data=numba.cuda.as_cuda_array(frame).gpu_data,
)
return arr
@dask_serialize.register(numba.cuda.devicearray.DeviceNDArray)
def dask_serialize_numba_ndarray(x):
header, frames = cuda_serialize_numba_ndarray(x)
frames = [memoryview(f.copy_to_host()) for f in frames]
return header, frames
@dask_deserialize.register(numba.cuda.devicearray.DeviceNDArray)
def dask_deserialize_numba_array(header, frames):
if dask_deserialize_rmm_device_buffer:
frames = [dask_deserialize_rmm_device_buffer(header, frames)]
else:
frames = [numba.cuda.to_device(np.asarray(memoryview(f))) for f in frames]
for f in frames:
weakref.finalize(f, numba.cuda.current_context)
arr = cuda_deserialize_numba_ndarray(header, frames)
return arr
|
The United Nations refused to recognize the Taliban government, with the United States imposing heavy sanctions on them, similar as those placed on North Korea.
We offer a variety of ways to make your voice heard. She was married to an Afghan businessman and had recently relocated to Afghanistan. They live as housewives for the remainder of their life. He was released in lateafter serving just four years - a common phenomenon in Afghanistan, where the Taliban often hold influence over the government. One picture, taken by the Afghan photojournalist Farzana Wahidyresonated especially profoundly with women and men alike. |
#!/usr/bin/env python
from setuptools import setup, find_packages
long_description = open('README.md').read()
setup(
name='pyspark-flame',
description='A low-overhead sampling profiler for PySpark, that outputs Flame Graphs',
long_description=long_description,
long_description_content_type='text/markdown',
author='James Pickering',
author_email='james_pic@hotmail.com',
license='MIT',
url='https://github.com/jamespic/pyspark-flame',
packages=find_packages('src'),
package_dir={'': 'src'},
scripts=['FlameGraph/flamegraph.pl'],
install_requires=['pyspark'],
use_scm_version=True,
setup_requires=['setuptools_scm'],
test_suite='test',
classifiers=[
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8'
]
)
|
Yehuda Amichai, the prominent, internationally known Israeli poet, was on campus in Fall 1998 as the first Sidney Harman Writer-in-Residence. Mr. Amichai’s poetry has been translated into over thirty languages, most recently Czech and Albanian, and he is the author of many collections of poetry, including Now and in Other Days (1955), Two Hopes Away (1958), In the Park (1960), Collected Poems (1963), Poems (1969), Songs of Jerusalem and Myself (1973), Amen (1977), Time (1979), Love Poems (1981), Even a Fist Was Once an Open Palm with Fingers (1991), and Yehuda Amichai: A Life of Poetry, 1948-1994 (1994). His most recent collection, Open Closed Open, appeared in this country in the year 2000. His play A Journey to Nineveh was produced by the Habimah National Theatre in Tel Aviv in 1964. He is also the author of a novel, Not of This Time, Not of This Place (1964), and a collection of short stories, The World Is a Room (1985). An honorary member of the American Academy of Arts and Letters, Mr. Amichai has been the recipient of numerous international honors.
Born in 1924 in Wurzburg, Germany, Mr. Amichai immigrated with his parents to Palestine in 1936. He served in the British Army in World War II and afterward in the Haganah underground. He also saw active service in the Israeli War of Independence and the Sinai Campaign. He lives in Jerusalem.
On grownups, He won't take pity anymore.
Now and in other days. |
# ------------------------------------------------------------------------------------------------
# Copyright (c) 2016 Microsoft Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
# associated documentation files (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge, publish, distribute,
# sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or
# substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT
# NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# ------------------------------------------------------------------------------------------------
import MalmoPython
agent_host = MalmoPython.AgentHost()
agent_host.setVideoPolicy( MalmoPython.VideoPolicy.LATEST_FRAME_ONLY )
agent_host.setRewardsPolicy( MalmoPython.RewardsPolicy.SUM_REWARDS )
agent_host.setObservationsPolicy( MalmoPython.ObservationsPolicy.LATEST_OBSERVATION_ONLY )
world_state = agent_host.getWorldState()
assert not world_state.has_mission_begun, 'World state says mission has already begun.'
assert not world_state.is_mission_running, 'World state says mission is already running.'
assert world_state.number_of_observations_since_last_state == 0, 'World state says observations already received.'
assert world_state.number_of_rewards_since_last_state == 0, 'World state says rewards already received.'
assert world_state.number_of_video_frames_since_last_state == 0, 'World state says video frames already received.'
assert len( world_state.observations ) == 0, 'World state has observations stored.'
assert len( world_state.rewards ) == 0, 'World state has rewards stored.'
assert len( world_state.video_frames ) == 0, 'World state has video frames stored.'
print agent_host.getUsage()
|
Position summary: Broken Arrow Nursery is seeking applicants for a Horticulturist Assistant position. The position averages 32-48 hours per week and runs from mid-March through mid-November. Opportunities to extend the work season into December may also be available. We offer competitive compensation that is commensurate with experience. Main responsibilities will include the daily maintenance and care of the remarkably exciting diversity of rare and unusual plants we grow. Additional opportunities exist to help with all phases of woody and herbaceous plant production including propagation and integrated pest management. If you’re passionate and excited about plants, this is the job for you!
Requirements: Applicants should be energetic, physically fit and have the ability to follow directions and work independently as well as part of a team. Knowledge and experience with plant care and production is desirable. Basic knowledge of plant material is beneficial but not essential. The flexibility to work on some weekend days is preferred. |
# Copyright 2012-2013 John Sullivan
# Copyright 2012-2013 Other contributers as noted in the CONTRIBUTERS file
#
# This file is part of Galah.
#
# Galah is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Galah is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Galah. If not, see <http://www.gnu.org/licenses/>.
import galah.sheep.utility.universal as universal
import galah.sheep.utility.exithelpers as exithelpers
from galah.base.flockmail import FlockMessage
import threading
import logging
import consumer
import producer
import time
import zmq
# Load Galah's configuration.
from galah.base.config import load_config
config = load_config("sheep")
# Set up logging
import logging
logger = logging.getLogger("galah.sheep.maintainer")
poll_timeout = 10
# A counter used to generate names for consumer threads, not guarenteed to be
# the number of consumers currently extant.
_consumer_counter = 0
def start_consumer():
global _consumer_counter
consumerThread = threading.Thread(target = consumer.run,
name = "consumer-%d" % _consumer_counter)
consumerThread.start()
_consumer_counter += 1
return consumerThread
def start_producer():
producer_thread = threading.Thread(target = producer.run, name = "producer")
producer_thread.start()
return producer_thread
@universal.handleExiting
def run(znconsumers):
log = logging.getLogger("galah.sheep.maintainer")
log.info("Maintainer starting")
producer = start_producer()
consumers = []
# Continually make sure that all of the threads are up until it's time to
# exit
while not universal.exiting:
if not universal.orphaned_results.empty():
logger.warning(
"Orphaned results detected, going into distress mode."
)
while not universal.orphaned_results.empty():
try:
# We want to create a whole new socket everytime so we don't
# stack messages up in the queue. We also don't want to just
# send it once and let ZMQ take care of it because it might
# be eaten by a defunct shepherd and then we'd be stuck forever.
shepherd = universal.context.socket(zmq.DEALER)
shepherd.linger = 0
shepherd.connect(config["shepherd/SHEEP_SOCKET"])
shepherd.send_json(FlockMessage("distress", "").to_dict())
logger.info(
"Sent distress message to shepherd, waiting for response."
)
message = exithelpers.recv_json(shepherd, timeout = 1000 * 60)
message = FlockMessage.from_dict(message)
if message.type == "bloot" and message.body == "":
while not universal.orphaned_results.empty():
result = universal.orphaned_results.get()
try:
shepherd.send_json(
FlockMessage("result", result).to_dict()
)
confirmation = exithelpers.recv_json(
shepherd, timeout = 1000 * 5
)
confirmation = FlockMessage.from_dict(confirmation)
if confirmation.type == "bloot" and \
confirmation.body == "":
continue
except:
universal.orphaned_results.put(result)
raise
except universal.Exiting:
logger.warning(
"Orphaned results have not been sent back to the "
"shepherd. I WILL NOT ABANDON THEM, YOU WILL HAVE TO "
"KILL ME WITH FIRE! (SIGKILL is fire in this analogy)."
)
# Nah man.
universal.exiting = False
continue
except exithelpers.Timeout:
continue
# Remove any dead consumers from the list
dead_consumers = 0
for c in consumers[:]:
if not c.isAlive():
dead_consumers += 1
consumers.remove(c)
if dead_consumers > 0:
logger.warning(
"Found %d dead consumers, restarting them.", dead_consumers
)
# Start up consumers until we have the desired amount
while len(consumers) < znconsumers:
consumers.append(start_consumer())
# If the producer died, start it again
if not producer.isAlive():
log.warning("Found dead producer, restarting it.")
producer = start_producer()
# Sleep for awhile
time.sleep(poll_timeout)
raise universal.Exiting()
|
If Manchester United were to let Marouane Fellaini leave in this transfer window, it would be a mistake. Plain and simple. He offers a great deal to the squad in terms of versatility and adding an extra dimension, so to move him on would be short sighted to say the least.
We now know that AC Milan almost certainly won't be taking Fellaini this January. Rossoneri chief executive Adriano Galliani recently confirmed that while he is a keen admirer of the player, there is "no way" that an agreement will be struck.
But what was disturbing is that so many United fans were jumping for joy when it looked like the Belgian was actually going. And those same people will be praying that something else comes up before the deadline on 1st February.
Fellaini has had a raw deal off supporters ever since he joined United. That is, barring a few weeks in March and April in 2015 when he was in an excellent run of form, perhaps highlighting the fickle nature of many modern 'fans'.
As the only major signing in the summer of 2013, he unfairly bore the brunt of the frustrations that should have been directed at David Moyes and Ed Woodward. Injury and niggling fitness issues then prevented him from making any kind of impact in his debut season as a United player.
To put it bluntly, he was never given a chance by people who immediately decided he was going to be a flop. Even when he did start performing under the tutelage of Louis van Gaal, 'begrudging' is an appropriate word one could use when talking about how that that particular upturn was viewed.
The summer arrivals of Bastian Schweinsteiger, Morgan Schneiderlin and Anthony Martial increased the competition for places, but Fellaini doesn't have to be a regular starter to still be an invaluable member of the squad at Old Trafford.
Against Bournemouth in December, he looked to be only United player that offered any attacking threat and was rewarded with a goal. If he's going to play in an advanced role he needs service, and if he's deeper there are few better in the league at bullying opponents.
Fellaini can terrorise even the best Premier League players - that much was clear when he used to play against United and other big clubs in his Everton days. And to deridingly call him a 'Plan B player' really isn't accurate, although it certainly doesn't hurt that he has that ability in his locker.
It probably won't now happen in this window, but i t would be a mistake for United to let Fellaini leave and fans need to understand that. |
#!/usr/bin/env python
# encoding: utf-8
'''
Created on June 19, 2016
@author: David Moss
'''
# This module will emulate a light bulb device.
# input function behaves differently in Python 2.x and 3.x. And there is no raw_input in 3.x.
if hasattr(__builtins__, 'raw_input'):
input=raw_input
import requests
import sys
import json
import threading
import time
import logging
from argparse import ArgumentParser
from argparse import RawDescriptionHelpFormatter
_https_proxy = None
def main(argv=None):
if argv is None:
argv = sys.argv
else:
sys.argv.extend(argv)
parser = ArgumentParser(formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("-d", "--deviceId", dest="deviceId", help="Globally unique device ID")
parser.add_argument("-u", "--username", dest="username", help="Username")
parser.add_argument("-p", "--password", dest="password", help="Password")
parser.add_argument("-s", "--server", dest="server", help="Base server URL (app.presencepro.com)")
parser.add_argument("-b", "--brand", dest="brand", help="Brand name partner to interact with the correct servers: 'myplace', 'origin', 'presence', etc.")
parser.add_argument("--httpdebug", dest="httpdebug", action="store_true", help="HTTP debug logger output");
parser.add_argument("--https_proxy", dest="https_proxy", help="If your corporate network requires a proxy, type in the full HTTPS proxy address here (i.e. http://10.10.1.10:1080)")
# Process arguments
args = parser.parse_args()
# Extract the arguments
deviceId = args.deviceId
username = args.username
password = args.password
server = args.server
httpdebug = args.httpdebug
brand = args.brand
if brand is not None:
brand = brand.lower()
if brand == 'presence':
print(Color.BOLD + "\nPresence by People Power" + Color.END)
server = "app.presencepro.com"
elif brand == 'myplace':
print(Color.BOLD + "\nMyPlace - Smart. Simple. Secure." + Color.END)
server = "iot.peoplepowerco.com"
elif brand == 'origin':
print(Color.BOLD + "\nOrigin Home HQ" + Color.END)
server = "app.originhomehq.com.au"
elif brand == 'innogy':
print(Color.BOLD + "\ninnogy SmartHome" + Color.END)
server = "innogy.presencepro.com"
else:
sys.stderr.write("This brand does not exist: " + str(brand) + "\n\n")
return 1
if not deviceId:
sys.stderr.write("Specify a device ID for this virtual device with the -d option. Use --help for more info.")
return 1
global _https_proxy
_https_proxy = None
if args.https_proxy is not None:
_https_proxy = {
'https': args.https_proxy
}
# Define the bot server
if not server:
server = "https://app.presencepro.com"
if "http" not in server:
server = "https://" + server
# HTTP Debugging
if httpdebug:
try:
import http.client as http_client
except ImportError:
# Python 2
import httplib as http_client
http_client.HTTPConnection.debuglevel = 1
# You must initialize logging, otherwise you'll not see debug output.
logging.basicConfig()
logging.getLogger().setLevel(logging.DEBUG)
requests_log = logging.getLogger("requests.packages.urllib3")
requests_log.setLevel(logging.DEBUG)
requests_log.propagate = True
# Grab the device server
device_server = _get_ensemble_server_url(server, deviceId)
# Login to your user account
app_key, user_info = _login(server, username, password)
# This is the device type of this virtual device
deviceType = 10071
# Grab the user's primary location ID
locationId = user_info['locations'][0]['id']
# Register the virtual device to your user's account
_register_device(server, app_key, locationId, deviceId, deviceType, "Virtual Light Bulb")
# Persistent connection to listen for commands
t = threading.Thread(target=_listen, args=(device_server, deviceId))
t.start()
def _listen(device_server, deviceId):
"""Listen for commands"""
global _https_proxy
while True:
try:
print("\n[" + deviceId + "]: Listening for commands")
http_headers = {"Content-Type": "application/json"}
r = requests.get(device_server + "/deviceio/mljson", params={"id":deviceId, "timeout":60}, headers=http_headers, timeout=60, proxies=_https_proxy)
command = json.loads(r.text)
print("[" + deviceId + "]: Command received: " + str(command))
# Ack the command
commandId = command['commands'][0]['commandId']
ackPayload = {"version":2, "proxyId": deviceId, "sequenceNumber": 1, "responses": [{"commandId":commandId, "result":1}]}
result = requests.post(device_server + "/deviceio/mljson", headers=http_headers, data=json.dumps(ackPayload), proxies=_https_proxy)
except Exception as e:
print("Exception: " + str(e))
time.sleep(1)
def _login(server, username, password):
"""Get an Bot API key and User Info by login with a username and password"""
global _https_proxy
if not username:
username = input('Email address: ')
if not password:
import getpass
password = getpass.getpass('Password: ')
try:
import requests
# login by username and password
http_headers = {"PASSWORD": password, "Content-Type": "application/json"}
r = requests.get(server + "/cloud/json/login", params={"username":username}, headers=http_headers, proxies=_https_proxy)
j = json.loads(r.text)
_check_for_errors(j)
app_key = j['key']
# get user info
http_headers = {"PRESENCE_API_KEY": app_key, "Content-Type": "application/json"}
r = requests.get(server + "/cloud/json/user", headers=http_headers, proxies=_https_proxy)
j = json.loads(r.text)
_check_for_errors(j)
return app_key, j
except BotError as e:
sys.stderr.write("Error: " + e.msg)
sys.stderr.write("\nCreate an account on " + server + " and use it to sign in")
sys.stderr.write("\n\n")
raise e
def _register_device(server, appKey, locationId, deviceId, deviceType, description):
"""Register a device to the user's account"""
global _https_proxy
http_headers = {"API_KEY": appKey, "Content-Type": "application/json"}
r = requests.post(server + "/cloud/json/devices", params={"locationId":locationId, "deviceId":deviceId, "deviceType":deviceType, "desc":description}, headers=http_headers, proxies=_https_proxy)
j = json.loads(r.text)
_check_for_errors(j)
return j
def _get_ensemble_server_url(server, device_id=None):
"""Get Ensemble server URL"""
import requests
global _https_proxy
http_headers = {"Content-Type": "application/json"}
params = {"type": "deviceio", "ssl": True}
if not device_id:
# to be removed
params['deviceId'] = "nodeviceid"
else:
params['deviceId'] = device_id
r = requests.get(server + "/cloud/json/settingsServer", params=params, headers=http_headers, proxies=_https_proxy)
return r.text
def _check_for_errors(json_response):
"""Check some JSON response for BotEngine errors"""
if not json_response:
raise BotError("No response from the server!", -1)
if json_response['resultCode'] > 0:
msg = "Unknown error!"
if 'resultCodeMessage' in json_response.keys():
msg = json_response['resultCodeMessage']
elif 'resultCodeDesc' in json_response.keys():
msg = json_response['resultCodeDesc']
raise BotError(msg, json_response['resultCode'])
del(json_response['resultCode'])
class BotError(Exception):
"""BotEngine exception to raise and log errors."""
def __init__(self, msg, code):
super(BotError).__init__(type(self))
self.msg = msg
self.code = code
def __str__(self):
return self.msg
def __unicode__(self):
return self.msg
#===============================================================================
# Color Class for CLI
#===============================================================================
class Color:
"""Color your command line output text with Color.WHATEVER and Color.END"""
PURPLE = '\033[95m'
CYAN = '\033[96m'
DARKCYAN = '\033[36m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
if __name__ == "__main__":
sys.exit(main())
|
Questions about "Chrooted SSH/SFTP Tutorial (Debian Etch)"
Discussion in 'HOWTO-Related Questions' started by Leszek, Dec 17, 2007.
Recently I was installing a Debian Etch based server for a school.
Since students can be really naughty sometimes and do something bad while exploring the filesystem by SSH, I've used the second way to chroot them into a jail. After loging by SSH everything works fine,user is locked in his jail and doesn't see to much.Unfortunetly chrooted users cannot log in by SFTP nor SCP.
I tried to connect by Midnight Commander and in efect I've got empty directory. I believe WinSCP gave an "Access Denied" error (can't remember).
Other (not chrooted) users can connect without problems.
One other thing that concerns me is that mod_userdir doesn't work for chrooted users. All the browser gives back is error 404.
I've even chmod 777 index.html in /home/chroot/home/klient2/public_html to be sure.
I hope Apache doesn't have to be in the jail for it to work.
Can it be fixed somehow ? Thanks for Your help.
you could use mysecureshell instead. That will only allow scp connections and they can't get outside their home directory.
Yes,I could.But students wouldn't be able to practice bash commands then.
That's why it's important.I know the chroot jail doesn't have all of the commands but the plan doesn't contain very much commands so teachers could add theese commands.
I'm not sure why but without touching anything I've tried to connect to that server (which is at home) from the mentioned school by WinSCP and it worked.I don't know why Two other people couldn't.
/home/chroot/usr/lib/openssh/sftp-server and /usr/bin/sftp are present.
Maybe a problem with the personal firewalls (e.g. the Windows firewall or ZoneAlarm) on the clients?
All firewalls were off while testing.
I've found out that something is wrong with dns settings.
The server's domain wasn't found while connecting.
Entering the ip address helped.Strange!
Well,everything is ok now.I think this was the solution. Sometimes it happens that You miss something small but important.
I followed the tutorial for installing mysecureshell v0.95 (Download failed for v1.0-2) which worked great the first time on my debian 4 box. In other words, sftp clients connect fine and see only jailed homedirs.
However, those same users, when they ssh in (from another machine on the soho network), get a welcome message, then the connection is closed. Same thing if they scp. My best guess is that the users old shell pref is.. uh... conflicting with whatever mysecureshell does.
More of a nuisance than a problem for now.. just seems odd. As does that the open-ssh server defaults to NOT chrooting remote users. Huh? Why do I have to install the mysecureshell pkg to achieve this? Seems like a slightly ugly workaround to what could be a simple ssh pref.
Only a handful of *x installations worth of experience here (and not from source) so its very likely I'm missing something. Hours of gugling didnt make me any cluedinner. Nor searching the logs.
I'M not sure if SSH is possible with MySecureShell. I think it's SFTP only. However, you can try to change the configuration in /etc/ssh/sftp_config.
Denying ssh by redirecting shell - all or nothing?
Thanks for confirming -- Apparently in a shell, even a jailed cracker can still escape - I guess the denial of shell completely to sftp users makes sense.
I haven't disabled shell for root - that would be very inconvenient. But if root password is guessable then your box is owned anyway.
So I guess jailing is really just for multiple, honest, normal users privacy. And to protect themselves and each other from borking improperly permissioned files.
Gad, configuration is slow sloggin.. hoping sql goes smoothly. |
from django.shortcuts import render
from .models import Article
def special_case_2016(request):
item = {'title':'Special Case 2016','topics':10}
return render(request, "blog/special_case_2016.html", {'item':item})
def year_archive(request,yy):
item = {'title':'Year Archive','content':yy}
return render(request, "blog/year_archive.html", {'item':item})
def month_archive(request,yy,mm):
item = {'title':'Month Archive','content':yy}
return render(request, "blog/month_archive.html", {'item':item})
def article_detail(request,yy,mm,id):
item = {'title':'Article Detail','content':id}
return render(request, "blog/article_detail.html", {'item':item})
def index(request):
blog_list = Article.objects.order_by('-publish_date')
context = {'blog_list': blog_list}
return render(request, 'blog/index.html', context)
def latest(request):
latest_blog_list = Article.objects.order_by('-publish_date')[:10]
context = {'latest_blog_list': latest_blog_list}
return render(request, 'blog/index.html', context)
def detail0(request, blog_id):
return HttpResponse("You're looking at article %s." % blog_id)
def detail1(request, blog_id):
item = Article.objects.get(pk=blog_id)
return render(request, 'blog/detail.html', {'item': item})
def detail(request, blog_id):
try:
item = Article.objects.get(pk=blog_id)
except Article.DoesNotExist:
raise Http404("Article does not exist")
return render(request, 'blog/detail.html', {'item': item})
|
Modus™ is a high capacity bin housing, which securely contains a wheeled bin in a tidy manner. Modus™ 1280 Food Waste Recycling Housing features a secure chute to prevent odours escaping the unit.
Covering large wheeled bins with a Modus™ Housing increases recycling rates and improves the appearance of the area. The Modus™ range of housings accepts a wide range of standard-sized 4-wheeled containers. Modus 770 is designed to accept 600, 660 or 770-litre containers, while Modus 1280 will hold either 1100 or 1280-litre containers. |
# -*- coding: utf-8 -*-
#
# NOTE: this snippet should come *after* the other celery_*
# because it uses the BROKER_URL that should have been
# defined in these.
#
"""
Copyright 2013 Olivier Cortès <oc@1flow.io>
This file is part of the 1flow project.
1flow is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of
the License, or (at your option) any later version.
1flow is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public
License along with 1flow. If not, see http://www.gnu.org/licenses/
"""
#from datetime import timedelta
import djcelery
djcelery.setup_loader()
from celery.schedules import crontab
from kombu import Exchange, Queue
# Avoid sharing the same celery states file
# when multiple workers run on the same machine.
try:
index = sys.argv.index('--hostname')
except:
CELERYD_STATE_DB = 'celery.states'
else:
# get 'medium' from 'medium.worker-03.1flow.io'
CELERYD_STATE_DB = 'celery.states.{0}'.format(
sys.argv[index + 1].split('.', 1)[0])
del index
# 2014-03-09: I benchmarked with 0/1/2 on a 15K-items queue, with various
# other parameters (mtpc=0/1/4/16/64, crc=16/32/64) and having no prefetching
# is the option that gives the best continuous throughput, with excellent
# peaks. All other options make the process-group master stop children to
# ack and re-prefetch next jobs, which in turn make all other process groups
# wait. This produce a lot of hickups in the global processing tunnel. Thus, 0.
CELERYD_PREFETCH_MULTIPLIER = 0
CELERY_DEFAULT_QUEUE = 'medium'
CELERY_QUEUES = (
Queue('high', Exchange('high'), routing_key='high'),
Queue('medium', Exchange('medium'), routing_key='medium'),
Queue('low', Exchange('low'), routing_key='low'),
Queue('fetch', Exchange('fetch'), routing_key='fetch'),
Queue('swarm', Exchange('swarm'), routing_key='swarm'),
Queue('clean', Exchange('clean'), routing_key='clean'),
Queue('background', Exchange('background'), routing_key='background'),
)
BROKER_URL = os.environ.get('BROKER_URL')
# Disabling the heartbeat because workers seems often disabled in flower,
# thanks to http://stackoverflow.com/a/14831904/654755
BROKER_HEARTBEAT = 0
CELERY_RESULT_BACKEND = BROKER_URL
CELERY_RESULT_PERSISTENT = True
# Allow to recover from any unknown crash.
CELERY_ACKS_LATE = True
# Sometimes, Ask asks us to enable this to debug issues.
# BTW, it will save some CPU cycles.
CELERY_DISABLE_RATE_LIMITS = True
# Allow our remote workers to get tasks faster if they have a
# slow internet connection (yes Gurney, I'm thinking of you).
#
# 20140309: no more remote worker and we have very small messages (only
# IDs, no full instance), so stop wasting CPU cycles.
#CELERY_MESSAGE_COMPRESSION = 'gzip'
# Avoid long running and retried tasks to be run over-and-over again.
BROKER_TRANSPORT_OPTIONS = {'visibility_timeout': 86400}
# Half a day is enough
CELERY_TASK_RESULT_EXPIRES = 43200
# The default beiing 5000, we need more than this.
CELERY_MAX_CACHED_RESULTS = 32768
# NOTE: I don't know if this is compatible with upstart.
CELERYD_POOL_RESTARTS = True
# Since Celery 3.1/3.2, no 'pickle' anymore.
# JSON is my prefered option, anyway.
CELERY_ACCEPT_CONTENT = ['pickle', 'json']
CELERY_TASK_SERIALIZER = 'pickle'
CELERY_RESULT_SERIALIZER = 'json'
#CELERY_ALWAYS_EAGER=True
CELERY_TRACK_STARTED = True
CELERY_SEND_TASK_SENT_EVENT = True
# Disabled by default and I like it, because we use Sentry for this.
#CELERY_SEND_TASK_ERROR_EMAILS = False
CELERYBEAT_SCHEDULER = 'djcelery.schedulers.DatabaseScheduler'
CELERYBEAT_SCHEDULE = {
# 'celery-beat-test': {
# 'task': 'oneflow.base.tasks.celery_beat_test',
# 'schedule': timedelta(seconds=15),
# 'schedule': timedelta(seconds=5),
# 'schedule': crontab(minute='*'),
# },
#
# •••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••• Core tasks
'refresh-all-feeds': {
'task': 'oneflow.core.tasks.refresh_all_feeds',
'schedule': crontab(hour='*', minute='*'),
},
'global-checker-task': {
'task': 'oneflow.core.tasks.global_checker_task',
'schedule': crontab(hour='1', minute='1'),
},
# •••••••••••••••••••••••••••••••••••••••••••••••••••••••••••••• Statistics
# We update stats regularly to avoid "loosing" data and desynchronization.
# UDP packets are not reliable. But that's the point of it, isn't it?
'synchronize-statsd-gauges': {
'task': 'oneflow.core.stats.synchronize_statsd_gauges',
'schedule': crontab(minute='59'),
'args': (True, ),
},
# •••••••••••••••••••••••••••••••••••••••••••••••••••••••••• Cleaning tasks
'clean-obsolete-redis-keys': {
'task': 'oneflow.core.tasks.clean_obsolete_redis_keys',
'schedule': crontab(hour='2', minute='2'),
},
# ••••••••••••••••••••••••••••••••••••••••••••••••••••• Social auth refresh
'refresh-access-tokens-00': {
'task': 'oneflow.base.tasks.refresh_access_tokens',
'schedule': crontab(hour='*/4', minute='0,48'),
},
'refresh-access-tokens-12': {
'task': 'oneflow.base.tasks.refresh_access_tokens',
'schedule': crontab(hour='3,7,11,15,19,23', minute=12),
},
'refresh-access-tokens-24': {
'task': 'oneflow.base.tasks.refresh_access_tokens',
'schedule': crontab(hour='2,6,10,14,18,22', minute=24),
},
'refresh-access-tokens-36': {
'task': 'oneflow.base.tasks.refresh_access_tokens',
'schedule': crontab(hour='1,5,9,13,17,21', minute=36),
},
}
|
Australian Prime Minister Julia Gillard is the country’s first female leader, and she showed this week that she won’t stand for misogyny. In a male-dominated profession such as politics, she has shown considerable audacity to pull the leader of the Australian opposition up for his inappropriate statements towards her and women in general. In this 15-minute video, see her confront him with his own statements- which include that an under representation of women in parliament is not “a bad thing”. Yikes.
So, was Gillard awesome, awesome, or awesome? Choose carefully. |
# -*- coding: utf-8 -*-
import os
import pygame
class Dialog:
def __init__(self, game_board):
self.game_board = game_board
self.color = (255, 255, 255, 150)
self.scheme = "white"
if self.game_board.mainloop.scheme is not None:
if self.game_board.mainloop.scheme.dark:
self.scheme = "black"
self.color = (0, 0, 0, 150)
self.img_src = "congrats.png"
self.img_src2 = "game_over.png"
self.sizer = game_board.mainloop.sizer
self.layout_update()
self.level = game_board.level
def layout_update(self):
self.color = (255, 255, 255, 150)
self.scheme = "white"
if self.game_board.mainloop.scheme is not None:
if self.game_board.mainloop.scheme.dark:
self.scheme = "black"
self.color = (0, 0, 0, 150)
self.width = self.sizer.screen_w
self.height = self.sizer.screen_h
self.image = pygame.Surface((self.width, self.height), flags=pygame.SRCALPHA)
self.image.fill(self.color)
self.rect = self.image.get_rect()
self.rect.topleft = [0, 0]
self.img = pygame.image.load(os.path.join('res', 'images', self.img_src)).convert_alpha()
self.img2 = pygame.image.load(os.path.join('res', 'images', self.img_src2)).convert_alpha()
# img2 has the same size
img_pos_x = self.img.get_rect(centerx=self.image.get_width() // 2)
img_pos_y = self.img.get_rect(centery=self.image.get_height() // 2)
self.img_pos = (img_pos_x[0], img_pos_y[1])
def update(self, screen):
self.image.fill(self.color)
if self.level.dialog_type == 0:
self.image.blit(self.img, self.img_pos)
elif self.level.dialog_type == 1:
self.image.blit(self.img2, self.img_pos)
elif self.level.dialog_type == 2:
pass
screen.blit(self.image, (0, 0))
|
The Servicemembers Civil Relief Act (SCRA) is a federal statute passed by Congress to allow military members to suspend or postpone some civil obligations so that the military member can devote his or her full attention to military duties. The original Soldiers' and Sailors' Civil Relief Act (SSCRA) was passed during World War I. The statute was reenacted during World War II, and was later modified during Operation Desert Storm.
The Servicemembers Civil Relief Act is designed to protect active duty military members, reservists who are in active federal service, and National Guardsmen who are in active federal service. Some of the benefits under the SCRA extend to dependents of active duty military members as well.
What Kind Of Relief Can It Provide?
The SCRA can provide many forms of relief to military members. Below are some of the most common forms of relief.
6% CAP ON INTEREST RATES: Under the SCRA, a military member can cap the interest rate at 6% for all obligations entered into before beginning active duty. This can include interest rates on credit cards, mortgages, and even some student loans (except for Federal guaranteed student loans), to name a few. To qualify for the interest rate cap the military member has to show that he or she is now on active duty, that the obligation or debt was incurred prior to entry on active duty, and that military service materially affects the members' ability to pay. To begin the process, the military member needs to send a letter along with a copy of current military orders to the lender requesting relief under the SCRA. The interest rate cap lasts for the duration of active duty service.
STAY OF PROCEEDINGS: If you are served with a complaint indicating that you are being sued for some reason, you can obtain a "stay" or postponement of those proceedings. A stay can be used to stop the action altogether, or to hold up some phase of it. According to the SCRA, you can request a "stay" during any stage of the proceedings. However, the burden is on you, the military member, to show that your military service has materially affected your ability to appear in court. In general, you can request a stay of the proceedings for a reasonable period of time (30-60 days). For example, if you are being sued for divorce, you can put off the hearing for some period of time, but it is unlikely that a court will allow you to put off the proceedings indefinitely.
DEFAULT JUDGMENTS: A default judgment is entered against a party who has failed to defend against a claim that has been brought by another party. To obtain a default judgment, a plaintiff must file an affidavit (written declaration of fact) stating that the defendant is not in the military service and has not requested a stay. If you are sued while on active duty, you fail to respond and as a result a default judgment is obtained against you, you can reopen the default judgment by taking several steps. First, you must show that the judgment was entered during your military service or within 30 days after you've left the service. Second, you must write to the court requesting that the default judgment be reopened while you are still on active duty or within 90 days of leaving the service. Third, you must not have made any kind of appearance in court, through filing an answer or otherwise, prior to the default judgment being entered. Finally, you must indicate that your military service prejudiced your ability to defend your case and show that you had a valid defense to the action against you.
PROTECTION FROM EVICTION: If you are leasing a house or apartment, the SCRA can protect you from being evicted for a period of time, usually three months. The dwelling place must be occupied by either the active duty member or his or her dependents and the rent on the premises cannot exceed $1200.00 a month. Additionally, the military member must show that military service materially affects his or her ability to pay rent. If a landlord continues to try to evict the military member or does actually evict the member, he or she is subject to criminal sanctions such as fines or even imprisonment. However, if you feel that you are in this situation, don't just stop paying rent and wait three months. Talk to an attorney.
TERMINATION OF PRE-SERVICE LEASES: The SCRA also allows military members who are just entering active duty service to lawfully terminate a lease without repercussions. To do this, the service member needs to show that the lease was entered into prior to the commencement of active duty service, that the lease was signed by or on behalf of the service member, and that the service member is currently in military service.
MORTGAGES: The SCRA can also provide temporary relief from paying your mortgage. To obtain relief, a military member must show that their mortgage was entered into prior to beginning active duty, that the property was owned prior to entry into military service, that the property is still owned by the military member, and that military service materially affects the member's ability to pay the mortgage.
As you can see, the Servicemembers Civil Relief Act can be a big help to military members in times of need. In fact, the United States Supreme Court has declared that the Act must be read with "an eye friendly to those who dropped their affairs to answer their country's call." It actually provides many more protections than those listed here. If you think that you may qualify for protection under the SCRA, you should talk to an attorney.
This information is from the U.S. Department of Homeland Security, U.S. Coast Guard Legal Assistance website. For details and links to additional information, go to this link: Soldiers' and Sailors' Civil Relef Act Provides Umbrella of Protection. |
import datetime
import pandas as pd
import pytest
import ibis
import ibis.expr.datatypes as dt
pytestmark = pytest.mark.bigquery
pytest.importorskip('google.cloud.bigquery')
def test_timestamp_accepts_date_literals(alltypes, project_id):
date_string = '2009-03-01'
param = ibis.param(dt.timestamp).name('param_0')
expr = alltypes.mutate(param=param)
params = {param: date_string}
result = expr.compile(params=params)
expected = """\
SELECT *, @param AS `param`
FROM `{}.testing.functional_alltypes`""".format(
project_id
)
assert result == expected
@pytest.mark.parametrize(
('distinct', 'expected_keyword'), [(True, 'DISTINCT'), (False, 'ALL')]
)
def test_union(alltypes, distinct, expected_keyword, project_id):
expr = alltypes.union(alltypes, distinct=distinct)
result = expr.compile()
expected = """\
SELECT *
FROM `{project}.testing.functional_alltypes`
UNION {}
SELECT *
FROM `{project}.testing.functional_alltypes`""".format(
expected_keyword, project=project_id
)
assert result == expected
def test_ieee_divide(alltypes, project_id):
expr = alltypes.double_col / 0
result = expr.compile()
expected = """\
SELECT IEEE_DIVIDE(`double_col`, 0) AS `tmp`
FROM `{}.testing.functional_alltypes`""".format(
project_id
)
assert result == expected
def test_identical_to(alltypes, project_id):
t = alltypes
pred = t.string_col.identical_to('a') & t.date_string_col.identical_to('b')
expr = t[pred]
result = expr.compile()
expected = """\
SELECT *
FROM `{}.testing.functional_alltypes`
WHERE (((`string_col` IS NULL) AND ('a' IS NULL)) OR (`string_col` = 'a')) AND
(((`date_string_col` IS NULL) AND ('b' IS NULL)) OR (`date_string_col` = 'b'))""".format( # noqa: E501
project_id
)
assert result == expected
@pytest.mark.parametrize('timezone', [None, 'America/New_York'])
def test_to_timestamp(alltypes, timezone, project_id):
expr = alltypes.date_string_col.to_timestamp('%F', timezone)
result = expr.compile()
if timezone:
expected = """\
SELECT PARSE_TIMESTAMP('%F', `date_string_col`, 'America/New_York') AS `tmp`
FROM `{}.testing.functional_alltypes`""".format(
project_id
)
else:
expected = """\
SELECT PARSE_TIMESTAMP('%F', `date_string_col`) AS `tmp`
FROM `{}.testing.functional_alltypes`""".format(
project_id
)
assert result == expected
@pytest.mark.parametrize(
('case', 'expected', 'dtype'),
[
(datetime.date(2017, 1, 1), "DATE '{}'".format('2017-01-01'), dt.date),
(
pd.Timestamp('2017-01-01'),
"DATE '{}'".format('2017-01-01'),
dt.date,
),
('2017-01-01', "DATE '{}'".format('2017-01-01'), dt.date),
(
datetime.datetime(2017, 1, 1, 4, 55, 59),
"TIMESTAMP '{}'".format('2017-01-01 04:55:59'),
dt.timestamp,
),
(
'2017-01-01 04:55:59',
"TIMESTAMP '{}'".format('2017-01-01 04:55:59'),
dt.timestamp,
),
(
pd.Timestamp('2017-01-01 04:55:59'),
"TIMESTAMP '{}'".format('2017-01-01 04:55:59'),
dt.timestamp,
),
],
)
def test_literal_date(case, expected, dtype):
expr = ibis.literal(case, type=dtype).year()
result = ibis.bigquery.compile(expr)
assert result == "SELECT EXTRACT(year from {}) AS `tmp`".format(expected)
@pytest.mark.parametrize(
('case', 'expected', 'dtype', 'strftime_func'),
[
(
datetime.date(2017, 1, 1),
"DATE '{}'".format('2017-01-01'),
dt.date,
'FORMAT_DATE',
),
(
pd.Timestamp('2017-01-01'),
"DATE '{}'".format('2017-01-01'),
dt.date,
'FORMAT_DATE',
),
(
'2017-01-01',
"DATE '{}'".format('2017-01-01'),
dt.date,
'FORMAT_DATE',
),
(
datetime.datetime(2017, 1, 1, 4, 55, 59),
"TIMESTAMP '{}'".format('2017-01-01 04:55:59'),
dt.timestamp,
'FORMAT_TIMESTAMP',
),
(
'2017-01-01 04:55:59',
"TIMESTAMP '{}'".format('2017-01-01 04:55:59'),
dt.timestamp,
'FORMAT_TIMESTAMP',
),
(
pd.Timestamp('2017-01-01 04:55:59'),
"TIMESTAMP '{}'".format('2017-01-01 04:55:59'),
dt.timestamp,
'FORMAT_TIMESTAMP',
),
],
)
def test_day_of_week(case, expected, dtype, strftime_func):
date_var = ibis.literal(case, type=dtype)
expr_index = date_var.day_of_week.index()
result = ibis.bigquery.compile(expr_index)
assert (
result
== "SELECT MOD(EXTRACT(DAYOFWEEK FROM {}) + 5, 7) AS `tmp`".format(
expected
)
) # noqa: E501
expr_name = date_var.day_of_week.full_name()
result = ibis.bigquery.compile(expr_name)
if strftime_func == 'FORMAT_TIMESTAMP':
assert result == "SELECT {}('%A', {}, 'UTC') AS `tmp`".format(
strftime_func, expected
)
else:
assert result == "SELECT {}('%A', {}) AS `tmp`".format(
strftime_func, expected
)
@pytest.mark.parametrize(
('case', 'expected', 'dtype'),
[
(
datetime.datetime(2017, 1, 1, 4, 55, 59),
"TIMESTAMP '{}'".format('2017-01-01 04:55:59'),
dt.timestamp,
),
(
'2017-01-01 04:55:59',
"TIMESTAMP '{}'".format('2017-01-01 04:55:59'),
dt.timestamp,
),
(
pd.Timestamp('2017-01-01 04:55:59'),
"TIMESTAMP '{}'".format('2017-01-01 04:55:59'),
dt.timestamp,
),
(datetime.time(4, 55, 59), "TIME '{}'".format('04:55:59'), dt.time),
('04:55:59', "TIME '{}'".format('04:55:59'), dt.time),
],
)
def test_literal_timestamp_or_time(case, expected, dtype):
expr = ibis.literal(case, type=dtype).hour()
result = ibis.bigquery.compile(expr)
assert result == "SELECT EXTRACT(hour from {}) AS `tmp`".format(expected)
def test_window_function(alltypes, project_id):
t = alltypes
w1 = ibis.window(
preceding=1, following=0, group_by='year', order_by='timestamp_col'
)
expr = t.mutate(win_avg=t.float_col.mean().over(w1))
result = expr.compile()
expected = """\
SELECT *,
avg(`float_col`) OVER (PARTITION BY `year` ORDER BY `timestamp_col` ROWS BETWEEN 1 PRECEDING AND CURRENT ROW) AS `win_avg`
FROM `{}.testing.functional_alltypes`""".format( # noqa: E501
project_id
)
assert result == expected
w2 = ibis.window(
preceding=0, following=2, group_by='year', order_by='timestamp_col'
)
expr = t.mutate(win_avg=t.float_col.mean().over(w2))
result = expr.compile()
expected = """\
SELECT *,
avg(`float_col`) OVER (PARTITION BY `year` ORDER BY `timestamp_col` ROWS BETWEEN CURRENT ROW AND 2 FOLLOWING) AS `win_avg`
FROM `{}.testing.functional_alltypes`""".format( # noqa: E501
project_id
)
assert result == expected
w3 = ibis.window(
preceding=(4, 2), group_by='year', order_by='timestamp_col'
)
expr = t.mutate(win_avg=t.float_col.mean().over(w3))
result = expr.compile()
expected = """\
SELECT *,
avg(`float_col`) OVER (PARTITION BY `year` ORDER BY `timestamp_col` ROWS BETWEEN 4 PRECEDING AND 2 PRECEDING) AS `win_avg`
FROM `{}.testing.functional_alltypes`""".format( # noqa: E501
project_id
)
assert result == expected
def test_range_window_function(alltypes, project_id):
t = alltypes
w = ibis.range_window(
preceding=1, following=0, group_by='year', order_by='month'
)
expr = t.mutate(two_month_avg=t.float_col.mean().over(w))
result = expr.compile()
expected = """\
SELECT *,
avg(`float_col`) OVER (PARTITION BY `year` ORDER BY `month` RANGE BETWEEN 1 PRECEDING AND CURRENT ROW) AS `two_month_avg`
FROM `{}.testing.functional_alltypes`""".format( # noqa: E501
project_id
)
assert result == expected
w3 = ibis.range_window(
preceding=(4, 2), group_by='year', order_by='timestamp_col'
)
expr = t.mutate(win_avg=t.float_col.mean().over(w3))
result = expr.compile()
expected = """\
SELECT *,
avg(`float_col`) OVER (PARTITION BY `year` ORDER BY UNIX_MICROS(`timestamp_col`) RANGE BETWEEN 4 PRECEDING AND 2 PRECEDING) AS `win_avg`
FROM `{}.testing.functional_alltypes`""".format( # noqa: E501
project_id
)
assert result == expected
@pytest.mark.parametrize(
('preceding', 'value'),
[
(5, 5),
(ibis.interval(nanoseconds=1), 0.001),
(ibis.interval(microseconds=1), 1),
(ibis.interval(seconds=1), 1000000),
(ibis.interval(minutes=1), 1000000 * 60),
(ibis.interval(hours=1), 1000000 * 60 * 60),
(ibis.interval(days=1), 1000000 * 60 * 60 * 24),
(2 * ibis.interval(days=1), 1000000 * 60 * 60 * 24 * 2),
(ibis.interval(weeks=1), 1000000 * 60 * 60 * 24 * 7),
],
)
def test_trailing_range_window(alltypes, preceding, value, project_id):
t = alltypes
w = ibis.trailing_range_window(
preceding=preceding, order_by=t.timestamp_col
)
expr = t.mutate(win_avg=t.float_col.mean().over(w))
result = expr.compile()
expected = """\
SELECT *,
avg(`float_col`) OVER (ORDER BY UNIX_MICROS(`timestamp_col`) RANGE BETWEEN {} PRECEDING AND CURRENT ROW) AS `win_avg`
FROM `{}.testing.functional_alltypes`""".format( # noqa: E501
value, project_id
)
assert result == expected
@pytest.mark.parametrize(
('preceding', 'value'), [(ibis.interval(years=1), None)]
)
def test_trailing_range_window_unsupported(alltypes, preceding, value):
t = alltypes
w = ibis.trailing_range_window(
preceding=preceding, order_by=t.timestamp_col
)
expr = t.mutate(win_avg=t.float_col.mean().over(w))
with pytest.raises(ValueError):
expr.compile()
@pytest.mark.parametrize(
('distinct1', 'distinct2', 'expected1', 'expected2'),
[
(True, True, 'UNION DISTINCT', 'UNION DISTINCT'),
(True, False, 'UNION DISTINCT', 'UNION ALL'),
(False, True, 'UNION ALL', 'UNION DISTINCT'),
(False, False, 'UNION ALL', 'UNION ALL'),
],
)
def test_union_cte(
alltypes, distinct1, distinct2, expected1, expected2, project_id
):
t = alltypes
expr1 = t.group_by(t.string_col).aggregate(metric=t.double_col.sum())
expr2 = expr1.view()
expr3 = expr1.view()
expr = expr1.union(expr2, distinct=distinct1).union(
expr3, distinct=distinct2
)
result = expr.compile()
expected = """\
WITH t0 AS (
SELECT `string_col`, sum(`double_col`) AS `metric`
FROM `{project}.testing.functional_alltypes`
GROUP BY 1
)
SELECT *
FROM t0
{}
SELECT `string_col`, sum(`double_col`) AS `metric`
FROM `{project}.testing.functional_alltypes`
GROUP BY 1
{}
SELECT `string_col`, sum(`double_col`) AS `metric`
FROM `{project}.testing.functional_alltypes`
GROUP BY 1""".format(
expected1, expected2, project=project_id
)
assert result == expected
def test_projection_fusion_only_peeks_at_immediate_parent():
schema = [
('file_date', 'timestamp'),
('PARTITIONTIME', 'date'),
('val', 'int64'),
]
table = ibis.table(schema, name='unbound_table')
table = table[table.PARTITIONTIME < ibis.date('2017-01-01')]
table = table.mutate(file_date=table.file_date.cast('date'))
table = table[table.file_date < ibis.date('2017-01-01')]
table = table.mutate(XYZ=table.val * 2)
expr = table.join(table.view())[table]
result = ibis.bigquery.compile(expr)
expected = """\
WITH t0 AS (
SELECT *
FROM unbound_table
WHERE `PARTITIONTIME` < DATE '2017-01-01'
),
t1 AS (
SELECT CAST(`file_date` AS DATE) AS `file_date`, `PARTITIONTIME`, `val`
FROM t0
),
t2 AS (
SELECT t1.*
FROM t1
WHERE t1.`file_date` < DATE '2017-01-01'
),
t3 AS (
SELECT *, `val` * 2 AS `XYZ`
FROM t2
)
SELECT t3.*
FROM t3
CROSS JOIN t3 t4"""
assert result == expected
def test_bool_reducers(alltypes):
b = alltypes.bool_col
expr = b.mean()
result = expr.compile()
expected = """\
SELECT avg(CAST(`bool_col` AS INT64)) AS `mean`
FROM `ibis-gbq.testing.functional_alltypes`"""
assert result == expected
expr2 = b.sum()
result = expr2.compile()
expected = """\
SELECT sum(CAST(`bool_col` AS INT64)) AS `sum`
FROM `ibis-gbq.testing.functional_alltypes`"""
assert result == expected
def test_bool_reducers_where(alltypes):
b = alltypes.bool_col
m = alltypes.month
expr = b.mean(where=m > 6)
result = expr.compile()
expected = """\
SELECT avg(CASE WHEN `month` > 6 THEN CAST(`bool_col` AS INT64) ELSE NULL END) AS `mean`
FROM `ibis-gbq.testing.functional_alltypes`""" # noqa: E501
assert result == expected
expr2 = b.sum(where=((m > 6) & (m < 10)))
result = expr2.compile()
expected = """\
SELECT sum(CASE WHEN (`month` > 6) AND (`month` < 10) THEN CAST(`bool_col` AS INT64) ELSE NULL END) AS `sum`
FROM `ibis-gbq.testing.functional_alltypes`""" # noqa: E501
assert result == expected
def test_approx_nunique(alltypes):
d = alltypes.double_col
expr = d.approx_nunique()
result = expr.compile()
expected = """\
SELECT APPROX_COUNT_DISTINCT(`double_col`) AS `approx_nunique`
FROM `ibis-gbq.testing.functional_alltypes`"""
assert result == expected
b = alltypes.bool_col
m = alltypes.month
expr2 = b.approx_nunique(where=m > 6)
result = expr2.compile()
expected = """\
SELECT APPROX_COUNT_DISTINCT(CASE WHEN `month` > 6 THEN `bool_col` ELSE NULL END) AS `approx_nunique`
FROM `ibis-gbq.testing.functional_alltypes`""" # noqa: E501
assert result == expected
def test_approx_median(alltypes):
d = alltypes.double_col
expr = d.approx_median()
result = expr.compile()
expected = """\
SELECT APPROX_QUANTILES(`double_col`, 2)[OFFSET(1)] AS `approx_median`
FROM `ibis-gbq.testing.functional_alltypes`"""
assert result == expected
m = alltypes.month
expr2 = d.approx_median(where=m > 6)
result = expr2.compile()
expected = """\
SELECT APPROX_QUANTILES(CASE WHEN `month` > 6 THEN `double_col` ELSE NULL END, 2)[OFFSET(1)] AS `approx_median`
FROM `ibis-gbq.testing.functional_alltypes`""" # noqa: E501
assert result == expected
@pytest.mark.parametrize(
('unit', 'expected_unit', 'expected_func'),
[
('Y', 'YEAR', 'TIMESTAMP'),
('Q', 'QUARTER', 'TIMESTAMP'),
('M', 'MONTH', 'TIMESTAMP'),
('W', 'WEEK', 'TIMESTAMP'),
('D', 'DAY', 'TIMESTAMP'),
('h', 'HOUR', 'TIMESTAMP'),
('m', 'MINUTE', 'TIMESTAMP'),
('s', 'SECOND', 'TIMESTAMP'),
('ms', 'MILLISECOND', 'TIMESTAMP'),
('us', 'MICROSECOND', 'TIMESTAMP'),
('Y', 'YEAR', 'DATE'),
('Q', 'QUARTER', 'DATE'),
('M', 'MONTH', 'DATE'),
('W', 'WEEK', 'DATE'),
('D', 'DAY', 'DATE'),
('h', 'HOUR', 'TIME'),
('m', 'MINUTE', 'TIME'),
('s', 'SECOND', 'TIME'),
('ms', 'MILLISECOND', 'TIME'),
('us', 'MICROSECOND', 'TIME'),
],
)
def test_temporal_truncate(unit, expected_unit, expected_func):
t = ibis.table([('a', getattr(dt, expected_func.lower()))], name='t')
expr = t.a.truncate(unit)
result = ibis.bigquery.compile(expr)
expected = """\
SELECT {}_TRUNC(`a`, {}) AS `tmp`
FROM t""".format(
expected_func, expected_unit
)
assert result == expected
@pytest.mark.parametrize('kind', ['date', 'time'])
def test_extract_temporal_from_timestamp(kind):
t = ibis.table([('ts', dt.timestamp)], name='t')
expr = getattr(t.ts, kind)()
result = ibis.bigquery.compile(expr)
expected = """\
SELECT {}(`ts`) AS `tmp`
FROM t""".format(
kind.upper()
)
assert result == expected
def test_now():
expr = ibis.now()
result = ibis.bigquery.compile(expr)
expected = 'SELECT CURRENT_TIMESTAMP() AS `tmp`'
assert result == expected
def test_bucket():
t = ibis.table([('value', 'double')], name='t')
buckets = [0, 1, 3]
expr = t.value.bucket(buckets).name('foo')
result = ibis.bigquery.compile(expr)
expected = """\
SELECT
CASE
WHEN (`value` >= 0) AND (`value` < 1) THEN 0
WHEN (`value` >= 1) AND (`value` <= 3) THEN 1
ELSE CAST(NULL AS INT64)
END AS `tmp`
FROM t"""
assert result == expected
@pytest.mark.parametrize(
('kind', 'begin', 'end', 'expected'),
[
('preceding', None, 1, 'UNBOUNDED PRECEDING AND 1 PRECEDING'),
('following', 1, None, '1 FOLLOWING AND UNBOUNDED FOLLOWING'),
],
)
def test_window_unbounded(kind, begin, end, expected):
t = ibis.table([('a', 'int64')], name='t')
kwargs = {kind: (begin, end)}
expr = t.a.sum().over(ibis.window(**kwargs))
result = ibis.bigquery.compile(expr)
assert (
result
== """\
SELECT sum(`a`) OVER (ROWS BETWEEN {}) AS `tmp`
FROM t""".format(
expected
)
)
|
Last year, U.S. Customs & Border Protection — CBP — in Puerto Rico and the U.S. Virgin Islands seized $10.6 million dollars in unreported currency, smuggled bulk cash, or unlawful currency structuring violations. Last year I called that an overwhelming amount of seized currency.
Most of these customs currency seizures occur at airports, ferry crossings, etc. CBP for Puerto Rico and the U.S. Virgin Islands reported their annual fiscal year statistics for 2014 and this currency seizure number has more than doubled. That is more than overwhelming, it is tremendous… the story also compares currency seizures with those in California, Texas, New Mexico, and Arizona. All areas in which drug smuggling is pervasive.
In Puerto Rico and the U.S. Virgin Islands, CBP officers and agents seized over 51,043 pounds of narcotics with an estimated street value of approximately 650 million and seized approximately $10.6 million in unreported currency in FY 2014, which runs from October 1, 2013 to September 30, 2014.
Furthermore, $8.4 million of currency interdictions were reported and over 80 firearms were seized.
$8.4M or $10.6M? Who’s counting over there?! And this is the same government that seizes currency for any mis-report who can’t get their facts straight for their news releases. |
class MonoMacPackage (Package):
def __init__ (self):
self.pkgconfig_version = '1.0'
self.maccore_tag = '0b71453'
self.maccore_source_dir_name = 'mono-maccore-0b71453'
self.monomac_tag = 'ae428c7'
self.monomac_source_dir_name = 'mono-monomac-ae428c7'
Package.__init__ (self, 'monomac', self.monomac_tag)
self.sources = [
'https://github.com/mono/maccore/tarball/%{maccore_tag}',
'https://github.com/mono/monomac/tarball/%{monomac_tag}'
]
def prep (self):
self.sh ('tar xf "%{sources[0]}"')
self.sh ('tar xf "%{sources[1]}"')
self.sh ('mv %{maccore_source_dir_name} maccore')
self.sh ('mv %{monomac_source_dir_name} monomac')
self.cd ('monomac/src')
def build (self):
self.sh ('make')
def install (self):
self.sh ('mkdir -p %{prefix}/lib/monomac')
self.sh ('mkdir -p %{prefix}/share/pkgconfig')
self.sh ('echo "Libraries=%{prefix}/lib/monomac/MonoMac.dll\n\nName: MonoMac\nDescription: Mono Mac bindings\nVersion:%{pkgconfig_version}\nLibs: -r:%{prefix}/lib/monomac/MonoMac.dll" > %{prefix}/share/pkgconfig/monomac.pc')
self.sh ('cp MonoMac.dll %{prefix}/lib/monomac')
MonoMacPackage ()
|
Very large abstract wall art canvas print triptych in. Large vertical wall art very modern also goes. 35 large outdoor metal wall art my wall of life. Jolipa home decoration dream house experience. |
from research.three_phase.Engine.Devices.branch import *
from research.three_phase.Engine.Devices.bus import *
class TransformerType1p:
def __init__(self, name, conn_f: Connection, conn_t: Connection, r, x, Vf_rate, Vt_rate, rating=1e-6):
"""
Single phase transformer constructor
:param conn_f: Connection type at the from bus
:param conn_t: Connection type at the to bus
:param r: leakage resistance in per unit
:param x: leakage reactance in per unit
:param Vf_rate: Voltage rate at the "from" side in kV
:param Vt_rate: Voltage rate at the "to" side in kV
:param rating: Power rating in MVA
"""
self.name = name
# from-bus connection
self.conn_f = conn_f
# to-bus connection
self.conn_t = conn_t
# voltage rate at the from side
self.Vf = Vf_rate
# voltage rate at the to side
self.Vt = Vt_rate
# power rating in MVA
self.Srate = rating
# resistance
self.r = r
# reactance
self.x = x
self.number_of_phases = 1
def get_ABCD(self, tap_f=1.0, tap_t=1.0):
"""
ABCD parameters of a single-phase transformer depending on the connections
Reference: Load Flow Optimization and Optimal Power Flow - J.C. Das, pag 332 (2017)
| If | | A B | | Vf |
| | = | | * | |
| It | | C D | | Vt |
:param tap_f: tap value at the from side
:param tap_t: tap value at the to side
:return: A, B, C, D parameters (float values not matrices)
"""
yt = 1.0 / (self.r + 1j * self.x)
# tap changer coefficients
ka = tap_f * tap_f
kb = tap_f * tap_t
kc = tap_t * tap_f
kd = tap_t * tap_t
return yt / ka, -yt / kb, -yt / kc, yt / kd
class TransformerType3p:
def __init__(self, name, conn_f: Connection, conn_t: Connection, r, x, Vf_rate, Vt_rate, rating=1e-6):
"""
Three-phase transformer type
:param conn_f: Connection type at the from bus
:param conn_t: Connection type at the to bus
:param r: leakage resistance in per unit
:param x: leakage reactance in per unit
:param Vf_rate: Voltage rate at the "from" side in kV
:param Vt_rate: Voltage rate at the "to" side in kV
:param rating: power rating in MVA
"""
self.name = name
# from-bus connection
self.conn_f = conn_f
# to-bus connection
self.conn_t = conn_t
# voltage rate at the from side
self.Vf = Vf_rate
# voltage rate at the to side
self.Vt = Vt_rate
# power rating in MVA
self.Srate = rating
self.number_of_phases = 3
# resistance
self.r = r
# reactance
self.x = x
def get_ABCD(self, tap_f=1.0, tap_t=1.0):
"""
ABCD parameters of a three-phase transformer depending on the connections
Reference: Load Flow Optimization and Optimal Power Flow - J.C. Das, pag 332 (2017)
| If | | A B | | Vf |
| | = | | * | |
| It | | C D | | Vt |
:param tap_f: tap value at the from side
:param tap_t: tap value at the to side
:return: A, B, C, D parameters (4 matrices of 3x3)
"""
# single-phase transformer admittance
yt = 1.0 / (self.r + 1j * self.x)
# fundamental sub matrices
YI = np.array([[yt, 0, 0], [0, yt, 0], [0, 0, yt]])
YII = (1 / 3) * np.array([[2 * yt, -yt, -yt], [-yt, 2 * yt, -yt], [-yt, -yt, 2 * yt]])
YIII = (1 / np.sqrt(3)) * np.array([[-yt, yt, 0], [0, -yt, yt], [yt, 0, -yt]])
# tap changer coefficients
ka = tap_f * tap_f
kb = tap_f * tap_t
kc = tap_t * tap_f
kd = tap_t * tap_t
if self.conn_f == Connection.WyeG and self.conn_t == Connection.WyeG:
# YI, YI, -YI, -YI = A, D, B, C
A, B, C, D = YI / ka, -YI / kb, -YI / kc, YI / kd
elif self.conn_f == Connection.WyeG and self.conn_t == Connection.Wye:
# YII, YII, -YII, -YII = A, D, B, C
A, B, C, D = YII / ka, -YII / kb, -YII / kc, YII / kd
elif self.conn_f == Connection.Wye and self.conn_t == Connection.WyeG:
# YII, YII, -YII, -YII = A, D, B, C
A, B, C, D = YII / ka, -YII / kb, -YII / kc, YII / kd
elif self.conn_f == Connection.Wye and self.conn_t == Connection.Wye:
# YII, YII, -YII, -YII = A, D, B, C
A, B, C, D = YII / ka, -YII / kb, -YII / kc, YII / kd
elif self.conn_f == Connection.WyeG and self.conn_t == Connection.Delta:
# YI, YII, YIII, YIII.transpose() = A, D, B, C
A, B, C, D = YI / ka, YIII / kb, YIII.transpose() / kc, YII / kd
elif self.conn_f == Connection.Wye and self.conn_t == Connection.Delta:
# YII, YII, YIII, YIII.transpose() = A, D, B, C
A, B, C, D = YII / ka, YIII / kb, YIII.transpose() / kc, YII / kd
elif self.conn_f == Connection.Delta and self.conn_t == Connection.Wye:
# YII, YIII, YIII.transpose(), YIII = A, D, B, C
A, B, C, D = YII / ka, YIII.transpose() / kb, YIII / kc, YIII / kd
elif self.conn_f == Connection.Delta and self.conn_t == Connection.WyeG:
# YII, YII, YIII.transpose(), YIII = A, D, B, C
A, B, C, D = YII / ka, YIII.transpose() / kb, YIII / kc, YII / kd
elif self.conn_f == Connection.Delta and self.conn_t == Connection.Delta:
# YII, YII, -YII, -YII = A, D, B, C
A, B, C, D = YII / ka, -YII / kb, -YII / kc, YII / kd
else:
raise Exception('Transformer connections not understood')
return A, B, C, D, A, D
class Transformer(Branch):
def __init__(self, name, transformer_type, bus_from: Bus, bus_to: Bus,
conn_from=Phases.ABC, conn_to=Phases.ABC):
"""
Model of a three-phase transformer
:param name: name of the line
:param transformer_type: transformer type object
:param bus_from: bus from object
:param bus_to: bus to object
:param conn_from: vector of connection in the bus from i.e. [0, 1, 2]
:param conn_to: vector of connection in the bus to, i.e. [0, 1, 2]
:param rating: transformer rating in MVA
"""
self.name = name
self.f = bus_from
self.t = bus_to
self.tap_f = 1.0
self.tap_t = 1.0
self.rating = transformer_type.Srate
self.transformer_type = transformer_type
self.number_of_phases = transformer_type.number_of_phases
self.phases_from = conn_from
self.phases_to = conn_to
# check connection compatibility
if len(self.phases_from) != len(self.phases_to):
raise Exception('Wrong phases')
if len(self.phases_from) != self.transformer_type.number_of_phases:
raise Exception('The number of phases of the line type do not match the specified connection phases')
if self.f.Vnom != self.transformer_type.Vf:
raise Exception(self.name + ':The transformer rated voltage at the from side does not '
'match the bus rated voltage')
if self.t.Vnom != self.transformer_type.Vt:
raise Exception(self.name + ':The transformer rated voltage at the to side does not '
'match the bus rated voltage')
def get_ABCD(self, Sbase):
"""
get the ABCD parameters
| If | | A B | | Vf |
| | = | | * | |
| It | | C D | | Vt |
:param Sbase: Base power in MVA (not used, but kept form interface compatibility)
"""
return self.transformer_type.get_ABCD(self.tap_f, self.tap_t)
def __str__(self):
return self.name
|
Wrist Length Satin Stretch Gloves Wrist length satin gloves with 10% lycra or spandex for stretch. One size fits most, about sizes 5-9. These glove are versatile and work well on a bare arm or under a jacket or dress sleeve. They are perfect for weddings and work well both outside and in and can be worn for formal and inform events. Color: purple Hue: Code: #A500CE. |
from pts.evolve.simplega import GAEngine, RawScoreCriteria
from pts.evolve.genomes.list1d import G1DList
from pts.evolve import Mutators, Initializators
from pts.evolve import Selectors
from pts.evolve import Consts
import math
# This is the Rastrigin Function, a deception function
def rastrigin(genome):
n = len(genome)
total = 0
for i in xrange(n):
total += genome[i]**2 - 10*math.cos(2*math.pi*genome[i])
return (10*n) + total
def run_main():
# Genome instance
genome = G1DList(20)
genome.setParams(rangemin=-5.2, rangemax=5.30, bestrawscore=0.00, rounddecimal=2)
genome.initializator.set(Initializators.G1DListInitializatorReal)
genome.mutator.set(Mutators.G1DListMutatorRealGaussian)
genome.evaluator.set(rastrigin)
# Genetic Algorithm Instance
ga = GAEngine(genome)
ga.terminationCriteria.set(RawScoreCriteria)
ga.setMinimax(Consts.minimaxType["minimize"])
ga.setGenerations(3000)
ga.setCrossoverRate(0.8)
ga.setPopulationSize(100)
ga.setMutationRate(0.06)
ga.evolve(freq_stats=50)
best = ga.bestIndividual()
print best
if __name__ == "__main__":
run_main() |
Critical Review, or Annals of Literature, 3rd Series, 8 (May 1806), 108.
Historical Dialogues for young Persons, Vol. I. 8vo. 4s. Johnson. 1806.
The design of the volumes now offered to the public, is, by a selection of interesting narratives, scenes, and events, from popular historical productions, to overcome this inaptitude; it has also been the author’s object to lead the mind to reflect on the facts presented, without which the knowledge of them is but of little value.
The style is clear, uniform, and not ungraceful: we need only add, that the work is not designed for children, to whose capacities the reflections generally arising out of the subjects are by no means adapted, but for youth from the age of twelve years and upwards. |
import pdb
import time
import math
import copy
import warnings
import numpy as np
class HorsetailMatching(object):
'''Class for using horsetail matching within an optimization. The main
functionality is to evaluate the horsetail matching
metric (and optionally its gradient) that can be used with external
optimizers.
The code is written such that all arguments that can be used at the
initialization of a HorsetailMatching object can also be set as
attributes after creation to achieve exactly the same effect.
:param function fqoi: function that returns the quantity of interest, it
must take two ordered arguments - the value of the design variable
vector and the value of the uncertainty vector.
:param list prob_uncertainties: list of probabilistic uncertainties.
Each can be an instance of the UncertainParameter class,
in which case they will be sampled using the getSample() method.
Alternatiely each can be a function which returns sample(s) using
whatever method is desired.
:param list int_uncertainties: list of interval uncertainties [default []].
Each can be an instance of the IntervalParameter class,
in which case they will be sampled using the getSample() method.
Alternatiely each can be specified as a tuple/list of the bounds.
:param function ftarget: function that returns the value of the target
inverse CDF given a value in [0,1]. Can be a tuple that gives two
target fuctions, one for the upper bound and one for the lower bound on
the CDF under mixed uncertainties [default t(h) = 0]
:param bool/function jac: Argument that
specifies how to evaluate the gradient of the quantity of interest.
If False no gradients are propagated, if True the fqoi should return
a second argument g such that g_i = dq/dx_i. If a function, it should
have the same signature as fqoi but return g. [default False]
:param str method: method with which to evaluate the horsetil matching
metric, can be 'empirical' or 'kernel' [default 'empirical' if
jac is False else default 'kernel'].
:param int samples_prob: number of samples to take from the
probabilsitic uncertainties. [default 1000]
:param int samples_int: number of samples to take from the
interval uncertainties. Note that under mixed uncertainties, a nested
loop is used to evaluate the metric so the total number of
samples will be samples_prob*samples_int (at each interval uncertainty
sample samples_prob samples are taken from the probabilistic
uncertainties). [default 50]
:param list integration_points: Only for method='kernel'.
The integration point values to use when evaluating the metric using
kernels [by default 100 points spread over 3 times the range of
the samples of q obtained the first time the metric is evaluated]
:param number kernel_bandwidth: Only for method='kernel'. The bandwidth
used in the kernel function [by default it is found the first time
the metric is evaluated using Scott's rule]
:param str kernel_type: Only for method='kernel'. The type of kernel to
use, can be 'gaussian', 'uniform', or 'triangle' [default 'gaussian'].
:param function surrogate: Surrogate that is created at every design
point to be sampled instead of fqoi. It should be a function that
takes two arguments - an array with values of the uncertainties at
which to fit the surrogate of size (num_quadrature_points,
num_uncertainties), and an array of quantity of interest values
corresponding to these uncertainty values to which to fit the surrogate
of size (num_quadrature_points). It should return a functio that
predicts the qoi at an aribtrary value of the uncertainties.
[default None]
:param list surrogate_points: Only with a surrogate. List of points at
which fqoi is evaluated to give values to fit the surrogates to. These
are passed to the surrogate function along with the qoi evaluated at
these points when the surrogate is fitted [by default tensor
quadrature of 5 points in each uncertain dimension is used]
:param bool/function surrogate_jac: Only with a surrogate. Specifies how
to take surrogates of the gradient. It works similarly to the
jac argument: if False, the same surrogate is fitted to fqoi and each
component of its gradient, if True, the surrogate function is
expected to take a third argument - an array that is the gradient
at each of the quadrature points of size
(num_quadrature_points, num_design_variables). If a function, then
instead the array of uncertainty values and the array of gradient
values are passed to this function and it should return a function for
the surrogate model of the gradient.
:param bool reuse_samples: If True will reuse the same set of samples of
the uncertainties for evaluating the metric at any value of the
design variables, if False wise will re-sample every time evalMetric
is called [default True]
:param bool verbose: If True will print out details [default False].
*Example Declarations*::
>>> from horsetailmatching import HorsetailMatching,
UncertainParameter, PolySurrogate
>>> def myFunc(x, u): return x[0]*x[1] + u
>>> def myGrad(x, u): return [x[1], x[0]]
>>> def myTarg1(h): return 1-h**3
>>> def myTarg2(h): return 2-h**3
>>> u1 = UniformParameter()
>>> u2 = IntervalParameter()
>>> U = [u1, u2]
>>> poly = PolySurrogate(dimensions=2)
>>> poly_points = poly.getQuadraturePoints()
>>> theHM = HorsetailMatching(myFunc, U)
>>> theHM = HorsetailMatching(myFunc, U, jac=myGrad, method='kernel')
>>> theHM = HorsetailMatching(myFunc, U, ftarget=myTarg1)
>>> theHM = HorsetailMatching(myFunc, U, ftarget=(myTarg1, myTarg2))
>>> theHM = HorsetailMatching(myFunc, U, samples_prob=500,
samples_int = 50)
>>> theHM = HorsetailMatching(myFunc, U, method='kernel',
integration_points=numpy.linspace(0, 10, 100),
kernel_bandwidth=0.01)
>>> theHM = HorsetailMatching(myFunc, U,
surrogate=poly.surrogate, surrogate_jac=False,
surrogate_points=poly_points)
>>> theHM = HorsetailMatching(myFunc, U, verbose=True,
reuse_samples=True)
'''
def __init__(self, fqoi, prob_uncertainties, int_uncertainties=[],
ftarget=None, jac=False, method=None,
samples_prob=100, samples_int=50, integration_points=None,
kernel_bandwidth=None, kernel_type='gaussian', alpha=400,
surrogate=None, surrogate_points=None, surrogate_jac=False,
reuse_samples=True, verbose=False):
self.fqoi = fqoi
# self.uncertain_parameters = uncertain_parameters
self.prob_uncertainties = prob_uncertainties
self.int_uncertainties = int_uncertainties
self.ftarget = ftarget
self.jac = jac
self.method = method # Must be done after setting jac
self.samples_prob = samples_prob
self.samples_int = samples_int
self.integration_points = integration_points
self.kernel_bandwidth = kernel_bandwidth
self.kernel_type = kernel_type
self.alpha = alpha
self.reuse_samples = reuse_samples
self.u_samples = None
self.surrogate = surrogate
self.surrogate_points = surrogate_points
self.surrogate_jac = surrogate_jac
self.verbose = verbose
###############################################################################
## Properties with non-trivial setting behaviour
###############################################################################
# @property
# def uncertain_parameters(self):
# return self._u_params
#
# @uncertain_parameters.setter
# def uncertain_parameters(self, params):
# self._u_params = _makeIter(params)
# if len(self._u_params) == 0:
# raise ValueError('No uncertain parameters provided')
#
# self._u_int, self._u_prob = [], []
# for ii, u in enumerate(self._u_params):
# if u.is_interval_uncertainty:
# self._u_int.append((ii, u))
# else:
# self._u_prob.append((ii, u))
@property
def prob_uncertainties(self):
return self._prob_uncertainties
@prob_uncertainties.setter
def prob_uncertainties(self, params):
self._prob_uncertainties = _makeIter(params)
@property
def int_uncertainties(self):
return self._int_uncertainties
@int_uncertainties.setter
def int_uncertainties(self, params):
self._int_uncertainties = _makeIter(params)
@property
def samples_prob(self):
return self._samples_prob
@samples_prob.setter
def samples_prob(self, value):
if len(self.prob_uncertainties) > 0:
self._samples_prob = value
else:
self._samples_prob = 1
@property
def samples_int(self):
return self._samples_int
@samples_int.setter
def samples_int(self, value):
if len(self.int_uncertainties) > 0:
self._samples_int = value
else:
self._samples_int = 1
@property
def method(self):
return self._method
@method.setter
def method(self, value):
if value is None:
if self.jac is False:
self._method = 'empirical'
else:
self._method = 'kernel'
else:
self._method = value
@property
def ftarget(self):
return self._ftarget
@ftarget.setter
def ftarget(self, value):
def standardTarget(h):
return 0
try:
iter(value)
self._ftarg_u = value[0]
self._ftarg_l = value[1]
self._ftarget = value
except:
if value is None:
self._ftarget = standardTarget
else:
self._ftarget = value
self._ftarg_u = self._ftarget
self._ftarg_l = self._ftarget
@property
def u_samples(self):
return self._u_samples
@u_samples.setter
def u_samples(self, samples):
if samples is not None:
N_u = len(self.prob_uncertainties) + len(self.int_uncertainties)
if (not isinstance(samples, np.ndarray) or
samples.shape != (self.samples_int, self.samples_prob, N_u)):
raise TypeError('u_samples should be a np.array of size'
'(samples_int, samples_prob, num_uncertanities)')
self._u_samples = samples
@property
def kernel_type(self):
return self._kernel_type
@kernel_type.setter
def kernel_type(self, value):
allowed_types = ['gaussian', 'uniform', 'triangle']
if value not in allowed_types:
raise ValueError('Kernel type must be one of'+
', '.join([str(t) for t in allowed_types]))
else:
self._kernel_type = value
##############################################################################
## Public Methods
##############################################################################
def evalSamples(self, x):
'''Evalautes the samples of quantity of interest and its gradient
(if supplied) at the given values of the design variables
:param iterable x: values of the design variables, this is passed as
the first argument to the function fqoi
:return: (values of the quantity of interest, values of the gradient)
:rtype: Tuple
'''
# Make sure dimensions are correct
# u_sample_dimensions = self._processDimensions()
self._N_dv = len(_makeIter(x))
if self.verbose:
print('Evaluating surrogate')
if self.surrogate is None:
def fqoi(u):
return self.fqoi(x, u)
def fgrad(u):
return self.jac(x, u)
jac = self.jac
else:
fqoi, fgrad, surr_jac = self._makeSurrogates(x)
jac = surr_jac
u_samples = self._getParameterSamples()
if self.verbose:
print('Evaluating quantity of interest at samples')
q_samples, grad_samples = self._evalSamples(u_samples, fqoi, fgrad, jac)
return q_samples, grad_samples
def evalMetric(self, x, method=None):
'''Evaluates the horsetail matching metric at given values of the
design variables.
:param iterable x: values of the design variables, this is passed as
the first argument to the function fqoi
:param str method: method to use to evaluate the metric ('empirical' or
'kernel')
:return: metric_value - value of the metric evaluated at the design
point given by x
:rtype: float
*Example Usage*::
>>> def myFunc(x, u): return x[0]*x[1] + u
>>> u1 = UniformParameter()
>>> theHM = HorsetailMatching(myFunc, u)
>>> x0 = [1, 2]
>>> theHM.evalMetric(x0)
'''
# Make sure dimensions are correct
# u_sample_dimensions = self._processDimensions()
if self.verbose:
print('----------')
print('At design: ' + str(x))
q_samples, grad_samples = self.evalSamples(x)
if self.verbose:
print('Evaluating metric')
return self.evalMetricFromSamples(q_samples, grad_samples, method)
def evalMetricFromSamples(self, q_samples, grad_samples=None, method=None):
'''Evaluates the horsetail matching metric from given samples of the quantity
of interest and gradient instead of evaluating them at a design.
:param np.ndarray q_samples: samples of the quantity of interest,
size (M_int, M_prob)
:param np.ndarray grad_samples: samples of the gradien,
size (M_int, M_prob, n_x)
:return: metric_value - value of the metric
:rtype: float
'''
# Make sure dimensions are correct
# u_sample_dimensions = self._processDimensions()
q_samples = np.array(q_samples)
if not (q_samples.shape[0] == self.samples_int and
q_samples.shape[1] == self.samples_prob):
raise ValueError('Shape of q_samples should be [M_int, M_prob]')
if grad_samples is not None:
grad_samples = np.array(grad_samples)
if not (grad_samples.shape[0] == self.samples_int and
grad_samples.shape[1] == self.samples_prob):
raise ValueError('''Shape of grad_samples
should be [M_int, M_prob, n_dv]''')
if method is None:
method = self.method
if method.lower() == 'empirical':
return self._evalMetricEmpirical(q_samples, grad_samples)
elif method.lower() == 'kernel':
return self._evalMetricKernel(q_samples, grad_samples)
else:
raise ValueError('Unsupported metric evalation method')
def getHorsetail(self):
'''Function that gets vectors of the horsetail plot at the last design
evaluated.
:return: upper_curve, lower_curve, CDFs - returns three parameters,
the first two are tuples containing pairs of x/y vectors of the
upper and lower bounds on the CDFs (the horsetail plot). The
third parameter is a list of x/y tuples for individual CDFs
propagated at each sampled value of the interval uncertainties
*Example Usage*::
>>> def myFunc(x, u): return x[0]*x[1] + u
>>> u = UniformParameter()
>>> theHM = HorsetailMatching(myFunc, u)
>>> (x1, y1, t1), (x2, y2, t2), CDFs = theHM.getHorsetail()
>>> matplotlib.pyplot(x1, y1, 'b')
>>> matplotlib.pyplot(x2, y2, 'b')
>>> for (x, y) in CDFs:
... matplotlib.pyplot(x, y, 'k:')
>>> matplotlib.pyplot.show()
'''
if hasattr(self, '_ql'):
ql, qu, hl, hu = self._ql, self._qu, self._hl, self._hu
qh, hh = self._qh, self._hh
if self._qis is not None:
ql, hl = _appendPlotArrays(ql, hl, self._qis)
qu, hu = _appendPlotArrays(qu, hu, self._qis)
CDFs = []
for qi, hi in zip(qh, hh):
CDFs.append((qi, hi))
upper_target = [self._ftarg_u(h) for h in hu]
upper_curve = (qu, hu, upper_target)
lower_target = [self._ftarg_l(h) for h in hl]
lower_curve = (ql, hl, lower_target)
return upper_curve, lower_curve, CDFs
else:
raise ValueError('''The metric has not been evaluated at any
design point so the horsetail does not exist''')
##############################################################################
## Private methods ##
##############################################################################
def _evalMetricEmpirical(self, q_samples, grad_samples=None):
M_prob = self.samples_prob
M_int = self.samples_int
if M_int > 1:
alpha = self.alpha
else:
alpha = 1
h_htail = np.zeros([M_int, M_prob])
q_htail = np.zeros([M_int, M_prob])
q_l = np.zeros(M_prob)
q_u = np.zeros(M_prob)
if grad_samples is not None:
g_htail = np.zeros([M_int, M_prob, self._N_dv])
g_l = np.zeros([M_prob, self._N_dv])
g_u = np.zeros([M_prob, self._N_dv])
Du_grad = np.zeros(self._N_dv)
Dl_grad = np.zeros(self._N_dv)
for ii in np.arange(M_int):
# Get empirical CDF by sorting samples at each value of intervals
sortinds = np.argsort(q_samples[ii, :])
q_htail[ii, :] = q_samples[ii, sortinds]
M = q_samples.shape[1]
h_htail[ii, :] = [(1./M)*(0.5 + j) for j in range(M)]
if grad_samples is not None:
for ix in np.arange(self._N_dv):
g_htail[ii, :, ix] = grad_samples[ii, sortinds, ix]
for jj in np.arange(M_prob):
q_u[jj] = min(q_htail[:, jj])
q_l[jj] = max(q_htail[:, jj])
if grad_samples is not None:
q_u[jj] = _extalg(q_htail[:, jj], -1*alpha)
q_l[jj] = _extalg(q_htail[:, jj], alpha)
for ix in np.arange(self._N_dv):
gtemp = _extgrad(q_htail[:, jj], -1*alpha)
g_u[jj, ix] = gtemp.dot(g_htail[:, jj, ix])
gtemp = _extgrad(q_htail[:, jj], alpha)
g_l[jj, ix] = gtemp.dot(g_htail[:, jj, ix])
h_u, h_l = h_htail[0], h_htail[0] # h is same for all ECDFs
t_u = [self._ftarg_u(hi) for hi in h_u]
t_l = [self._ftarg_l(hi) for hi in h_u]
self._ql, self._qu, self._hl, self._hu = q_l, q_u, h_l, h_u
self._qh, self._hh = q_htail, h_htail
self._tl, self._tu = t_l, t_u
self._qis = None
Du = (1./M_prob)*sum((q_u - t_u)**2)
Dl = (1./M_prob)*sum((q_l - t_l)**2)
dhat = np.sqrt(Du + Dl)
if self.verbose:
print('Metric: ' + str(dhat))
if grad_samples is not None:
for ix in np.arange(self._N_dv):
Du_grad[ix] = (1./M_prob)*sum(2*(q_u - t_u)*g_u[:, ix])
Dl_grad[ix] = (1./M_prob)*sum(2*(q_l - t_l)*g_l[:, ix])
dhat_grad = (0.5*(Du+Dl)**(-0.5)*(Du_grad + Dl_grad))
if self.verbose:
print('Gradient: ' + str([g for g in dhat_grad]))
return dhat, dhat_grad
else:
return dhat
def _getKernelParameters(self, q_samples):
# If kernel bandwidth not specified, find it using Scott's rule
if self.kernel_bandwidth is None:
if len(self.prob_uncertainties) > 0:
if abs(np.max(q_samples) - np.min(q_samples)) < 1e-6:
bw = 1e-6
else:
bw = 0.33*((4/(3.*q_samples.shape[1]))**(1/5.)
*np.std(q_samples[0,:]))
else:
bw = 1e-3
self.kernel_bandwidth = bw
else:
bw = self.kernel_bandwidth
## Initalize arrays and prepare calculation
q_min = np.amin(q_samples)
q_max = np.amax(q_samples)
if self.integration_points is None:
q_range = q_max - q_min
qis_full = np.linspace(q_min - q_range, q_max + q_range, 10000)
self.integration_points = qis_full
else:
qis_full = np.array(self.integration_points)
ii_low, ii_high = 0, len(qis_full)
try:
ii_high, qi_high = next((iq, qi) for iq, qi in enumerate(qis_full) if
qi > q_max + 20*bw)
except StopIteration:
warnings.warn('Sample found higher than range of integration points')
try:
iiN_low, qi_low = next((iq, qi) for iq, qi in enumerate(qis_full[::-1]) if
qi < q_min - 20*bw)
ii_low = len(qis_full) - (iiN_low+1)
except StopIteration:
warnings.warn('Sample found lower than range of integration points')
qis = qis_full[ii_low:ii_high+1] # Only evaluate over range of samples
self._qis = qis
return qis, bw
def _evalMetricKernel(self, q_samples, grad_samples=None):
qis, bw = self._getKernelParameters(q_samples)
N_quad = len(qis)
M_prob = self.samples_prob
M_int = self.samples_int
if M_int > 1:
alpha = self.alpha
else:
alpha = 1
fhtail = np.zeros([N_quad, M_int])
qhtail = np.zeros([N_quad, M_int])
if grad_samples is not None:
fht_grad = np.zeros([N_quad, M_int, self._N_dv])
hu_grad = np.zeros([N_quad, self._N_dv])
hl_grad = np.zeros([N_quad, self._N_dv])
Du_grad = np.zeros(self._N_dv)
Dl_grad = np.zeros(self._N_dv)
# ALGORITHM 1 from publication
# Evaluate all individual CDFs and their gradients
for mm in np.arange(M_int):
qjs = q_samples[mm, :]
rmat = qis.reshape([N_quad, 1])-qjs.reshape([1, M_prob])
if grad_samples is not None:
Kcdf, Kprime = _kernel(rmat, M_prob, bw=bw,
ktype=self.kernel_type, bGrad=True)
for ix in np.arange(self._N_dv):
grad_js = grad_samples[mm, :, ix]
fht_grad[:, mm, ix] = Kprime.dot(-1*grad_js)
else:
Kcdf = _kernel(rmat, M_prob, bw=bw, ktype=self.kernel_type,
bGrad=False)
fhtail[:, mm] = Kcdf.dot(np.ones([M_prob, 1])).flatten()
qhtail[:, mm] = qis
# ALGORITHM 2 from publication
# Find horsetail curves - envelope of the CDFs and their gradients
# In Matrix form
if grad_samples is None:
hu = np.max(fhtail, axis=1).flatten()
hl = np.min(fhtail, axis=1).flatten()
else:
hu = _extalg(fhtail, alpha, axis=1).flatten()
hl = _extalg(fhtail, -1*alpha, axis=1).flatten()
Su_prime = _extgrad(fhtail, alpha, axis=1)
Sl_prime = _extgrad(fhtail, -1*alpha, axis=1)
for kx in np.arange(self._N_dv):
fis_grad = fht_grad[:, :, kx]
for ii in np.arange(N_quad):
hu_grad[ii, kx] = Su_prime[ii, :].dot(fis_grad[ii, :])
hl_grad[ii, kx] = Sl_prime[ii, :].dot(fis_grad[ii, :])
# ALGORITHM 3 from publication
# Evaluate overall metric and gradient using matrix multipliation
tu = np.array([self._ftarg_u(hi) for hi in hu])
tl = np.array([self._ftarg_l(hi) for hi in hl])
Du = _matrix_integration(qis, hu, tu)
Dl = _matrix_integration(qis, hl, tl)
dhat = float(np.sqrt(Du + Dl))
self._ql, self._qu, self._hl, self._hu = qis, qis, hl, hu
self._qh, self._hh = qhtail, fhtail
self._tl, self._tu = tl, tu
if self.verbose:
print('Metric: ' + str(dhat))
if grad_samples is not None:
tu_pr = np.array([_finDiff(self._ftarg_u, hi) for hi in hu])
tl_pr = np.array([_finDiff(self._ftarg_l, hi) for hi in hl])
for kx in np.arange(self._N_dv):
Du_grad[kx] = _matrix_grad(qis, hu, hu_grad[:, kx], tu, tu_pr)
Dl_grad[kx] = _matrix_grad(qis, hl, hl_grad[:, kx], tl, tl_pr)
dhat_grad = (0.5*(Du+Dl)**(-0.5)*(Du_grad + Dl_grad))
if self.verbose:
print('Gradient: ' + str([g for g in dhat_grad]))
return dhat, dhat_grad
else:
return dhat
def _makeSurrogates(self, x):
# Get quadrature points
if self.surrogate_points is None:
N_u = len(self.prob_uncertainties) + len(self.int_uncertainties)
mesh = np.meshgrid(*[np.linspace(-1, 1, 5) for n in np.arange(N_u)],
copy=False)
u_sparse = np.vstack([m.flatten() for m in mesh]).T
else:
u_sparse = self.surrogate_points
N_sparse = u_sparse.shape[0]
q_sparse = np.zeros(N_sparse)
# Get surrogates in correct form
if not self.jac:
for iu, u in enumerate(u_sparse):
q_sparse[iu] = self.fqoi(x, u)
surr_qoi = self.surrogate(u_sparse, q_sparse)
def fqoi(u):
return surr_qoi(u)
fgrad = False
surr_jac = False
else:
g_sparse = np.zeros([N_sparse, self._N_dv])
for iu, u in enumerate(u_sparse):
if isinstance(self.jac, bool) and self.jac:
q_sparse[iu], g_sparse[iu, :] = self.fqoi(x, u)
else:
q_sparse[iu] = self.fqoi(x, u)
g_sparse[iu, :] = self.jac(x, u)
if not self.surrogate_jac:
fpartial = [lambda u: 0 for _ in np.arange(self._N_dv)]
surr_qoi = self.surrogate(u_sparse, q_sparse)
for k in np.arange(self._N_dv):
fpartial[k] = self.surrogate(u_sparse, g_sparse[:, k])
def surr_grad(u):
return [f(u) for f in fpartial]
else:
if isinstance(self.surrogate_jac, bool) and self.surrogate_jac:
surr_qoi, surr_grad = self.surrogate(
u_sparse, q_sparse, g_sparse)
else:
surr_qoi = self.surrogate(u_sparse, q_sparse)
surr_grad = self.surrogate_jac(u_sparse, g_sparse)
def fqoi(u):
return(surr_qoi(u))
def fgrad(u):
return(surr_grad(u))
surr_jac = fgrad
return fqoi, fgrad, surr_jac
def _getParameterSamples(self):
N_u = len(self.prob_uncertainties) + len(self.int_uncertainties)
get_new = True
if self.reuse_samples and self.u_samples is not None:
if self.u_samples.shape != (self.samples_int, self.samples_prob, N_u):
if self.verbose:
print('''Stored samples do not match current dimensions,
getting new samples''')
else:
get_new = False
if get_new:
if self.verbose:
print('Getting uncertain parameter samples')
N_u = len(self.prob_uncertainties) + len(self.int_uncertainties)
N_prob = len(self.prob_uncertainties)
N_int = len(self.int_uncertainties)
# u_samples = np.zeros([self.samples_int, self.samples_prob, N_u])
u_samples_prob = np.zeros([self.samples_int, self.samples_prob,
len(self.prob_uncertainties)])
u_samples_int = np.zeros([self.samples_int, self.samples_prob,
len(self.int_uncertainties)])
u_ints = np.zeros([self.samples_int, len(self.int_uncertainties)])
for kk, uk in enumerate(self.int_uncertainties):
if callable(uk):
samps = np.array(uk()).flatten()
if len(samps) != self.samples_prob:
raise Exception('Number of samples returned not equal ' +
'to specified number of samples: please set number of ' +
'samples with samples_prob attribute')
else:
u_ints[:, kk] = samps
elif isinstance(uk, (tuple, list)): ## See if given as tuple/list of bounds
lb, ub = uk[0], uk[1]
u_ints[:, kk] = np.random.uniform(lb, ub, size=self.samples_int)
u_ints[0, kk] = lb
u_ints[-1, kk] = ub
elif hasattr(uk, 'getSample'):
for ii in np.arange(self.samples_int):
u_ints[ii, kk] = uk.getSample()
else:
raise TypeError('Unsupported interval uncertainty type')
u_samples_int = np.tile(u_ints[:, np.newaxis], (1, self.samples_prob, 1))
u_probs = np.zeros([self.samples_prob, len(self.prob_uncertainties)])
for kk, uk in enumerate(self.prob_uncertainties):
if callable(uk):
samps = np.array(uk()).flatten()
if len(samps) != self.samples_prob:
raise Exception('Number of samples returned not equal ' +
'to specified number of samples: please set number of ' +
'samples with samples_prob attribute')
else:
u_probs[:, kk] = samps
elif hasattr(uk, 'getSample'):
for jj in np.arange(self.samples_prob):
u_probs[jj, kk] = uk.getSample()
else:
raise TypeError('Unsupported probabilistic uncertainty type')
u_samples_prob = np.tile(u_probs[np.newaxis, :], (self.samples_int, 1, 1))
u_samples = np.concatenate((u_samples_int, u_samples_prob), axis=2)
self.u_samples = u_samples
return u_samples
else:
if self.verbose:
print('Re-using stored samples')
return self.u_samples
def _evalSamples(self, u_samples, fqoi, fgrad, jac):
# Array of shape (M_int, M_prob)
grad_samples = None
q_samples = np.zeros([self.samples_int, self.samples_prob])
if not jac:
for ii in np.arange(q_samples.shape[0]):
for jj in np.arange(q_samples.shape[1]):
q_samples[ii, jj] = fqoi(u_samples[ii, jj])
else:
grad_samples = np.zeros([self.samples_int, self.samples_prob,
self._N_dv])
for ii in np.arange(q_samples.shape[0]):
for jj in np.arange(q_samples.shape[1]):
if isinstance(jac, bool) and jac:
(q, grad) = fqoi(u_samples[ii, jj])
q_samples[ii, jj] = float(q)
grad_samples[ii, jj, :] = [_ for _ in grad]
else:
q_samples[ii, jj] = fqoi(u_samples[ii, jj])
grad_samples[ii, jj, :] = fgrad(u_samples[ii, jj])
self.grad_samples = grad_samples
self.q_samples = q_samples
return q_samples, grad_samples
##############################################################################
## Private functions
##############################################################################
def _extalg(xarr, alpha=100, axis=None):
'''Given an array xarr of values, smoothly return the max/min'''
return (np.sum(xarr * np.exp(alpha*xarr), axis=axis, keepdims=True)/
np.sum(np.exp(alpha*xarr), axis=axis, keepdims=True))
def _extgrad(xarr, alpha=100, axis=None):
'''Given an array xarr of values, return the gradient of the smooth min/max
swith respect to each entry in the array'''
term1 = (np.exp(alpha*xarr)/
np.sum(np.exp(alpha*xarr), axis=axis, keepdims=True))
term2 = 1 + alpha*(xarr - _extalg(xarr, alpha, axis=axis))
return term1*term2
def _ramp(x, width):
return _minsmooth(1, _maxsmooth(0, (x - width/2)*(1/width)))
def _trint(x, width):
w = width/2.
xb = _maxsmooth(-w, _minsmooth(x, w))
y1 = 0.5 + xb/w + xb**2/(2*w**2)
y2 = xb/w - xb**2/(2*w**2)
return _minsmooth(y1, 0.5) + _maxsmooth(y2, 0.0)
def _minsmooth(a, b, eps=0.0000):
return 0.5*(a + b - np.sqrt((a-b)**2 + eps**2))
def _maxsmooth(a, b, eps=0.0000):
return 0.5*(a + b + np.sqrt((a-b)**2 + eps**2))
def _step(x):
return 1 * (x > 0)
def _erf(r):
## Numerical implementation of the error function for matrix comptibility
# save the sign of x
sign = np.sign(r)
x = np.absolute(r)
# constants
a1 = 0.254829592
a2 = -0.284496736
a3 = 1.421413741
a4 = -1.453152027
a5 = 1.061405429
p = 0.3275911
# A&S formula 7.1.26
t = 1.0/(1.0 + p*x)
y = 1.0 - (((((a5*t + a4)*t) + a3)*t + a2)*t + a1)*t*np.exp(-x*x)
return sign*y # erf(-x) = -erf(x)
def _kernel(points, M, bw, ktype='gauss', bGrad=False):
if ktype == 'gauss' or ktype == 'gaussian':
KernelMat = (1./M)*((1 + _erf((points/bw)/np.sqrt(2.)))/2.)
# KernelMat = np.zeros(points.shape)
# for ir in np.arange(points.shape[0]):
# for ic in np.arange(points.shape[1]):
# KernelMat[ir, ic] = (1./M)*((1. +
# math.erf((points[ir, ic]/bw)/math.sqrt(2.)))/2.)
elif ktype == 'uniform' or ktype == 'uni':
KernelMat = (1./M)*_ramp(points, width=bw*np.sqrt(12))
elif ktype == 'triangle' or ktype == 'tri':
KernelMat = (1./M)*_trint(points, width=bw*2.*np.sqrt(6))
if bGrad:
if ktype == 'gauss' or ktype == 'gaussian':
const_term = 1.0/(M * np.sqrt(2*np.pi*bw**2))
KernelGradMat = const_term * np.exp(-(1./2.) * (points/bw)**2)
elif ktype == 'uniform' or ktype == 'uni':
width = bw*np.sqrt(12)
const = (1./M)*(1./width)
KernelGradMat = const*(_step(points+width/2) -
_step(points-width/2))
elif ktype == 'triangle' or ktype == 'tri':
width = bw*2.*np.sqrt(6)
const = (1./M)*(2./width)
KernelGradMat = const*(_ramp(points+width/4, width/2) -
_ramp(points-width/4, width/2))
return KernelMat, KernelGradMat
else:
return KernelMat
def _matrix_integration(q, h, t):
''' Returns the dp metric for a single horsetail
curve at a given value of the epistemic uncertainties'''
N = len(q)
# correction if CDF has gone out of trapezium range
if h[-1] < 0.9: h[-1] = 1.0
W = np.zeros([N, N])
for i in range(N):
W[i, i] = 0.5*(h[min(i+1, N-1)] - h[max(i-1, 0)])
dp = (q - t).T.dot(W).dot(q - t)
return dp
def _matrix_grad(q, h, h_dx, t, t_prime):
''' Returns the gradient with respect to a single variable'''
N = len(q)
W = np.zeros([N, N])
Wprime = np.zeros([N, N])
for i in range(N):
W[i, i] = 0.5*(h[min(i+1, N-1)] - h[max(i-1, 0)])
Wprime[i, i] = \
0.5*(h_dx[min(i+1, N-1)] - h_dx[max(i-1, 0)])
tgrad = np.array([t_prime[i]*h_dx[i] for i in np.arange(N)])
grad = 2.0*(q - t).T.dot(W).dot(-1.0*tgrad) \
+ (q - t).T.dot(Wprime).dot(q - t)
return grad
def _appendPlotArrays(q, h, integration_points):
q = np.insert(q, 0, q[0])
h = np.insert(h, 0, 0)
q = np.insert(q, 0, min(integration_points))
h = np.insert(h, 0, 0)
q = np.append(q, q[-1])
h = np.append(h, 1)
q = np.append(q, max(integration_points))
h = np.append(h, 1)
return q, h
def _finDiff(fobj, dv, f0=None, eps=10**-6):
if f0 is None:
f0 = fobj(dv)
fbase = copy.copy(f0)
fnew = fobj(dv + eps)
return float((fnew - fbase)/eps)
def _makeIter(x):
try:
iter(x)
return [xi for xi in x]
except:
return [x]
def _intervalSample(returned_samples, bounds):
if len(returned_samples) < 1:
return bounds[0]
elif len(returned_samples) < 2:
return bounds[1]
else:
return np.random.uniform(bounds[0], bounds[1])
|
When it comes to online casinos being household names, few can trump Bet365. Just as popular a leading UK sportsbook as it is an online casino, Bet365 holds a lot of sway in the industry. They have a wealth of experience, having debuted in 2001, and are owned and operated by Hillside Media Casino. Bet365 can be played as an instant play (browser-based), downloadable or mobile gaming domain.
This site is one of a handful of top domains which is eCOGRA certified. And they feature a license issued out of Malta. Their domain offers multilingual gameplay, although they mainly appeal to UK-based players and offer the GBP currency option. Newbies who choose to sign up and play at Bet365 can claim a New Player Welcome Bonus worth £100 when they deposit for the first time at this site.
Why Play At Bet365 Online?
Few players will find a reason to grumble at the types of games they can find at Bet365. They dish out games from the likes of Microgaming, Playtech and Net Entertainment, as well as BetSoft Gaming, NextGen Gaming, Play ‘n Go, Quickspin, WMS Gaming, Leander Games, Genesis Gaming, Bally Tech, Eyecon, Big Time Gaming, Rabcat, Blueprint Gaming, and more.
With so many games in their collection, players might think it a challenge to find what they are looking for. However, Bet365 have broken their games down into manageable genres, some of which include featured games, slots, card games, table games, video poker, scratch cards and jackpot games. Expect to find plenty of major cash prizes with the casino’s range of progressive jackpots, chief amongst which are Playtech’s DC superheroes range of jackpot titles, and their Age of Gods series of slots.
Known table games you can play at this casino include live dealer games. These include Blackjack, baccarat, heads-up hold’em, roulette, and other variants. Slots easily form the largest collection of games at the site, though, and games worth a peek include any of the Age of Gods slots, Grease, Everybody’s Jackpot, Halloween Fortune, The Dark Knight, Frankie Dettori’s Magic Seven. If you want to find a greater array of games (the casino primarily features solely Playtech games), then pop into the Games or Vegas sites with your Bet365 account. Most of the titles from other software providers are held there.
Of course, you can also use your Bet365 Casino account to place sports bets. That is primarily what Bet365 is known for. Betting markets open to players at Bet365 include American football, baseball, basketball, boxing, cricket, Gaelic sports, golf, greyhounds and horse racing, ice hockey, motor sports, rugby, snooker, soccer, tennis, and half-a-dozen other options. Betting odds (check oddschecker) can also be displayed in fractional, decimal and American formats. Other famous sports betting sites are Betfair and Skybet.
Bet365 makes things extra simple for players to wager on their favourite games from their mobile phone or tablet. If you want a streamlined, more mobile-specific online gambling experience, there is an app which is available for the Apple Store and Google Play Store, and that guarantees compatibility for modern iOS and Android-powered devices.
Alternatively, players can simply open the Bet365 website in the web browser on their devices. This will see a mobile-friendly version of the site accessible. Bear in mind, though, that the games collection and the array of sporting bets may be a touch limited (compared to the instant play, browser-based site) on the apps.
Few online casinos are as well-regulated or as secure as Bet365. This online casino and sports betting site is secured using Thawte technology and is one of the major eCOGRA certified casinos you can choose to play at. They are impeccably well licensed out of Malta, and rarely ever receive complaints against their service. Given the sheer volume of television commercials, bus stop posters, football stadium hoardings and magazine ads; everybody knows that they trust Bet365.
The customer support at the site is just as you might expect. You can choose to obtain assistance via FAQ, live chat, e-mail or telephone. Their customer support team is on hand around the clock. They provide you with answers to any questions and queries you may have. The FAQ guide is particularly useful as it covers all the basic do’s and don’ts of the casino and answers plenty of the more commonly asked queries by players.
UK-based players have lots of potential banking options open to them at Bet365 Casino. Maestro, VISA, MasterCard, VISA Electron, Apple Pay, PayPal, Google Pay, Paysafecard, Skrill, Neteller, and Entropay all offer instant transaction times, with minimum deposit limits ranging from £5 to £10. Fast bank transfer, bank wire and cheques understandably take a lot longer and often have £100 minimum deposit limits.
Withdrawal options include VISA, MasterCard, Maestro, VISA Electron, bank wire, PayPal, Skrill, Neteller, Paysafecard, Entropay and cheques. Transaction times range from under 24 hours for e-wallets, to 1-5 days for most bank options. Cheques can take between 5 and 28 days to land in your account. There are no fees for deposits or withdrawals at Bet365. Players can also efficiently and freely transfer their funds to the various other sectors of the Bet365 empire. Such as their sportsbook, Vegas or general games sites.
A new player who chooses to play with Bet365 and deposits at least £10 into their account is eligible to receive a 100% casino bonus on that deposit. This can double the sum to the tune of up to £100. With players required to enter the BONUS100 promo code to claim the offer. This welcome bonus has some of the most agreeable wagering requirements around, at just 15x.
Bet365 is not short of other top deals, either. They regularly offer cashback promos on live dealer games and are renowned for their monthly specials. The sportsbook may also provide an alternative welcome bonus for new players. Which may be worth checking out if you favour sports bets. Keep in mind that Bet365 runs a loyalty scheme, which can see players snap up points based on their wagering habits. Over time, those points can be used to claim rewards when playing at the site. |
# =============================================================================
# Copyright [2013] [Kevin Carter]
# License Information :
# This software has no warranty, it is provided 'as is'. It is your
# responsibility to validate the behavior of the routines and its accuracy
# using the code provided. Consult the GNU General Public license for further
# details (see GNU General Public License).
# http://www.gnu.org/licenses/gpl.html
# =============================================================================
import os
from genastack.common import utils
ARGS = utils.get_role_config('openssl')
PROJECT_URL = ARGS.get(
'project_url',
'http://ftp.postgresql.org/pub/source/v9.2.7/postgresql-9.2.7.tar.gz'
)
TEMP_PATH = utils.return_temp_dir()
WORK_PATH = utils.return_rax_dir()
LIBS_PATH = utils.return_rax_dir(path='openstack/lib')
INCLUDE_PATH = utils.return_rax_dir(path='openstack/include')
NAME = 'postgresql-9.2.7.tgz'
INSTALL_COMMANDS = [
'./configure --prefix=%s' % WORK_PATH,
'make install'
]
EXPORTS = [
'CFLAGS=-I%s -I/usr/include/x86_64-linux-gnu' % INCLUDE_PATH,
'LDFLAGS=-L%s -L/usr/lib/x86_64-linux-gnu' % LIBS_PATH,
'LD_RUN_PATH=%s' % LIBS_PATH
]
BUILD_DATA = {
'postgres_connector': {
'help': 'Install upstream postgresql_connector.',
'build': [
{
'get': {
'url': PROJECT_URL,
'path': TEMP_PATH,
'name': NAME,
'md5sum': 'a61a63fc08b0b27a43b6ca325f49ab4b',
'uncompress': True
},
'export': EXPORTS,
'not_if_exists': os.path.join(LIBS_PATH, 'postgresql'),
'build_commands': INSTALL_COMMANDS,
},
],
'package_install': {
'apt': {
'packages': [
'bison',
'flex'
]
}
}
}
}
|
We are a team of professional video editors from more than 10 years. We have been working with quite a few clients in different areas - from wedding and family to drone and marketing videos and post productions. Now we are coming to patreon.com to offer our services exclusively!
We can edit any other footage you would like to be transformed to something nice and professional.
All of our productions are delivered in the highest possible quality in any format you would like.
We could work with 4K footage with no problem.
Our goal is satisfied and happy clients.
Revisions and fast delivery are always guaranteed.
We are using Adobe Premiere, After Effects, Adobe Audition and Da Vinci Resolve.
Please make sure to contact us as every video project is different and needs our personal attention.
Basic + cuts/ audio adjustments/ text etc. Contact us for more info.
For big projects like wedding, commercials etc. Contact us for more information.
Regular video editing for small projects. Contact us for more information.
When we reach $1000 per month, we'll make 1 month editing with а big discount. |
import os
import sys
import versioneer
from distutils.core import setup
from setuptools import find_packages
from distutils.extension import Extension
from distutils.command.sdist import sdist as _sdist
from distutils.command.install import install as _install
try:
import numpy as np
except:
print("ERROR: Numpy not found, please install numpy")
sys.exit(1)
USE_CYTHON = ("--cython" in sys.argv) or ("USE_CYTHON" in os.environ)
CYTHON_INSTALLED = False
try:
import Cython
CYTHON_INSTALLED = True
except:
print("ERROR: Cython flag was given but cython was not found")
sys.exit(1)
#
source_pyx = "cyhdfs3/cyhdfs3.pyx"
source_c = "cyhdfs3/cyhdfs3.c"
if not os.path.exists(source_c):
if CYTHON_INSTALLED:
print("Generated `.c` files not found will default to use cython")
USE_CYTHON = True
else:
print("ERROR: Generated `.c` files not found and Cython not installed, please install cython")
sys.exit(1)
if USE_CYTHON:
source = source_pyx
else:
source = source_c
if USE_CYTHON:
from distutils.extension import Extension
from Cython.Compiler.Options import directive_defaults
directive_defaults["linetrace"] = True
directive_defaults["embedsignature"] = True
macros = [("CYTHON_TRACE", "1")]
else:
macros = []
include_dirs = ["/usr/local/include", "/usr/local/include/hdfs"]
include_dirs.append(np.get_include())
library_dirs = ["/usr/local/lib/"]
# If conda PREFIX is present add conda paths
prefix = os.getenv("PREFIX", None)
if prefix is not None:
include_dirs.append(os.path.join(prefix, "include"))
include_dirs.append(os.path.join(prefix, "include", "hdfs"))
library_dirs.append(os.path.join(prefix, "lib"))
ext_modules = [
Extension(name="cyhdfs3.cyhdfs3",
sources=[source],
include_dirs=include_dirs,
library_dirs=library_dirs,
libraries=["hdfs3", "avro", "m", "snappy"],
define_macros=macros
)
]
# Versioneer class
cmdclass = versioneer.get_cmdclass()
# Cythonize on `sdist`: Always to make sure the compiled Cython files in the pkg are up-to-date
class sdist(_sdist):
def run(self):
from Cython.Build import cythonize
cythonize(["cyhdfs3/*.pyx"])
_sdist.run(self)
cmdclass["sdist"] = sdist
# Cythonize on `install`: If specified
class install(_install):
def run(self):
if USE_CYTHON:
from Cython.Build import cythonize
global ext_modules
ext_modules = cythonize(ext_modules)
_install.run(self)
cmdclass["install"] = install
with open("requirements.txt") as f:
required = f.read().splitlines()
setup(
name="cyhdfs3",
version=versioneer.get_version(),
author="Daniel Rodriguez",
author_email="df.rodriguez143@gmail.com",
url="https://github.com/danielfrg/cyhdfs3",
cmdclass=cmdclass,
license="Apache License Version 2.0, January 2004",
install_requires=required,
packages=find_packages(),
ext_modules=ext_modules,
entry_points="""
[console_scripts]
hdfs3=cyhdfs3.cli:main
""",
)
|
Xiaomi officially announced the arrival of a new viky Taki brand under the brand mijia. As mentioned, this device is called xiaomi mijia walkie talkie 1s, and has been named after the mijia walkie talkie released last March. The first generation of mijia walkie talkie was well received at the time of sale, and even won the Good Design Prize in 2017 for its sleek quality. The device was available in white and blue colors and claimed to have a Standby time of up to 8 days.
The new xiaomi mijia walkie talkie 1s comes with a lighter and thinner design, and its performance is much more powerful. The device is also portable and now comes with a handle on the back. The device also has a 3W transmission power, which means its range has expanded. This model offers up to 5 days of standby time. Just like the first-generation walkie-talkie, it's equipped with FM radio, so it's fun, apart from communication. This device also supports Azimut sharing.
The sale of xiaomi mijia walkie talkie 1s starts October 16th. This model is also available in white and blue. |
# -*- coding: utf-8 -*-
# This file is part of the Horus Project
__author__ = 'Jesús Arroyo Torrens <jesus.arroyo@bq.com>'
__copyright__ = 'Copyright (C) 2014-2015 Mundo Reader S.L.'
__license__ = 'GNU General Public License v2 http://www.gnu.org/licenses/gpl2.html'
import cv2
import time
import struct
import platform
import threading
import numpy as np
from horus.engine.driver.driver import Driver
from horus.engine.calibration.pattern import Pattern
from horus.engine.calibration.calibration_data import CalibrationData
from horus.engine.algorithms.image_capture import ImageCapture
from horus.engine.algorithms.image_detection import ImageDetection
from horus.engine.algorithms.laser_segmentation import LaserSegmentation
from horus.engine.algorithms.point_cloud_generation import PointCloudGeneration
system = platform.system()
"""
Calibrations:
- Autocheck Algorithm
- Camera Intrinsics Calibration
- Laser Triangulation Calibration
- Platform Extrinsics Calibration
"""
class CalibrationCancel(Exception):
def __init__(self):
Exception.__init__(self, _("CalibrationCancel"))
class Calibration(object):
"""Generic class for threading calibration"""
def __init__(self):
self.driver = Driver()
self.pattern = Pattern()
self.calibration_data = CalibrationData()
self.image_capture = ImageCapture()
self.image_detection = ImageDetection()
self.laser_segmentation = LaserSegmentation()
self.point_cloud_generation = PointCloudGeneration()
# TODO: Callbacks to Observer pattern
self._before_callback = None
self._progress_callback = None
self._after_callback = None
self._is_calibrating = False
def set_callbacks(self, before, progress, after):
self._before_callback = before
self._progress_callback = progress
self._after_callback = after
def start(self):
if not self._is_calibrating:
if self._before_callback is not None:
self._before_callback()
if self._progress_callback is not None:
self._progress_callback(0)
self._is_calibrating = True
threading.Thread(target=self._start).start()
def _start(self):
pass
def cancel(self):
self._is_calibrating = False
|
green force keto is a weight loss dietary supplement it helps you get rid of those extra pounds and get back in the shape in a natural and healthy way. you should keep up keto diet plan and exercise plan reliably to get need results instantly. There are different central focuses behind using Green Force Keto upgrade which is a natural composition and it mainly helps you to get down the excess fat from your body and make those fats change into energy to maintain the health of your body.
QUICKBOOKS SUPPORT Resolve Error -6000, when opening a company file. You are trying to open your company file when you suddenly get a -6000, XXXX error. This error is usually followed by 3 or 4 more digits and a message indicating that you are unable to open the company file.
a ketogenic weight-reduction plan. Mainly, the Keto Ultra Diet Diet transitions your body from utilizing glucose, to using ketones as it’s most important vitality supply. its a quick way to diminish the fat. this diet paln can be use by those people who are starting at now on eat less carbs in any case need to make their eating routine plan snappier can use this supplement in an efficient way. |
# -*- coding: utf-8 -*-
# Copyright (c) 2014 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""cros deploy: Deploy the packages onto the target device."""
from __future__ import print_function
import sys
from chromite.cli import command
from chromite.cli import deploy
from chromite.lib import commandline
from chromite.lib import cros_logging as logging
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
@command.CommandDecorator('deploy')
class DeployCommand(command.CliCommand):
"""Deploy the requested packages to the target device.
This command assumes the requested packages are already built in the
chroot. This command needs to run inside the chroot for inspecting
the installed packages.
Note: If the rootfs on your device is read-only, this command
remounts it as read-write. If the rootfs verification is enabled on
your device, this command disables it.
"""
EPILOG = """
To deploy packages:
cros deploy device power_manager cherrypy
cros deploy device /path/to/package
To uninstall packages:
cros deploy --unmerge cherrypy
For more information of cros build usage:
cros build -h
"""
@classmethod
def AddParser(cls, parser):
"""Add a parser."""
super(cls, DeployCommand).AddParser(parser)
cls.AddDeviceArgument(parser, positional=True)
parser.add_argument(
'packages', help='Packages to install. You can specify '
'[category/]package[:slot] or the path to the binary package. '
'Use @installed to update all installed packages (requires --update).',
nargs='+')
parser.add_argument(
'--board',
help='The board to use. By default it is automatically detected. You '
'can override the detected board with this option.')
parser.add_argument(
'--no-strip', dest='strip', action='store_false', default=True,
help='Do not run strip_package to filter out preset paths in the '
'package. Stripping removes debug symbol files and reduces the size '
'of the package significantly. Defaults to always strip.')
parser.add_argument(
'--unmerge', dest='emerge', action='store_false', default=True,
help='Unmerge requested packages.')
parser.add_argument(
'--root', default='/',
help="Package installation root, e.g. '/' or '/usr/local'"
" (default: '%(default)s').")
parser.add_argument(
'--no-clean-binpkg', dest='clean_binpkg', action='store_false',
default=True, help='Do not clean outdated binary packages. '
' Defaults to always clean.')
parser.add_argument(
'--emerge-args', default=None,
help='Extra arguments to pass to emerge.')
parser.add_argument(
'--private-key', type='path', default=None,
help='SSH identify file (private key).')
parser.add_argument(
'--no-ping', dest='ping', action='store_false', default=True,
help='Do not ping the device before attempting to connect to it.')
parser.add_argument(
'--dry-run', '-n', action='store_true',
help='Output deployment plan but do not deploy anything.')
advanced = parser.add_argument_group('Advanced options')
advanced.add_argument(
'--force', action='store_true',
help='Ignore sanity checks, just do it.')
# TODO(garnold) Make deep and check installed the default behavior.
advanced.add_argument(
'--update', action='store_true',
help='Check installed versions on target (emerge only).')
advanced.add_argument(
'--deep', action='store_true',
help='Install dependencies. Implies --update.')
advanced.add_argument(
'--deep-rev', action='store_true',
help='Install reverse dependencies. Implies --deep.')
def Run(self):
"""Run cros deploy."""
commandline.RunInsideChroot(self)
self.options.Freeze()
deploy.Deploy(
self.options.device,
self.options.packages,
board=self.options.board,
emerge=self.options.emerge,
update=self.options.update,
deep=self.options.deep,
deep_rev=self.options.deep_rev,
clean_binpkg=self.options.clean_binpkg,
root=self.options.root,
strip=self.options.strip,
emerge_args=self.options.emerge_args,
ssh_private_key=self.options.private_key,
ping=self.options.ping,
force=self.options.force,
dry_run=self.options.dry_run)
logging.info('cros deploy completed successfully.')
|
Wesham members are greatly saddened this week by the loss of former club chairman Graham Vickers who sadly passed away at the weekend aged 60 after 5 years battling throat cancer. This follows the loss of long standing member, treasurer and 10k race director Les Ward.
Rob Danson scored another clear win at the penultimate fixture of the 2018 Inter Club Grand Prix in Astley Park in Chorley with a fast time of 24:43 for the hilly 4.5-mile 2 lapper. A strong duo of Steve Swarbrick and Steve Littler came in 6th and 7th in 27:50 and 28:00 respectively with David Taylor running 28:55 for 13th place. Despite this good start the Wesham Open team finished 3rd with Steve Abbott 28th (30:19); Lee Barlow 29th (30:22); Paul Hetherington 32nd (30:30); Tessa Robinson 36th (2nd woman 31:07); Andrew Harling 45th (31:34) and Jason Barlow 50th (22:00). The Wesham women finished 5th with Tessa Robinson 2nd; Helen Lawrenson 9th (92nd 34:41); Tracey Hulme 21st (144th 38:46); Maureen Danson 37th (185th 41;37); and Jo McCaffery 60th (227th 44:56). The Women W40+ were 4th with Lawrenson 4th; Hulme 9th; Danson 19th; McCaffery 34th and Pauline Eccleston 35th (230th 45:18). The Open Masters were 2nd with Swarbrick 2nd; Littler 3rd; Abbott 12th; L Barlow 13th; Hetherington 15th and Robinson 18th, while the M50s were 4th (Nigel Shepherd 10th and 72nd 33:28); John Collier 20th / 93rd 34:49; Peter Cruse 24th / 110th 35:42 and Stuart Clayton 33rd / 128th 37:15); and the M60s 2nd with Shepherd 2nd; Collier 5th; and George Kennedy 11th (134th 37:57). Overall Wesham are 2nd in the Open, W40+ and M60 team rankings, and 3rd in the overall masters, with the women and M50s 4th. Full Wesham results from Wednesday: Thomas Crabtree was 52nd in 32:16; Carl Groome 58th (32:36); Mark Renshall 60th (32:48); Leigh Doughty 61st (33:01); Matty Chamberlain 63rd (33:05); Neil Gregson 65th (33:14); Matthew Atherton 68th (33:24); Elliot Costello 98th (34:58); Paul Lancashire 103rd (34:14); Lee Nixon 113rd (35:52); Rob Wallace 122nd (36:39); Martin Allison 124th (36:46); Phil Leaver 152nd (39:13); Ryan Azzopardi 172nd (40:45); James Danson 187th (41:48); Dave Young 194th (42:14); Paul Carter 201st (42:42); Graham Cunliffe 218th (44:14); Peter Bartlett 225th (44;49); Diane Blagden 62nd and 233rd (45:33); Kerry Eccles 64th and 237th (45:50); Jonathon Sanderson 242nd (46:07); Nicola Ball 73rd and 251st (47:36); Anne Berry 83rd and 263rd (48:48); Tanya Barlow 84th and 264th (48:49); Julie Rooney 90th and 275th (50:30); James Birchall 282nd (51:52); Emma Davies 100th and 286th (52:10); Sue Rigby 109th and 298th (59:22); Antoinette Holton 110th and 299th (59:25); and Sophie Scott 111th and 300th (59:26).
Rob Danson tore up the course record at the Catforth Canter 5k on Saturday evening steaming round to a win in 15:02. It was a Wesham club championship counter and there were many personal best performances logged Paul Hetherington finished 8th in 18:19 (2nd M45) with Lee Barlow chasing in 9th with 18:32 (3rd M45) finishing in tandem with Andrew Harling 10th with a massive improvement on his PB with 18:33. Carl Groome (4th M45) came in 16th with 20:01; Lee Nixon 21st in 20:31; Jonathan Lawson 25th in 21:52; and Ben Wrigley 33rd with 22:51. Kath Hoyer finished 7th woman and 1st W55 in 23:40 (39th overall); Sharon Cooper 9th woman, 2nd W45 and 41st in 23:53; Alan Hudson was 1st M65 in his return to racing finishing 42nd in 23:56; Maureen Danson 10th and 1st W50 (46th overall) in 24:58; Pauline Eccleston 11th and 47th with 25:10; Diane Blagden 15th and 53rd in 26:25; Gemma Owen 16th (3rd W35) and 44th with 27:15; Tanya Barlow 17th and 58th in 27:31; Julie Rooney 21st and 64th in 28:38 with James Danson 65th also 28:38.
There were two new events on the local athletics circuit with the City of Preston 5- and 10-mile races on Sunday. James Mulvany scorched around the 5 Mile race finishing in 3rd place with a time of 29:36, closely followed by Steve Abbott who came in 4th in 30:15 with the pair 1st and 2nd in the M40 category. In the 10-mile race Helen Lawrenson finished 7th woman and 1st W45 in 1:14:04, 46th place overall; Paul Lancashire was 58th in 1:17:20; and Stuart Clayton 60th with 1:17:37. There were 209 in the 10-miler and the winning time was 57:57, the only runner under the hour, and 107 in the 5-miler with the fastest time 26:12 with just 3 runners inside 30 minutes.
In the Lytham Hall parkrun on Saturday morning Helen Lawrenson ran 21:03 for the 2nd fastest woman with an age graded performance of 77.75% for W45. Jason Bladgen had 21:38 for M45 66.02%; Martin Allison 22:41 with an M35 grade of 58.12%; Phil Leaver 23:57 for M65 70.01%; Andrew Moore 26:39 with M55 58.16%; Diane Blagden 27:37 and W50 62.22%; Peter Rooney 27:38 with M55 57.52%; James Birchall 29:41 and 43.51%; Nicola Carter 31:12 for W35 48.08%; and Ann Knowles 35:51 for W45 46.96%. Chris Pike ran St Helens parkrun in 25:33 for M40 54.66%, his best run since 16th June. Stuart Clayton ran the Southport run in 21:43 for M50 67.92%, his second run there. The Gores travelled to the Clumber Park run in Nottinghamshire near Worksop for Vicky’s 100th different venue. Megan ran 32:48 for JW11 50.71% with Steven and Vicky in support in 32:49 and 32:50. Emma Lund was in action at the Lancaster parkrun with a time of 22:57 for W35 65.14%. Stephen Browne was left at Blackpool parkrun where he ran 22:44 for M50 65.91% while he was deserted for the 2nd running of the new Fleetwood Promenade run where Paul Freary took the course record in 17:03 on a windy day. Rob Wallace ran 22:15 for 57.98%; Dave Marsland 22:34 with M60 72.30%; Robert Brown 23:49 and M60 67.95%; George Kennedy 23:55 and M60 67.04%; Graham Cunliffe 30:26 with M60 53.61% and Lynn Brown 31:31 and W55 58.22%. |
#!/usr/bin/env python3
import textutil, source
from json import JSONEncoder
class DocumentEncoder(JSONEncoder):
def default(self, document):
if isinstance(document, Document):
return {'interesting': document.interesting, 'data': document.data}
else:
return json.JSONEncoder.default(self, document)
class Document():
def __init__(self, data, interesting = None):
self.data = data
self.interesting = interesting
self.predicted_interesting = None
self.score = None
self._new = True
@property
def id(self):
return self.data['id']
@property
def url(self):
pass
@property
def title(self):
pass
@property
def text(self):
pass
@property
def user(self):
pass
@property
def image_url(self):
pass
@property
def classification_text(self):
text_items = filter(None, self.classification_text_items)
text = ' '.join(list(map(textutil.normalized_text, text_items)))
return text
@property
def classification_text_items(self):
return []
@property
def source(self):
return source.name_from_module(self.__module__)
def __repr__(self):
return '{}.{}({}, interesting={})'.format(self.__module__, self.__class__.__name__, self.data, self.interesting)
@property
def new(self):
current = self._new
self._new = False
return current
@property
def children(self):
return []
|
Generate leads, close deals & manage your stages, automatically from your Magento webshop. With the Infusionsoft’s CRM integration you can grow like a company twice your size while connecting like a real human being.
Use Infusionsoft to keep track of customers that request a quote from your Magento webshop via the Magento Quotation Manager (Cart2Quote). Use this Add-on to synchronize Quote Request from your Magento with stage in your Infusionsoft account.
Customers requesting a quote via your Magento store will be added as a contact in Infusionsoft.
Magento Quotation Requests will be synced automatically with Infusionsoft by adding or updating an opportunity.
Create a dedicated Infusionsoft track for Quote Requests via Magento and link Statuses to Probabilities and Milestones.
Infusionsoft will sync the sales rep assigned to the Magento Quote with the mapped Infusionsoft User. |
import numpy as np
import nengo
import nengo.utils.function_space
import nengo.spa as spa
from nengo.spa import Vocabulary
from . import forcing_functions
from . import oscillator
from . import point_attractor
nengo.dists.Function = nengo.utils.function_space.Function
nengo.FunctionSpace = nengo.utils.function_space.FunctionSpace
def generate(input_signal, alpha=1000.0):
beta = alpha / 4.0
# generate the Function Space
forces, _, goals = forcing_functions.load_folder(
'models/locomotion_trajectories', rhythmic=True,
alpha=alpha, beta=beta)
# make an array out of all the possible functions we want to represent
force_space = np.vstack(forces)
# use this array as our space to perform svd over
fs = nengo.FunctionSpace(space=force_space, n_basis=10)
# store the weights for each movement
weights_a = [] # ankle
weights_k = [] # knee
weights_h = [] # hip
# NOTE: things are added to weights based on the order files are read
for ii in range(int(len(goals) / 6)):
forces = force_space[ii*6:ii*6+6]
# load up the forces to be output by the forcing function
# calculate the corresponding weights over the basis functions
weights_a.append(np.hstack([
np.dot(fs.basis.T, forces[0]), # ankle 1
np.dot(fs.basis.T, forces[1])])) # ankle 2
weights_h.append(np.hstack([
np.dot(fs.basis.T, forces[2]), # hip 1
np.dot(fs.basis.T, forces[3])])) # hip 2
weights_k.append(np.hstack([
np.dot(fs.basis.T, forces[4]), # knee 1
np.dot(fs.basis.T, forces[5])])) # knee 2
# Create our vocabularies
sps_labels = ['GALLOP', 'RUNNING', 'WALKING']
rng = np.random.RandomState(0)
dimensions = 50 # some arbitrary number
vocab_input = Vocabulary(dimensions=dimensions, rng=rng)
vocab_dmp_weights_a = Vocabulary(dimensions=fs.n_basis*2, rng=rng)
vocab_dmp_weights_k = Vocabulary(dimensions=fs.n_basis*2, rng=rng)
vocab_dmp_weights_h = Vocabulary(dimensions=fs.n_basis*2, rng=rng)
for ii, (label, wa, wk, wh) in enumerate(zip(
sps_labels, weights_a, weights_k, weights_h)):
vocab_input.parse(label) # randomly generate input vector
vocab_dmp_weights_a.add(label, wa)
vocab_dmp_weights_k.add(label, wk)
vocab_dmp_weights_h.add(label, wh)
net = spa.SPA()
net.config[nengo.Ensemble].neuron_type = nengo.LIFRate()
with net:
config = nengo.Config(nengo.Ensemble)
config[nengo.Ensemble].neuron_type = nengo.Direct()
with config:
# --------------------- Inputs --------------------------
# def input_func(t):
# return vocab_input.parse(input_signal).v
# net.input = nengo.Node(input_func)
net.input = spa.State(dimensions, subdimensions=10,
vocab=vocab_input)
# ------------------- Point Attractors --------------------
zero = nengo.Node([0])
net.a1 = point_attractor.generate(
n_neurons=1000, alpha=alpha, beta=beta)
nengo.Connection(zero, net.a1.input[0], synapse=None)
net.a2 = point_attractor.generate(
n_neurons=1000, alpha=alpha, beta=beta)
nengo.Connection(zero, net.a1.input[0], synapse=None)
net.k1 = point_attractor.generate(
n_neurons=1000, alpha=alpha, beta=beta)
nengo.Connection(zero, net.k1.input[0], synapse=None)
net.k2 = point_attractor.generate(
n_neurons=1000, alpha=alpha, beta=beta)
nengo.Connection(zero, net.k2.input[0], synapse=None)
net.h1 = point_attractor.generate(
n_neurons=1000, alpha=alpha, beta=beta)
nengo.Connection(zero, net.h1.input[0], synapse=None)
net.h2 = point_attractor.generate(
n_neurons=1000, alpha=alpha, beta=beta)
nengo.Connection(zero, net.h2.input[0], synapse=None)
# -------------------- Oscillators ----------------------
kick = nengo.Node(nengo.utils.functions.piecewise({0: 1, .05: 0}),
label='kick')
osc = oscillator.generate(net, n_neurons=3000, speed=.01)
osc.label = 'oscillator'
nengo.Connection(kick, osc[0])
# ------------------- Forcing Functions --------------------
with config:
net.assoc_mem_a = spa.AssociativeMemory(
input_vocab=vocab_input,
output_vocab=vocab_dmp_weights_a,
wta_output=False)
nengo.Connection(net.input.output, net.assoc_mem_a.input)
net.assoc_mem_k = spa.AssociativeMemory(
input_vocab=vocab_input,
output_vocab=vocab_dmp_weights_k,
wta_output=False)
nengo.Connection(net.input.output, net.assoc_mem_k.input)
net.assoc_mem_h = spa.AssociativeMemory(
input_vocab=vocab_input,
output_vocab=vocab_dmp_weights_h,
wta_output=False)
nengo.Connection(net.input.output, net.assoc_mem_h.input)
# -------------------- Product for decoding -----------------------
product_a1 = nengo.Network('Product A1')
nengo.networks.Product(
n_neurons=1000, dimensions=fs.n_basis, net=product_a1)
product_a2 = nengo.Network('Product A2')
nengo.networks.Product(
n_neurons=1000, dimensions=fs.n_basis, net=product_a2)
product_h1 = nengo.Network('Product H1')
nengo.networks.Product(
n_neurons=1000, dimensions=fs.n_basis, net=product_h1)
product_h2 = nengo.Network('Product H2')
nengo.networks.Product(
n_neurons=1000, dimensions=fs.n_basis, net=product_h2)
product_k1 = nengo.Network('Product K1')
nengo.networks.Product(
n_neurons=1000, dimensions=fs.n_basis, net=product_k1)
product_k2 = nengo.Network('Product K2')
nengo.networks.Product(
n_neurons=1000, dimensions=fs.n_basis, net=product_k2)
# get the largest basis function value for normalization
max_basis = np.max(fs.basis*fs.scale)
domain = np.linspace(-np.pi, np.pi, fs.basis.shape[0])
domain_cossin = np.array([np.cos(domain), np.sin(domain)]).T
for ff, product in zip(
[net.assoc_mem_a.output[:fs.n_basis],
net.assoc_mem_a.output[fs.n_basis:],
net.assoc_mem_k.output[:fs.n_basis],
net.assoc_mem_k.output[fs.n_basis:],
net.assoc_mem_h.output[:fs.n_basis],
net.assoc_mem_h.output[fs.n_basis:]],
[product_a1, product_a2, product_k1,
product_k2, product_h1, product_h2]):
for ii in range(fs.n_basis):
# find the value of a basis function at a value of (x, y)
target_function = nengo.utils.connection.target_function(
domain_cossin, fs.basis[:, ii]*fs.scale/max_basis)
nengo.Connection(osc, product.B[ii], **target_function)
# multiply the value of each basis function at x by its weight
nengo.Connection(ff, product.A)
nengo.Connection(product_a1.output, net.a1.input[1],
transform=np.ones((1, fs.n_basis)) * max_basis)
nengo.Connection(product_a2.output, net.a2.input[1],
transform=np.ones((1, fs.n_basis)) * max_basis)
nengo.Connection(product_k1.output, net.k1.input[1],
transform=np.ones((1, fs.n_basis)) * max_basis)
nengo.Connection(product_k2.output, net.k2.input[1],
transform=np.ones((1, fs.n_basis)) * max_basis)
nengo.Connection(product_h1.output, net.h1.input[1],
transform=np.ones((1, fs.n_basis)) * max_basis)
nengo.Connection(product_h2.output, net.h2.input[1],
transform=np.ones((1, fs.n_basis)) * max_basis)
# -------------------- Output ------------------------------
net.output = nengo.Node(size_in=6, label='output')
nengo.Connection(net.a1.output, net.output[0], synapse=0.01)
nengo.Connection(net.a2.output, net.output[1], synapse=0.01)
nengo.Connection(net.k1.output, net.output[2], synapse=0.01)
nengo.Connection(net.k2.output, net.output[3], synapse=0.01)
nengo.Connection(net.h1.output, net.output[4], synapse=0.01)
nengo.Connection(net.h2.output, net.output[5], synapse=0.01)
# add in the goal offsets
nengo.Connection(net.assoc_mem_a.output[[-2, -1]],
net.output[[0, 1]], synapse=None)
nengo.Connection(net.assoc_mem_k.output[[-2, -1]],
net.output[[2, 3]], synapse=None)
nengo.Connection(net.assoc_mem_h.output[[-2, -1]],
net.output[[4, 5]], synapse=None)
# create a node to give a plot of the represented function
ff_plot_a = fs.make_plot_node(domain=domain, lines=2,
ylim=[-1000000, 1000000])
nengo.Connection(net.assoc_mem_a.output, ff_plot_a, synapse=0.1)
ff_plot_k = fs.make_plot_node(domain=domain, lines=2,
ylim=[-1000000, 1000000])
nengo.Connection(net.assoc_mem_k.output, ff_plot_k, synapse=0.1)
ff_plot_h = fs.make_plot_node(domain=domain, lines=2,
ylim=[-1000000, 1000000])
nengo.Connection(net.assoc_mem_h.output, ff_plot_h, synapse=0.1)
return net
|
During inflammation, vascular permeability is increased by various proteolytic events, such as the generation of bradykinin, that augment local tissue responses by enabling tissue penetration of serum proteins, including complement and acute-phase proteins. Proteases also govern inflammatory responses by processing extracellular matrix proteins and soluble bioactive mediators. We quantified changes in the proteome and the nature of protein amino termini (the N-terminome) and the altered abundance of murine proteases and inhibitors during skin inflammation. Through analysis of the N-terminome by iTRAQ-TAILS, we identified cotranslational and posttranslational αN-acetylation motifs, quantitative increases in protein abundance, and qualitative changes in the proteolytic signature during inflammation. Of the proteins identified in normal skin, about half were cleaved, and phorbol ester–induced inflammation increased the proportion of cleaved proteins, including chemokines and complement proteins, that were processed at previously uncharacterized sites. In response to phorbol ester–induced inflammation, mice deficient in matrix metalloproteinase 2 (MMP2) showed reduced accumulation of serum proteins in the skin and exhibited different proteolytic networks from those of wild-type mice. We found that the complement 1 (C1) inhibitor attenuated the increase in serum protein accumulation in inflamed skin. Cleavage and inactivation of the C1 inhibitor by MMP2 increased complement activation and bradykinin generation in wild-type mice, leading to increased vessel permeability during inflammation, which was diminished in Mmp2−/− mice. Thus, our systems-level analysis of proteolysis dissected cleavage events associated with skin inflammation and demonstrated that loss of a single protease could perturb the proteolytic signaling network and enhance inflammation. |
import factory
from rest_framework.reverse import reverse
from waldur_mastermind.marketplace.tests import factories as marketplace_factories
from .. import models
class GoogleCredentialsFactory(factory.DjangoModelFactory):
class Meta:
model = models.GoogleCredentials
service_provider = factory.SubFactory(marketplace_factories.ServiceProviderFactory)
client_id = factory.Sequence(lambda n: 'client_id-%s' % n)
project_id = factory.Sequence(lambda n: 'project_id-%s' % n)
client_secret = factory.Sequence(lambda n: 'client_secret-%s' % n)
@classmethod
def get_url(cls, credentials=None):
if credentials is None:
credentials = GoogleCredentialsFactory()
return (
'http://testserver'
+ reverse(
'google_credential-detail',
kwargs={'uuid': credentials.service_provider.uuid.hex},
)
+ 'google_credentials/'
)
@classmethod
def get_authorize_url(cls, credentials=None):
if credentials is None:
credentials = GoogleCredentialsFactory()
return 'http://testserver' + reverse(
'google-auth-detail',
kwargs={'uuid': credentials.service_provider.uuid.hex},
)
|
The Children Welfare Consultation Offices are Destroying Famillies in Japan, Now !!
The Following story is told by the waords of "Leona"
Leona was confined in the room of the house in the mountains and kept away from ordinary society for 1 and a half months in a very cold winter without heating. Takita and Uetani kept watch Leona all the time. Takita gave him only one blanket.
"Your father used violence on you."
"Your father is bad. Your father is a criminal."
"No, my father is good. My father didn't use violence on me."
When Leona opposed their comments about his father, every time, Chan and Takita struck him with their fists. And then Leona fell down on the floor, they kicked Leona's body, head, or back.
again they struck him withtheir fists. And then, when he fell down the floor, they kicked him.
Uetani also stayed at the house and she also struck him with her fists. The main person who used violence against Leona was Takita.
And then, one day, Leona opposed bitterly to their comments about his father. Then, Nobuyuki Takita (DV victim's helper, DV=Domestic Violence) and Chan Hannnyon (Uetani's lawyer) held him in their arms and threw out him out of the window of the upstairs room.
Leona held on to a handrail of the balcony, but soon, he dropped to the ground. He hit the ground hard and hurt his lower back and his legs. For a long time, he could not move because his lower back and his legs hurt so much. But Uetani, Chan, Takita didn't take him to see the doctor.
" If you don't say "My father use violence against me", we will make the police arrest your father."
20th December of 2007, about 8:15 A.M., Leona was taken away to the house in the mountains.( He was a grade 5 student of Tastumi No.2 Elementary School, and he was walking on his way to school, ) He was confined for about 1 and a half months in the house.
In February of next year(2008), Uetani, Chan, and Takita moved him to another place that they could confine Leona. The place was an apartment in Edogawa ward of Tokyo. This was Uetani's home. Leona was confined in the appartment again.
Leona was confined in the apartment, he went to Seishin No.3 Elementary school for only 2 weeks. He studied in a special room witout other students.
In Uetani's apartment Leona found the subpoenas addressed to his father from Family Court when he open the filing cabinet.
On 21st March of 2008, Leona's legs and lower back became better, So, he went out shopping with Uetani. In the supermarket, he said to Uetani; "I just go to the toilet."
And then he ran away from Uetani, called his father, and ask him to help. and went back to his father's house, Leona's father took his son to the lawyer that belonged to the organization of "Children Human Rights Number 110". Leona told his story for 4 hours, about the kidnapping, the prison life, the violence by Takita, Chan, Uetani.
"Now,just I talked about Leona's sotry with Sumida Children Welfare Consultation Office. The officers of Sumida perceived that Uetani, Chan, Takita used violence on Leona."
"The staff of Sumida Children Welfare Consultation Office want Leona to come to the Sumida Childen Consultation Office and stay in the office office 1 or w days to talk with policeman about the details of the case."
But, on 22nd March of 2008 Sumida Children Welfare Consultation Office issued the temporary protection order against Leona and confined him in the temporary protection institution in Shinjuku ward of Tokyo.
But, soon, Sumida Office sent Leona to an institution named “Carillon”. “Carillon” is operated by “Children Human Rights Number 110”. This institution “Carillon” is entrusted children’s upbringing by Tokyo Metropolitan Government.
But, this is not a good reason, the officers quibbled.
Leona was confined in “Carillon”for about 1 week. Next, Sumida Children Welfare Consultation Office sent Leona to the temporary protection institution in Shinjuku ward of Tokyo again. He was confined there for about 2 months.
And the staff hit him.
And then Sumida Children Welfare Consultation Office sent Leona to “Nishidai Children House” in Itabashi ward of Tokyo. This institution is also entrusted children’s upbringing by Tokyo Metropolitan Government.
In 2008, from April to June, Leona’s father and Leona’s friend demanded Sumida Children Welfare Consultation Office to give Leona their letters and let them to meet.
But, the staff of Sumida Office refused all their demands.
“ We told your father that you are staying in Nishidai Children House.” But it was a lie. Leona’s father didn’t know the place that Leona was staying in. The staff of Nishidai and the staff of Sumida told the father nothing.
And father told Shimura Police on another telephone the same story.
On 30th, Shimura Police officers came to Hara’s home and strongly demanded Hara to go to Shimura Police with Leona. Uetani who had parental rights of Leona and Sumida Welfare Consultation Office asked Shimura Police to arrest Toyohiko Hara for kidnapping. Hara and Leona talked with policemen about the story. And Hara’s innocence was became clear. The policemen recommended Hara and Leona to stay Shimura Police. So, they stayed there the night of the day. |
# General
CLASS_CATEGORIES = (
(0, 'Stargate Command'),
(1, 'System Lords')
)
USER_AGENT = 'UE3-SGB'
IP_TO_COUNTRY_URL = 'http://api.hostip.info/get_html.php?ip='
# FireSky API related constants
SERVER_LIST_URL = 'http://ws.firesky.com/SGBLogin/ServerListAll'
OBJECTIVE_LIST_URL = 'http://rep1.firesky.com/RegistrationWS/AccountObjectiveGet'
ACHIEVEMENT_LIST_URL = 'http://rep1.firesky.com/RegistrationWS/AccountAchievementGet'
OBJECTIVE_LIST_NS = 'http://www.cheyenneme.com/xml/registration'
ACHIEVEMENT_LIST_NS = 'http://www.cheyenneme.com/xml/registration'
# Rankings related variables
ACCOUNT_OBJECTIVES_ALL = ['SGR_Account_TimePlayedTotal', 'SGR_Account_Headshots',
'SGR_Account_ExperiencePointsEarned', 'SGR_Account_HighestMatchKillStreak',
'SGR_Account_KillsTotal', 'SGR_Account_KilledTotal',
'SGR_Account_WinsTotal', 'SGR_Account_LossesTotal',
'SGR_Account_ShotsFired', 'SGR_Account_ShotsHit',
'SGR_Account_DamageDealtTotal', 'SGR_Account_HealingGivenByHandDevice',
'SGR_Account_HealingGivenByHaraKesh', 'SGR_Account_HealingGivenByHypoDispenser',
'SGR_Account_HealingGivenByHypoSpray', 'SGR_Account_HealingGivenTotal',
'SGR_Account_HealingReceivedTotal']
# Leonops - Court = TDM game type, Arena = Arena game type
AVAILABLE_MAPS = ('Amarna', 'SGC', 'Whiteout', 'Court', 'Arena')
OBJECTIVES_MAPS = ('SGR_Account_WinsOn%s',
'SGR_Account_LossesOn%s', 'SGR_Account_TimePlayedOn%s')
OBJECTIVES_MAPS_ALL = [(objective % map) for objective in OBJECTIVES_MAPS for map in AVAILABLE_MAPS]
AVAILABLE_CLASSES = ('Soldier', 'Commando', 'Scientist', 'Goauld', 'Jaffa', 'Ashrak')
OBJECTIVES_CLASSES = ('SGR_%s_KillsTotal', 'SGR_%s_KilledTotal', 'SGR_%s_DamageDealtTotal',
'SGR_%s_Headshots', 'SGR_%s_TimePlayedTotal')
OBJECTIVES_CLASSES_ALL = [(objective % player_class) for objective in OBJECTIVES_CLASSES for player_class in AVAILABLE_CLASSES]
AVAILABLE_WEAPONS = ('AshrakBlade', 'Beretta', 'Claymore', 'DesertEagle', 'DiseaseCloud',
'GrenadeLauncher', 'HandDevicePush', 'HandDeviceZap', 'P90', 'SniperRifle',
'StaffBlast', 'StaffMelee', 'Turret')
OBJECTIVES_WEAPONS = ('SGR_Account_KillsUsing%s', 'SGR_Account_DamageDealtWith%s', 'SGR_Account_DamageTakenBy%s')
OBJECTIVES_WEAPONS_ALL = [(objective % weapon) for objective in OBJECTIVES_WEAPONS for weapon in AVAILABLE_WEAPONS] |
Get more YouTube Subscribers to any channel, guaranteed.
This service is unavailable until YouTube Update is finished. Thank you and sorry for the inconvenience.
It will be back very soon. |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_device_dns
short_description: Manage BIG-IP device DNS settings
description:
- Manage BIG-IP device DNS settings.
version_added: 2.2
options:
cache:
description:
- Specifies whether the system caches DNS lookups or performs the
operation each time a lookup is needed. Please note that this applies
only to Access Policy Manager features, such as ACLs, web application
rewrites, and authentication.
type: str
choices:
- enabled
- disabled
- enable
- disable
name_servers:
description:
- A list of name servers that the system uses to validate DNS lookups
type: list
search:
description:
- A list of domains that the system searches for local domain lookups,
to resolve local host names.
type: list
ip_version:
description:
- Specifies whether the DNS specifies IP addresses using IPv4 or IPv6.
type: int
choices:
- 4
- 6
state:
description:
- The state of the variable on the system. When C(present), guarantees
that an existing variable is set to C(value).
type: str
choices:
- absent
- present
default: present
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Set the DNS settings on the BIG-IP
bigip_device_dns:
name_servers:
- 208.67.222.222
- 208.67.220.220
search:
- localdomain
- lab.local
provider:
password: secret
server: lb.mydomain.com
user: admin
delegate_to: localhost
'''
RETURN = r'''
cache:
description: The new value of the DNS caching
returned: changed
type: str
sample: enabled
name_servers:
description: List of name servers that were set
returned: changed
type: list
sample: ['192.0.2.10', '172.17.12.10']
search:
description: List of search domains that were set
returned: changed
type: list
sample: ['192.0.2.10', '172.17.12.10']
ip_version:
description: IP version that was set that DNS will specify IP addresses in
returned: changed
type: int
sample: 4
warnings:
description: The list of warnings (if any) generated by module based on arguments
returned: always
type: list
sample: ['...', '...']
'''
from ansible.module_utils.basic import AnsibleModule
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import is_empty_list
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import is_empty_list
class Parameters(AnsibleF5Parameters):
api_map = {
'dns.cache': 'cache',
'nameServers': 'name_servers',
'include': 'ip_version',
}
api_attributes = [
'nameServers', 'search', 'include',
]
updatables = [
'cache', 'name_servers', 'search', 'ip_version',
]
returnables = [
'cache', 'name_servers', 'search', 'ip_version',
]
absentables = [
'name_servers', 'search',
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def search(self):
search = self._values['search']
if search is None:
return None
if isinstance(search, str) and search != "":
result = list()
result.append(str(search))
return result
if is_empty_list(search):
return []
return search
@property
def name_servers(self):
name_servers = self._values['name_servers']
if name_servers is None:
return None
if isinstance(name_servers, str) and name_servers != "":
result = list()
result.append(str(name_servers))
return result
if is_empty_list(name_servers):
return []
return name_servers
@property
def cache(self):
if self._values['cache'] is None:
return None
if str(self._values['cache']) in ['enabled', 'enable']:
return 'enable'
else:
return 'disable'
@property
def ip_version(self):
if self._values['ip_version'] == 6:
return "options inet6"
elif self._values['ip_version'] == 4:
return ""
else:
return None
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
change = getattr(self, returnable)
if isinstance(change, dict):
result.update(change)
else:
result[returnable] = change
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def ip_version(self):
if self._values['ip_version'] == 'options inet6':
return 6
elif self._values['ip_version'] == "":
return 4
else:
return None
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def ip_version(self):
if self.want.ip_version is None:
return None
if self.want.ip_version == "" and self.have.ip_version is None:
return None
if self.want.ip_version == self.have.ip_version:
return None
if self.want.ip_version != self.have.ip_version:
return self.want.ip_version
@property
def name_servers(self):
state = self.want.state
if self.want.name_servers is None:
return None
if state == 'absent':
if self.have.name_servers is None and self.want.name_servers:
return None
if set(self.want.name_servers) == set(self.have.name_servers):
return []
if set(self.want.name_servers) != set(self.have.name_servers):
return list(set(self.want.name_servers).difference(self.have.name_servers))
if not self.want.name_servers:
if self.have.name_servers is None:
return None
if self.have.name_servers is not None:
return self.want.name_servers
if self.have.name_servers is None:
return self.want.name_servers
if set(self.want.name_servers) != set(self.have.name_servers):
return self.want.name_servers
@property
def search(self):
state = self.want.state
if self.want.search is None:
return None
if not self.want.search:
if self.have.search is None:
return None
if self.have.search is not None:
return self.want.search
if state == 'absent':
if self.have.search is None and self.want.search:
return None
if set(self.want.search) == set(self.have.search):
return []
if set(self.want.search) != set(self.have.search):
return list(set(self.want.search).difference(self.have.search))
if self.have.search is None:
return self.want.search
if set(self.want.search) != set(self.have.search):
return self.want.search
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.pop('module', None)
self.client = F5RestClient(**self.module.params)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def _absent_changed_options(self):
diff = Difference(self.want, self.have)
absentables = Parameters.absentables
changed = dict()
for k in absentables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state == "present":
changed = self.update()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def should_absent(self):
result = self._absent_changed_options()
if result:
return True
return False
def absent(self):
self.have = self.read_current_from_device()
if not self.should_absent():
return False
if self.module.check_mode:
return True
self.absent_on_device()
return True
def read_dns_cache_setting(self):
uri = "https://{0}:{1}/mgmt/tm/sys/db/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
'dns.cache'
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response
def read_current_from_device(self):
cache = self.read_dns_cache_setting()
uri = "https://{0}:{1}/mgmt/tm/sys/dns/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if cache:
response['cache'] = cache['value']
return ApiParameters(params=response)
def update_on_device(self):
params = self.changes.api_params()
if params:
uri = "https://{0}:{1}/mgmt/tm/sys/dns/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
if self.want.cache:
uri = "https://{0}:{1}/mgmt/tm/sys/db/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
'dns.cache'
)
payload = {"value": self.want.cache}
resp = self.client.api.patch(uri, json=payload)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def absent_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/sys/dns/".format(
self.client.provider['server'],
self.client.provider['server_port'],
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
cache=dict(
choices=['disabled', 'enabled', 'disable', 'enable']
),
name_servers=dict(
type='list'
),
search=dict(
type='list'
),
ip_version=dict(
choices=[4, 6],
type='int'
),
state=dict(
default='present',
choices=['absent', 'present']
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_one_of = [
['name_servers', 'search', 'ip_version', 'cache']
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
required_one_of=spec.required_one_of
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
|
I have the same problem, turned of electric, did not help, probe light still on and cannot start the oven. does anyone have any other ideas, my wife is on my case, and Christmas coming is not going to help matters. I have the same problem, turned of electric, did not help, probe light still on and cannot start the oven. does anyone have any other ideas, my wife is on my case, and Christmas coming is not going to help matters.
Same problem with Thermador double oven. Probe light goes on by itself, and I can't turn it off. Then I can't use the upper oven. It usually happens at the worst times like Thanksgiving and Xmas, when there are a lot of dishes being cooked at once. I suspect that it might be moisture getting into the jack inside the oven which the probe plugs into. If there were a way to block off the jack, it might solve the problem.
Exact same thing is happening with my oven. Probe light came on while I was roasting veggies at 425 degrees. I can't get it to go off. Exact same thing is happening with my oven. Probe light came on while I was roasting veggies at 425 degrees. I can't get it to go off.
sounds like your probe connection may be shorted. Can you inspect the probe itself, and perhaps do a test for continuity with an ohm meter? Also, you might be able to disable the probe in the menu settings, and run it on manual, just for a test.
Do you still have the user's manual for this unit?
display will show F (Fahrenheit).
display will show C (Celsius).
will be displayed for both ovens.
Probe light on convection oven stays on. Clock won't display. Oven still works though.
Sounds like the control board is not communicating correctly with the rest of the unit, consult a professional.
After calling an authorized technician and the thermador support line, neither of which solved my problem, i was told a HARD RESET of the PROBE is all that is required. This problem originally occurred on my Thermador double oven after a power outage and rendered my oven unusable b/c the probe light would not go off -- even though i had never used the probe!
To hard reset the PROBE, turn off the oven at the breaker box, plug in the probe for 1 minute and remove it, then turn the power back on at breaker box, and probe light goes off!! Also, since i didn't have the original probe and to buy a new one would be $80, I borrowed a friend's Thermador probe (different model but it still worked). It cost me $85 for the service visit, 2 weeks of nonoperational oven, and about 5 hours time -- hence the reason i am making this post. It is a golden trick!!
What did NOT work -- turning off power to house again for 15 minutes to reset the OVEN, and disconnecting the probe wires from control panel so probe isn't recognized.
You might want to try and use a q-tip and some rubbing alchohol and try to clean the inside of the probe connector in the oven. Some liquids or something might be in it causing the control board to think the probe is connected. Make sure to do this procedure with the range unplugged. Then wait about 20 minutes before plugging the range back in. Hopefully that will stop the issue. If not could be a control board problem.
you could have a bad probe receptacle. Or a board problem. Kill power to oven, wait 10 min, cross your fingers and restore power.
I had a similar problem today with my KitchenAid Superba wall convection oven (2002 model). I was roasting tomatoes at 225 degrees F for 3 or 4 hours when the control pad changed and showed that the probe was in use (never have I used it and had to look for it). I, too, tried to plug in the probe and take it out, but nothing changed. My worry was that I could no longer use the timed bake function. After trying to reset by turning off the oven at the breaker, with no change, I turned on the oven to 400 F and left it on for 10 - 15 minutes hoping that the high temperature would dry out whatever moisture that must have gotten in the place where the probe connects. Then I turned it off and just waited. After a half hour or so the control pad cleared and nothing showed (that is what you want when the oven is off!). Everything is back to working now.
The "F10" error code caused by the runaway temperature.
The first thing I would check is the resistance across the oven temperature probe (oven temperature sensor) terminals at the EOC (electronic oven control) plug.
It will be about 1100 Ohms at the room temperature.
If the resistance at the EOC plug is wrong, then measure the oven temperature probe resistance at the probe plug.
If it's wrong - replace the oven temperature probe. It's better to hardwire it, using a high temperature ceramic wire nuts.
If the readings at the EOC and at the oven temperature probe plug are different - check the wire harness between the oven temperature probe and the EOC.
If there is nothing wrong with the oven temperature probe resistance - replace the EOC.
KEMS377 Lower electric Convection Oven - no probe, won't work!
Probe light came on during operation and oven won't work. We have never used the probe. How to turn it off???? Seven year old Thermador built in over. I can't find the specific model number.
Have a manual for Thermador SEC272BPSS Electric Double Oven? |
# -*- coding: utf-8 -*-
#
# This file is part of TracForge Project
#
# Copyright (C) 2008 TracForge Project
#
# See AUTHORS for more informations
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from trac.env import open_environment
from dircache import listdir
from os import path as ospath
from time import time
import sys
class TracProject:
""""Project description"""
def __init__(self, path, href):
# env information
self.path = path
self.env = open_environment(path)
self.name = self.env.project_name
self.href = href
self.description = self.env.project_description
# last commit information
self.last_author = ""
self.last_message = "This repository has not yet been modified"
# hack to get an time object with value = 0
self.last_date = 0
self.last_rev = "0"
self._set_info()
def _set_info(self):
# in crashes if no commit have been done.
try :
# last commit information
repo = self.env.get_repository()
last_action = repo.get_changeset(repo.youngest_rev)
self.last_rev = repo.youngest_rev
self.last_author = last_action.author
self.last_message = last_action.message
self.last_date = last_action.date
except :
print "Unexpected error:", sys.exc_info()
def get_repos(self):
return self.env.get_repository()
def get_path(self):
return self.path
def get_env(self):
return self.env
def get_name(self):
return self.name
def get_href(self):
return self.href
def get_description(self):
return self.description
def get_last_author(self):
return self.last_author
def get_last_message(self):
return self.last_message
def get_last_date(self):
return self.last_date
def get_last_rev(self):
return self.last_rev
class TracProjects:
""""All the projects"""
def __init__(self, trac_dir, trac_href):
self.trac_dir = trac_dir
self._projects = self._get_projects(trac_href)
self.index = 0
def next(self):
nb = len(self._projects)
if self.index < nb:
project = self._projects[self.index]
self.index = self.index + 1
return project
else:
raise StopIteration
def __iter__(self):
self.index = 0
return self
def _get_projects(self, trac_href):
projects = listdir(self.trac_dir)
tracprojects = []
for project in projects:
path = "%s/%s" % (self.trac_dir, project)
href = trac_href + ospath.basename(path)
tracprojects.append(TracProject(path, href))
return tracprojects
|
Availability: Mondays from 5:00 - 6:30 PM; individual sessions determined by need.
Provides a 12-week group program for youth based on the SAMSHA CBT (Cognitive Behavior Therapy) curriculum. The population serviced is adolescents (13-18) who are currently involved with juvenile probation referred by Juvenile Court probation officers and youth with Medicaid. For children under the age of 13 or children with severe emotional disturbances that may not be able to participate in a group setting, anger management services can be provided one-on-one in the home. |
from flask import Blueprint, request, current_app, jsonify
from flask_restless.helpers import to_dict
from flask_jwt import current_user
from werkzeug.utils import secure_filename
from werkzeug.exceptions import BadRequest
from croplands_api.utils.s3 import upload_image
import uuid
import cStringIO
from croplands_api.models.location import Image, db
from croplands_api.auth import is_anonymous
upload = Blueprint('upload', __name__, url_prefix='/upload')
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1].lower() in current_app.config['ALLOWED_IMG_EXTENSIONS']
@upload.route('/image', methods=['POST'])
def image_view():
"""
This view allows users to upload photos of locations from their mobile device.
"""
# get the accompanying data
data = request.form
for field in ['location_id', 'lat', 'lon', 'date_acquired']:
if field not in data:
print "missing %s" % field
raise BadRequest(description='Image requires %s.' % field)
if 'file' in request.files and request.files['file'] is not None:
# get the file from the request object
f = request.files['file']
# sanitize the file name
filename = secure_filename(f.filename)
# check that file type is allowed NAIVE check
if not allowed_file(filename):
print "bad file type"
raise BadRequest('Bad File Type')
# get file for processing and uploading
f_io = cStringIO.StringIO()
f.save(dst=f_io)
# create key for file
url = 'images/mobile/' + str(uuid.uuid4()) + '.jpg'
# upload image to s3 bucket
upload_image(f_io, encoded_image=False, filename=url)
elif 'url' in data:
url = data['url']
else:
raise BadRequest(description='Not enough data')
# save to database
image = Image(location_id=data['location_id'], lat=data['lat'], lon=data['lon'],
url=url,
date_acquired=data['date_acquired'])
# get the user from the token
if not is_anonymous():
image.user_id = current_user.id
if 'source' in data:
image.source = data['source']
db.session.add(image)
db.session.commit()
return jsonify(to_dict(image)), 201 |
What a Day Yesterday Was digital sheet music. Contains printable sheet music plus an interactive, downloadable digital sheet music file.
Looking through these old photographs, don't they bring some good mem'ries back?
The Arrangement Details Tab gives you detailed information about this particular arrangement of What a Day Yesterday Was - not necessarily the song.
There are no reviews written for What a Day Yesterday Was. |
Subsets and Splits