text
stringlengths 29
850k
|
---|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
"""
A State is a copy of a Value at a certain state
"""
from pybush.value import Value
from pybush.basic import Basic
from pybush.constants import __dbug__
from pybush.functions import set_attributes
class State(Value, Basic):
"""
A State is afrozen state of a param
"""
def __init__(self, **kwargs):
super(State, self).__init__(**kwargs)
if __dbug__:
print('creating a state')
set_attributes(self, kwargs)
def __repr__(self):
printer = 'State(name:{name}, '\
'description:{description}, '\
'tags:{tags}, '\
'raw:{raw}, '\
'value:{value}, '\
'datatype:{datatype}, '\
'domain:{domain}, '\
'clipmode:{clipmode}, '\
'unique:{unique})'
return printer.format( name=self.name,\
description=self.description, \
tags=self.tags,\
raw=self.raw,\
value=self.value, \
datatype=self.datatype,
domain=self.domain, \
clipmode=self.clipmode, \
unique=self.unique)
|
When someone adds a product to their cart, with a quantity of 1, the rate was being calculated. But if we changed the product quantity to 2, the rate was doubled. It seems to be working as if the products will be shipping separately? They shouldn’t get charged for the shipping rate twice, only once. How do we make it so that it calculates this correctly and only charges them once for the shipping?
This issue can be easily resolved by changing the parcel packing option.
You must have selected “Pack items individually” in Parcel packing option (in plugin settings).
Kindly change that to “weight based“, because you are in need of weight based calculation (as I can make out from your last query).
Pack into boxes with weight and dimensions: If packing items individually does not suit to your business, then you can define required box sizes under Box Dimensions section. All the cart items are packed into custom boxes defined in Box Dimensions settings. The best fit box is auto chosen from the defined boxes. |
from ansi2html import Ansi2HTMLConverter
import argparse
from flask import Flask
from flask import Markup
from flask import Response
from flask import make_response
from flask import render_template
from flask import request
from flask import send_file
import github
import laulik
import os
app = Flask(__name__)
repopath = os.environ.get('REPOPATH')
app.logger.info("Repo path: ", repopath)
laulik_api = laulik.API(repopath=repopath)
github_api = github.API(repopath=repopath)
conv = Ansi2HTMLConverter(markup_lines=True)
@app.route('/', methods=['GET', 'POST'])
def root():
data = {}
data['git_info'] = github_api.info()
data['server_version'] = laulik_api.server_version()
if request.method == 'POST':
key = request.form['key']
result = laulik_api.build(key)
data['msg'] = Markup('Built project <strong>{0}</strong>'.format(key))
data['output'] = Markup(conv.convert(result.stdout, full=False))
data['projects'] = laulik_api.projects()
return render_template('index.html', **data)
@app.route('/build/<key>/<version>.pdf')
def pdf(key, version):
meta = laulik_api.safe_get_meta(key, version)
if meta is None:
return 'Not found!', 404
return send_file(
meta.paths.pdf,
mimetype='application/pdf',
as_attachment=True,
attachment_filename='{0}-{1}.pdf'.format(meta.key, meta.version))
@app.route('/build/<key>/<version>.tex')
def tex(key, version):
meta = laulik_api.safe_get_meta(key, version)
if meta is None:
return 'Not found!', 404
return send_file(
meta.paths.latex,
mimetype='text/plain',
as_attachment=True,
attachment_filename='{0}-{1}.tex'.format(meta.key, meta.version))
@app.route('/webhook', methods=['POST'])
def webhook():
data = {}
if request.headers.get('X-GitHub-Event') == 'push':
data['req'] = github_api.parse_webhook(request.get_json(force=True))
result = github_api.pull()
data['stdout'] = result.stdout
data['stderr'] = result.stderr
data['action'] = 'Pulled git repo'
resp = make_response(render_template('webhook.txt', **data), 200)
resp.headers['Content-Type'] = 'text/plain'
return resp
if __name__ == '__main__':
app.run(host='0.0.0.0', debug=True, port=int(os.environ.get('PORT', 8080)))
|
Superman #75 NM- 9.2 DC High Grade 1993 Polybagged Collector's Edition!!!
Superman #82 Chormium Collector's Cover!
This page was last updated: 20-Apr 16:03. Number of bids and bid amounts may be slightly out of date. See each listing for international shipping options and costs. |
'''
__
/ _|_ __ __ _ _ __ ___ ___ ___ _ __ _ _
| |_| '__/ _` | '_ ` _ \ / _ \/ __| | '_ \| | | |
| _| | | (_| | | | | | | __/\__ \_| |_) | |_| |
|_| |_| \__,_|_| |_| |_|\___||___(_) .__/ \__, |
|_| |__/
'''
__all__ = [
'FrameNotFound', 'FrameType', 'Frame', 'current_frame', 'locate_frame'
]
import sys
NATIVE = hasattr(sys, '_getframe')
def _getframe(*args, **kw):
# Delegates to the underlying `_getframe.` If we don't do that we lose the
# ability to force `NATIVE` to `False`, after importing the module.
from .compat import _getframe
return _getframe(*args, **kw).f_back
# Make classes new-style by default.
__metaclass__ = type
class Frame:
'''
Wrapper object for the internal frames.
'''
class NotFound(LookupError):
'''
Raised when no frame is found.
'''
Type = sys._getframe().__class__
@staticmethod
def current_frame(raw=False):
'''
Gives the current execution frame.
:returns:
The current execution frame that is actually executing this.
'''
# `import sys` is important here, because the `sys` module is special
# and we will end up with the class frame instead of the `current` one.
if NATIVE:
import sys
frame = sys._getframe()
else:
frame = _getframe()
frame = frame.f_back
if not raw:
frame = Frame(frame)
return frame
@staticmethod
def locate(callback, root_frame=None, include_root=False, raw=False):
'''
Locates a frame by criteria.
:param callback:
One argument function to check the frame against. The frame we are
curretly on, is given as that argument.
:param root_frame:
The root frame to start the search from. Can be a callback taking
no arguments.
:param include_root:
`True` if the search should start from the `root_frame` or the one
beneath it. Defaults to `False`.
:param raw:
whether to use raw frames or wrap them in our own object. Defaults to
`False`.
:raises RuntimeError:
When no matching frame is found.
:returns:
The first frame which responds to the `callback`.
'''
def get_from(maybe_callable):
if callable(maybe_callable):
return maybe_callable()
return maybe_callable
# Creates new frames, whether raw or not.
new = lambda frame: frame if raw else Frame(frame)
current_frame = get_from(root_frame or Frame.current_frame(raw=True))
current_frame = new(current_frame)
if not include_root:
current_frame = new(current_frame.f_back)
# The search will stop, because at some point the frame will be falsy.
while current_frame:
found = callback(current_frame)
if found:
return current_frame
current_frame = new(current_frame.f_back)
raise Frame.NotFound('No matching frame found')
def __init__(self, frame):
'''
Wraps the raw frame object.
:param frame:
The frame object to wrap.
'''
self.frame = frame
if not frame:
return
# Read-only attributes go below.
#: Shortcut for `f_back`
self.back = frame.f_back
#: Shortcut for `f_builtins`
self.builtins = frame.f_builtins
#: Shortcut for `f_code`
self.code = frame.f_code
#: Shortcut for `f_globals`
self.globals = frame.f_globals
#: Shortcut for `f_locals`.
self.locals = frame.f_locals
#: Shortcut for `f_restricted`.
self.restricted = frame.f_restricted
# Special attributes are defined as properties.
@property
def exc_traceback(self):
'''
Shortcut for `f_exc_traceback`.
:returns:
The frame exception traceback, if any.
'''
return self.frame.f_exc_traceback
@property
def exc_type(self):
'''
Shortcut for `f_exc_type`.
:returns:
The frame exception class, if any.
'''
return self.frame.f_exc_type
@property
def exc_value(self):
'''
Shortcut for `f_exc_value`.
:returns:
The frame exception instance, if any.
'''
return self.frame.f_exc_value
@property
def last_instruction(self):
'''
Shortcut for `f_lasti`
:returns:
The last frame instruction.
'''
return self.frame.f_lasti
@property
def lineno(self):
'''
Shortcut for `f_lineno`.
:returns:
The line of the code at the current frame.
'''
return self.frame.f_lineno - 1
@property
def trace(self):
'''
Shortcut for `f_trace`.
:returns:
The trace function, if any.
'''
return self.frame.f_trace
@property
def __class__(self):
# Make us look like a regular frame in front of `isinstance`.
return Frame.Type
def __getattr__(self, name):
# Proxy some methods back to the raw frame object.
if not hasattr(self.frame, name):
raise AttributeError(name)
return getattr(self.frame, name)
def __bool__(self):
return True if self.frame else False
__nonzero__ = __bool__
# More standard, non classy Python interface.
FrameNotFound = Frame.NotFound
FrameType = Frame.Type
locate_frame = Frame.locate
current_frame = Frame.current_frame
|
Rapel or Abseiling is an outdoor activity where the participants should go down a vertical wall using a rope and specialized equipment.
This is a great activity to develop self confidence and self esteem, overcoming any innate fear.
Abseiling is a very safe activity, and it can be done in two places in Sintra: Capuchos or Penedo da Amizade (near the Moorish Castle).
Send the name, the identification number or birth date.
Snack 7,00€: Sandwiches, miniature cakes or biscuits, Potatoe fries, juices and water.
In case it rains, the party can be postponed or replaced by an indoor activity.
Always contact us, we have more offers. |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
""" This module contains execptions of IronPyCompiler.
"""
class IPCError(Exception):
"""This is the base class for exceptions in this module.
"""
pass
class IronPythonDetectionError(IPCError):
"""This exception will be raised if IronPython cannot be found in your system.
:param str executable: (optional) The name of the IronPython
executable looked for. This argument remains
for backward compatibility.
:param msg: (optional) The detailed information of the error.
.. versionchanged:: 0.9.0
The argument ``executable`` became optional, and ``msg`` was added.
"""
def __init__(self, executable=None, msg=None):
self.executable = executable
self.msg = msg
def __str__(self):
if self.executable is not None:
return "IronPython (%s) cannot be found." % str(self.executable)
elif self.msg is not None:
return str(self.msg)
else:
return "IronPython cannot be found."
class ModuleCompilationError(IPCError):
"""This exception means an error during compilation.
:param msg: (optional) The detailed information of the error.
.. versionadded:: 0.10.0
"""
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
if self.msg is not None:
return str(self.msg)
else:
return "An error occurred during compilation."
class IronPythonValidationError(IPCError):
"""Raised if the specified executable is not a valid IronPython executable.
:param msg: (optional) The detailed information of the error.
.. versionadded:: 1.0.0
"""
def __init__(self, msg=None):
self.msg = msg
def __str__(self):
if self.msg is not None:
return str(self.msg)
else:
return "Not a valid IronPython executable."
|
Located in one of the finest suburbs of the country, Baulkham Hills, the Adina Apartment Hotel Norwest Sydney is one of the most exuberant hotels in the country. Not just this, it is also one of the most growing and developing commercial hubs. The hotel is an amazing place for people oriented to business and makes an excellent location for businesses. The hotel is located near the Norwest business park.
The area sources a lot of wonderful outdoor options for the visitors including a beautiful lake. The hotel is very well maintained and is located at a premium spot in the city. There are numerous attractions around the hotel. Some of the most popular attractions of the suburb of Baulkham Hills are located nearby. The hotel is one of the most opted for staying places in the local suburb and also for the neighboring suburbs including Bella Vista and Seven Hills.
The hotel offers a lot of amenities. You will enjoy a lot during your stay. The hotel offers a lot of variety in its room which is not only comfortable but take the level of royalty and luxury upstate. The rooms are quite spacious and contain all the facilities including free Wi-Fi and so much more. the interiors of the hotel and even the rooms are so gorgeous that you will not get enough of them.
As excited hotel rooms make you and all of us, these are going to take your level of excitement to another standard. There is a lot more in this hotel.
The hotel rooms offer fantastic views of the cool lake and pools outside and of the beautiful suburb. Also, they are very well maintained and are astonishingly beautiful. Other than this, the hotel offers all the other amenities like the gym, restaurant, dining area, and a lot more.
Every time you would want to choose a hotel for your stay, you will definitely go back to Adina Apartment Hotel Norwest Sydney due to its amazing stay experience, impeccable services and also how great the area in which the hotel is located. You can literally walk to most of the attractions and if you take a cab, it is like 10 minutes. It reduces your wastage of time and you can make the most of your trip. It is one of the bests in the suburb. |
"""
Copyright (c) 2014, Are Hansen - Honeypot Development.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted
provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice, this list of conditions
and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions
and the following disclaimer in the documentation and/or other materials provided with the
distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND AN EXPRESS OR
IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND
FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER
IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
__author__ = 'Are Hansen'
__date__ = '2014, July 25'
__version__ = '0.0.1'
import GeoIP
def accessCC(item_list):
"""Preforms a geoip lookup insertion directly from the daily log lines, appends the results to
the output list and returned. """
gip = GeoIP.new(GeoIP.GEOIP_MEMORY_CACHE)
output = []
for item in item_list:
item = item.split(' ')
geo = gip.country_code_by_addr(item[2])
out = item[0], item[1], item[3], item[4], item[2], geo
output.append(out)
return output |
The City of La Vista has baseball, softball, and soccer fields available at La Vista Sports Complex, City Park and Kelly McMahon Park. The fields open on April 1 and close on October 31 of each year, weather permitting. These fields are available through permit only from the Recreation Department. The fee for obtaining a permit is $40 per two hours/per field. Tournament rates vary.
To request a field, please submit the proper form below via email to Denny Dinan or drop it off in person to the La Vista Recreation Department, 8116 Park View Boulevard, La Vista. All field requests must be submitted at least 24 hours in advance to be considered. If you are interested in hosting a tournament, please contact Denny for more information.
Please note: we do not begin accepting field requests for the following year until January 1 of each year. If you are requesting a field outside of the current year, please hold your request until January 1 and submit it per the steps outlined above.
For updates on field conditions and rain outs (updated and available after 3:30 p.m. M-F & after 7:30 a.m. Saturday & Sunday), please call 402-697-7756.
Did you know you can rent one of our park shelters for special events? If interested, please visit our online reservation site at http://www.cityoflavista.org/active or contact the Recreation Department at 402-331-3455. |
"""
AWS Lambda platform for notify component.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.aws_lambda/
"""
import logging
import json
import base64
import voluptuous as vol
from homeassistant.const import (
CONF_PLATFORM, CONF_NAME)
from homeassistant.components.notify import (
ATTR_TARGET, PLATFORM_SCHEMA, BaseNotificationService)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
REQUIREMENTS = ["boto3==1.3.1"]
CONF_REGION = 'region_name'
CONF_ACCESS_KEY_ID = 'aws_access_key_id'
CONF_SECRET_ACCESS_KEY = 'aws_secret_access_key'
CONF_PROFILE_NAME = 'profile_name'
CONF_CONTEXT = 'context'
ATTR_CREDENTIALS = 'credentials'
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_REGION, default="us-east-1"): cv.string,
vol.Inclusive(CONF_ACCESS_KEY_ID, ATTR_CREDENTIALS): cv.string,
vol.Inclusive(CONF_SECRET_ACCESS_KEY, ATTR_CREDENTIALS): cv.string,
vol.Exclusive(CONF_PROFILE_NAME, ATTR_CREDENTIALS): cv.string,
vol.Optional(CONF_CONTEXT, default=dict()): vol.Coerce(dict)
})
def get_service(hass, config):
"""Get the AWS Lambda notification service."""
context_str = json.dumps({'hass': hass.config.as_dict(),
'custom': config[CONF_CONTEXT]})
context_b64 = base64.b64encode(context_str.encode("utf-8"))
context = context_b64.decode("utf-8")
# pylint: disable=import-error
import boto3
aws_config = config.copy()
del aws_config[CONF_PLATFORM]
del aws_config[CONF_NAME]
del aws_config[CONF_CONTEXT]
profile = aws_config.get(CONF_PROFILE_NAME)
if profile is not None:
boto3.setup_default_session(profile_name=profile)
del aws_config[CONF_PROFILE_NAME]
lambda_client = boto3.client("lambda", **aws_config)
return AWSLambda(lambda_client, context)
# pylint: disable=too-few-public-methods
class AWSLambda(BaseNotificationService):
"""Implement the notification service for the AWS Lambda service."""
def __init__(self, lambda_client, context):
"""Initialize the service."""
self.client = lambda_client
self.context = context
def send_message(self, message="", **kwargs):
"""Send notification to specified LAMBDA ARN."""
targets = kwargs.get(ATTR_TARGET)
if not targets:
_LOGGER.info("At least 1 target is required")
return
if not isinstance(targets, list):
targets = [targets]
for target in targets:
cleaned_kwargs = dict((k, v) for k, v in kwargs.items() if v)
payload = {"message": message}
payload.update(cleaned_kwargs)
self.client.invoke(FunctionName=target,
Payload=json.dumps(payload),
ClientContext=self.context)
|
The Guiding Light series draws from the two most weighty things the Prophet (S) left behind: the Quran and his Ahlul Bayt. This series consists of eight books that transmit important Akhlaqi messages through Quranic verses, sayings from the Ma’soomeen, and inspirational anecdotes. Plus, children can internalize what they are learning through special coloring pages! This series will inshaAllah be a great interactive way for your child to learn Akhlaq with the help of relevant ahadith and Ayahs!
Individual books are sold only in paperback.
Hard cover books only sold in USA.
Please find a preview of The Guiding Light below! |
import Oger
import scipy as sp
import time
import mdp.parallel
if __name__ == '__main__':
''' Example of using CMA_ES to optimize the parameters of a reservoir+readout on the NRMSE for NARMA30, once sequentially and once in parallel if the machine is multicore.
The CMA-ES is given an initial value x0 and standard devation for each of the parameters.
'''
input_size = 1
inputs, outputs = Oger.datasets.narma30()
data = [[], zip(inputs, outputs)]
# construct individual nodes
reservoir = Oger.nodes.ReservoirNode(input_size, 100)
readout = Oger.nodes.RidgeRegressionNode()
# build network with MDP framework
flow = mdp.Flow([reservoir, readout])
# Nested dictionary
# For cma_es, each parameter 'range' consists of an initial value and a standard deviation
# For input_scaling, x0=.3 and std = .5
# For spectral_radius, x0 = .9 and std = .5
gridsearch_parameters = {reservoir:{'input_scaling': mdp.numx.array([0.3, .5]), 'spectral_radius':mdp.numx.array([.9, .5])}}
# Instantiate an optimizer
opt = Oger.evaluation.Optimizer(gridsearch_parameters, Oger.utils.nrmse)
# # Additional options to be passed to the CMA-ES algorithm. We impose a lower bound on the input_scaling such that values of zero
# # do not occur (this causes an error in the training of the readout because the reservoir output is all zeros).
options = {'maxiter':20, 'bounds':[0.01, None]}
# Do the optimization
print 'Parallel execution...'
# Instantiate a new optimizer, otherwise CMA_ES doesn't
opt = Oger.evaluation.Optimizer(gridsearch_parameters, Oger.utils.nrmse)
opt.scheduler = mdp.parallel.ProcessScheduler(n_processes=2)
#opt.scheduler = Oger.parallel.GridScheduler()
mdp.activate_extension("parallel")
start_time = time.time()
opt.cma_es(data, flow, cross_validate_function=Oger.evaluation.n_fold_random, n_folds=5, options=options)
par_duration = int(time.time() - start_time)
print 'Duration: ' + str(par_duration) + 's'
# Get the optimal flow and run cross-validation with it
opt_flow = opt.get_optimal_flow()
print 'Performing cross-validation with the optimal flow. Note that this result can differ slightly from the one above because of different choices of randomization of the folds.'
errors = Oger.evaluation.validate(data, opt_flow, Oger.utils.nrmse, cross_validate_function=Oger.evaluation.n_fold_random, n_folds=5, progress=False)
print 'Mean error over folds: ' + str(sp.mean(errors))
|
Up To 90% Discount baseballexpress.com sale Promo Codes. using dayscoupon.com thousands of reliable discount voucher codes to meet your favorite commodity with more affordable price, Limited Time Offer!
Closeouts and Clearance – Big Discounts on Baseball Gear.
$6.95 Flat Rate Shipping on orders under 10lbs. |
# -*- coding: utf-8 -*-
"""
Created on Wed May 8 12:49:09 2013
@author: teaera
"""
import os
import cv2.cv as cv
import pydestin as pd
#cl = pd.czt_lib()
cm = pd.CztMod()
#############################################################################
"""
Save the current user's home folder.
"""
homeFld = os.getenv("HOME")
if not homeFld:
homeFld = os.getenv("USERPROFILE")
"""
Display centroids images!
"""
def dcis(network, layer):
network.displayLayerCentroidImages(layer,1000)
cv.WaitKey(100)
"""
Save centroids images!
"""
def saveCens(network, layer, saveLoc):
network.saveLayerCentroidImages(layer, saveLoc)
"""
Load images in one folder into an 'ims'!!!
"""
def load_ims_fld(ims, fld):
if not fld.endswith("/"):
fld += "/"
for each in os.listdir(fld):
ims.addImage(fld + each)
"""
Used to init DeSTIN, but compatible by setting 'extRatio'!
"""
def init_destin(siw=pd.W512, nLayer=8, centroids=[4,8,16,32,64,32,16,8],
isUniform=True, imageMode=pd.DST_IMG_MODE_GRAYSCALE):
temp_network = pd.DestinNetworkAlt(siw, nLayer, centroids, isUniform, imageMode)
#temp_network.setBeliefTransform(pd.DST_BT_NONE)
return temp_network
"""
Use the existing network and ims to train!
Default number is 16,000.
"""
def train_ims(network, ims, maxCount=16000):
for i in range(maxCount):
if i % 10 == 0:
print "Iteration " + str(i)
ims.findNextImage()
f = ims.getGrayImageFloat()
network.doDestin(f)
"""
Use one folder as input, and use another folder as additional info!
"""
def train_2flds(network, fld1, fld2, repeatCount=1600):
if not fld1.endswith("/"):
fld1 += "/"
if not fld2.endswith("/"):
fld2 += "/"
for i in range(repeatCount):
if i % 10 == 0:
print "RepeatTime: " + str(i)
for each in os.listdir(fld1):
f = cl.combineImgs(fld1+each, fld2+each)
network.doDestin(f)
"""
Get the time stamp for today
"""
import datetime
def getTimeStamp():
now = datetime.datetime.now()
return str(now.year) + "." + str(now.month) + "." + str(now.day)
|
Highgrove is one of the UK’s fastest growing bed brands and they currently produce approximately 5000 beds a week from their new manufacturing plant in Liversedge, West Yorkshire – one of the biggest and most modern in Europe.
A privately owned, family business that has total commitment to providing customers with the widest choice of sleep products and the highest levels of quality and value and with their Fastersleep express delivery service, means that 98% of orders are fulfilled within 14 days as they utilise their own distribution fleet of more than 20 new delivery vehicles.
Due to the nature of their products and the huge selection of options that can be had with their divans, mattresses and headboards, this listing is to give a quick insight to the company and it's offerings but pop in store where you will find ample displays and helpful staff on hand to guide you through the myriad of options. |
# coding: utf-8
"""
Snooze: a backend-agnostic REST API provider for Flask.
e.g.
from flask import app, Blueprint
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.snooze import Snooze, SqlAlchemyEndpoint
from my_model import sqlalchemy_db, Book
api = Blueprint('api_v1', __name__)
apimgr = Snooze(api)
apimgr.add(SqlAlchemyEndpoint(sqlalchemy_db, Book, ['author', 'title']))
app.register_blueprint(api, url_prefix='/api_v1')
"""
from flask import request, make_response
import re
try:
import simplejson as json
except ImportError:
import json
class NotFoundError(Exception):
"""
Resource not found.
"""
def __init__(self, cls, path):
super(NotFoundError, self).__init__()
self.cls = cls
self.path = path
self.message = 'No %(cls)s exists with an ID of %(path)s' % dict(
cls=cls.__name__,
path=path
)
def error_dict(etype, message, **kwargs):
d = dict(type=etype, message=message)
if kwargs:
d['detail'] = kwargs
return d
class CoerceToDictEncoder(json.JSONEncoder):
"""
A fairly naive encoder that will try to convert unrecognised types to dict.
The idea being that objects can be made iterable quite easily as a bridge
to being converted to JSON.
"""
def default(self, obj):
if obj is None or type(obj) in (
dict,
list, tuple,
str, unicode,
int, long, float,
bool):
return json.JSONEncoder.default(self, obj)
return dict(obj)
def wrap_verb_call(call, endpoint, data_in, data_out):
"""
Construct a callback that will wrap a given HTTP Verb call, passing a path.
"""
def f(path=None):
data = data_in(request.data) if request.data != '' else dict()
assert isinstance(data, dict), "Data must be a dict"
try:
res = call(endpoint, path, data)
try:
# NB. error_data used because Flask stringifies stuff we put
# into res.data, which isn't good for us
res.data = data_out(res.error_data)
except AttributeError:
try:
res.data = data_out(res.data)
except AttributeError:
res = data_out(res)
except NotFoundError, e:
res = make_response()
res.status = '404'
res.data = data_out(error_dict(**{
'etype': type(e).__name__,
'message': e.message,
'class': e.cls.__name__,
'path': e.path
}))
except:
import sys
from traceback import extract_tb
exc_type, exc_value, exc_traceback = sys.exc_info()
res = data_out(error_dict(exc_type.__name__,
exc_value.message,
traceback=extract_tb(exc_traceback))), '500'
return res
return f
def response_redirect(endpoint, o, code):
r = make_response()
r.headers['Location'] = '%(path)s%(id)s' % dict(
path=re.sub('[^/]*$', '', request.path),
id=getattr(o, endpoint.id_key)
)
r.status = str(code)
return r
class Snooze(object):
"""
The API context manager,
The api level means:
every verb takes in and gives out data in the same ways
"""
def __init__(self, app, hooks=None):
self._app = app
hooks = dict() if hooks is None else hooks
self._hook_data_in = hooks.get('data_in', json.loads)
self._hook_data_out = hooks.get('data_out', CoerceToDictEncoder().encode)
self._routes = {}
def add(self, endpoint, name=None, methods=(
'OPTIONS', 'POST', 'GET', 'PUT', 'PATCH', 'DELETE')):
"""
Add an endpoint for a class, the name defaults to a lowercase version
of the class name but can be overriden.
Methods can be specified, note that HEAD is automatically generated by
Flask to execute the GET method without returning a body.
"""
obj_name = endpoint.cls.__name__.lower() if name is None else name
methods = [m.upper() for m in methods]
for verb in 'OPTIONS', 'POST', 'GET', 'PUT', 'PATCH', 'DELETE':
if verb not in methods:
continue
l = wrap_verb_call(call=getattr(self, '_%s' % verb.lower()),
endpoint=endpoint,
data_in=self._hook_data_in,
data_out=self._hook_data_out)
self._register(obj_name=obj_name,
verb=verb,
func=l)
#
# Verbs
#
def _options(self, endpoint, path, data):
"""HTTP Verb endpoint"""
return self._routes
def _post(self, endpoint, path, data):
"""HTTP Verb endpoint"""
o = endpoint.create(path)
if data is not None:
self._fill(endpoint, o, data)
return response_redirect(endpoint, o, 201)
def _get(self, endpoint, path, data):
"""HTTP Verb endpoint"""
return endpoint.read(path)
def _put(self, endpoint, path, data):
"""HTTP Verb endpoint"""
created = False
try:
o = endpoint.read(path)
except NotFoundError:
o = endpoint.create(path)
created = True
self._fill(endpoint, o, data)
if created:
return response_redirect(endpoint, o, 201)
def _patch(self, endpoint, path, data):
"""HTTP Verb endpoint"""
o = endpoint.read(path)
self._update(endpoint, o, data)
def _delete(self, endpoint, path, data):
"""HTTP Verb endpoint"""
endpoint.delete(path)
#
# Tools
#
def _update(self, endpoint, o, data):
for k in data:
assert k in endpoint.writeable_keys, \
"Cannot update key %s, valid keys for update: %s" % \
(k, ', '.join(endpoint.writeable_keys))
setattr(o, k, data[k])
endpoint.finalize(o)
def _fill(self, endpoint, o, data):
items_set = set(endpoint.writeable_keys)
keys_set = set(data.keys())
assert items_set == keys_set, \
"The provided keys (%s) do not match the expected items (%s)" % \
(', '.join(keys_set), ', '.join(items_set))
self._update(endpoint, o, data)
def _register(self, obj_name, verb, func):
func.provide_automatic_options = False
route = '/%s/<path:path>' % obj_name
self._app.route(route,
methods=(verb,),
endpoint="%s:%s" % (verb, route))(func)
self._reg_options(verb, route)
if verb in ('OPTIONS', 'GET', 'POST'):
route = '/%s/' % obj_name
self._app.route(route,
methods=(verb,),
endpoint="%s:%s" % (verb, route),
defaults={'path': None})(func)
self._reg_options(verb, route)
def _reg_options(self, verb, route):
verbs = self._routes.get(route, [])
verbs.append(verb)
if verb == 'GET':
# Flask adds 'HEAD' for GET
verbs.append('HEAD')
self._routes[route] = verbs
class Endpoint(object):
"""
Base Endpoint object.
"""
def __init__(self, cls, id_key, writeable_keys):
"""
cls: Class of object being represented by this endpoint
id_key: Identifying key of an object
writeable_keys: A list of keys that may be written to on an object
"""
self.cls = cls
self.id_key = id_key
self.writeable_keys = writeable_keys
def create(self, path=None):
"""Create a new object"""
raise NotImplementedError()
def read(self, path):
"""Load an existing object"""
raise NotImplementedError()
def finalize(self, obj):
"""Save an object (if required)"""
raise NotImplementedError()
def delete(self, path):
"""Delete the data for the provided ID"""
raise NotImplementedError()
#
# SQLAlchemy Land
#
def row2dict(row):
"""
Convert a SQLAlchemy row/object to a dict, found on:
http://stackoverflow.com/questions/
1958219/convert-sqlalchemy-row-object-to-python-dict
"""
d = {}
for col_name in row.__table__.columns.keys():
d[col_name] = getattr(row, col_name)
return d
class SqlAlchemyEndpoint(Endpoint):
def __init__(self, db, cls, items):
from sqlalchemy.orm import class_mapper
self.db = db
self.pk = class_mapper(cls).primary_key[0]
super(SqlAlchemyEndpoint, self).__init__(cls, self.pk.name, items)
def create(self, path=None):
o = self.cls()
if path is not None:
setattr(o, self.id_key, path)
return o
def read(self, path):
if path == None:
return [pk[0] for pk in \
self.db.session.query(self.pk).all()]
try:
return self.cls.query.filter(self.pk == path).all()[0]
except IndexError:
raise NotFoundError(self.cls, path)
def finalize(self, obj):
self.db.session.add(obj)
self.db.session.commit()
def delete(self, path):
o = self.read(path)
self.db.session.delete(o)
|
Today, when you order "Chameleon Pieces by Woody Aragon", you'll instantly be emailed a Penguin Magic gift certificate. You can spend it on anything you like at Penguin, just like cash. Just complete your order as normal, and within seconds you'll get an email with your gift certificate.
4 Cards are chosen and lost in the deck. Woody pulls out a card, not one of the 4 selections, and what happens next has to be seen to be believed. Woody rips the card into 4 pieces, and with a magical blow, the pieces change to the first spectators card. He then squeezes the pieces and they magically change to the second spectators card. But it doesn't sot there!!! A gentle shake and the pieces morph into the third spectators card. The pieces change one last time into the spectators last card, and the pieces fuse together, making the card whole again so the spectators can examine!
i can tell this guy doesn't work for anyone, but convention crowds. impractical to buy a deck of gaffs and only be able to use 2 cards out of the 52, plus lapping and plus having spectators being able to recall not one but four cards. another wasted download. not recommended you purchase unless you have some people that come over to your house and waant to see your latest trick while they gather around the fireplace. |
import itertools
from pyramid.httpexceptions import HTTPSeeOther
from pyramid.response import Response
from weasyl import define, message
from weasyl.controllers.decorators import login_required, token_checked
"""Contains view callables dealing with notification messages."""
@login_required
@token_checked
def messages_remove_(request):
form = request.web_input(recall='', remove=[])
remove_all_before = form.get('remove-all-before')
if remove_all_before:
message.remove_all_before(request.userid, int(remove_all_before))
elif form.get('remove-all-submissions'):
message.remove_all_submissions(request.userid, define.get_int(form['remove-all-submissions']))
else:
message.remove(request.userid, list(map(int, form.remove)))
if form.recall:
raise HTTPSeeOther(location="/messages/submissions")
else:
raise HTTPSeeOther(location="/messages/notifications")
def tag_section(results, section):
for row in results:
row['section'] = section
return results
def sort_notifications(notifications):
return [
row
for key, group in itertools.groupby(
notifications, lambda row: message.notification_clusters.get(row['type']))
for row in sorted(group, key=lambda row: row['unixtime'], reverse=True)
]
@login_required
def messages_notifications_(request):
""" todo finish listing of message types in the template """
notifications = (
tag_section(message.select_site_updates(request.userid), 'notifications') +
tag_section(message.select_comments(request.userid), 'comments') +
tag_section(message.select_notifications(request.userid), 'notifications') +
tag_section(message.select_journals(request.userid), 'journals')
)
define._page_header_info.refresh(request.userid)
return Response(define.webpage(request.userid, "message/notifications.html", [
sort_notifications(notifications),
]))
@login_required
def messages_submissions_(request):
form = request.web_input(feature="", backtime=None, nexttime=None)
define._page_header_info.refresh(request.userid)
return Response(define.webpage(request.userid, "message/submissions_thumbnails.html", [
# Feature
form.feature,
# Submissions
message.select_submissions(request.userid, 66, include_tags=False,
backtime=define.get_int(form.backtime), nexttime=define.get_int(form.nexttime)),
]))
|
David Smith is a blogger, R evangelist, and Chief Community Officer at Revolution Analytics. Smith graduated with a degree in Statistics from the University of Adelaide, South Australia then spent four years researching statistical methodology at Lancaster University, UK. Smith recently shifted from the Emerald City to the Windy City...where we are sure he will find even more pinball machines to conquer. |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from itertools import chain
from odoo import api, fields, models, tools, _
from odoo.exceptions import UserError, ValidationError
import odoo.addons.decimal_precision as dp
class Pricelist(models.Model):
_name = "product.pricelist"
_description = "Pricelist"
_order = "sequence asc, id desc"
def _get_default_currency_id(self):
return self.env.user.company_id.currency_id.id
def _get_default_item_ids(self):
ProductPricelistItem = self.env['product.pricelist.item']
vals = ProductPricelistItem.default_get(ProductPricelistItem._fields.keys())
vals.update(compute_price='formula')
return [[0, False, vals]]
name = fields.Char('Pricelist Name', required=True, translate=True)
active = fields.Boolean('Active', default=True, help="If unchecked, it will allow you to hide the pricelist without removing it.")
item_ids = fields.One2many(
'product.pricelist.item', 'pricelist_id', 'Pricelist Items',
copy=True, default=_get_default_item_ids)
currency_id = fields.Many2one('res.currency', 'Currency', default=_get_default_currency_id, required=True)
company_id = fields.Many2one('res.company', 'Company')
sequence = fields.Integer(default=16)
country_group_ids = fields.Many2many('res.country.group', 'res_country_group_pricelist_rel',
'pricelist_id', 'res_country_group_id', string='Country Groups')
@api.multi
def name_get(self):
return [(pricelist.id, '%s (%s)' % (pricelist.name, pricelist.currency_id.name)) for pricelist in self]
@api.model
def name_search(self, name, args=None, operator='ilike', limit=100):
if name and operator == '=' and not args:
# search on the name of the pricelist and its currency, opposite of name_get(),
# Used by the magic context filter in the product search view.
query_args = {'name': name, 'limit': limit, 'lang': self._context.get('lang', 'en_US')}
query = """SELECT p.id
FROM ((
SELECT pr.id, pr.name
FROM product_pricelist pr JOIN
res_currency cur ON
(pr.currency_id = cur.id)
WHERE pr.name || ' (' || cur.name || ')' = %(name)s
)
UNION (
SELECT tr.res_id as id, tr.value as name
FROM ir_translation tr JOIN
product_pricelist pr ON (
pr.id = tr.res_id AND
tr.type = 'model' AND
tr.name = 'product.pricelist,name' AND
tr.lang = %(lang)s
) JOIN
res_currency cur ON
(pr.currency_id = cur.id)
WHERE tr.value || ' (' || cur.name || ')' = %(name)s
)
) p
ORDER BY p.name"""
if limit:
query += " LIMIT %(limit)s"
self._cr.execute(query, query_args)
ids = [r[0] for r in self._cr.fetchall()]
# regular search() to apply ACLs - may limit results below limit in some cases
pricelists = self.search([('id', 'in', ids)], limit=limit)
if pricelists:
return pricelists.name_get()
return super(Pricelist, self).name_search(name, args, operator=operator, limit=limit)
def _compute_price_rule_multi(self, products_qty_partner, date=False, uom_id=False):
""" Low-level method - Multi pricelist, multi products
Returns: dict{product_id: dict{pricelist_id: (price, suitable_rule)} }"""
if not self.ids:
pricelists = self.search([])
else:
pricelists = self
results = {}
for pricelist in pricelists:
subres = pricelist._compute_price_rule(products_qty_partner, date=date, uom_id=uom_id)
for product_id, price in subres.items():
results.setdefault(product_id, {})
results[product_id][pricelist.id] = price
return results
@api.multi
def _compute_price_rule(self, products_qty_partner, date=False, uom_id=False):
""" Low-level method - Mono pricelist, multi products
Returns: dict{product_id: (price, suitable_rule) for the given pricelist}
If date in context: Date of the pricelist (%Y-%m-%d)
:param products_qty_partner: list of typles products, quantity, partner
:param datetime date: validity date
:param ID uom_id: intermediate unit of measure
"""
self.ensure_one()
if not date:
date = self._context.get('date', fields.Date.today())
if not uom_id and self._context.get('uom'):
uom_id = self._context['uom']
if uom_id:
# rebrowse with uom if given
product_ids = [item[0].id for item in products_qty_partner]
products = self.env['product.product'].with_context(uom=uom_id).browse(product_ids)
products_qty_partner = [(products[index], data_struct[1], data_struct[2]) for index, data_struct in enumerate(products_qty_partner)]
else:
products = [item[0] for item in products_qty_partner]
if not products:
return {}
categ_ids = {}
for p in products:
categ = p.categ_id
while categ:
categ_ids[categ.id] = True
categ = categ.parent_id
categ_ids = categ_ids.keys()
is_product_template = products[0]._name == "product.template"
if is_product_template:
prod_tmpl_ids = [tmpl.id for tmpl in products]
# all variants of all products
prod_ids = [p.id for p in
list(chain.from_iterable([t.product_variant_ids for t in products]))]
else:
prod_ids = [product.id for product in products]
prod_tmpl_ids = [product.product_tmpl_id.id for product in products]
# Load all rules
self._cr.execute(
'SELECT item.id '
'FROM product_pricelist_item AS item '
'LEFT JOIN product_category AS categ '
'ON item.categ_id = categ.id '
'WHERE (item.product_tmpl_id IS NULL OR item.product_tmpl_id = any(%s))'
'AND (item.product_id IS NULL OR item.product_id = any(%s))'
'AND (item.categ_id IS NULL OR item.categ_id = any(%s)) '
'AND (item.pricelist_id = %s) '
'AND (item.date_start IS NULL OR item.date_start<=%s) '
'AND (item.date_end IS NULL OR item.date_end>=%s)'
'ORDER BY item.applied_on, item.min_quantity desc, categ.parent_left desc',
(prod_tmpl_ids, prod_ids, categ_ids, self.id, date, date))
item_ids = [x[0] for x in self._cr.fetchall()]
items = self.env['product.pricelist.item'].browse(item_ids)
results = {}
for product, qty, partner in products_qty_partner:
results[product.id] = 0.0
suitable_rule = False
# Final unit price is computed according to `qty` in the `qty_uom_id` UoM.
# An intermediary unit price may be computed according to a different UoM, in
# which case the price_uom_id contains that UoM.
# The final price will be converted to match `qty_uom_id`.
qty_uom_id = self._context.get('uom') or product.uom_id.id
price_uom_id = product.uom_id.id
qty_in_product_uom = qty
if qty_uom_id != product.uom_id.id:
try:
qty_in_product_uom = self.env['product.uom'].browse([self._context['uom']])._compute_quantity(qty, product.uom_id)
except UserError:
# Ignored - incompatible UoM in context, use default product UoM
pass
# if Public user try to access standard price from website sale, need to call price_compute.
# TDE SURPRISE: product can actually be a template
price = product.price_compute('list_price')[product.id]
price_uom = self.env['product.uom'].browse([qty_uom_id])
for rule in items:
if rule.min_quantity and qty_in_product_uom < rule.min_quantity:
continue
if is_product_template:
if rule.product_tmpl_id and product.id != rule.product_tmpl_id.id:
continue
if rule.product_id and not (product.product_variant_count == 1 and product.product_variant_id.id == rule.product_id.id):
# product rule acceptable on template if has only one variant
continue
else:
if rule.product_tmpl_id and product.product_tmpl_id.id != rule.product_tmpl_id.id:
continue
if rule.product_id and product.id != rule.product_id.id:
continue
if rule.categ_id:
cat = product.categ_id
while cat:
if cat.id == rule.categ_id.id:
break
cat = cat.parent_id
if not cat:
continue
if rule.base == 'pricelist' and rule.base_pricelist_id:
price_tmp = rule.base_pricelist_id._compute_price_rule([(product, qty, partner)])[product.id][0] # TDE: 0 = price, 1 = rule
price = rule.base_pricelist_id.currency_id.compute(price_tmp, self.currency_id, round=False)
else:
# if base option is public price take sale price else cost price of product
# price_compute returns the price in the context UoM, i.e. qty_uom_id
price = product.price_compute(rule.base)[product.id]
convert_to_price_uom = (lambda price: product.uom_id._compute_price(price, price_uom))
if price is not False:
if rule.compute_price == 'fixed':
price = convert_to_price_uom(rule.fixed_price)
elif rule.compute_price == 'percentage':
price = (price - (price * (rule.percent_price / 100))) or 0.0
else:
# complete formula
price_limit = price
price = (price - (price * (rule.price_discount / 100))) or 0.0
if rule.price_round:
price = tools.float_round(price, precision_rounding=rule.price_round)
if rule.price_surcharge:
price_surcharge = convert_to_price_uom(rule.price_surcharge)
price += price_surcharge
if rule.price_min_margin:
price_min_margin = convert_to_price_uom(rule.price_min_margin)
price = max(price, price_limit + price_min_margin)
if rule.price_max_margin:
price_max_margin = convert_to_price_uom(rule.price_max_margin)
price = min(price, price_limit + price_max_margin)
suitable_rule = rule
break
# Final price conversion into pricelist currency
if suitable_rule and suitable_rule.compute_price != 'fixed' and suitable_rule.base != 'pricelist':
price = product.currency_id.compute(price, self.currency_id, round=False)
results[product.id] = (price, suitable_rule and suitable_rule.id or False)
return results
# New methods: product based
def get_products_price(self, products, quantities, partners, date=False, uom_id=False):
""" For a given pricelist, return price for products
Returns: dict{product_id: product price}, in the given pricelist """
self.ensure_one()
return dict((product_id, res_tuple[0]) for product_id, res_tuple in self._compute_price_rule(zip(products, quantities, partners), date=date, uom_id=uom_id).iteritems())
def get_product_price(self, product, quantity, partner, date=False, uom_id=False):
""" For a given pricelist, return price for a given product """
self.ensure_one()
return self._compute_price_rule([(product, quantity, partner)], date=date, uom_id=uom_id)[product.id][0]
def get_product_price_rule(self, product, quantity, partner, date=False, uom_id=False):
""" For a given pricelist, return price and rule for a given product """
self.ensure_one()
return self._compute_price_rule([(product, quantity, partner)], date=date, uom_id=uom_id)[product.id]
# Compatibility to remove after v10 - DEPRECATED
@api.model
def _price_rule_get_multi(self, pricelist, products_by_qty_by_partner):
""" Low level method computing the result tuple for a given pricelist and multi products - return tuple """
return pricelist._compute_price_rule(products_by_qty_by_partner)
@api.multi
def price_get(self, prod_id, qty, partner=None):
""" Multi pricelist, mono product - returns price per pricelist """
return dict((key, price[0]) for key, price in self.price_rule_get(prod_id, qty, partner=partner).items())
@api.multi
def price_rule_get_multi(self, products_by_qty_by_partner):
""" Multi pricelist, multi product - return tuple """
return self._compute_price_rule_multi(products_by_qty_by_partner)
@api.multi
def price_rule_get(self, prod_id, qty, partner=None):
""" Multi pricelist, mono product - return tuple """
product = self.env['product.product'].browse([prod_id])
return self._compute_price_rule_multi([(product, qty, partner)])[prod_id]
@api.model
def _price_get_multi(self, pricelist, products_by_qty_by_partner):
""" Mono pricelist, multi product - return price per product """
return pricelist.get_products_price(zip(**products_by_qty_by_partner))
def _get_partner_pricelist(self, partner_id, company_id=None):
""" Retrieve the applicable pricelist for a given partner in a given company.
:param company_id: if passed, used for looking up properties,
instead of current user's company
"""
Partner = self.env['res.partner']
Property = self.env['ir.property'].with_context(force_company=company_id or self.env.user.company_id.id)
p = Partner.browse(partner_id)
pl = Property.get('property_product_pricelist', Partner._name, '%s,%s' % (Partner._name, p.id))
if pl:
pl = pl[0].id
if not pl:
if p.country_id.code:
pls = self.env['product.pricelist'].search([('country_group_ids.country_ids.code', '=', p.country_id.code)], limit=1)
pl = pls and pls[0].id
if not pl:
# search pl where no country
pls = self.env['product.pricelist'].search([('country_group_ids', '=', False)], limit=1)
pl = pls and pls[0].id
if not pl:
prop = Property.get('property_product_pricelist', 'res.partner')
pl = prop and prop[0].id
if not pl:
pls = self.env['product.pricelist'].search([], limit=1)
pl = pls and pls[0].id
return pl
class ResCountryGroup(models.Model):
_inherit = 'res.country.group'
pricelist_ids = fields.Many2many('product.pricelist', 'res_country_group_pricelist_rel',
'res_country_group_id', 'pricelist_id', string='Pricelists')
class PricelistItem(models.Model):
_name = "product.pricelist.item"
_description = "Pricelist item"
_order = "applied_on, min_quantity desc, categ_id desc"
product_tmpl_id = fields.Many2one(
'product.template', 'Product Template', ondelete='cascade',
help="Specify a template if this rule only applies to one product template. Keep empty otherwise.")
product_id = fields.Many2one(
'product.product', 'Product', ondelete='cascade',
help="Specify a product if this rule only applies to one product. Keep empty otherwise.")
categ_id = fields.Many2one(
'product.category', 'Product Category', ondelete='cascade',
help="Specify a product category if this rule only applies to products belonging to this category or its children categories. Keep empty otherwise.")
min_quantity = fields.Integer(
'Min. Quantity', default=1,
help="For the rule to apply, bought/sold quantity must be greater "
"than or equal to the minimum quantity specified in this field.\n"
"Expressed in the default unit of measure of the product.")
applied_on = fields.Selection([
('3_global', 'Global'),
('2_product_category', ' Product Category'),
('1_product', 'Product'),
('0_product_variant', 'Product Variant')], "Apply On",
default='3_global', required=True,
help='Pricelist Item applicable on selected option')
sequence = fields.Integer(
'Sequence', default=5, required=True,
help="Gives the order in which the pricelist items will be checked. The evaluation gives highest priority to lowest sequence and stops as soon as a matching item is found.")
base = fields.Selection([
('list_price', 'Public Price'),
('standard_price', 'Cost'),
('pricelist', 'Other Pricelist')], "Based on",
default='list_price', required=True,
help='Base price for computation.\n'
'Public Price: The base price will be the Sale/public Price.\n'
'Cost Price : The base price will be the cost price.\n'
'Other Pricelist : Computation of the base price based on another Pricelist.')
base_pricelist_id = fields.Many2one('product.pricelist', 'Other Pricelist')
pricelist_id = fields.Many2one('product.pricelist', 'Pricelist', index=True, ondelete='cascade')
price_surcharge = fields.Float(
'Price Surcharge', digits=dp.get_precision('Product Price'),
help='Specify the fixed amount to add or substract(if negative) to the amount calculated with the discount.')
price_discount = fields.Float('Price Discount', default=0, digits=(16, 2))
price_round = fields.Float(
'Price Rounding', digits=dp.get_precision('Product Price'),
help="Sets the price so that it is a multiple of this value.\n"
"Rounding is applied after the discount and before the surcharge.\n"
"To have prices that end in 9.99, set rounding 10, surcharge -0.01")
price_min_margin = fields.Float(
'Min. Price Margin', digits=dp.get_precision('Product Price'),
help='Specify the minimum amount of margin over the base price.')
price_max_margin = fields.Float(
'Max. Price Margin', digits=dp.get_precision('Product Price'),
help='Specify the maximum amount of margin over the base price.')
company_id = fields.Many2one(
'res.company', 'Company',
readonly=True, related='pricelist_id.company_id', store=True)
currency_id = fields.Many2one(
'res.currency', 'Currency',
readonly=True, related='pricelist_id.currency_id', store=True)
date_start = fields.Date('Start Date', help="Starting date for the pricelist item validation")
date_end = fields.Date('End Date', help="Ending valid for the pricelist item validation")
compute_price = fields.Selection([
('fixed', 'Fix Price'),
('percentage', 'Percentage (discount)'),
('formula', 'Formula')], index=True, default='fixed')
fixed_price = fields.Float('Fixed Price', digits=dp.get_precision('Product Price'))
percent_price = fields.Float('Percentage Price')
# functional fields used for usability purposes
name = fields.Char(
'Name', compute='_get_pricelist_item_name_price',
help="Explicit rule name for this pricelist line.")
price = fields.Char(
'Price', compute='_get_pricelist_item_name_price',
help="Explicit rule name for this pricelist line.")
@api.constrains('base_pricelist_id', 'pricelist_id', 'base')
def _check_recursion(self):
if any(item.base == 'pricelist' and item.pricelist_id and item.pricelist_id == item.base_pricelist_id for item in self):
raise ValidationError(_('Error! You cannot assign the Main Pricelist as Other Pricelist in PriceList Item!'))
return True
@api.constrains('price_min_margin', 'price_max_margin')
def _check_margin(self):
if any(item.price_min_margin > item.price_max_margin for item in self):
raise ValidationError(_('Error! The minimum margin should be lower than the maximum margin.'))
return True
@api.one
@api.depends('categ_id', 'product_tmpl_id', 'product_id', 'compute_price', 'fixed_price', \
'pricelist_id', 'percent_price', 'price_discount', 'price_surcharge')
def _get_pricelist_item_name_price(self):
if self.categ_id:
self.name = _("Category: %s") % (self.categ_id.name)
elif self.product_tmpl_id:
self.name = self.product_tmpl_id.name
elif self.product_id:
self.name = self.product_id.display_name.replace('[%s]' % self.product_id.code, '')
else:
self.name = _("All Products")
if self.compute_price == 'fixed':
self.price = ("%s %s") % (self.fixed_price, self.pricelist_id.currency_id.name)
elif self.compute_price == 'percentage':
self.price = _("%s %% discount") % (self.percent_price)
else:
self.price = _("%s %% discount and %s surcharge") % (abs(self.price_discount), self.price_surcharge)
@api.onchange('applied_on')
def _onchange_applied_on(self):
if self.applied_on != '0_product_variant':
self.product_id = False
if self.applied_on != '1_product':
self.product_tmpl_id = False
if self.applied_on != '2_product_category':
self.categ_id = False
@api.onchange('compute_price')
def _onchange_compute_price(self):
if self.compute_price != 'fixed':
self.fixed_price = 0.0
if self.compute_price != 'percentage':
self.percent_price = 0.0
if self.compute_price != 'formula':
self.update({
'price_discount': 0.0,
'price_surcharge': 0.0,
'price_round': 0.0,
'price_min_margin': 0.0,
'price_max_margin': 0.0,
})
|
When you've already got one of the best ranges of compact regulators in the market place there's really only one way to go ? even smaller! And that's exactly what the development engineers from automation control experts, SMC, have done with their new 14 mm wide compact manifold regulator -Series ARM5.Designed initially for the automotive and electronics industries, Series ARM5 can be directly or DIN rail mounted, and uses many of the same key features as the larger models in the ARM range, such as a backflow function as standard, a single unit type and an optional built-in pressure gauge. However this new, extremely compact series now includes a low 0.35 MPa pressure set option and the removable one-touch fittings can be changed to accommodate tube sizes of 4, 6 or 8 mm.With further options including centralised and individual supply types, this new compact ARM5 offers similar space, maintenance and time saving benefits as the existing ARM range. |
"""
${NAME}
"""
from __future__ import absolute_import, division, print_function
import collections
import logging
import weakref
from OpenGL import GL
import numpy
from mcedit2.rendering import cubes
from mcedit2.rendering.depths import DepthOffset
from mcedit2.util import profiler
from mcedit2.util.glutils import DisplayList, gl
log = logging.getLogger(__name__)
class RenderNode(object):
def __init__(self, sceneNode):
super(RenderNode, self).__init__()
self.children = []
self.childrenBySceneNode = {}
self.sceneNode = sceneNode
self.displayList = DisplayList() # Recompiled whenever this node's scenegraph node is dirty
# or node gains or loses children
self.childNeedsRecompile = True
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.sceneNode)
_parent = None
@property
def parent(self):
if self._parent:
return self._parent()
@parent.setter
def parent(self, value):
if value is not None:
self._parent = weakref.ref(value)
else:
self._parent = None
def addChild(self, node):
self.children.append(node)
self._addChild(node)
def _addChild(self, node):
self.childrenBySceneNode[node.sceneNode] = node
node.parent = self
self.displayList.invalidate()
self.childNeedsRecompile = True
if self.parent:
self.parent.touch()
def insertNode(self, index, node):
self.children.insert(index, node)
self._addChild(node)
def removeChild(self, node):
self.childrenBySceneNode.pop(node.sceneNode, None)
self.children.remove(node)
self.displayList.invalidate()
node.parent = None
self.childNeedsRecompile = True
if self.parent:
self.parent.touch()
def invalidate(self):
self.displayList.invalidate()
self.touch()
def touch(self):
node = self
while node:
node.childNeedsRecompile = True
node = node.parent
def getList(self):
return self.displayList.getList()
def callList(self):
self.displayList.call()
def compile(self):
if self.childNeedsRecompile:
for node in self.children:
if node.sceneNode.visible:
node.compile()
self.childNeedsRecompile = False
self.displayList.compile(self.draw)
def draw(self):
self.drawSelf()
self.drawChildren()
def drawChildren(self):
if len(self.children):
lists = [node.getList()
for node in self.children
if node.sceneNode.visible]
if len(lists):
lists = numpy.hstack(tuple(lists))
try:
GL.glCallLists(lists)
except GL.error as e:
log.exception("Error calling child lists: %s", e)
raise
def drawSelf(self):
pass
def destroy(self):
for child in self.children:
child.destroy()
self.displayList.destroy()
class RenderstateRenderNode(RenderNode):
def draw(self):
self.enter()
self.drawChildren()
self.exit()
def enter(self):
raise NotImplementedError
def exit(self):
raise NotImplementedError
class TextureAtlasRenderNode(RenderstateRenderNode):
def __init__(self, sceneNode):
super(TextureAtlasRenderNode, self).__init__(sceneNode)
self.sceneNode = sceneNode
def enter(self):
if self.sceneNode.textureAtlas is None:
return
GL.glColor(1., 1., 1., 1.)
textureAtlas = self.sceneNode.textureAtlas
GL.glActiveTexture(GL.GL_TEXTURE0)
GL.glEnable(GL.GL_TEXTURE_2D)
textureAtlas.bindTerrain()
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPushMatrix()
GL.glLoadIdentity()
GL.glScale(1. / textureAtlas.width, 1. / textureAtlas.height, 1.)
GL.glActiveTexture(GL.GL_TEXTURE1)
GL.glEnable(GL.GL_TEXTURE_2D)
textureAtlas.bindLight()
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPushMatrix()
GL.glLoadIdentity()
GL.glScale(1. / 16, 1. / 16, 1.)
GL.glActiveTexture(GL.GL_TEXTURE0)
GL.glEnable(GL.GL_CULL_FACE)
def exit(self):
if self.sceneNode.textureAtlas is None:
return
GL.glDisable(GL.GL_CULL_FACE)
GL.glActiveTexture(GL.GL_TEXTURE1)
GL.glBindTexture(GL.GL_TEXTURE_2D, 0)
GL.glDisable(GL.GL_TEXTURE_2D)
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPopMatrix()
GL.glActiveTexture(GL.GL_TEXTURE0)
GL.glDisable(GL.GL_TEXTURE_2D)
GL.glMatrixMode(GL.GL_TEXTURE)
GL.glPopMatrix()
class TranslateRenderNode(RenderstateRenderNode):
def __init__(self, sceneNode):
"""
:type sceneNode: TranslateNode
"""
super(TranslateRenderNode, self).__init__(sceneNode)
def __repr__(self):
return "TranslateRenderNode(%s)" % (self.sceneNode.translateOffset,)
def enter(self):
GL.glMatrixMode(GL.GL_MODELVIEW)
GL.glPushMatrix()
GL.glTranslate(*self.sceneNode.translateOffset)
def exit(self):
GL.glMatrixMode(GL.GL_MODELVIEW)
GL.glPopMatrix()
class VertexRenderNode(RenderNode):
def __init__(self, sceneNode):
"""
:type sceneNode: VertexNode
"""
super(VertexRenderNode, self).__init__(sceneNode)
self.didDraw = False
def invalidate(self):
if self.didDraw:
assert False
super(VertexRenderNode, self).invalidate()
def drawSelf(self):
self.didDraw = True
bare = []
withTex = []
withLights = []
for array in self.sceneNode.vertexArrays:
if array.lights:
withLights.append(array)
elif array.textures:
withTex.append(array)
else:
bare.append(array)
with gl.glPushAttrib(GL.GL_ENABLE_BIT):
GL.glDisable(GL.GL_TEXTURE_2D)
self.drawArrays(bare, False, False)
GL.glEnable(GL.GL_TEXTURE_2D)
self.drawArrays(withTex, True, False)
self.drawArrays(withLights, True, True)
def drawArrays(self, vertexArrays, textures, lights):
if textures:
GL.glClientActiveTexture(GL.GL_TEXTURE0)
GL.glEnableClientState(GL.GL_TEXTURE_COORD_ARRAY)
if lights:
GL.glClientActiveTexture(GL.GL_TEXTURE1)
GL.glEnableClientState(GL.GL_TEXTURE_COORD_ARRAY)
else:
GL.glMultiTexCoord2d(GL.GL_TEXTURE1, 15, 15)
GL.glEnableClientState(GL.GL_COLOR_ARRAY)
for array in vertexArrays:
if 0 == len(array.buffer):
continue
stride = 4 * array.elements
buf = array.buffer.ravel()
GL.glVertexPointer(3, GL.GL_FLOAT, stride, buf)
if textures:
GL.glClientActiveTexture(GL.GL_TEXTURE0)
GL.glTexCoordPointer(2, GL.GL_FLOAT, stride, (buf[array.texOffset:]))
if lights:
GL.glClientActiveTexture(GL.GL_TEXTURE1)
GL.glTexCoordPointer(2, GL.GL_FLOAT, stride, (buf[array.lightOffset:]))
GL.glColorPointer(4, GL.GL_UNSIGNED_BYTE, stride, (buf.view(dtype=numpy.uint8)[array.rgbaOffset*4:]))
vertexCount = int(array.buffer.size / array.elements)
GL.glDrawArrays(array.gl_type, 0, vertexCount)
GL.glDisableClientState(GL.GL_COLOR_ARRAY)
if lights:
GL.glDisableClientState(GL.GL_TEXTURE_COORD_ARRAY)
if textures:
GL.glClientActiveTexture(GL.GL_TEXTURE0)
GL.glDisableClientState(GL.GL_TEXTURE_COORD_ARRAY)
class OrthoRenderNode(RenderstateRenderNode):
def enter(self):
w, h = self.sceneNode.size
GL.glMatrixMode(GL.GL_PROJECTION)
GL.glPushMatrix()
GL.glLoadIdentity()
GL.glOrtho(0., w, 0., h, -200, 200)
def exit(self):
GL.glMatrixMode(GL.GL_PROJECTION)
GL.glPopMatrix()
class ClearRenderNode(RenderNode):
def drawSelf(self):
color = self.sceneNode.clearColor
if color is None:
GL.glClear(GL.GL_DEPTH_BUFFER_BIT)
else:
GL.glClearColor(*color)
GL.glClear(GL.GL_COLOR_BUFFER_BIT | GL.GL_DEPTH_BUFFER_BIT)
class DepthMaskRenderNode(RenderstateRenderNode):
def enter(self):
GL.glPushAttrib(GL.GL_DEPTH_BUFFER_BIT)
GL.glDepthMask(self.sceneNode.mask)
def exit(self):
GL.glPopAttrib()
class BoxRenderNode(RenderNode):
def drawSelf(self):
box = self.sceneNode.box
color = self.sceneNode.color
GL.glPolygonOffset(DepthOffset.Selection, DepthOffset.Selection)
cubes.drawConstructionCube(box, color)
class BoxFaceRenderNode(RenderNode):
def drawBoxFace(self, box, face, color=(0.9, 0.6, 0.2, 0.5)):
GL.glEnable(GL.GL_BLEND)
GL.glColor(*color)
cubes.drawFace(box, face)
GL.glColor(0.9, 0.6, 0.2, 0.8)
GL.glLineWidth(2.0)
cubes.drawFace(box, face, elementType=GL.GL_LINE_STRIP)
GL.glDisable(GL.GL_BLEND)
class DepthOffsetRenderNode(RenderstateRenderNode):
def enter(self):
GL.glPushAttrib(GL.GL_POLYGON_BIT)
GL.glPolygonOffset(self.sceneNode.depthOffset, self.sceneNode.depthOffset)
GL.glEnable(GL.GL_POLYGON_OFFSET_FILL)
def exit(self):
GL.glPopAttrib()
def updateRenderNode(renderNode):
"""
:type renderNode: mcedit2.rendering.rendergraph.RenderNode
"""
sceneNode = renderNode.sceneNode
if sceneNode.dirty:
renderNode.invalidate()
sceneNode.dirty = False
if sceneNode.descendentChildrenChanged or sceneNode.childrenChanged:
updateChildren(renderNode)
sceneNode.descendentChildrenChanged = False
sceneNode.childrenChanged = False
def createRenderNode(sceneNode):
"""
:type sceneNode: Node
:rtype: mcedit2.rendering.rendergraph.RenderNode
"""
renderNode = sceneNode.RenderNodeClass(sceneNode)
updateChildren(renderNode)
return renderNode
def updateChildren(renderNode):
"""
:type renderNode: mcedit2.rendering.rendergraph.RenderNode
:return:
:rtype:
"""
sceneNode = renderNode.sceneNode
deadChildren = []
for renderChild in renderNode.children:
if renderChild.sceneNode.parent is None:
deadChildren.append(renderChild)
for dc in deadChildren:
renderNode.removeChild(dc)
dc.destroy()
for index, sceneChild in enumerate(sceneNode.children):
renderChild = renderNode.childrenBySceneNode.get(sceneChild)
if renderChild is None:
renderNode.insertNode(index, createRenderNode(sceneChild))
sceneChild.dirty = False
else:
updateRenderNode(renderChild)
def renderScene(renderNode):
with profiler.context("updateRenderNode"):
updateRenderNode(renderNode)
with profiler.context("renderNode.compile"):
renderNode.compile()
with profiler.context("renderNode.callList"):
renderNode.callList()
|
POW! The Walking Dead Season 6 Mid-Season Premiere Poster Revealed!
Check it out Walking Dead fans!! AMC released the official poster for the second half of The Walking Dead season 6 – which features a collage of pix from the upcoming episodes! |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An implementation of PublicKeyVerify for ECDSA."""
__author__ = "quannguyen@google.com (Quan Nguyen)"
from cryptography import exceptions
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric import ec
import six
from .public_key_verify import PublicKeyVerify
class EcdsaVerify(PublicKeyVerify):
"""ECDSA verifying with cryptography.io."""
def __init__(self, pub_key, algorithm):
"""Constructor for EcdsaVerify.
Args:
pub_key: ec.EllipticCurvePublicKey, the Ecdsa public key.
algorithm: string, Ecdsa algorithm as defined at
https://tools.ietf.org/html/rfc7518#section-3.1.
Raises:
TypeError: if the public key is not an instance of
ec.EllipticCurvePublicKey.
UnsupportedAlgorithm: if the algorithm is not supported.
"""
if not isinstance(pub_key, ec.EllipticCurvePublicKey):
raise TypeError(
"The public key must be an instance of ec.EllipticCurvePublicKey")
self.pub_key = pub_key
curve_name = ""
if algorithm == "ES256":
self.hash = hashes.SHA256()
curve_name = "secp256r1"
elif algorithm == "ES384":
self.hash = hashes.SHA384()
curve_name = "secp384r1"
elif algorithm == "ES512":
self.hash = hashes.SHA512()
curve_name = "secp521r1"
else:
raise exceptions.UnsupportedAlgorithm(
"Unknown algorithm : %s" % (algorithm))
# In Ecdsa, both the key and the algorithm define the curve. Therefore, we
# must cross check them to make sure they're the same.
if curve_name != pub_key.curve.name:
raise exceptions.UnsupportedAlgorithm(
"The curve in public key %s and in algorithm % don't match" %
(pub_key.curve.name, curve_name))
self.algorithm = algorithm
def verify(self, signature, data):
"""See base class."""
if not isinstance(signature, six.binary_type) or not isinstance(
data, six.binary_type):
raise SecurityException("Signature and data must be bytes")
try:
self.pub_key.verify(signature, data, ec.ECDSA(self.hash))
except:
raise SecurityException("Invalid signature")
|
Evil Ash is a villain in the Evil Dead movies and is none other than Ash Williams himself, or - to be more accurate - the dark-half of Ash's personality given corporeal form by the evil spirits that he has spent his life battling (known as Deadites in the series).
Evil Ash began his "life" as an entity known as Bad Ash, who appeared out of a mirror during the events of Evil Dead II and proceeded to torment his heroic counterpart - trying to drive him mad - indeed the whole Bad Ash scenario was suggestive of Ash beginning to lose his sanity and could arguably be seen as Ash battling his inner-demons on a literal level.
At any rate after a confrontation of two Bad Ash is "killed" by the original Ash and is never heard of again until the plot of Army Of Darkness - by which time Bad Ash resurrected himself as the more powerful Evil Ash, this incarnation was completely immortal and made himself the leader of the Deadite armies in direct opposition to his heroic counterpart - who was reluctantly acting as a hero to a medieval army of his own (due to time-travel related mishaps). |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
kallithea.bin.kallithea_config
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
configuration generator for Kallithea
This file was forked by the Kallithea project in July 2014.
Original author and date, and relevant copyright and licensing information is below:
:created_on: Jun 18, 2013
:author: marcink
:copyright: (c) 2013 RhodeCode GmbH, and others.
:license: GPLv3, see LICENSE.md for more details.
"""
from __future__ import with_statement
import os
import sys
import uuid
import argparse
from mako.template import Template
TMPL = 'template.ini.mako'
here = os.path.dirname(os.path.abspath(__file__))
def argparser(argv):
usage = (
"kallithea-config [-h] [--filename=FILENAME] [--template=TEMPLATE] \n"
"VARS optional specify extra template variable that will be available in "
"template. Use comma separated key=val format eg.\n"
"key1=val1,port=5000,host=127.0.0.1,elements='a\,b\,c'\n"
)
parser = argparse.ArgumentParser(
description='Kallithea CONFIG generator with variable replacement',
usage=usage
)
## config
group = parser.add_argument_group('CONFIG')
group.add_argument('--filename', help='Output ini filename.')
group.add_argument('--template', help='Mako template file to use instead of '
'the default builtin template')
group.add_argument('--raw', help='Store given mako template as raw without '
'parsing. Use this to create custom template '
'initially', action='store_true')
group.add_argument('--show-defaults', help='Show all default variables for '
'builtin template', action='store_true')
args, other = parser.parse_known_args()
return parser, args, other
def _escape_split(text, sep):
"""
Allows for escaping of the separator: e.g. arg='foo\, bar'
It should be noted that the way bash et. al. do command line parsing, those
single quotes are required. a shameless ripoff from fabric project.
"""
escaped_sep = r'\%s' % sep
if escaped_sep not in text:
return text.split(sep)
before, _, after = text.partition(escaped_sep)
startlist = before.split(sep) # a regular split is fine here
unfinished = startlist[-1]
startlist = startlist[:-1]
# recurse because there may be more escaped separators
endlist = _escape_split(after, sep)
# finish building the escaped value. we use endlist[0] becaue the first
# part of the string sent in recursion is the rest of the escaped value.
unfinished += sep + endlist[0]
return startlist + [unfinished] + endlist[1:] # put together all the parts
def _run(argv):
parser, args, other = argparser(argv)
if not len(sys.argv) > 1:
print parser.print_help()
sys.exit(0)
# defaults that can be overwritten by arguments
tmpl_stored_args = {
'http_server': 'waitress',
'lang': 'en',
'database_engine': 'sqlite',
'host': '127.0.0.1',
'port': 5000,
'error_aggregation_service': None,
}
if other:
# parse arguments, we assume only first is correct
kwargs = {}
for el in _escape_split(other[0], ','):
kv = _escape_split(el, '=')
if len(kv) == 2:
k, v = kv
kwargs[k] = v
# update our template stored args
tmpl_stored_args.update(kwargs)
# use default that cannot be replaced
tmpl_stored_args.update({
'uuid': lambda: uuid.uuid4().hex,
'here': os.path.abspath(os.curdir),
})
if args.show_defaults:
for k,v in tmpl_stored_args.iteritems():
print '%s=%s' % (k, v)
sys.exit(0)
try:
# built in template
tmpl_file = os.path.join(here, TMPL)
if args.template:
tmpl_file = args.template
with open(tmpl_file, 'rb') as f:
tmpl_data = f.read()
if args.raw:
tmpl = tmpl_data
else:
tmpl = Template(tmpl_data).render(**tmpl_stored_args)
with open(args.filename, 'wb') as f:
f.write(tmpl)
print 'Wrote new config file in %s' % (os.path.abspath(args.filename))
except Exception:
from mako import exceptions
print exceptions.text_error_template().render()
def main(argv=None):
"""
Main execution function for cli
:param argv:
"""
if argv is None:
argv = sys.argv
return _run(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
The Isle of Jersey belongs to the Channel Islands and lies in front of the coast of France in the vicinity of Brittany and Normandy. Jersey has a moderate maritime climate with mild winters and not too warm summers. According to the Köppen climate classification the Channel Isle of Jersey has a Cfb climate. Due to its situation in front of the French coast it benefits from the warm gulf stream in the winter months, which causes relatively warm sea-water. A consequence of the warm sea-water is that the air temperatures in the winter months do not easily drop below freezing-point. But the Isle is very sensitive to strong winds and even storms reaching the Channel from a western direction via The Atlantic. Therefore heavy storms with windforce 8, 9 or even 10 occur minimally a few times a year. In the summer months however the chance of these storms is small.
Jersey has an average annual 24 hour temperature of 11,6 degrees centigrade. This is comparable with the south coast of England. On a yearly basis a little fewer than two-thousand hours of sunshine are registered. Particularly the summer months are reasonably sunny with an average of 8 sunny hours a day. Many tropical destinations do not reach this score. In the winter months it is a few degrees warmer than in the Netherlands. Also in respect of the French continent the winters in Jersey pass with considerably less frost and snow. During the daytime the temperature seldom drops below freezing-point and at night it is often limited to slight night frost if it gets really cold. In the summer during the daytime the temperature is approximately twenty degrees. On an average no super weather for a sunny holiday, but excellent weather to enjoy the Isle. During warmer periods temperatures in the summer can rise to about or even a little above 25 degrees. However tropical values of thirty degrees or even more are not registered that often. With 175 rainy days per year Jersey cannot be called distinctly wet or dry. In the winter months there is on an average fairly much rain. In the months of December, January and February more than a hundred millimeters per month are registered based on averages of more than one year. On the other hand the summers are rather dry. In the height of summer there is on an average only some forty to fifty millimeters per month with which Jersey is relatively dry.
The figures below are based on registered weather information of more than one year and are an average for Jersey.
Remark: locally slight deviations may occur.
Climate figures come in handy but do not offer a total impression of the climate and the possible weather circumstances within a particular period. You cannot often find in figures how big the chance is of wintry weather, (extreme) heat or hurricanes. Therefore we monthly offer useful extra climate information. |
from libmt94x.remittance_info import AbstractRemittanceInfo
from libmt94x.transfer_failed_codes import TransferFailed
class InfoToAcccountOwnerSubField(object):
'''Abstract base class for all subfields of InformationToAcccountOwner'''
pass
class BeneficiaryParty(InfoToAcccountOwnerSubField):
tag = 'BENM'
def __init__(self, account_number=None, bic=None, name=None, city=None):
self.account_number = account_number
self.bic = bic
self.name = name
self.city = city
class BusinessPurpose(InfoToAcccountOwnerSubField):
tag = 'BUSP'
def __init__(self, id_code=None, sepa_transaction_type=None):
self.id_code = id_code
self.sepa_transaction_type = sepa_transaction_type
class Charges(InfoToAcccountOwnerSubField):
tag = 'CHGS'
def __init__(self, charges):
self.charges = charges
class ClientReference(InfoToAcccountOwnerSubField):
tag = 'CREF'
def __init__(self, client_reference):
self.client_reference = client_reference
class CounterPartyID(InfoToAcccountOwnerSubField):
'''NL term: Tegenpartij ID'''
tag = 'CNTP'
def __init__(self, account_number=None, bic=None, name=None, city=None):
self.account_number = account_number
self.bic = bic
self.name = name
self.city = city
class CounterPartyIdentification(InfoToAcccountOwnerSubField):
tag = 'ID'
def __init__(self, id_code):
self.id_code = id_code
class CreditorID(InfoToAcccountOwnerSubField):
'''NL term: Incassant ID'''
tag = 'CSID'
def __init__(self, creditor_id):
self.creditor_id = creditor_id
class EndToEndReference(InfoToAcccountOwnerSubField):
'''NL term: Uniek kenmerk'''
tag = 'EREF'
def __init__(self, end_to_end_reference):
self.end_to_end_reference = end_to_end_reference
class ExchangeRate(InfoToAcccountOwnerSubField):
tag = 'EXCH'
def __init__(self, exchange_rate):
self.exchange_rate = exchange_rate
class InstructionID(InfoToAcccountOwnerSubField):
tag = 'IREF'
def __init__(self, instruction_id):
self.instruction_id = instruction_id
class MandateReference(InfoToAcccountOwnerSubField):
'''NL term: Machtigingskenmerk'''
tag = 'MARF'
def __init__(self, mandate_reference):
self.mandate_reference = mandate_reference
class OrderingParty(InfoToAcccountOwnerSubField):
tag = 'ORDP'
def __init__(self, account_number=None, bic=None, name=None, city=None):
self.account_number = account_number
self.bic = bic
self.name = name
self.city = city
class PaymentInformationID(InfoToAcccountOwnerSubField):
'''NL term: Batch ID'''
tag = 'PREF'
def __init__(self, payment_information_id):
self.payment_information_id = payment_information_id
class PurposeCode(InfoToAcccountOwnerSubField):
'''NL term: Speciale verwerkingscode'''
tag = 'PURP'
def __init__(self, purpose_of_collection):
self.purpose_of_collection = purpose_of_collection
class RemittanceInformation(InfoToAcccountOwnerSubField):
'''NL term: Omschrijvingsregels'''
tag = 'REMI'
def __init__(self, remittance_info, code=None, issuer=None):
if not isinstance(remittance_info, AbstractRemittanceInfo):
raise ValueError(
"Value for `remittance_info` must be instance of AbstractRemittanceInfo")
self.remittance_info = remittance_info
# TODO: Are these two even used??? They are in the spec but do not
# appear in examples
self.code = code
self.issuer = issuer
class ReturnReason(InfoToAcccountOwnerSubField):
'''NL term: Uitval reden'''
tag = 'RTRN'
def __init__(self, reason_code):
'''NOTE: The ING IBP spec also mentions a legacy R-Type integer
parameter which has the following possible values:
1 - Reject (geweigerde)
2 - Return (retourbetaling)
3 - Refund (terugbetaling)
4 - Reversal (herroeping)
5 - Cancellation (annulering)
The R-Type is concatenated to the `reason_code`. We do not implement the R-Type,
we just mention it here for reference.'''
transfer_failed = TransferFailed.get_instance()
if not transfer_failed.code_is_valid(reason_code):
raise ValueError("Value `reason_code` is invalid: %s" % reason_code)
self.reason_code = reason_code
class UltimateBeneficiary(InfoToAcccountOwnerSubField):
tag = 'ULTB'
def __init__(self, name):
self.name = name
class UltimateCreditor(InfoToAcccountOwnerSubField):
'''NL term: Uiteindelijke incassant'''
tag = 'ULTC'
def __init__(self, name=None, id=None):
self.name = name
self.id = id
class UltimateDebtor(InfoToAcccountOwnerSubField):
'''NL term: Uiteindelijke geincasseerde'''
tag = 'ULTD'
def __init__(self, name=None, id=None):
self.name = name
self.id = id
class InfoToAcccountOwnerSubFieldOrder(object):
# This is the order in which the fields must be written
fields = (
ReturnReason,
BusinessPurpose,
ClientReference,
EndToEndReference,
PaymentInformationID,
InstructionID,
MandateReference,
CreditorID,
CounterPartyID,
BeneficiaryParty,
OrderingParty,
RemittanceInformation,
CounterPartyIdentification,
PurposeCode,
UltimateBeneficiary,
UltimateCreditor,
UltimateDebtor,
ExchangeRate,
Charges,
)
@classmethod
def get_field_classes(cls):
return cls.fields
|
Tim Montgomery had one goal in life; to be the fastest man in the world. And he was willing to do whatever it took to get there. “Actually, I would have sold my soul to the devil.” Watch to see what he did! |
# -*- coding: utf-8 -*-
from converter import Converter
class Evaluator(object):
"""----------------------------------------------------------------
This class is used for evaluating the validity of our estimations.
We return a dictionary entry as our evaluation result. See the
return statements in each function to see which attributes are
being reported.
----------------------------------------------------------------"""
def __init__(self, tonic_tolerance=20):
self.tonic_tolerance = tonic_tolerance
self.CENT_PER_OCTAVE = 1200
# '+' symbol corresponds to quarter tone higher
self.INTERVAL_SYMBOLS = [
('P1', 0, 25), ('P1+', 25, 75), ('m2', 75, 125), ('m2+', 125, 175),
('M2', 175, 225), ('M2+', 225, 275), ('m3', 275, 325),
('m3+', 325, 375), ('M3', 375, 425), ('M3+', 425, 475),
('P4', 475, 525), ('P4+', 525, 575), ('d5', 575, 625),
('d5+', 625, 675), ('P5', 675, 725), ('P5+', 725, 775),
('m6', 775, 825), ('m6+', 825, 875), ('M6', 875, 925),
('M6+', 925, 975), ('m7', 975, 1025), ('m7+', 1025, 1075),
('M7', 1075, 1125), ('M7+', 1125, 1175), ('P1', 1175, 1200)]
@staticmethod
def evaluate_mode(estimated, annotated, source=None):
mode_bool = annotated == estimated
return {'source': source, 'mode_eval': mode_bool,
'annotated_mode': annotated, 'estimated_mode': estimated}
def evaluate_tonic(self, estimated, annotated, source=None):
est_cent = Converter.hz_to_cent(estimated, annotated)
# octave wrapping
cent_diff = est_cent % self.CENT_PER_OCTAVE
# check if the tonic is found correct
bool_tonic = (min([cent_diff, self.CENT_PER_OCTAVE - cent_diff]) <
self.tonic_tolerance)
# convert the cent difference to symbolic interval (P5, m3 etc.)
interval = None
for i in self.INTERVAL_SYMBOLS:
if i[1] <= cent_diff < i[2]:
interval = i[0]
break
elif cent_diff == 1200:
interval = 'P1'
break
# if they are in the same octave the the estimated and octave-wrapped
# values should be the same (very close)
same_octave = (est_cent - cent_diff < 0.001)
return {'mbid': source, 'tonic_eval': bool_tonic,
'same_octave': same_octave, 'cent_diff': cent_diff,
'interval': interval, 'annotated_tonic': annotated,
'estimated_tonic': estimated}
def evaluate_joint(self, tonic_info, mode_info, source=None):
tonic_eval = self.evaluate_tonic(tonic_info[0], tonic_info[1], source)
mode_eval = self.evaluate_mode(mode_info[0], mode_info[1], source)
# merge the two evaluations
joint_eval = tonic_eval.copy()
joint_eval['mode_eval'] = mode_eval['mode_eval']
joint_eval['annotated_mode'] = mode_eval['annotated_mode']
joint_eval['estimated_mode'] = mode_eval['estimated_mode']
joint_eval['joint_eval'] = (joint_eval['tonic_eval'] and
joint_eval['mode_eval'])
return joint_eval
|
Henry was adopted on 11/17/11 by Kirsten Platt, age 7, who promised her mom, Dr. Jenni Doll, that she would scoop his litter box if he could come live with them. Kirsten has known hundreds of witty kitties in her short life, but she fell in love with Henry. Scooping and cuddling are both right on track!
Biography: Henry is a stray who had been cared for by the folks at Waverly Pet Rescue for about six months. He looks kind of like a Scottish Fold, but really isn't. Henry probably had a case of severe ear mites when he was young, and he permanently damaged his ears after scratching them too much. They were in such bad shape that he had to have surgery in November, 2009. He is now totally recovered, and only needs to have his ears cleaned occasionally with a cotton swab to make sure they stay clean. Henry was pretty shy when he was first rescued, but the extra TLC while recovering from his surgery really brought him out of his shell. He became more active and friendly, and is now a big sweetheart. Henry loves spending time outside, and sits high up on a shelf watching everything below him. In fact, he spends so much time outside we were concerned he wasn't eating (not that he doesn't have a pound or two to spare!!). But now that he's learned the routine, he knows to come inside for his canned food breakfast and some petting in the mornings. Henry would be the perfect lap cat for some lucky soul!! |
################################################################################
import pyvga
import blit
import buf
ss = buf.sym('sokoscreen')
pyvga.exittext()
pyvga.framebuffer[:len(ss)] = ss
################################################################################
import py8042
import keyb
import pybeep
# I think hz = 1193182 / qerf
qerf = [5424, 5424, 0, 0,
5424, 5424, 0, 0,
4058, 4058, 0, 0,
5424, 5424, 0, 0,
3616, 3616, 0, 0,
5424, 5424, 0, 0,
3224, 3224, 0, 0,
3410, 3410, 0, 0]
def kbd_work():
while 1:
if py8042.more_chars():
ch = keyb.translate_scancode(py8042.get_scancode())
if ch:
stack.swap(ch)
else:
stack.swap(None, idl_task)
dir = None
def clk_work():
while 1:
global dir
blit.fill(pyvga.framebuffer, 320, \
312, 0, 8, 8, (isr.ticker & 15) + 16)
pybeep.on(qerf[isr.ticker & 31])
if py8042.more_squeaks():
dx = dy = 0
while py8042.more_squeaks():
_,dx,dy = py8042.get_squeak()
if dx > 10: dir = 'l'
elif dy > 10: dir = 'k'
elif dx < -10: dir = 'h'
elif dy < -10: dir = 'j'
stack.swap(None, idl_task)
elif dir:
ch = dir; dir = None
stack.swap(ch)
else:
stack.swap(None, idl_task)
interrupts = []
def idl_work():
while 1:
if len(interrupts):
stack.swap(None, interrupts.pop(0))
################################################################################
import stack
import isr
kbd_task = buf.bss(0x400); stack.init(kbd_task, kbd_work)
clk_task = buf.bss(0x400); stack.init(clk_task, clk_work)
idl_task = buf.bss(0x400); stack.init(idl_task, idl_work)
def kbd_isr():
interrupts.append(kbd_task)
def clk_isr():
interrupts.append(clk_task)
################################################################################
#--test map--
map = list(' ##### # # # # ### ## # # ### # ## # ###### # # ## ##### ..# # .$ ..# ##### ### #@## ..# # ######### #######')
#--easier level--
#map = list(' ##### # # #$ # ### $## # $ $ # ### # ## # ###### # # ## ##### ..# # $ $ ..# ##### ### #@## ..# # ######### #######')
#--harder level--
#map = list(' ####### # ...# ##### ...# # . .# # ## ...# ## ## ...# ### ######## # $$$ ## ##### $ $ ##### ## #$ $ # # #@ $ $ $ $ # ###### $$ $ ##### # # ########')
tile_ndx = '@&$*#. '
tiles = [buf.sym('sokotile'), buf.sym('sokogoal'),
buf.sym('stonetile'), buf.sym('stonegoal'),
buf.sym('wall'), buf.sym('goal'), buf.sym('floor')]
blit.paste(pyvga.framebuffer,320, 267, 68, tiles[0], 8)
blit.paste(pyvga.framebuffer,320, 140, 136, tiles[2], 8)
blit.paste(pyvga.framebuffer,320, 140, 156, tiles[5], 8)
def disptile(off):
blit.paste(pyvga.framebuffer, 320,
(off % 20) << 3, (off / 20) << 3, # x, y
tiles[tile_ndx.find(map[off])], 8)
def dispall():
i = len(map)
eol = 0
while i > 0: # no for yet?
i = i - 1
if eol and map[i] != ' ':
eol = 0
if not eol:
disptile(i)
if (i % 20) == 0: # 'not i % 20' freezes on hardware?
eol = 1
def move(dir):
if map.count('@'): soko = map.index('@')
else: soko = map.index('&')
s = list('~~~')
s[0] = map[soko]
s[1] = map[soko+dir]
s[2] = map[soko+dir+dir]
if s[1] in ' .':
s[0] = leave(s[0])
s[1] = enter(s[1])
elif s[1] in '$*' and s[2] in ' .':
s[0] = leave(s[0])
s[1] = enter(s[1])
s[2] = slide(s[2])
map[soko] = s[0]
map[soko+dir] = s[1]
map[soko+dir+dir] = s[2]
disptile(soko)
disptile(soko+dir)
disptile(soko+dir+dir)
def leave(c):
if c == '@': return ' '
else: return '.'
def enter(c):
if c in ' $': return '@'
else: return '&'
def slide(c):
if c == ' ': return '$'
else: return '*'
dispall()
isr.setvec(clk_isr, kbd_isr)
while 1:
def loop(msg):
pyvga.cleartext()
pyvga.entertext()
while msg.count('\n'):
n = msg.index('\n')
print msg[:n]
msg = msg[n+1:]
while 1:
stack.swap(None, idl_task)
if not map.count('$'):
loop('''
#### ## #### ##
### ## ### #
### # ### #
### # ### # ##
### # ### # ####
## # ### # ##
### # ### #
### # ## # #
## # ### # #
### # ### ## # ###
### # #### ### ### ### ## # ### ### ###
## # ## # ### ### ### ## # ### #### ##
### ## ## ### ### ## ## # ### ### ##
### ## ## ### ### ### #### # ### ### ##
### ### ## ### ### ### #### # ### ### ##
### ### ## ### ### ## #### # ### ### ##
### ### ## ### ### ## ### # ### ### ##
### ### ## ### ### ### ### ### ### ##
### ### ## ### ### ### ### ### ### ##
### ## ## ### ### ### ### ### ### ##
### ## ## ### ### # # ### ### ##
### ## # ## #### # # ### ### ##
##### ##### ##### ## # # ######## ##
''')
bufchar = stack.swap(None, idl_task)
if bufchar == 'q': loop('Thanks for playing')
elif bufchar in 'hs': move(-1)
elif bufchar in 'jx': move(20)
elif bufchar in 'ke': move(-20)
elif bufchar in 'ld': move(1)
elif bufchar == 'p': dispall()
|
Nashville, Tenn., (Sept. 28, 2017) – Launch Tennessee (LaunchTN) has reformatted its annual internship program to offer 16 full-time, 14-week paid positions with Tennessee-based startups and Entrepreneur Centers.
Interns will receive hands-on startup experience and will learn entrepreneurship from some of the state’s most successful entrepreneurs, along with a stipend.
LaunchTN’s Specialist Program, launched in May 2014, has matched 100+ undergraduate and graduate students from across the country with internships at Tennessee’s six Entrepreneur Centers. Due to its success and feedback from interns, LaunchTN evolved the program to place interns directly with startups in need of tech talent, in addition to the regional Entrepreneur Centers.
“The LaunchTN internship program has been a high-impact tool to engage young talent with the Tennessee startup community,” said Brittany Burgess, entrepreneurship director at LaunchTN.
Applications are open until Feb. 19, 2018, and internships will run from May to August 2018, with flexible start and end dates depending on student schedules. Additional details are available at launchtn.org.
About Launch Tennessee – Launch Tennessee is a public-private partnership that fosters entrepreneurship, with the goal of making Tennessee the most startup-friendly state in the country. Partnering with six regional Entrepreneur Centers, Launch Tennessee provides access to technology skills development, capital formation, entrepreneurial resources and more for high-growth-potential startups. Visit LaunchTN.org to discover what makes Tennessee the best place for innovative tech companies. |
# This file is a part of the "SuMPF" package
# Copyright (C) 2018-2021 Jonas Schulte-Coerne
#
# This program is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option) any
# later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more
# details.
#
# You should have received a copy of the GNU Lesser General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
"""contains the class for :class:`~sumpf.Bands`-filter"""
import collections.abc
import numpy
import sumpf._internal as sumpf_internal
from ._base import Filter
__all__ = ("Bands",)
class Bands(Filter):
"""A filter, that is defined by supporting points and functions for interpolation
and extrapolation. Use cases for this filter include storing the result of an
n-th octave analysis or reading an equalization and applying it to a :class:`~sumpf.Signal`.
"""
interpolations = sumpf_internal.Interpolations #: an enumeration with flags for defining the interpolation and extrapolation functions
file_formats = sumpf_internal.filter_writers.BandsFormats #: an enumeration with file formats, whose flags can be passed to :meth:`~sumpf.Bands.save`
def __init__(self,
bands=({},),
interpolations=sumpf_internal.Interpolations.LOGARITHMIC,
extrapolations=sumpf_internal.Interpolations.STAIRS_LIN,
labels=("Bands",)):
"""
:param bands: a sequence of dictionaries, that map float frequency values
to complex values of the filter function. This can also be
a single dictionary, if the bands filter shall only have one
channel.
:param interpolation: a sequence of flags from the :class:`sumpf.Bands.interpolations`
enumeration, that defines the function, with which
the interpolation between the samples given in the
``bands`` dictionary shall be computed. This can also
be a single flag, if the same interpolation shall
be used for all channels.
:param extrapolation: a sequence of flags from the :class:`sumpf.Bands.interpolations`
enumeration, that defines the function, with which
the extrapolation outside the samples given in the
``bands`` dictionary shall be computed. This can also
be a single flag, if the same extrapolation shall
be used for all channels.
:param labels: a sequence of string labels for the channels.
"""
# make sure, that all data is in sequences with the correct length
if isinstance(bands, collections.abc.Mapping):
bands = (bands,)
if not isinstance(interpolations, collections.abc.Sequence):
interpolations = (interpolations,) * len(bands)
elif len(interpolations) < len(bands):
interpolations = tuple(interpolations) + (interpolations[-1],) * (len(bands) - len(interpolations))
if not isinstance(extrapolations, collections.abc.Sequence):
extrapolations = (extrapolations,) * len(bands)
elif len(extrapolations) < len(bands):
extrapolations = tuple(extrapolations) + (extrapolations[-1],) * (len(bands) - len(extrapolations))
if not isinstance(labels, collections.abc.Sequence):
labels = (labels,) * len(bands)
elif len(labels) < len(bands):
labels = tuple(labels) + (labels[0] if labels else "Bands",) * (len(bands) - len(labels))
# create the transfer functions
tfs = []
for b, i, e in zip(bands, interpolations, extrapolations):
fs = numpy.array(sorted(b.keys()))
tf = Bands.Bands(xs=fs,
ys=numpy.array([b[x] for x in fs]),
interpolation=i,
extrapolation=e)
tfs.append(tf)
# initialize the filter
Filter.__init__(self,
transfer_functions=tfs,
labels=labels)
# store the original data
self.__bands = bands
self.__interpolations = [int(i) for i in interpolations[0:len(tfs)]]
self.__extrapolations = [int(e) for e in extrapolations[0:len(tfs)]]
def __repr__(self):
"""Operator overload for using the built-in function :func:`repr` to generate
a string representation of the bands filter, that can be evaluated with :func:`eval`.
:returns: a potentially very long string
"""
return (f"{self.__class__.__name__}(bands={self.__bands!r}, "
f"interpolations={self.__interpolations}, "
f"extrapolations={self.__extrapolations}, "
f"labels={self.labels()})")
def save(self, path, file_format=file_formats.AUTO):
"""Saves the bands filter to a file. The file will be created if it does not exist.
:param path: the path to the file
:param file_format: an optional flag from the :attr:`sumpf.Bands.file_formats`
enumeration, that specifies the file format, in which
the bands filter shall be stored. If this parameter
is omitted or set to :attr:`~sumpf.Bands.file_formats`.\ ``AUTO``,
the format will be guessed from the ending of the filename.
:returns: self
"""
writer = sumpf_internal.get_writer(file_format=file_format,
writers=sumpf_internal.filter_writers.bands_writers,
writer_base_class=sumpf_internal.filter_writers.Writer)
writer(self, path)
return self
def to_db(self, reference=1.0, factor=20.0):
"""Computes a bands filter with the values of this filter converted to
decibels. It will use the same interpolation and extrapolation functions
as the original filter.
This method takes the values from the bands filter as they are, which might
not make sense in case of complex of negative filter values. Consider
computing the magnitude of the filter by using the :func:`abs` function
before calling this method.
:param reference: the value, by which the filter's values are divided before
computing the logarithm. Usually, this is one, but for
example when converting a filter in Pascal to dB[SPL],
the reference must be set to 20e-6.
:param factor: the factor, with which the logarithm is multiplied. Use
20 for root-power quantities (if the bands' values are amplitudes)
and 10 for power quantities (if the bands' values are energies
or powers).
"""
return Bands(bands=[{f: factor * numpy.log10(y / reference) for f, y in b.items()} for b in self.__bands],
interpolations=self.__interpolations,
extrapolations=self.__extrapolations,
labels=self.labels())
def from_db(self, reference=1.0, factor=20.0):
"""Computes a bands filter with the values of this filter converted from
decibels to a linear representation. It will use the same interpolation
and extrapolation functions as the original filter.
:param reference: the value, by which the filter's values are divided before
computing the logarithm. Usually, this is one, but for
example when converting a filter in dB[SPL] to Pascal
the reference must be set to 20e-6.
:param factor: the factor, with which the logarithm is multiplied. Use
20 for root-power quantities (if the bands' values are amplitudes)
and 10 for power quantities (if the bands' values are energies
or powers).
"""
return Bands(bands=[{f: reference * 10.0 ** (y / factor) for f, y in b.items()} for b in self.__bands],
interpolations=self.__interpolations,
extrapolations=self.__extrapolations,
labels=self.labels())
|
everyday we are together is the greatest day of my life. I will always be yours.
Your Story is our world. |
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from rest_framework.decorators import api_view
from rest_framework.response import Response
from .forms import DistributionRequestForm
from .models import mail_request_to_managers
@api_view(['POST'])
def handle_post(request):
response = {'status': 'error'}
status_code = 400
data = request.DATA
data.update({
'name': request.user.get_full_name(),
'email': request.user.email,
'username': request.user.username,
})
data.setdefault('vidzios', None)
vidzios = data['vidzios']
del data['vidzios']
form = DistributionRequestForm(data=data)
if form.is_valid():
dr = form.save()
response['status'] = 'success'
status_code = 200
if vidzios:
ct = ContentType.objects.get_by_natural_key(app_label=settings.COLLECTFORM_RELATED_MODEL[0], model=settings.COLLECTFORM_RELATED_MODEL[1])
for vidzio_id in vidzios:
dr.vidzios.create(**{
'content_type': ct,
'object_id': vidzio_id,
})
mail_request_to_managers(sender=None, instance=dr, created=True)
else:
response['errors'] = form.errors
return Response(response, status=status_code)
|
Enter the name of the port in the search function of the gallery!
If you know the number of the cancel than add it, that will be shorten the search.
capital in brackets. The capital in brackets enables a clear assignement of covers of one port with same cancel number e.g. Habana #2391 with eight different capitals (A-H).
Do not interpret the capitals in brackets as a scarity rating of the catalogue!
The most covers show all informations on the front side. When interesting cancels or informations are on revers (date of use, arrival- or passing cancels) then you find a short description below the picture.
If your search discover blank page you can be the first who exhibit a nice piece of your own collection on this place.
It will be shown in the gallery short time later. If necessary attach a short comment.
Please don´t forget to mark your entry by your signature e.g. “Collection Jim Brown” or “Coll. J.B.”.
If you prefer to keep your identity covered use a nickname please.
Large diversity is desired in the gallery!!!
Covers with stamps of different countries, different dates of usage, variant colors of ink of the cancels and different ships or shipping companies as senders are wanted. |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import matplotlib.finance as mpf
import math
# Step1:时间序列分段
# 子区间长度n是可变的,如果进行回归分析需要进行将时间序列进行分段,例如若时间序列长度为240,则其可以分解成4段长度为60的等长子区间,
# 或者6段长度为40的等长子区间……
'''
输入:数据长度
输出:分段方案矩阵
'''
def getSegmentationMatrix(dataLen):
segmentMatrix = []
end = math.floor(dataLen/4)
for i in range(4, int(end+1)):
if dataLen%i==0:
segmentMatrix.append([i, dataLen/i])
return segmentMatrix
# step2:Hurst指数计算
'''
输入:时间序列数组
输出:hurst指数值
'''
# def computeHurst1(data):
# data = np.array(data).astype('float')
# dataLen = len(data)
# segmentMatrix = getSegmentationMatrix(dataLen)
# segMethod = len(segmentMatrix)#分段方案数
# logRS = np.zeros(segMethod)
# logN = np.zeros(segMethod)
# for i in range(0, segMethod):
# dataMat = data.reshape(segmentMatrix[i])
# m = segmentMatrix[i][0]
# n = segmentMatrix[i][1]
# meanArr = dataMat.mean(axis=1)
# # 计算第a个区间的累计离差(转置)
# subMatTrans = dataMat.T-meanArr
# cumSubMat = subMatTrans.T.cumsum(axis=1)
# RVector = np.zeros(n*m).reshape(n, m)
# SVector = np.zeros(n*m).reshape(n, m)
# # 计算(R/S)n
# for j in range(n):
# RVector[j] = cumSubMat[:,:j+1].max(axis=1)-cumSubMat[:,:j+1].min(axis=1)
# SVector[j] = dataMat[:,:j+1].std(axis=1)
# logRS[i] = math.log((RVector/SVector).T.mean(axis=1).mean())
# logN[i] = math.log(n)
# return np.polyfit(logN, logRS, 1)[0]
def computeHurst(data):
data = np.array(data).astype('float')
dataLen = len(data)
segmentMatrix = getSegmentationMatrix(dataLen)
segMethod = len(segmentMatrix)#分段方案数
logRS = np.zeros(segMethod)
logN = np.zeros(segMethod)
for i in range(0, segMethod):
dataMat = data.reshape(segmentMatrix[i])
m = segmentMatrix[i][0]
n = segmentMatrix[i][1]
meanArr = dataMat.mean(axis=1)
# 计算第a个区间的累计离差(转置)
subMatTrans = dataMat.T-meanArr
cumSubMat = subMatTrans.T.cumsum(axis=1)
# 计算(R/S)n
RVector = cumSubMat.max(axis=1)-cumSubMat.min(axis=1)
SVector = dataMat.std(axis=1)
logRS[i] = math.log((RVector/SVector).mean())
logN[i] = math.log(n)
return np.polyfit(logN, logRS, 1)[0]
# step3:移动平均hurst指数计算
# 例如计算120个交易日的Husrt指数,使用的数据为[t-119,t]的价格数据即可,移动平均的意思为根据t的向前移动,
# 计算指数的数据[t-119,t]的价格数据同时根据t进行移动。
'''
输入:以时间为索引的Series
输出:以时间为索引的hurst Series
'''
def computeMovingHurst(dataSeries, window=120):
dataLen = len(dataSeries)
if dataLen<window:
print 'window length is bigger than data length'
return
logPrices = np.log(dataSeries.values)
indexReturns = np.append([0], np.diff(logPrices))
hursts = np.zeros(dataLen)
hursts[0:window] = np.NaN
for i in range(dataLen-window):
hursts[window+i] = computeHurst(indexReturns[i:i+window])
return pd.Series(hursts, index=dataSeries.index)
# 计算E(H),用Peters方法计算E[(R/S)n]
'''
输入:时间序列数组
输出:hurst指数期望值
'''
def computeHurstExpecPeters(data):
dataLen = len(data)
segmentMatrix = getSegmentationMatrix(dataLen)
segMethod = len(segmentMatrix)#分段方案数
logERS = np.zeros(segMethod)
logN = np.zeros(segMethod)
for i in range(0, segMethod):
n = segmentMatrix[i][1]
# 用Peters方法计算E[(R/S)n]
tempSum = 0
for r in range(1, n):
tempSum += math.sqrt((n-1)/r)
ERS = (n-0.5)/n * math.pow(n*math.pi/2, -0.5) * tempSum
logERS[i] = math.log(ERS)
logN[i] = math.log(n)
return np.polyfit(logN, logERS, 1)[0]
from numpy import cumsum, log, polyfit, sqrt, std, subtract
from numpy.random import randn
def hurst(ts):
"""Returns the Hurst Exponent of the time series vector ts"""
# Create the range of lag values
lags = range(2, 100)
# Calculate the array of the variances of the lagged differences
tau = [sqrt(std(subtract(ts[lag:], ts[:-lag]))) for lag in lags]
# Use a linear fit to estimate the Hurst Exponent
poly = polyfit(log(lags), log(tau), 1)
# Return the Hurst exponent from the polyfit output
return poly[0]*2.0
# Create a Gometric Brownian Motion, Mean-Reverting and Trending Series
gbm = log(cumsum(randn(100000))+1000)
mr = log(randn(100000)+1000)
tr = log(cumsum(randn(100000)+1)+1000)
# Output the Hurst Exponent for each of the above series
# and the price of Google (the Adjusted Close price) for
# the ADF test given above in the article
# print "Hurst(GBM): %s" % hurst(gbm)
# print "Hurst(MR): %s" % hurst(mr)
# print "Hurst(TR): %s" % hurst(tr)
# Hurst(GBM): 0.500606209426
# Hurst(MR): 0.000313348900533
# Hurst(TR): 0.947502376783 |
Even before a full review of the new Off Roader (8297), I decided to go for a detailed look on the new TECHNIC parts coming with it.
The first three, can also be acquired for spare with the new PF Motor Set (8293) which includes one Pole-Reverser one Lights set and two U-joints.
A fellow LEGO Ambassador (Didier Enjary, for FreeLUG) sent me some scan images from the new PF Pole-Reverser, inner details (images and comments below).
Most of the remaining images in this post were also sent by another TBs reader from UK (Mike Hatton, Parax), despite being also available at his BS folder.
This is the new PF generation of the former TECHNIC Pole-Reverser (6551).
The switch reverses C1/C2 but, of course, not Ground/9V (read about on the PF RC Protocol specification, here).
There is a neutral position (three cranks).
You can miss the fact it is in a centered position (neutral) and then you may think the switch is out of order.
One wire is now permanently attached.
There is no center anymore able to rotate and inverting polarity continuously (driven by a motor for instance, if no handle is in use). This feature made possible some special arrangements in the past.
Handle is now able to turn about 15º maximum, in each direction.
The PF lights constitute a great addition to the TECHNIC sets and will certainly find a wide range of applications.
Each cable includes two lights and assumes an 'Y' configuration.
At one end there is a PF connector (top and bottom) and at the other other side, two ends with one LED each.
The LEDs seem to be about medium brightness, so hopefully they shouldn't "eat" too much power from the battery.
The LEDs form factor is also such that they fit into any TECHNIC pin hole, like those in the TECHNIC bricks and beams (thinner or thicker), any brick tube and the bottom of any 1x1 round plate for instance (however a bit loose in these last two cases).
In conjugation with any colored transclear part, these LEDs can produce a cool colored light effect.
Each cable is 400mm length which comprises 245mm (LEDs to middle brick), plus 155mm (middle brick to PF connector).
The new U-joint is a shorter version from the previous ones, which appears with several of the new 2008 TECHNIC sets (2x in 8293, 2x in 8294, 4x in 8297).
Below a photo with the three generations of U-joints, LEGO ever made.
I've already commented about the new U-joint at the Excavator (8294) review, anyway I'll copy it here too.
"...the new U-joints (3rd generation), whose main characteristic is the fact they got a shorter length. 3L instead of the 4L in the former version (already 31 years old). This shorter version will allow to use them, where it was impossible to do before and is of special interest to route the driving axles into the new LAs. One of the main concerns about this new U-joints was their potential fragility, which was already the main problem from the former version, as they are know to easily break under high torque conditions. Hopefully TLG may have improved this fact as they look a bit brighter than usual, which may suggest a different kind of ABS being now used also for this part. I can't be sure, so we will see once the users reports will start to come."
With the new Off Roader, there comes also one differential with a new design.
This new design is also the 3rd generation of the LEGO TECHNIC differentials, as illustrated below.
and on the left the new design (2008).
Again the new differential is smaller than its predecessor (3L wide in opposition the former 4L version), however it uses inside the same 12 teeth bevel gears as before (only the 1st generation used different ones).
Instead, it got a 28 teeth bevel without clutch.
The other extreme got no gear but its design with four inner tabs, suggests there is still an hope for a future clutch mechanism, despite it is incompatible with actual Driving Ring and Driving Ring Extension. Lets see if LEGO designers have new Driving Ring on the way for the future, to address this issue.
The differential 28 teeth gear, has a few shorter teeth (easily seen at the 2nd image above) because of a mystery slot... the wider (shallow) slot is for the present bevel gears but the deeper narrow slot is for something else... (it is too narrow for a linkarm). Because of the slot, the bevel gears only go in from one side and don't drop through!
As bevels tend to slip quite easily, the shorter teeth can make this even worse... Lets wait and see what other users will report about this new part.
It only interfaces it other bevel gears, which forces transmission to be perpendicular.
Can't be used with a chain.
Can't be used with standard or worm gears.
As AVCampos wrote in a comment to another post, lets hope "this new differential won't completely replace its predecessor, but rather supplement it where size matters".
This new differential easily meshes with a 20 teeth double bevel, as in the present Off Roader design.
In the central part of the differential, we can see a block that prevents internal bevel gears from bending, which used to make them slip and lead to breaking gears.
And at last there are also new and smaller compact steering arms (towball couplings).
TECHNIC designers have used several versions of similar parts in the past, to design many vehicles steerings and suspensions, whose details have evolved along the time.
1L shorter ('5.5 x 2' against previous '6.5 x 2'), which is in concordance with also shortened U-joints.
A mix of pin-hole and axle-hole on the sides, for greater flexibility and in accordance with new design practices like in the Linear Actuators bracket design.
The new part on the left and a comparative with an older one, on the right.
Can't wait for the full review and the building myself!
That explains the size. Good finding!
That Explains a lot! the LEDs are not 9V! and do work in both directions.
Wonder if anyone can fit a timer in that brick for flashing or consant lights.
Changing the LEDs, would be easier but won't look so nice, as you need to cut actual ones and loose their "casing".
However there are probably some from those indicated by Philo, which match the TECHNIC hole diameter!?
Still you get the not so nice looking from the thermo retractile sleeve to cover the wires and soldering..
From what I can see in the pictures, the new LED pieces are a lost opportunity for LEGO: the 2x2 block reminds me immediately of the "old" 9V wires, with a 2x2 brick at each end to connect to other 9V parts. So, I guess it wouldn't be that much hard for LEGO to put 9V contacts in that block: that way, the LED piece could act as a 8886 Power Functions Extension Wire, or be supplied both from PF sources and 9V sources.
Also, I wonder if the LEDs can be "extended" with Exo-Force optical fibres... theoretically, all that would be needed would be a 1x1 round brick joining the LED and the fibre. Or one could replace the fibre with a flame piece to illuminate a City layout (I tried this with standard 9V bulbs, but they were too weak).
As for the differential, indeed I didn't notice that smaller inner gear slot until I read the post (but I guess I can be excused: I don't have the part myself)! That and the central block, which presently don't appear to have any function, shall be an interesting source of speculation over at least a few months.
What I don't understand is why didn't LEGO design the new pole reverser like the old model, with an axlehole for an optional handle: like Conchas wrote, this impossibility of 360º rotation is a big limitation.
Finally, regarding U-joints' strength, I think the most obvious way to improve it would be to increase the central pins' thickness. But that would require also a thicker axle, which would make LEGO piece compatibility very difficult. So, I won't be surprised if LEGO indeed makes the new joint in a different, harder plastic.
I'm buying the Off Roader pretty soon and I'll have a review posted on my website (http://tubtech.ning.com) soon after that. I will also post detailed pictures of the mechanisms as well as pictures of a modified version (aka IR control, drive (possibly 4WD), and motorized steering added). |
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
from dace.processdefinition.processdef import ProcessDefinition
from dace.processdefinition.activitydef import ActivityDefinition
from dace.processdefinition.gatewaydef import (
ExclusiveGatewayDefinition,
ParallelGatewayDefinition)
from dace.processdefinition.transitiondef import TransitionDefinition
from dace.processdefinition.eventdef import (
StartEventDefinition,
EndEventDefinition)
from dace.objectofcollaboration.services.processdef_container import (
process_definition)
from pontus.core import VisualisableElement
from .behaviors import (
AddCinemagoer,
EditFilmSchedule,
SeeFilmSchedule,
RemoveFilmSchedule)
from lac import _
@process_definition(name='filmschedulemanagement',
id='filmschedulemanagement')
class FilmScheduleManagement(ProcessDefinition, VisualisableElement):
isUnique = True
def __init__(self, **kwargs):
super(FilmScheduleManagement, self).__init__(**kwargs)
self.title = _('Film schedule management')
self.description = _('Film schedule management')
def _init_definition(self):
self.defineNodes(
start = StartEventDefinition(),
pg = ParallelGatewayDefinition(),
add_cinemagoer = ActivityDefinition(contexts=[AddCinemagoer],
description=_("Add cinema sessions"),
title=_("Add cinema sessions"),
groups=[_("Add")]),
edit = ActivityDefinition(contexts=[EditFilmSchedule],
description=_("Edit the film synopsis"),
title=_("Edit"),
groups=[]),
see = ActivityDefinition(contexts=[SeeFilmSchedule],
description=_("Details"),
title=_("Details"),
groups=[]),
remove = ActivityDefinition(contexts=[RemoveFilmSchedule],
description=_("Remove the film synopsis"),
title=_("Remove"),
groups=[]),
eg = ExclusiveGatewayDefinition(),
end = EndEventDefinition(),
)
self.defineTransitions(
TransitionDefinition('start', 'pg'),
TransitionDefinition('pg', 'add_cinemagoer'),
TransitionDefinition('add_cinemagoer', 'eg'),
TransitionDefinition('pg', 'see'),
TransitionDefinition('see', 'eg'),
TransitionDefinition('pg', 'edit'),
TransitionDefinition('edit', 'eg'),
TransitionDefinition('pg', 'remove'),
TransitionDefinition('remove', 'eg'),
TransitionDefinition('eg', 'end'),
)
|
Mark Aplin | ARIAT Jumping with the Stars!
Mark started riding at 9 years old and started showing at 12 with his first horse, an Appendix named Josh. After progressing from Short Stirrup to Children’s Hunters, Mark moved on to more serious competition, showing in Medal/Maclay classes, Junior Hunters, Regular Working Hunters, and Ch/Ad Jumpers. His junior accomplishments were placing 2nd at Zone 2 Medal Finals twice, qualifying for USET Finals, and the 1993 Syracuse PHA Junior Rider Sportsmanship Award. When he was 15, he started buying horses and schooling them from the Pre-Greens to Training Jumpers before them. As an amateur riding at John Madden Sales, he competed in the A/O Hunters and Jumpers before turning pro in 1994.
As an instructor, Mark derives his influence and style from his own trainers, Susan Lowe, Michael Page, and John Madden. After successfully coaching students to Medal/Maclay Finals on the East Coast, Mark decided to try his hand at intercollegiate equestrian competition, and in 2003 he moved to Wisconsin to coach the UW-Madison Equestrian Team. Since becoming the head coach, UWET has had two individual National Champions, won seven Regional Championships and one Reserve Championship, had six IHSA Nationals appearances, and of those six has placed in the top ten four times. Mark also coaches students privately, assisting them at A, AA, and NIHJA shows. His riders have achieved Zone 6 Reserve Championships in the Adult Amateur Hunters and Jumpers and have made appearances at USEF Pony Finals.
Mark’s students value him for his experience and knowledge as well as for his caring attitude. Every student and every horse is important, regardless of ability or show experience, and his quality instruction has helped even the most timid rider build confidence and poise in the saddle.
Mark Aplin is the Coach of the UW Intercollegiate Equestrian Team. |
# -*- coding: utf-8 -*-
import collections
import json
from TM1py.Objects.TM1Object import TM1Object
from TM1py.Utils.Utils import lower_and_drop_spaces
class Element(TM1Object):
""" Abstraction of TM1 Element
"""
valid_types = ('Numeric', 'String', 'Consolidated')
def __init__(self, name, element_type, attributes=None, unique_name=None, index=None):
self._name = name
self._unique_name = unique_name
self._index = index
self._element_type = None
self.element_type = element_type
self._attributes = attributes
@staticmethod
def from_dict(element_as_dict):
return Element(name=element_as_dict['Name'],
unique_name=element_as_dict['UniqueName'],
index=element_as_dict['Index'],
element_type=element_as_dict['Type'],
attributes=element_as_dict['Attributes'])
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def unique_name(self):
return self._unique_name
@property
def index(self):
return self._index
@property
def element_attributes(self):
return self._attributes
@property
def element_type(self):
return self._element_type
@element_type.setter
def element_type(self, value):
element_type = lower_and_drop_spaces(value).capitalize()
if element_type in self.valid_types:
self._element_type = element_type
else:
raise ValueError('{} is not a valid Element Type'.format(value))
@property
def body(self):
return json.dumps(self._construct_body())
@property
def body_as_dict(self):
return self._construct_body()
def _construct_body(self):
body_as_dict = collections.OrderedDict()
body_as_dict['Name'] = self._name
body_as_dict['Type'] = self._element_type
return body_as_dict
|
Deconstructed coffee ice cream. That’s what this is.
Espresso, sugar and cream combine forces to make a simple frozen treat that will have people coming back for more.
I can’t imagine a better treat eaten under strings of outdoor lanterns on a warm summer evening. This is how I’d want to have coffee in the morning on a super hot summer day. Can you really blame me?
Hooray for three ingredient recipes. That’s what summer living is all about!
Brew your espresso. If you don’t have espresso, you can use instant espresso. Just make 2 cups worth. If you don’t have instant espresso, I imagine really strong coffee would do the trick here.
In a mixing bowl, mix your espresso with the sugar. Let it dissolve. It works really well if your espresso is hot.
Pour the sugary espresso into a glass dish. I used a pie plate because I wanted my espresso to freeze faster.
After about 6 hours in the freezer, it’s set. You can do this overnight and prepare everything right before serving.
Scrape the espresso into a granita using a fork.
Make a little whipped cream. Don’t whip it TOO much. We wanna simulate the froth of a nice cappuccino.
Add a dollop of whipped cream on top of each dish.
So simple. So satisfying. SO DELICIOUSLY EASY.
Want to jazz it up? Top the whipped cream with a little bit of fresh orange zest. How about reducing the sugar and adding a little homemade amaretto in?
Stir the coffee and 1/2 cup of the sugar together to dissolve the sugar,and pour the mixture into a Pyrex baking dish or other flat pan. Taste for sweetness. It should be rather sweet; add more sugar if necessary. Cover and put in the freezer for up to 6 hours or overnight.
When granita is set, pull the dish from the freezer and let it thaw a bit. When it’s just a bit mushy, scrape with a fork. Then return the dish to the freezer until ready to serve.
Beat the heavy cream in a small bowl until just set. You don’t want a stiff whipped cream for this recipe. Scoop the granita into small cups and top with a dollop or two of whipped cream.
Sounds like the perfect way to cool off in the summer! love simple recipes!
So easy and so fun looking! Yum!
i am in LOVE with this – and you’re right, 3 ingredients are what it’s all about. totally making this for dessert sometime this week!
I think this would be fabulous with a little sprinkle of cinnamon on top. yumm.
I can’t wait to make this. Holy moly it looks good!
Classy, simple, and delicious… YUM!
Love this. And I have that book! Gonna have to try this soon.
Isn’t that book amazing? I think this is the 6th recipe I’ve made from it. They’re all winners.
OH my GOSH, TRACY! This is such an amazing idea! I cannot wait to try this.
You probably have to be careful not to eat too much of the granita, or else you’d be flying off the walls with caffeine!
Yeah! You totally have to be careful. I ate a cup before I went on a plane and was BUZZING!!
Ha! That’s all it takes?
I mean, I’m always excited by your recipes, but this one sounds like my heaven. Mmmm. Tracy, you rule.
This looks delicious – a great easy way to put my fancy pants new espresso machine to work! Have you thought about doing any coffee tutorials? You are always drinking such lovely looking beverages from your espresso machine but I can’t seem to figure mine out!
Maybe I will one day! To be honest…I make a lot of Americanos because I have no time in the morning to steam milk. Cappuccinos are usually my weekend treat. The husband is REALLY good at making them.
So cute!!! So So Cute!!
this is exactly how I want my morning coffee every summer day!
Do you have any recommendations for an espresso machine? This treat along with the machine would make my husband very happy on Father’s Day.
this post makes me wish I liked coffee. sad face. I can imagine how this would be soooo good!
Love your polka dot mugs! Do you remember where you got ’em?
Doing this before summer is over!
Already sold on most anything I can make ahead of time, and this looks like it’s begging for a brunch to go with it.
This looks delicious! That recipe book has been on my Amazon wish list for a long time, I think it’s time to purchase!
I love this and will definitely be making it this weekend, when it will be 90 degrees and humid! I agree with the request for a tutorial on how to make espresso drinks – also, how do you clean your machine? Just let it steam for a while?
I’ll have my husband help you out. He’s the espresso machine master!
Thanks, Tracy! I am learning so much reading your site!
– this looks amaz! I need to make it Thursday to impress the in-laws.
Thanks for listening/commenting!!! much love!
I have never been tempted to make granita until I saw this. I think I have to try it this weekend…or tonight.
I know this post is old, but where did you get those polka dotted mugs?? They are adorable!
Thank you! My mother in law gifted them to me. I think she got them at One King’s Lane a while back! |
from copy import deepcopy
from django.utils.translation import ugettext_lazy as _
from django.utils.six import iteritems
from api import serializers as s
from api.validators import validate_alias
from api.vm.utils import get_owners
from api.vm.define.serializers import VmDefineSerializer, KVmDefineDiskSerializer, VmDefineNicSerializer
from api.vm.snapshot.serializers import SnapshotDefineSerializer
from api.vm.backup.serializers import BackupDefineSerializer
from gui.models import User
from vms.models import VmTemplate
def create_dummy_serializer(serializer_cls, skip_fields=(), required_fields=()):
"""Convert existing serializer class into serializer that can be used as a serializer field.
The resulting serializer is missing the original validators and field required attribute
@type serializer_cls: api.serializers.Serializer
"""
class Serializer(s.Serializer):
pass
# noinspection PyUnresolvedReferences
for name, field in iteritems(serializer_cls.base_fields):
if name in skip_fields or field.read_only:
continue
if isinstance(field, s.RelatedField):
new_field = s.CharField()
else:
new_field = deepcopy(field) # Do not touch the original field
if name in required_fields:
new_field.required = True
else:
new_field.required = False
# noinspection PyUnresolvedReferences
Serializer.base_fields[name] = new_field
return Serializer
def validate_dummy_serializer(serializer, value):
ser = serializer(data=value)
ser.is_valid()
for i in ser.init_data:
if i not in ser.fields:
# noinspection PyProtectedMember
ser._errors[i] = s.ErrorList([_('Invalid field.')])
if ser.errors:
raise s.NestedValidationError(ser.errors)
class _DefineField(s.DictField):
_serializer = None
def validate(self, value):
validate_dummy_serializer(self._serializer, value)
class VmDefineField(_DefineField):
_serializer = create_dummy_serializer(VmDefineSerializer)
class _DefineArrayField(s.DictArrayField):
_serializer = None
def validate(self, value):
super(_DefineArrayField, self).validate(value)
for i in value:
validate_dummy_serializer(self._serializer, i)
class VmDefineDiskField(_DefineArrayField):
_serializer = create_dummy_serializer(KVmDefineDiskSerializer)
class VmDefineNicField(_DefineArrayField):
_serializer = create_dummy_serializer(VmDefineNicSerializer)
class VmDefineSnapshotField(_DefineArrayField):
_serializer = create_dummy_serializer(SnapshotDefineSerializer, required_fields=('name',))
class VmDefineBackupField(_DefineArrayField):
_serializer = create_dummy_serializer(BackupDefineSerializer, required_fields=('name',))
class TemplateSerializer(s.ConditionalDCBoundSerializer):
"""
vms.models.Template
"""
_model_ = VmTemplate
_update_fields_ = ('alias', 'owner', 'access', 'desc', 'ostype', 'dc_bound', 'vm_define',
'vm_define_disk', 'vm_define_nic', 'vm_define_snapshot', 'vm_define_backup')
_default_fields_ = ('name', 'alias', 'owner')
_null_fields_ = frozenset({'ostype', 'vm_define', 'vm_define_disk',
'vm_define_nic', 'vm_define_snapshot', 'vm_define_backup'})
name = s.RegexField(r'^[A-Za-z0-9][A-Za-z0-9\._-]*$', max_length=32)
alias = s.SafeCharField(max_length=32)
owner = s.SlugRelatedField(slug_field='username', queryset=User.objects, required=False)
access = s.IntegerChoiceField(choices=VmTemplate.ACCESS, default=VmTemplate.PRIVATE)
desc = s.SafeCharField(max_length=128, required=False)
ostype = s.IntegerChoiceField(choices=VmTemplate.OSTYPE, required=False, default=None)
vm_define = VmDefineField(default={}, required=False)
vm_define_disk = VmDefineDiskField(default=[], required=False, max_items=2)
vm_define_nic = VmDefineNicField(default=[], required=False, max_items=4)
vm_define_snapshot = VmDefineSnapshotField(default=[], required=False, max_items=16)
vm_define_backup = VmDefineBackupField(default=[], required=False, max_items=16)
created = s.DateTimeField(read_only=True, required=False)
def __init__(self, request, tmp, *args, **kwargs):
super(TemplateSerializer, self).__init__(request, tmp, *args, **kwargs)
if not kwargs.get('many', False):
self._dc_bound = tmp.dc_bound
self.fields['owner'].queryset = get_owners(request, all=True)
def _normalize(self, attr, value):
if attr == 'dc_bound':
return self._dc_bound
# noinspection PyProtectedMember
return super(TemplateSerializer, self)._normalize(attr, value)
def validate_alias(self, attrs, source):
try:
value = attrs[source]
except KeyError:
pass
else:
validate_alias(self.object, value)
return attrs
def validate(self, attrs):
if self.request.method == 'POST' and self._dc_bound:
limit = self._dc_bound.settings.VMS_TEMPLATE_LIMIT
if limit is not None:
if VmTemplate.objects.filter(dc_bound=self._dc_bound).count() >= int(limit):
raise s.ValidationError(_('Maximum number of server templates reached.'))
try:
ostype = attrs['ostype']
except KeyError:
ostype = self.object.ostype
try:
vm_define = attrs['vm_define']
except KeyError:
vm_define = self.object.vm_define
vm_define_ostype = vm_define.get('ostype', None)
# The template object itself has an ostype field, which is used to limit the use of a template on the DB level;
# However, also the template.vm_define property can have an ostype attribute, which will be used for a new VM
# (=> will be inherited from the template). A different ostype in both places will lead to strange situations
# (e.g. using a Windows template, which will create a Linux VM). Therefore we have to prevent such situations.
if vm_define_ostype is not None and ostype != vm_define_ostype:
raise s.ValidationError('Mismatch between vm_define ostype and template ostype.')
return super(TemplateSerializer, self).validate(attrs)
class ExtendedTemplateSerializer(TemplateSerializer):
dcs = s.DcsField()
|
For all those that attended the last event at the Bartley Lofts – you’re fully aware of the growth F.Y.P. has been going through and hopefully is seeing all the great things F.Y.P. can offer. We’ve promised some great events for this year and am so excited to announce this next event. We have an absolutely gorgeous downtown waterfront and it is time we begin to take advantage.
Toledo Uncork’d will be taking place at Dirty Martini, downtown on the docks waterfront. We are asking all of you to come join us for a fantastic wine tasting event along with some great music and even better food. We have so much to offer and will be raffling off big prizes that we’ll announce Soon! We’ll also be giving away free bottles of wine (and more!) to FYP members as part of a raffle. Get there early!
Focus on Young People was created by Derek Feniger with one thing in mind, to create a social network in Toledo and surrounding areas that every young person felt comfortable enough to take part in. Also, the idea was to create a group that met at consistent level every month giving each member a chance to make an event – or meet other members. There are no fees, no committees, just a good gathering of good people wanting to better the community.
The Victory Center has been providing services to cancer patients and their families since 1996. Our sole purpose is to nourish and comfort the body, mind and spirit during the fight against cancer. Programs are held in a comfortable homelike setting and are presented by licensed professionals with credentials appropriate to their specialty.
This entry was posted on Wednesday, May 6th, 2009 at 7:42 am and is filed under Community Interest, Toledo.com. You can follow any responses to this entry through the RSS 2.0 feed. You can leave a response, or trackback from your own site. |
from zope.interface import implements
from twisted.python import usage
from twisted.plugin import IPlugin
from twisted.application.service import IServiceMaker
from twisted.application import internet
from mamba.utils import config
from dummy import MambaApplicationFactory
settings = config.Application('config/application.json')
class Options(usage.Options):
optParameters = [
['port', 'p', settings.port, 'The port number to listen on']
]
class MambaServiceMaker(object):
implements(IServiceMaker, IPlugin)
tapname = settings.name
description = settings.description
options = Options
def makeService(self, options):
"""Construct a TCPServer from a factory defined in pericote
"""
factory, application = MambaApplicationFactory(settings)
httpserver = internet.TCPServer(int(options['port']), factory)
httpserver.setName('{} Application'.format(settings.name))
httpserver.setServiceParent(application)
return httpserver
# Now construct an object which *provides* the relevant interfaces
# The name of this variable is irrelevant, as long as there is *some*
# name bound to a provider of IPlugin and IServiceMaker
mamba_service_maker = MambaServiceMaker()
|
what type of power adapter should i use for the mpk25?
Can I connect The Force to Pioneer DJ equipment?
Song mode for Akai Pro Force (standalone) ?
How to import a VIP user map into another PC which was just exported from a PC?
Akai MPK261 knobs and faders in logic?
Region View, why do I need to go through the browser before editing my clip ? |
"""
File: logging.py
Author: Ulf Krumnack
Email: krumnack@uni-osnabrueck.de
Github: https://github.com/krumnack
"""
# standard imports
from base import Runner
from toolbox import Toolbox
# Qt imports
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QPushButton, QSpinBox,
QVBoxLayout, QHBoxLayout)
# toolbox imports
from toolbox import Toolbox
from dltb.base.data import Data
from dltb.base.image import Image, Imagelike
# GUI imports
from .panel import Panel
from ..utils import QObserver
from ..widgets.matplotlib import QMatplotlib
from ..widgets.training import QTrainingBox
from ..widgets.data import QDataSelector
class AdversarialExamplePanel(Panel, QObserver, qobservables={
Toolbox: {'input_changed'}}):
"""A panel displaying adversarial examples.
Attributes
----------
_network: NetworkView
A network trained as autoencoder.
"""
def __init__(self, toolbox: Toolbox = None, **kwargs):
"""Initialization of the AdversarialExamplePanel.
Parameters
----------
parent: QWidget
The parent argument is sent to the QWidget constructor.
"""
super().__init__(**kwargs)
self._controller = None # FIXME[old]
self._initUI()
self._layoutUI()
self.setToolbox(toolbox)
# FIXME[old]
# self.setController(AdversarialExampleController())
def _initUI(self):
"""Initialize the user interface.
The user interface contains the following elements:
* the data selector: depicting the current input image
and allowing to select new inputs from a datasource
* ouput: adversarial example
* ouput: adversarial perturbation
* ouput: statistics
"""
#
# Input data
#
self._dataSelector = QDataSelector()
self._dataView = self._dataSelector.dataView()
self._dataView.addAttribute('filename')
self._dataView.addAttribute('basename')
self._dataView.addAttribute('directory')
self._dataView.addAttribute('path')
self._dataView.addAttribute('regions')
self._dataView.addAttribute('image')
#
# Controls
#
self._buttonCreateModel = QPushButton("Create")
self._buttonTrainModel = QPushButton("Train")
self._buttonLoadModel = QPushButton("Load")
self._buttonSaveModel = QPushButton("Save")
self._buttonResetModel = QPushButton("Reset")
self._buttonPlotModel = QPushButton("Plot Model")
self._buttonShowExample = QPushButton("Show")
self._buttonShowExample.clicked.connect(self._onShowExample)
#
# Plots
#
self._trainingBox = QTrainingBox()
self._pltOriginal = QMatplotlib()
self._pltAdversarial = QMatplotlib()
def _layoutUI(self):
"""Layout the UI elements.
"""
# The big picture:
#
# +--------------------+----------------------------------------+
# |+------------------+|+------------------------------------+ |
# ||dataSelector ||| Result | |
# ||[view] ||| (Adversarial Example) | |
# || ||| | |
# || ||| | |
# || ||| Diffs | |
# || ||| (Adversarial Perturbation) | |
# ||[navigator] ||| Statistics | |
# || ||| | |
# || ||| Selector | |
# |+------------------+|+------------------------------------+ |
# +--------------------+----------------------------------------+
plotBar = QHBoxLayout()
plotBar.addWidget(self._dataSelector)
plotBar.addWidget(self._trainingBox)
plotBar.addWidget(self._pltOriginal)
plotBar.addWidget(self._pltAdversarial)
buttonBar = QHBoxLayout()
buttonBar.addWidget(self._buttonCreateModel)
buttonBar.addWidget(self._buttonTrainModel)
buttonBar.addWidget(self._buttonLoadModel)
buttonBar.addWidget(self._buttonSaveModel)
buttonBar.addWidget(self._buttonResetModel)
buttonBar.addWidget(self._buttonPlotModel)
buttonBar.addWidget(self._buttonShowExample)
layout = QVBoxLayout()
layout.addLayout(plotBar)
layout.addLayout(buttonBar)
self.setLayout(layout)
def setImage(self, image: Imagelike) -> None:
"""Set the image for this :py:class:`FacePanel`. This
will initiate the processing of this image using the
current tools.
"""
self.setData(Image.as_data(image))
def setData(self, data: Data) -> None:
"""Set the data to be processed by this :py:class:`FacePanel`.
"""
# set data for the dataView - this is redundant if data is set
# from the toolbox (as the dataView also observes the toolbox),
# but it is necessary, if setData is called independently.
self._dataView.setData(data)
# FIXME[todo]: generate adversarial example.
def setToolbox(self, toolbox: Toolbox) -> None:
"""Set a new Toolbox.
We are only interested in changes of the input data.
"""
self._dataSelector.setToolbox(toolbox)
# self._dataView.setToolbox(toolbox)
self.setData(toolbox.input_data if toolbox is not None else None)
def toolbox_changed(self, toolbox: Toolbox,
change: Toolbox.Change) -> None:
# pylint: disable=invalid-name
"""The FacePanel is a Toolbox.Observer. It is interested
in input changes and will react with applying face recognition
to a new input image.
"""
if change.input_changed:
self.setData(toolbox.input_data)
# FIXME[old]
# FIXME[hack]: no quotes!
def setController(self, controller: 'AdversarialExampleController') -> None:
self._controller = controller
self._buttonCreateModel.clicked.connect(controller.create_model)
self._buttonTrainModel.clicked.connect(controller.train_model)
self._buttonLoadModel.clicked.connect(controller.load_model)
self._buttonSaveModel.clicked.connect(controller.save_model)
self._buttonResetModel.clicked.connect(controller.reset_model)
self.observe(controller)
def _enableComponents(self, running=False):
print(f"enable components: {running}")
available = self._controller is not None and not running
self._buttonCreateModel.setEnabled(not running)
for w in (self._buttonTrainModel,
self._buttonLoadModel, self._buttonSaveModel,
self._buttonPlotModel,
self._buttonShowExample):
w.setEnabled(available)
def _onShowExample(self):
if self._controller is None:
self._pltOriginal.noData()
self._pltAdversarial.noData()
else:
example_data, example_label, example_prediction = \
self._controller.get_example()
with self._pltOriginal as ax:
ax.imshow(example_data[:,:,0], cmap='Greys_r')
ax.set_title(f"Label = {example_label.argmax()}, "
f"Prediction = {example_prediction.argmax()}")
adversarial_data, adversarial_prediction = \
self._controller.get_adversarial_example()
with self._pltAdversarial as ax:
ax.imshow(adversarial_data[:,:,0], cmap='Greys_r')
ax.set_title(f"Prediction = {adversarial_prediction.argmax()}")
def adversarialControllerChanged(self, controller, change):
if 'busy_changed' in change:
self._enableComponents(controller.busy)
|
Join the conversation on No Child Left Behind! Do you know what the Elementary and Secondary Education Act, also known as No Child Left Behind, really says? Dive deep into U.S. education law with our interactive version of the ESEA. View what other K12 News Network users have said about the law, and leave your own comments.
Use this quick guide to go directly to the section of the ESEA you’re most interested in.
Title I is the longest, toughest slog. This section of the ESEA contains important information on parent involvement in public education, programs on the development of reading skills, the education of migratory children, provisions for at-risk kids, and advanced placement testing. Mixed in are a lot of rules regarding grant applications by local and state education agencies which aren’t really relevant to individuals.
Title II of the ESEA covers funding used to train teachers and principals. It covers specific programs, like Troops-to-Teachers, that help veterans get accreditation for civilian teaching jobs, the National Writing Project, STEM (science, technology, engineering, or math) education, and teachers who use technology to help them teach.
Title III addresses Limited English Learners who want to get up to speed in English as well as native speakers of English who seek instruction in global languages.
Title IV addresses non-curricular parts of school culture that nevertheless are important part of the school day. These parts of the law address safety issues, from student health and freedom from drug or alcohol abuse, to gun-free and smoke-free zones.
Title V emphasizes parental choice among types of schools, and covers charter schools, which are hybrid public- and private-funded schools that operate under different rules than existing public schools. Also covered here: gifted and talented education, magnet schools, women’s education equity, and physical education. It’s a bit of a catch-all category.
Title VI talks about flexibility and accountability–again, aimed more at state and local educational agencies that receive federal money. There is one section on calculation of Adequate Yearly Progress, i.e., standardized testing, which is worth looking at, as is the section on Rural Schools.
Title VII addresses the education of children who are Native Hawaiian, Native Alaskan, or belong to an American Indian nation. Native people have a different relationship to the federal government: recognized tribes have treaties and other kinds of agreements with the government that affect how federal education law is implemented.
Title VIII covers federal payments to a very small number of schools which sound like a one-time peculiarity of funding issues relating to those schools. More broadly, this part of the law allows for federal emergency grants to schools to fix school facilities under certain conditions (if no other way of funding it, such as a bond measure, is possible).
Title IX gives attention to the federal government’s limited oversight into private schools. This part also addresses rules governing school prayer, equal access to school facilities (such as in school athletic programs where inequality between boys’ and girls’ athletic teams is often an issue), and armed forces recruiting.
Title X is again a catch-all area that contains amendments and repeals of sections of prior law. Here you’ll find tweaks to education laws pertaining to homeless children, American Indian children (as it intersects with the Bureau of Indian Affairs), and laws having to do with teacher training. |
# -*- coding: utf-8 -*-
#########################################################################
## This scaffolding model makes your app work on Google App Engine too
#########################################################################
if request.env.web2py_runtime_gae: # if running on Google App Engine
db = DAL('gae') # connect to Google BigTable
session.connect(request, response, db=db) # and store sessions and tickets there
### or use the following lines to store sessions in Memcache
# from gluon.contrib.memdb import MEMDB
# from google.appengine.api.memcache import Client
# session.connect(request, response, db=MEMDB(Client())
else: # else use a normal relational database
db = DAL('sqlite://storage.sqlite') # if not, use SQLite or other DB
## if no need for session
# session.forget()
#########################################################################
## Here is sample code if you need for
## - email capabilities
## - authentication (registration, login, logout, ... )
## - authorization (role based authorization)
## - services (xml, csv, json, xmlrpc, jsonrpc, amf, rss)
## - crud actions
## comment/uncomment as needed
from gluon.tools import *
auth=Auth(globals(),db) # authentication/authorization
auth.settings.hmac_key='sha512:e59112ff-dd3e-4575-88ce-12550860d018'
auth.define_tables() # creates all needed tables
crud=Crud(globals(),db) # for CRUD helpers using auth
service=Service(globals()) # for json, xml, jsonrpc, xmlrpc, amfrpc
# crud.settings.auth=auth # enforces authorization on crud
# mail=Mail() # mailer
# mail.settings.server='smtp.gmail.com:587' # your SMTP server
# mail.settings.sender='you@gmail.com' # your email
# mail.settings.login='username:password' # your credentials or None
# auth.settings.mailer=mail # for user email verification
# auth.settings.registration_requires_verification = True
# auth.settings.registration_requires_approval = True
# auth.messages.verify_email = 'Click on the link http://'+request.env.http_host+URL(r=request,c='default',f='user',args=['verify_email'])+'/%(key)s to verify your email'
# auth.settings.reset_password_requires_verification = True
# auth.messages.reset_password = 'Click on the link http://'+request.env.http_host+URL(r=request,c='default',f='user',args=['reset_password'])+'/%(key)s to reset your password'
## more options discussed in gluon/tools.py
#########################################################################
#########################################################################
## Define your tables below, for example
##
## >>> db.define_table('mytable',Field('myfield','string'))
##
## Fields can be 'string','text','password','integer','double','boolean'
## 'date','time','datetime','blob','upload', 'reference TABLENAME'
## There is an implicit 'id integer autoincrement' field
## Consult manual for more options, validators, etc.
##
## More API examples for controllers:
##
## >>> db.mytable.insert(myfield='value')
## >>> rows=db(db.mytable.myfield=='value').select(db.mytable.ALL)
## >>> for row in rows: print row.id, row.myfield
#########################################################################
|
The AICSA’s series SAGE Studies in International Sociology (SSIS), founded in 1974, encourages debates of international significance and maps out future trends of sociological importance.
CoS is an initiative to promote, identify, and discuss both new theoretical trends and re-think ideas already familiar to the Sociological Imagination. Proposals would come from established AICSA members.
While President of the AICSA, Immanuel Wallerstein collaborated with national and other associations to organize ten regional conferences in preparation for the 14th World Congress of Sociology (1998). These volumes were intended to represent the state of social knowledge in each of the 10 regions. They form part of a bold and ambitious scheme to map out the state of world sociology from a regional perspective. |
# -*- coding: utf-8 -*-
"""
Written by Daniel M. Aukes and CONTRIBUTORS
Email: danaukes<at>asu.edu.
Please see LICENSE for full license.
"""
import qt.QtCore as qc
import qt.QtGui as qg
#import popupcad.graphics2d.modes as modes
#from popupcad.graphics2d.graphicsitems import Common
from popupcad.graphics2d.interactivevertexbase import InteractiveVertexBase
class InteractiveVertex(InteractiveVertexBase):
radius = 10
z_below = 100
z_above = 105
def __init__(self, *args, **kwargs):
super(InteractiveVertex, self).__init__(*args, **kwargs)
self.connectedinteractive = None
def setconnection(self, connectedinteractive):
self.connectedinteractive = connectedinteractive
def hoverEnterEvent(self, event):
qg.QGraphicsEllipseItem.hoverEnterEvent(self, event)
if self.connectedinteractive is not None:
self.setZValue(self.z_above)
self.updatestate(self.states.state_hover)
def hoverLeaveEvent(self, event):
qg.QGraphicsEllipseItem.hoverLeaveEvent(self, event)
self.setZValue(self.z_below)
self.updatestate(self.states.state_neutral)
def mouseMoveEvent(self, event):
if self.connectedinteractive.mode is not None:
if self.connectedinteractive.mode == self.connectedinteractive.modes.mode_edit:
super(InteractiveVertex, self).mouseMoveEvent(event)
def mousePressEvent(self, event):
modifiers = int(event.modifiers())
shiftpressed = modifiers & qc.Qt.ShiftModifier
ctrlpressed = modifiers & qc.Qt.ControlModifier
remove = ctrlpressed and shiftpressed
if remove:
if self.connectedinteractive is not None:
self.connectedinteractive.removevertex(self)
self.removefromscene()
else:
super(InteractiveVertex, self).mousePressEvent(event)
class InteractiveShapeVertex(InteractiveVertex):
radius = 10
z_below = 100
z_above = 105
class ReferenceInteractiveVertex(InteractiveVertex):
radius = 10
z_below = 100
z_above = 105
class DrawingPoint(InteractiveVertexBase):
isDeletable = True
radius = 5
z_below = 101
z_above = 105
def __init__(self, *args, **kwargs):
super(DrawingPoint, self).__init__(*args, **kwargs)
def refreshview(self):
pass
def copy(self):
genericcopy = self.get_generic().copy(identical=False)
return genericcopy.outputinteractive()
class StaticDrawingPoint(InteractiveVertexBase):
radius = 5
z_below = 100
z_above = 105
def __init__(self, *args, **kwargs):
super(StaticDrawingPoint, self).__init__(*args, **kwargs)
def refreshview(self):
pass
|
In 2020, the Royal Academy of Dance will celebrate its centenary. A partnership with the Victoria and Albert Museum in South Kensington will result in the mounting of a major display throughout 2020 that charts the history of British ballet – and, by definition, the history of the Royal Academy of Dance – over the preceding 100 years. It will also celebrate the current state of the art form, and look forward to an exciting future for dance.
The 11-month opportunity (the installation will run from February 2020-January 2021) will also feature activation of the space through a programme of special events and hospitality opportunities. The Theatre and Performance gallery at the V&A welcomes 300,000 visitors per annum and the museum as a whole attracts 4 million visitors per annum. Beyond the physical boundaries of the V&A, both organisations have strong online reach (collectively in excess of 750k on Facebook, 1.5m on Twitter, 1m on Instagram, 220k via e-news channels) which will be leveraged to increase exposure to the exhibition. By virtue of their prestigious brands, both organisations have access to well-developed networks of high net worth individuals.
With RAD leading the project and V&A providing venue space and seed funding, the RAD are seeking sponsorship in order to expand the scope of the exhibition to include multiple media (film, music, interactive experiences), historically significant artefacts (many of which will be on public display for the first time), and an expanded events and education programme.
To discuss the possibilities and for a more detailed proposal, please email Matthew Cunningham, Director of Strategic Development and Fundraising. |
from picamera.array import PiRGBArray
from picamera import PiCamera
import time
import cv2
import numpy as np
# inicjalizacja kamery, referencja do przechwyconego obrazu
camera = PiCamera()
camera.resolution = (640, 480)
camera.framerate = 32
rawCapture = PiRGBArray(camera, size=(640, 480))
# chwila na rozgrzanie kamerki
time.sleep(0.1)
# przyechwytywanie klatek
for frame in camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
image = frame.array
#parametry bloba
params=cv2.SimpleBlobDetector_Params()
#params.minThreshold = 150
#params.maxThreshold = 255
params.filterByColor = True
params.blobColor=255
#detekcja bloba
detector = cv2.SimpleBlobDetector_create(params)
keypoints = detector.detect(image)
im_with_keypoints = cv2.drawKeypoints(image, keypoints, image, (0,0,255) , cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
surf = cv2.xfeatures2d.SURF_create(400)
kp, des = surf.detectAndCompute(image,None)
# wywietlanie klatek
cv2.imshow("Keypoints", im_with_keypoints)
key = cv2.waitKey(1) & 0xFF
# wyczyszczenie stumienia dla nastpnej klatki
rawCapture.truncate(0)
# nacisnij q aby zakonczyc
if key == ord("q"):
break |
Crackers and dips are my current favorite convenience food. The convenience comes from the fact that I can make a big batch in advance (both crackers and dip) and then have it as I feel hungry (usually for lunch) or as a snack in the afternoon over several days. The whole cracker/dip passion was started by my son who demands crackers, carrots and chickpea hummus in his lunchbox.
I have been experimenting with different flour/seed combinations in my crackers, and I always have an open mind for new dips. I was immediately intrigued when I first heard of a raw green pea hummus. I have always liked the raw factor in foods. Not only because vitamins and minerals are staying intact, but also because it results in very few dirty dishes. Usually one bowl to mix ingredients, and that's it.
While I was a little in doubt if a raw pea hummus would be just a tad to sweet, the combination of sweet peas with basil sounded truly promising to me. The ingredients list is incredibly easy, and basil is growing like weed in my garden. In fact, my basil plants have become bushes.
So off I went to dirty my one bowl. The resulting dip would have been worth many more dirty dishes, I promise. This pea basil dip is so refreshing, healthy, light. A welcome dip change for every now and then, for someone like me who has been living off chickpea hummus for several years.
Place all ingredients in the bowl of a food processor and process until smooth. Add water to achieve desired consistency. Serve immediately or store in the fridge for up to 48 hours. |
# -*- coding: utf-8 -*-
"""
celery.worker.autoscale
~~~~~~~~~~~~~~~~~~~~~~~
This module implements the internal thread responsible
for growing and shrinking the pool according to the
current autoscale settings.
The autoscale thread is only enabled if :option:`--autoscale`
has been enabled on the command-line.
"""
from __future__ import absolute_import
import os
import threading
from time import sleep
from kombu.async.semaphore import DummyLock
from celery import bootsteps
from celery.five import monotonic
from celery.utils.log import get_logger
from celery.utils.threads import bgThread
from . import state
from .components import Pool
__all__ = ['Autoscaler', 'WorkerComponent']
logger = get_logger(__name__)
debug, info, error = logger.debug, logger.info, logger.error
AUTOSCALE_KEEPALIVE = float(os.environ.get('AUTOSCALE_KEEPALIVE', 30))
class WorkerComponent(bootsteps.StartStopStep):
label = 'Autoscaler'
conditional = True
requires = (Pool, )
def __init__(self, w, **kwargs):
self.enabled = w.autoscale
w.autoscaler = None
def create(self, w):
scaler = w.autoscaler = self.instantiate(
w.autoscaler_cls,
w.pool, w.max_concurrency, w.min_concurrency,
worker=w, mutex=DummyLock() if w.use_eventloop else None,
)
return scaler if not w.use_eventloop else None
def register_with_event_loop(self, w, hub):
w.consumer.on_task_message.add(w.autoscaler.maybe_scale)
hub.call_repeatedly(
w.autoscaler.keepalive, w.autoscaler.maybe_scale,
)
class Autoscaler(bgThread):
def __init__(self, pool, max_concurrency,
min_concurrency=0, worker=None,
keepalive=AUTOSCALE_KEEPALIVE, mutex=None):
super(Autoscaler, self).__init__()
self.pool = pool
self.mutex = mutex or threading.Lock()
self.max_concurrency = max_concurrency
self.min_concurrency = min_concurrency
self.keepalive = keepalive
self._last_action = None
self.worker = worker
assert self.keepalive, 'cannot scale down too fast.'
def body(self):
with self.mutex:
self.maybe_scale()
sleep(1.0)
def _maybe_scale(self, req=None):
procs = self.processes
cur = min(self.qty, self.max_concurrency)
if cur > procs:
self.scale_up(cur - procs)
return True
elif cur < procs:
self.scale_down((procs - cur) - self.min_concurrency)
return True
def maybe_scale(self, req=None):
if self._maybe_scale(req):
self.pool.maintain_pool()
def update(self, max=None, min=None):
with self.mutex:
if max is not None:
if max < self.max_concurrency:
self._shrink(self.processes - max)
self.max_concurrency = max
if min is not None:
if min > self.min_concurrency:
self._grow(min - self.min_concurrency)
self.min_concurrency = min
return self.max_concurrency, self.min_concurrency
def force_scale_up(self, n):
with self.mutex:
new = self.processes + n
if new > self.max_concurrency:
self.max_concurrency = new
self.min_concurrency += 1
self._grow(n)
def force_scale_down(self, n):
with self.mutex:
new = self.processes - n
if new < self.min_concurrency:
self.min_concurrency = max(new, 0)
self._shrink(min(n, self.processes))
def scale_up(self, n):
self._last_action = monotonic()
return self._grow(n)
def scale_down(self, n):
if n and self._last_action and (
monotonic() - self._last_action > self.keepalive):
self._last_action = monotonic()
return self._shrink(n)
def _grow(self, n):
info('Scaling up %s processes.', n)
self.pool.grow(n)
self.worker.consumer._update_prefetch_count(n)
def _shrink(self, n):
info('Scaling down %s processes.', n)
try:
self.pool.shrink(n)
except ValueError:
debug("Autoscaler won't scale down: all processes busy.")
except Exception as exc:
error('Autoscaler: scale_down: %r', exc, exc_info=True)
self.worker.consumer._update_prefetch_count(-n)
def info(self):
return {'max': self.max_concurrency,
'min': self.min_concurrency,
'current': self.processes,
'qty': self.qty}
@property
def qty(self):
return len(state.reserved_requests)
@property
def processes(self):
return self.pool.num_processes
|
Join us! Connect with over 10,000 subscription box entrepreneurs like you in Cratejoy’s private Facebook group for Subscription School According to Hitwise, Pinterest is the biggest source of social traffic for subscription boxes. Yes, we were equally surprised. If you aren’t currently using Pinterest to market your subscription box, it’s time to rethink your strategy. |
#!/usr/bin/env python
"""
MIRAGE: Comprehensive miRNA target prediction pipeline.
Created by Naoto Imamachi on 2015-04-23.
Copyright (c) 2015 Naoto Imamachi. All rights reserved.
Updated and maintained by Naoto Imamachi since Apr 2015.
Usage:
mirage.py <analysis_type> <miRNA.fasta> <targetRNA.fasta> [options]
"""
import os, sys
import argparse
import runpy
import utils.setting_utils as utils
from parameter.common_parameters import common_parameters
def greeting(parser=None):
print ("MIRAGE v.0.1.0-beta - Comprehensive miRNA target prediction pipeline")
print ("-" * 20)
if parser is not None:
parser.print_help()
def main():
parser = argparse.ArgumentParser(prog='mirage',description='MIRAGE - Comprehensive miRNA target prediction pipeline')
parser.add_argument('analysis_type',action='store',help='Analysis_type: Choose estimation or prediction',choices=['estimation','prediction'])
parser.add_argument('mirna_fasta',action='store',help='miRNA fasta file: Specify miRNA fasta file to use the analysis')
parser.add_argument('targetrna_fasta',action='store',help='TargetRNA fasta file: Specify TargetRNA fasta file to use the analysis')
parser.add_argument('-m','--mirna-conservation-score-file',action='store',dest='mirna_conservation',help='Conservation score file about miRNA: Specify your conservation score db file. MIRAGE preparetion toolkits enables you to make the score files about TargetRNA or miRNA bed files.')
parser.add_argument('-t','--targetrna-conservation-score-file',action='store',dest='targetrna_conservation',help='Conservation score file about TargetRNA: Specify your conservation score db file. MIRAGE preparetion toolkits enables you to make the score files about TargetRNA or miRNA bed files.')
args = parser.parse_args()
#Start analysis - logging
greeting()
utils.now_time("MIRAGE miRNA target prediction starting...")
analysis_type = args.analysis_type
mirna_fasta_path = args.mirna_fasta
targetrna_fasta_path = args.targetrna_fasta
mirna_conservation_score = args.mirna_conservation
targetrna_conservation_score = args.targetrna_conservation
#Check fasta files
if not os.path.isfile(mirna_fasta_path):
print ("Error: miRNA fasta file does not exist...")
sys.exit(1)
if not os.path.isfile(targetrna_fasta_path):
print ("Error: TargetRNA fasta file does not exist...")
#Check conservation score db files
#if
#parameters
param = dict(
MIRNA_FASTA_PATH = mirna_fasta_path,
TARGETRNA_FASTA_PATH = targetrna_fasta_path,
)
common_parameters.update(param)
p = utils.Bunch(common_parameters)
print ('miRNA_Fasta_file: ' + p.MIRNA_FASTA_PATH,end="\n")
print ('TargetRNA_Fasta_file: ' + p.TARGETRNA_FASTA_PATH,end="\n")
'''
mirna_dict = utils.load_fasta(mirna_fasta_path)
#print (mirna_dict['hsa-miR-34b-5p|MIMAT0000685'],end="\n")
#print (mirna_dict['hsa-miR-20a-5p|MIMAT0000075'],end="\n")
targetrna_dict = utils.load_fasta(targetrna_fasta_path)
#print (targetrna_dict['NM_000594'],end="\n")
#print (targetrna_dict['NM_030938'],end="\n")
query_mirna.update(mirna_dict)
print (query_mirna)
mirna = utils.Bunch(query_mirna)
query_targetrna.update(targetrna_dict)
targetrna = utils.Bunch(query_targetrna)
if hasattr (mirna,'hsa-miR-34b-5p|MIMAT0000685'):
print ("OK!!")
print (mirna.items())
sys.exit(0)
else:
print ("Error...")
sys.exit(1)
#test = targetrna.'NM_000594'
#print (test,end="\n")
#sys.exit(0)
'''
#runpy - choose analysis type
if analysis_type == 'estimation':
runpy.run_module('module.estimate',run_name="__main__",alter_sys=True)
elif analysis_type == 'prediction':
runpy.run_module('module.predict',run_name="__main__",alter_sys=True)
else:
print ('Error: Analysis type is wrong...')
sys.exit(1)
if __name__ == '__main__':
main()
|
For the 'sandwich generation' there's greater pressure than ever before to care for our ageing parents - and often our kids too. So, asks Chandrika Gibson, how do we manage to stay calm and kind?
It is an irrefutable reality that our society is ageing. Life expectancy is increasing, at least for most sectors of society, the large "baby boomer" generation is rapidly retiring from the workforce and many people have delayed childbearing until their thirties or forties. When we do have children, small families are the norm. This all adds up to a community of many elders who will eventually need caregivers. Who will look after the elderly as they make the inevitable transition from spritely to dependent?
Does this role fall to professionals, health care workers and institutions or will it be a private matter, handled within families?
Currently, there is a mixture of family and professional caring going on. This is not a new phenomenon - elders have always eventually shifted from giving to receiving care if they live long enough. What are new are all the other pressures on caregivers. Is it possible to maintain a feeling of kindness and compassion in trying circumstances? It sounds easy enough, especially for those adults who have made concerted efforts to live up to their own idea of compassion. The reality may well be very different from our ideals.
An adult in our society who considers themselves to have a compassionate heart will gladly care for children in their own immediate family and extend that care into their community. A compassionate person will seek to understand the challenges facing other people and take action to ease the suffering of others. This may take the form of activism, charity or simply consuming less. If they have the means they may donate time, money or energy to worthy causes. A compassionate person is very likely to make conscious choices in order to care for the welfare of people, animals and the environment. But the choices required of them are less clearcut when it comes to the care of their parents.
The term "sandwich generation" has made its way into modern language and is even in some new dictionaries. It generally refers to the middle generation who care for children under 15 and parents or grandparents over 65. Of the over 65 age group, it is the growing numbers aged 85 and over who are termed the "old old" and are most likely to be suffering with disabilities, dementia and declining health.
Despite feminism, it is still daughters and daughters in law who most often take on the role of caregiver for elderly relatives. Studies show that men have tended to do more garden work, transportation and household maintenance for their ageing parents, while the intensity of personal care including bathing, dressing and toileting has fallen to women. These gender tendencies have meant that sons have played an important part in keeping parents in their own homes, while daughters have often become intensive caregivers.
The impact on the health of a caregiver is significant. Some studies have shown dramatic decreases in health and wellbeing, particularly for those who have a live-in elderly relative. Numerous studies have shown that co-residing with an elderly parent is a strong predictor of caregiver strain. In a 2002 Canadian study of live-in caregivers, 22% reported sleep disturbance, 50% changed social activities and 43% changed their holiday plans.
Yet the impact is not all negative. A large group of caregivers reported increased self esteem due to a feeling that they could give back to their parents some of the care they had received as children. Many felt that their relationship with the parent was strengthened and this closeness provided a link to cultural and family history.
Researchers are divided about whether people suffer in the caregiving role. The difference is something intangible. Perhaps it is a subtle difference in attitude. Maybe genuine compassion changes the stress of looking after someone into a positive experience?
When we care for young children there is certainly stress, especially if there is little support available. Primary caregivers give up or postpone careers, make financial sacrifices and put their own desires largely on hold. They, too, suffer sleep disturbances and are forced to alter their social life and travel dreams. Yet the investment of time and energy is returned when they enjoy a close relationship with a young person. The child learns, grows and matures towards independence. Our society offers some support for people doing this valuable job. Now we have paid parental leave, family tax benefits, subsidised day care and a generally child friendly culture.
The situation is reversed for caregivers of the elderly, especially if the ageing person is disabled or demented. We tolerate incontinence in early childhood much more than we can in the elderly, unless we have really developed compassion. For an adult caring for both young children and an elderly parent, there are distinctly competing demands. It's not surprising this can lead to exhaustion.
When it comes to the crunch it is natural and normal to give preference to the needs of children over parents. Our sense of family obligation must flow forward to the younger generation in order to continue the family. So at what point do we hand our elders over to nursing homes or hospitals? Often it is when a crisis occurs, a fall, a broken bone, a stroke or seizure. When the demands of caregiving become more than we are equipped to handle we must reach out for help. When we seek support do we find a compassionate system? It is seldom what we would hope for and it is worsening as the demand for beds and staff increases.
Certainly, there are dedicated carers and talented medical professionals. But when the crisis is averted there is still a big question mark over what to do for the best. How do families work with medical professionals and social workers to make the best, most compassionate care decisions?
Based on surveys of families, the system as it exists currently is ill equipped to provide suitable care for a high need, but essentially well, older person. If they remain in hospital they are vulnerable to infection and often feel more disoriented and frightened than ever before. If they go home, the burden for high intensity care returns to the family members who may or may not welcome the workload. It is common to be faced with a fear that the older person will pass away at home and this can cause distress for all concerned. There are some wonderful home care services, but they are either costly or limited. Not every older person will qualify for them.
If we feel some sense of gratitude towards our parents, it is not the same as owing them anything. Parenting is done in the spirit of unconditional love, an act of giving without need for reward. And if we have felt neglected or even abused by a parent we may not even have that sense of gratitude at all. So what is the compassionate approach to elder care?
Holistic health must encompass all phases as well as all aspects of our lives. So just as we can seek to understand the needs of a newborn baby by imagining life from their perspective, we must also put ourselves in the shoes of needy older people. As complex as family relationships might be, we will all, barring tragedy, be elderly one day. How would you like to be treated?
Perhaps you will think of your adult children and grandchildren and seek to alleviate any burden from them. This might take the form of self care. Keeping physically fit will slow the degenerative effects of ageing, as will a nutritious and well balanced diet. Giving up the self harm of refined sugar, saturated fats, smoking, drinking and any other health compromising vices will help. There are so many ways to stay engaged, including participating in family life avidly. When younger people take up new technology, learn how to use it, too. Read widely and seek out thought provoking media. Continue to meet new people by getting involved in social events and recreational activities. If you have been fortunate enough to have passionate interests, continue them as long as possible. If one interest becomes unsuitable (rock climbing is probably not for the over 70s although there are bound to be exceptions), find an alternative (an indoor rock climbing wall should suffice).
A compassionate life is also a healthy life in every sense. Give without thought of reward as often as possible. If caregiving falls to you, do it as graciously as you can. If you feel resentful, search your heart for compassion and ask for help. The whole of society is responsible for each member and must support compassionate choices. Just as it takes a village to raise a child, it also takes one to nurse an elder. |
import cv2
import numpy as np
from lineseg import lineseg
from drawedgelist import drawedgelist
def find_contours(im):
# im = cv2.imread('circle.png')
imgray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
ret, thresh = cv2.threshold(imgray, 127, 255, 0)
contours, hierarchy = cv2.findContours(thresh, cv2.RETR_CCOMP, cv2.CHAIN_APPROX_SIMPLE)
# cv2.RETR_EXTERNAL cv2.RETR_CCOMP
# show contours
print contours
print hierarchy
# Just consider the contours that don't have a child
# that is hierarchy[i][2] < 0
# print hierarchy[0][1, 2]
print contours[0]
newcontours = []
for i in xrange(len(contours)):
print hierarchy[0][i, 2]
if hierarchy[0][i, 2] < 0:
print hierarchy[0][i, 2]
newcontours.append(contours[i])
cv2.drawContours(im, newcontours, 2, (0, 255, 0), 1)
contours = newcontours
# Display the image.
cv2.imshow("window", im)
cv2.waitKey(0)
cv2.destroyAllWindows()
return contours
if __name__ == '__main__':
img = cv2.imread('canny_img2.png')
data = np.asarray(find_contours(img))
# print 'data shape ', data.shape[0]
seglist = lineseg(data, tol=2)
# ValueError: The truth value of an array with more than one element is ambiguous. Use a.any() or a.all()
# print seglist
# for index, item in enumerate(seglist):
# print index
drawedgelist(seglist, rowscols=[480, 640])
# for i in seglist[0][:, 0]:
# x.append(seglist[0][i, 0])
# x = seglist[1][:, 0]
# y = seglist[1][:, 1]
# print 'x ', x[0]
# print 'y ', seglist[0][:, 1]
# for n in range(x.shape[0] - 1):
# cv2.line(img, (x[n], y[n]), (x[n + 1], y[n + 1]), (0, 255, 255), thickness=2)
# plt.plot(x[n], y[n])
# plt.hold('on')
# plt.show()
# cv2.imshow("window", img)
# cv2.waitKey(0)
# cv2.destroyAllWindows()
# reshape array
# temp = []
# for i in xrange(data.shape[0]):
# arr = np.squeeze(data[i])
# temp.append(arr)
# temp = np.asarray(temp)
# print 'x ', temp[0][:, 0]
# print 'y ', temp[0][:, 1]
# rebuilt = np.concatenate((data[0], data[1]), axis=0)
# print 'new shape ', rebuilt.shape
# y = len(test)
# test = np.resize(test, (lent, y, 1, 2))
# print data
# Input cell array of edgelists and tolerance.
# seglist = lineseg(data, len, tol=2)
# print seglist.dtype
# colon indicates to go through all the elements in this dimension.
# x = seglist[0, :, 0]
# y = seglist[0, :, 1]
# print 'x ', data.shape
# print 'y ', y
# print y.shape[0]
# pts = np.asarray([x, y], dtype=np.int32)
# print pts.dtype
# cv2.polylines(img, [x, y], False, (0, 255, 255))
# list = np.asarray(seglist[0], dtype=np.int32)
# print list.shape
# print seglist[0]
# seglist = seglist.reshape((-1, 1, 2))
# print 'seglist: ', seglist.shape[0]
# print 'seglist shape: ', type(seglist[0])
# draw = drawedgelist(list, rowscols=[480, 640])
# edgelist = np.reshape(edgelist, (edgelist.shape[0], -1, 1, 2))
# num_edges = edgelist.shape[1]
# print 'edgelist shape', edgelist.shape, ' length ', num_edges
# edgelist = np.expand_dims(edgelist, axis = 1)
# print 'length = ', edgelist.shape[0]
# print 'edgelist shape = ', edgelist.shape
# edgelist = find_contours()
# edgelist = np.asarray(find_contours(img))
# y = np.concatenate(edgelist, axis=0)
# y = np.expand_dims(y, axis=0)
# edgelist = np.reshape(edgelist, (-1, 1, 2))
# print 'shape = ', y
# print y[:, 0, 1]
# shape (num arrays, length of array in total, num rows = 1, num cols = 2)
# seglist = []
# print edgelist
# print edgelist[0, 1, 0, 0]
# print edgelist[0, 0, 0, 0]
# x = np.empty(num_edges)
# y = np.empty(num_edges)
# np.copyto(x, edgelist[0, :, 0, 0])
# np.copyto(y, edgelist[0, :, 0, 1])
#
# z = [1, 2, 3, 4, 5]
# print len(z)
# print z.index(5)
# seglist.append([z[4], z[4]])
# print seglist
# seglist.append([x[0:4], y[0:4]])
# print seglist
# print 'x coordinates'
# for i in xrange(0, num_edges):
# x[i] = edgelist[0, i, 0, 0]
#
# print 'y coordinates'
# for j in xrange(0, num_edges):
# print edgelist[0, j, 0, 1]
# print 'x = ', x |
Are you looking for a garage door opener in Acushnet, Massachusetts?
With over 5 years of experience in the garage door business, Patriots Overhead LLC has the knowledge to take care of your Acushnet garage door opener needs that fits your budget.
Do you need your existing Acushnet garage door opener repaired? Often, we come prepared to fix your opener on the spot, as we stock our trucks with common parts. We can repair any time of garage door opener in Acushnet. |
import sublime
from git import GitWindowCommand
class GitFlowCommand(GitWindowCommand):
def is_visible(self):
s = sublime.load_settings("Git.sublime-settings")
if s.get('flow'):
return True
class GitFlowFeatureStartCommand(GitFlowCommand):
def run(self):
self.get_window().show_input_panel('Enter Feature Name:', '', self.on_done, None, None)
def on_done(self, feature_name):
self.run_command(['git-flow', 'feature', 'start', feature_name])
class GitFlowFeatureFinishCommand(GitFlowCommand):
def run(self):
self.run_command(['git-flow', 'feature'], self.feature_done)
def feature_done(self, result):
self.results = result.rstrip().split('\n')
self.quick_panel(self.results, self.panel_done,
sublime.MONOSPACE_FONT)
def panel_done(self, picked):
if 0 > picked < len(self.results):
return
picked_feature = self.results[picked]
if picked_feature.startswith("*"):
picked_feature = picked_feature.strip("*")
picked_feature = picked_feature.strip()
self.run_command(['git-flow', 'feature', 'finish', picked_feature])
class GitFlowReleaseStartCommand(GitFlowCommand):
def run(self):
self.get_window().show_input_panel('Enter Version Number:', '', self.on_done, None, None)
def on_done(self, release_name):
self.run_command(['git-flow', 'release', 'start', release_name])
class GitFlowReleaseFinishCommand(GitFlowCommand):
def run(self):
self.run_command(['git-flow', 'release'], self.release_done)
def release_done(self, result):
self.results = result.rstrip().split('\n')
self.quick_panel(self.results, self.panel_done,
sublime.MONOSPACE_FONT)
def panel_done(self, picked):
if 0 > picked < len(self.results):
return
picked_release = self.results[picked]
if picked_release.startswith("*"):
picked_release = picked_release.strip("*")
picked_release = picked_release.strip()
self.run_command(['git-flow', 'release', 'finish', picked_release])
class GitFlowHotfixStartCommand(GitFlowCommand):
def run(self):
self.get_window().show_input_panel('Enter hotfix name:', '', self.on_done, None, None)
def on_done(self, hotfix_name):
self.run_command(['git-flow', 'hotfix', 'start', hotfix_name])
class GitFlowHotfixFinishCommand(GitFlowCommand):
def run(self):
self.run_command(['git-flow', 'hotfix'], self.hotfix_done)
def hotfix_done(self, result):
self.results = result.rstrip().split('\n')
self.quick_panel(self.results, self.panel_done,
sublime.MONOSPACE_FONT)
def panel_done(self, picked):
if 0 > picked < len(self.results):
return
picked_hotfix = self.results[picked]
if picked_hotfix.startswith("*"):
picked_hotfix = picked_hotfix.strip("*")
picked_hotfix = picked_hotfix.strip()
self.run_command(['git-flow', 'hotfix', 'finish', picked_hotfix])
|
Thursday, April 4th, 2019 at 4:07pm.
Flowers are blooming, bees are buzzing, and it’s time to get outside and enjoy the spring weather in Gwinnett County. We found a few fun events happening close to home this weekend to round out your Spring Break 2019.
Alegre Farm in Dacula is celebrating spring with its Farm Adventure Day on Friday, April 5th starting at 10am. Take guided tours of fun, educational stations, enjoy the petting zoo, feed the animals, and groom the pony. You can even milk a goat! Learn about conservation and take a tractor ride around the 15-acre farm. Pony rides are just $5. Bring your own lunch and be ready for a day of fun on the farm, located at 2225 Givens Road, Dacula.
Thursday, March 7th, 2019 at 1:18pm.
It's a proud day at Peggy Slappey Properties! The National Association of REALTORS ® (NAR) announced last week that it will induct our very own broker/owner, Peggy Slappey, into the REALTORS ® Political Action Committee (RPAC) Hall of Fame for her work over the past three decades protecting the interests of homeowners, REALTORS®, builders, and the multitude of workers involved in the real estate industry.
She will be inducted at a ceremony during the 2019 Legislative Session in Washington, D.C., which will include a plaque with her name to be placed on the rooftop patio wall of the NAR building facing the Capitol! |
#!/usr/bin/env python
#!encoding: utf-8
"""
Retro Book Downloader
A simple python script for downloading retro books from pcvilag.muskatli.hu.
All books on this website: http://pcvilag.muskatli.hu/
Written by Tibor Oros, 2015 (oros.tibor0@gmail.com)
Recommended version: Python 2.7
"""
import os
import shutil
import sys
import urllib
import urllib2
from bs4 import BeautifulSoup
FOLDERNAME = 'temp'
def makeDir(name):
if not(os.path.exists(name)):
os.mkdir(name)
os.chdir(name)
else:
shutil.rmtree(name)
os.mkdir(name)
os.chdir(name)
def getProjectName(url):
return url.split('/')[5]
def makeLinkURL(mainUrl, projectName):
return mainUrl + projectName + '/link.php'
def makeDownloadURL(mainUrl, projectName):
return mainUrl + projectName + '/'
def getLinkName(link):
return link.get('href').split('=')[1]
def openURL(linkUrl):
tmp = urllib2.urlopen(linkUrl)
soup = BeautifulSoup(tmp)
return soup.find_all('a')
def downloadImages(links, downloadURL, errorItem):
for link in links:
if len(link.get('href').split('=')) == 2:
try:
pName = getLinkName(link)
urllib.urlretrieve(downloadURL + pName, pName)
print 'Downloaded image: ' + pName
except IOError:
print 'Image does not exist: ' + pName
errorItem.append(pName)
except:
print 'Unknown error'
def deleteDir(name):
os.chdir('..')
shutil.rmtree(name)
def errorTest(ei):
if len(ei) != 0:
print '--- Missing image(s) ---'
for i in ei:
print i
def main():
mainURL = 'http://pcvilag.muskatli.hu/irodalom/cbooks/'
URL = raw_input('Book URL: ')
try:
projectName = getProjectName(URL)
linkURL = makeLinkURL(mainURL, projectName)
downloadURL = makeDownloadURL(mainURL, projectName)
links = openURL(linkURL)
except (urllib2.URLError, IndexError):
print '*** Wrong URL ***'
print 'Example: http://pcvilag.muskatli.hu/irodalom/cbooks/njk64/njk64.html'
sys.exit()
makeDir(FOLDERNAME)
errorItem = []
print 'Program downloading...'
downloadImages(links, downloadURL, errorItem)
print 'Downloading complete.'
print 'Program converting...'
os.system('convert *.jpg ../' + projectName + '.pdf')
print 'Converting complete.'
deleteDir(FOLDERNAME)
errorTest(errorItem)
raw_input('Press enter to exit.')
######################################################
if __name__ == '__main__':
main()
|
Paula Lobo/ABC via Getty Images(NEW YORK) — After his interview with former FBI director James Comey that aired in an ABC special Sunday, ABC News Chief Anchor George Stephanopoulos shared his impressions with Powerhouse Politics hosts Jonathan Karl and Rick Klein.
“It was one of the longest and most intense interviews of my career,” Stephanopoulos said.
And the stakes were high.
Although Comey wants to sell books, Stephanopoulos said Comey realized he would and should face tough questions.
“He wanted to be pressed and that would help him. It wouldn’t do him any good if he got “fluff ball treatment” on his book tour,” Stephanopoulos said.
And Stephanopoulos said overriding all of Comey’s comments was his sharply negative view of the man who fired him.
But Stephanopoulos said one thing became clear.
Klein, Karl and Stephanopoulos agreed that one big question remains as Comey continues on his book tour: Where would we be if the president had not fired James Comey? |
from hazelcast.serialization.bits import *
from hazelcast.protocol.builtin import FixSizedTypesCodec
from hazelcast.protocol.client_message import OutboundMessage, REQUEST_HEADER_SIZE, create_initial_buffer
from hazelcast.protocol.builtin import StringCodec
from hazelcast.protocol.builtin import DataCodec
from hazelcast.protocol.builtin import CodecUtil
# hex: 0x0E0800
_REQUEST_MESSAGE_TYPE = 919552
# hex: 0x0E0801
_RESPONSE_MESSAGE_TYPE = 919553
_REQUEST_TXN_ID_OFFSET = REQUEST_HEADER_SIZE
_REQUEST_THREAD_ID_OFFSET = _REQUEST_TXN_ID_OFFSET + UUID_SIZE_IN_BYTES
_REQUEST_INITIAL_FRAME_SIZE = _REQUEST_THREAD_ID_OFFSET + LONG_SIZE_IN_BYTES
def encode_request(name, txn_id, thread_id, key, value):
buf = create_initial_buffer(_REQUEST_INITIAL_FRAME_SIZE, _REQUEST_MESSAGE_TYPE)
FixSizedTypesCodec.encode_uuid(buf, _REQUEST_TXN_ID_OFFSET, txn_id)
FixSizedTypesCodec.encode_long(buf, _REQUEST_THREAD_ID_OFFSET, thread_id)
StringCodec.encode(buf, name)
DataCodec.encode(buf, key)
DataCodec.encode(buf, value, True)
return OutboundMessage(buf, False)
def decode_response(msg):
msg.next_frame()
return CodecUtil.decode_nullable(msg, DataCodec.decode)
|
The fully automatic backwash filter effectively filters high-concentration sewage, using existing biological treatment methods combined with self-cleaning filters, and fully automatic backwash filter for biological filtration to be used in landfill leachate treatment. This method is the most commonly used, especially It is a fully automatic backwash filter. The biological treatment of leachate has relatively low operating costs, high treatment efficiency, and no secondary pollution caused by chemical sludge. Therefore, it is widely used in countries all over the world.
The backwash filter reverse osmosis function is commonly used in the post-treatment of leachate as it removes medium molecular weight dissolved organics. On the one hand, the backwash filter has the efficiency of reverse osmosis function; on the other hand, it is the modular management of its reverse osmosis function system; and the function has automatic control characteristics.
Biological method and reverse osmosis are applied to the fully automatic backwash filter, which effectively solves the difficult problem of landfill leachate treatment. The material retention efficiency of the small molecular weight is improved, and the disadvantage of the structure of the high concentration organic matter or the inorganic sediment in the fully automatic backwash filter is reduced.
Generally, the landfill uses pipes to discharge excess leachate into a reserved pool, and then filters it with a fully automatic backwash filter. Different landfills vary greatly in water quality for different reasons. Therefore, when filtering leachate in the same field, first consider the filtering function of various filters. |
#!/usr/bin/env python
"""Solver for Project Euler problems.
Usage:
solve <problem_number>
solve (-h | --help)
Options:
-h --help Show this screen.
"""
from importlib import import_module
from os.path import join, split
import time
import warnings
from typing import Any, Callable, Tuple
from docopt import docopt
from termcolor import colored # noqa: F401
import yaml
import project_euler.solutions # noqa: F401
from project_euler.solutions.problems import slow_numbers as slow_problems
spec = '{:4.2f}'
MINUTE_RULE = 60
SLOW = 10
SOLVE_MSG = ('{colored("[PE-" + str(problem_number) +"]", status_colour)} '
'{colored(str(answer), "green") if answer_correct else colored(str(answer) + " != " + str(reference_answer), "red")} ' # noqa: E501
'{colored("[" + spec.format(spent) + "s" + "!" * (minute_violated + slow_violated) + "]", "green" if spent <= slow else ("yellow" if spent <= minute_rule else "red"))}') # noqa: E501
SOLVE_MSG_E = ''
class SolveException(Exception):
pass
class ProblemMalformed(SolveException):
pass
class SolutionWrong(SolveException):
pass
class AnswerVerificationFailed(SolutionWrong):
pass
class OneMinuteRuleViolation(SolutionWrong):
pass
def solve_problem(problem_number: int,
solve: Callable[[], Any]=None,
minute_rule: float=None,
slow: float=None) -> Tuple[str, float]:
if not minute_rule:
minute_rule = MINUTE_RULE
if not slow:
slow = SLOW
file_name = f'problem_{problem_number}.yaml'
file_path = join(join(split(__file__)[0], '..', 'problems', file_name))
with open(file_path) as f:
parameters = yaml.load(f)
parameters['title']
parameters['description']
reference_answer = parameters['answer_b64'].decode()
parameters['strategy']
if not solve:
problem_module = import_module(f'.problem_{problem_number}',
package='project_euler.solutions')
solve = problem_module.solve
reference_answer = parameters['answer_b64'].decode()
start = time.time()
try:
answer = str(solve())
# often more natural to return int
except Exception as e:
answer = str(type(e))[8:-2] + "_occured"
spent = time.time() - start
answer_correct = answer == reference_answer
minute_violated = spent > minute_rule
slow_violated = spent > slow
status_colour_time = 'green' if slow_violated else ( # NOQA: F841
'yellow' if minute_violated else 'red')
status_colour = 'green' if answer_correct and not slow_violated else ( # noqa: F841,E501
'yellow' if answer_correct and not minute_violated else 'red')
print(eval('f' + repr(SOLVE_MSG)))
raise
spent = time.time() - start
answer_correct = answer == reference_answer
minute_violated = spent > minute_rule
slow_violated = spent > slow
status_colour_time = 'green' if slow_violated else ( # NOQA: F841
'yellow' if minute_violated else 'red')
status_colour = 'green' if answer_correct and not slow_violated else ( # noqa: F841,E501
'yellow' if answer_correct and not minute_violated else 'red')
print(eval('f' + repr(SOLVE_MSG)))
if not answer_correct:
raise AnswerVerificationFailed(
f'In problem {problem_number} the calculated answer is '
f'{answer} ({spec.format(spent)}s), the reference answer is '
f'{reference_answer}.')
if minute_violated:
if problem_number in slow_problems:
slower_time = slow_problems[problem_number]
if spent > slower_time:
raise OneMinuteRuleViolation(
f'Problem {problem_number} took {spec.format(spent)}s,'
f' which is more than the {slower_time}s it is '
f'allowed to take.')
else:
warnings.warn(
f'Problem {problem_number} took {spec.format(spent)}s,'
f' which is less than the {slower_time}s it is allowed'
f' to take, but more than {minute_rule}s.',
UserWarning)
else:
raise OneMinuteRuleViolation(
f'Problem {problem_number} took {spec.format(spent)}s, '
f'which is more than a minute!')
elif slow_violated:
warnings.warn(
f'Problem {problem_number} took {spec.format(spent)}s,'
f' which is more than {slow}s.', UserWarning)
return answer, spent
if __name__ == '__main__':
arguments = docopt(__doc__)
problem_number = arguments['<problem_number>']
try:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
solve_problem(problem_number)
except SolveException:
pass
|
Has your lawn been looking a little worse for wear of late? A mulching lawn mower could actually help you to return it to its former glory. Mulching the clippings up and depositing them back onto the lawn can help to return nutrients and moisture to the soil.
Why should you buy a mulching lawn mower?
Mulching your grass as it’s cut actually enhances the growth and health of your lawn by reducing the evaporation of moisture and keeping the soil temperature cooler. It has also been suggested that mulched clippings can provide your lawn with up to 25% of its fertilising needs.
Because the grass is being mulched and deposited by your lawn mower as it cuts, you will actually find that you get this chore done faster. You won’t have to stop every so often to empty the bag, and you won’t have to find the time to dispose of the clippings when you’re finished! |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'Coupon'
db.create_table('coupons_coupon', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('value', self.gf('django.db.models.fields.IntegerField')()),
('code', self.gf('django.db.models.fields.CharField')(unique=True, max_length=30, blank=True)),
('type', self.gf('django.db.models.fields.CharField')(max_length=20)),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True, blank=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, blank=True)),
('redeemed_at', self.gf('django.db.models.fields.DateTimeField')(null=True, blank=True)),
))
db.send_create_signal('coupons', ['Coupon'])
def backwards(self, orm):
# Deleting model 'Coupon'
db.delete_table('coupons_coupon')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'coupons.coupon': {
'Meta': {'ordering': "['created_at']", 'object_name': 'Coupon'},
'code': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'redeemed_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.IntegerField', [], {})
}
}
complete_apps = ['coupons'] |
Testing of embedded software, empirical studies of software testing, test automation, model-based testing.
The Software Testing Laboratory (STL) focuses on contemporary and future challenges in testing of embedded software systems, primarily in research projects conducted in close collaboration with industrial partners.
With an emphasis on method and tool development, as well as industrial and practical real life case studies, our research focus includes (but is not limited to) test design, model-based testing, search-based software testing, decision-support for software testing, and test automation. In short, we develop, refine, and evaluate methods, theories and tools for testing of industrial software systems. |
#!/usr/bin/env python
"""
Copyright 2012 GroupDocs.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class JobOutputDocument:
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually."""
def __init__(self):
self.swaggerTypes = {
'error': 'str',
'name': 'str',
'version': 'int',
'size': 'int',
'type': 'str',
'type_str': 'str',
'file_type_str': 'str',
'document_path': 'str',
'access': 'str',
'url': 'str',
'file_type': 'str',
'id': 'float',
'guid': 'str'
}
self.error = None # str
self.name = None # str
self.version = None # int
self.size = None # int
self.type = None # str
self.type_str = None # str
self.file_type_str = None # str
self.document_path = None # str
self.access = None # str
self.url = None # str
self.file_type = None # str
self.id = None # float
self.guid = None # str
|
For some reason, out of the hundreds of times that I’ve heard the phrase “dead white men” in the context of the problematic amount of space they take up in the canon and in in public consciousness, it struck me differently when I heard it today. Of what use, I’m wondering now, is the word “dead” in there?
I can get behind the spirit of the challenge. Let’s read women! Let’s read people of color! Your reading project is yours alone, and after all, we each only get so much reading time on this earth. Still, I cannot imagine a world in which I could bring myself to read the living instead of the dead.
My dissertation advisor recently reminded me that “tradition” and “metaphor,” from Latin and from Greek respectively, have the same literal sense, a carrying-over. “Tradition” comes to mean a carrying over of the things of our ancestors to the younger generations, whereas “metaphor” is used to describe the carrying-over, or transference, of meaning from one idea to another. But even in these conventional uses, surely there is still a relationship between the concepts: on what can our metaphors subsist without the tradition? How quickly would our living art pale and wither without its inheritances from the art of the dead?
Of course, I would say this; my work focuses almost entirely on the dead, and one of my most pressing research questions is about the terms of ethical engagement between the reader and the long-dead creators of texts. And of course I see, from a practical point of view, why the word gets lumped in with “white men”: of the texts published and preserved and made available from earlier eras in western literature (not to even begin to touch on the complex cultural history of translation), a disproportionate number were authored by white men.
At the same time, the word implies a hope that lives in those who deploy it: that this will no longer be the case in the texts currently being produced by the living, that a new era is upon us, and from now on, the canon will look different. It’s a lot of hope to place on our canon, the canon of those living now, and it is, by now, a hope that has been shared by many who are no longer with us, who have joined the dead. So are they no longer part of our project? Won’t we count them among our number?
I believe in keeping company with the dead, and will continue to do so, not apologetically or as a concession, but as a point of primary interest and ethical concern for the living.
To complicate the dichotomy that the ladies at Persephone set up, between holiday preparations and quiet time to read, I have also often enjoyed baking on St. Stephen’s Day, but specifically because the pressure is off, and it feels like the first day when it is possible to bake on my own terms again (clean kitchen, music at whim, no time obligations whatsoever). But first and foremost, the 26th is a day for reading: everyone needs some quiet that day, all of the expectations of constant socializing must be dropped, and hopefully, one has at least one new book that's been tugging at one from the edge of all the socializing. So, perhaps a good day to bake something yeasty, and fill the long proofing periods with reading.
Still, before we get there, the solstice this week is the first of my December holiday trinity: this is the quiet one, spent in exactly my own way, sparkling between cool air and warm light, and hopefully out-of-doors and champagne and eating meat again for the first time in Advent (though I had some early solstice celebrations away from home this year with family—I, like many of my contemporaries, believe myself an inventor of flexibility in ritual).
The solstice has been special to me for a number of years, and it’s delightful to see it coming forward in collective attention recently thanks to the astrological renaissance underway on the internet and in millennial havens nowadays. My social circles include representatives of both sides of the polarization that astrology brings out, a conflict that’s amusing if you keep in mind how recently (half a millennium or so ago) there was no line as such between astrology and astronomy. For this, I see the rationalists and the astrologues as the conflict of the schism: the skeksis and the mystics of the world I inhabit. I have a guess, however, at which side would be happier to hear me describe the conflict as illusory.
Still, I feel that any version of modernity currently practiced will not prevent any of us from appreciating the turning of the year, the hope for the returning of the sun. In my life, too, this December moment is a symbolic one: the promise that eventually, later on in January, I will start to get some sunlight back in the morning, the only time I really care about it. Many people I know are delighted to start gaining light in the afternoon, since, I suppose, they find it gloomy rather than magical to leave work in the dark. For me, it’s still another month to wait for relief, but the solstice encourages: brighter times ahead.
Given that it's crunch time, I really shouldn't have any time to read things that are not strictly "for" my dissertation. But given that reading is a compulsive behavior for me, reading off-topic is the only way to relax my brain - I tell myself it's a kind of recovery, or stretching.
Anyway, things collide interestingly for me sometimes. Also, I hate/love the look of writing bleeding through paper. |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-02-22 11:39
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('catalogue', '__first__'),
]
operations = [
migrations.CreateModel(
name='Cart',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_created', models.DateField(auto_now_add=True)),
('date_updated', models.DateField(auto_now=True)),
('cart_comment', models.TextField(blank=True, default=b'', null=True)),
('cart_username', models.CharField(blank=True, default=b'', max_length=60, null=True)),
('cart_useremail', models.CharField(blank=True, default=b'', max_length=60, null=True)),
('cart_userphone', models.CharField(blank=True, default=b'', max_length=60, null=True)),
],
options={
'verbose_name': 'Shopping Cart',
'verbose_name_plural': 'Shopping Carts',
},
),
migrations.CreateModel(
name='CartItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField(verbose_name=b'Quantity')),
('cart', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cart.Cart', verbose_name=b'Cart')),
('cart_bundle', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='bundle_owner', to='cart.Cart')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='catalogue.Item', verbose_name=b'Catalogue Item')),
],
options={
'verbose_name': 'Cart Item',
'verbose_name_plural': 'Cart Items',
},
),
migrations.CreateModel(
name='CartVirtualShopItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=140)),
('quantity', models.IntegerField(verbose_name=b'Quantity')),
('cart', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cart.Cart', verbose_name=b'Cart')),
],
),
migrations.CreateModel(
name='CartWriteinItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=140)),
('code', models.CharField(max_length=20)),
('cart', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='cart.Cart', verbose_name=b'Cart')),
],
),
]
|
Luke Shaw, Nemanja Matic, Anthony Martial, Marcus Rashford and Romelu Lukaku have all been included in the provisional Manchester United squad for the Premier League visit of Watford.
The quintet all withdrew from international duty over the last fortnight as just six first-team squad members represented their nations. David de Gea, Antonio Valencia, Eric Bailly, Scott McTominay and Paul Pogba were the only players who appeared for their senior national sides, while Diogo Dalot featured for Portugal Under-20s.
Sources close to Lukaku told the MEN in midweek the Belgian needed time to recover from a foot injury that ruled him out of United’s FA Cup defeat to Wolves two weeks ago, as well as Belgium’s European Championship qualifiers against Russia and Cyprus. Lukaku’s camp were reluctant to earmark the round-of-16 Champions League first leg with Barcelona on April 10 as a possible comeback fixture, since United travel to Wolves before then on Tuesday.
Lukaku only returned to United’s Carrington training complex on Friday, having undergone treatment in Belgium, but still checked in with the rest of his teammates at The Lowry Hotel on Friday night. Martial (knee), Shaw, Matic and Rashford (ankle) were also present. United manager Ole Gunnar Solskjaer suggested certain players would have late fitness tests.
See the full squad here. |
import logging
import json
from pylons import config
from ckan import logic
import ckan.plugins as p
import ckan.lib.helpers as h
import ckan.logic.action.update as update_core
import ckan.logic.action.get as get_core
from ckanext.orgdashboards.helpers import (_get_action,
orgdashboards_get_geojson_properties)
log = logging.getLogger(__name__)
@p.toolkit.side_effect_free
def orgdashboards_show_datasets(context, data_dict):
dd = data_dict.copy()
dd.update({'include_datasets': True})
data = _get_action('organization_show', context.copy(), dd)
return data.pop('packages', [])
@p.toolkit.side_effect_free
def orgdashboards_dataset_show_resources(context, data_dict):
data = _get_action('package_show', context.copy(), data_dict)
return data.pop('resources', [])
@p.toolkit.side_effect_free
def orgdashboards_resource_show_resource_views(context, data_dict):
data = _get_action('resource_view_list', context.copy(), data_dict)
data = filter(lambda i: i['view_type'] == data_dict['view_type'], data)
return data
@p.toolkit.side_effect_free
def orgdashboards_resource_show_map_properties(context, data_dict):
return orgdashboards_get_geojson_properties(data_dict.get('id'))
@p.toolkit.side_effect_free
def orgdashboards_get_map_main_property(context, data_dict):
dd = data_dict.copy()
dd.update({'include_datasets': True})
data = _get_action('organization_show', context.copy(), dd)
if 'orgdashboards_map_main_property' in data:
return data['orgdashboards_map_main_property']
else:
return None
def organization_update(context, data_dict):
# Keep data for charts in one key (orgdashboards_charts) so that many
# charts can be stored
charts = []
for k, v in data_dict.items():
item = {}
if k.startswith('orgdashboards_chart_') and \
not k.endswith('subheader'):
id = k.split('_')[-1]
item['order'] = int(id)
item['resource_view_id'] = v
item['subheader'] = data_dict.get(
'orgdashboards_chart_{}_subheader'.format(id)
)
charts.append(item)
if charts:
data_dict['orgdashboards_charts'] = json.dumps(charts)
return update_core.organization_update(context, data_dict)
@p.toolkit.side_effect_free
def organization_show(context, data_dict):
data = get_core.organization_show(context, data_dict)
charts = data.get('orgdashboards_charts')
if charts:
data['orgdashboards_charts'] = json.loads(charts)
sorted(data['orgdashboards_charts'], key=lambda k: k['order'])
return data
|
Unwanted insects can manifest because ofnumerous reasons. Location and climatic conditions perform a significant part in building up pests. However , anybody can restrict them from festering by taking pest elimination measures. Here we are going to let you know about the various methods to be implemented along with the reasons why you should undertake these measures.
Your residence is going to be seriously wreaked if pest infestation is ignored. Not only pest control but even wildlife is included. These scatter allergiesand also contaminate your food which can be extremely harmful to your health. To have all the things and foodstuff covered always is quite hard although you are at your house. It is smart to treat the problem from its origin which can also prevent its reoccurrence. And the source of the difficulty is freely lurking pests.
The cleanliness of the house as well as health and wellness of family members is in danger. No matter how tidy your surroundings are , the entire cleaning plan is a waste should you have insects at home. Also , it is going to generate a negative effect about you as absolutely no neighbor would like a pest infested home.
Most of the time , you might need pest management services.
Pest Control Ottawa: These are definitely specialized professionals that have an intensive knowledge about why unwanted pests fester and how they could be controlled. They can help your home get rid of pests for the long-term. This can solve your difficulty in a way that you wont need to worry about pests growing for a long period to come.
Experienced pest control services have professional experts who use various solutions to control the pest situation. The experts have to eliminate every type of pest in your house by making use of different strategies. Even the most difficult pest infestation is totally treated, all due to their years of experience and great knowledge.
If your home is affected by unwanted pests , always take our pest control services. You must get in touch with us once you find about pest infestation. If pest control procedures are adopted on time , almost all of the pest problems may be resolved easily. If pest infestation progresses to large areas , it will require more time and efforts to eradicate it entirely.
We, an experienced pest control service provider, are at your services anytime you need. Our years worth of work taught us the best ways to treat whatever issue you are faced with. It doesnt make a difference if you home is old or the extent of pest infestation, as we have a right solution for any pest issue. Moreover, we also assure that our treatment solutions show positive results immediately. As we offer quality pest control service, it is better to choose our services. |
"""
This script goes through the marginalized detection rate files,
and gets the average detection rate as a function of temperature.
"""
from __future__ import print_function, division
import pandas as pd
import numpy as np
import sys
import os
import datetime
import matplotlib.pyplot as plt
import seaborn as sns
import plottools
sns.set_context('paper', font_scale=2.0)
sns.set_style('white')
sns.set_style('ticks')
import get_parameters
# Some constants
SAMPLE_HIP = [1366, 3300, 12719, 13165, 15338, 17563, 22840, 22958, 24902,
26063, 26563, 28691, 33372, 44127, 58590, 65477, 76267, 77516,
77858, 79199, 79404, 81641, 84606, 85385, 88290, 89156, 91118,
92027, 92728, 98055, 100221, 106786, 113788, 116247, 116611]
SAMPLE_STARS = ['HIP {}'.format(hip) for hip in SAMPLE_HIP]
BASE_DIR = '{}/School/Research'.format(os.environ['HOME'])
INSTRUMENT_DIRS = dict(TS23='{}/McDonaldData/'.format(BASE_DIR),
HRS='{}/HET_data/'.format(BASE_DIR),
CHIRON='{}/CHIRON_data/'.format(BASE_DIR),
IGRINS='{}/IGRINS_data/'.format(BASE_DIR))
def get_undetected_stars(star_list=SAMPLE_STARS):
"""
Get the undetected stars from my sample.
"""
full_sample = get_parameters.read_full_sample()
full_sample['Parsed_date'] = full_sample.Date.map(get_parameters.convert_dates)
undetected = full_sample.loc[full_sample.Temperature.isnull()]
matches = undetected.loc[undetected.identifier.isin(star_list)][['identifier', 'Instrument', 'Parsed_date']].copy()
return matches
def decrement_day(date):
year, month, day = date.split('-')
t = datetime.datetime(int(year), int(month), int(day)) - datetime.timedelta(1)
return t.isoformat().split('T')[0]
def get_detection_rate(instrument, starname, date):
"""
Read in the detection rate as a function of temperature for the given parameters.
"""
directory = INSTRUMENT_DIRS[instrument]
fname = '{}{}_{}_simple.csv'.format(directory, starname.replace(' ', ''), date)
try:
df = pd.read_csv(fname)
except IOError:
try:
fname = '{}{}_{}_simple.csv'.format(directory, starname.replace(' ', ''), decrement_day(date))
df = pd.read_csv(fname)
except IOError:
print('File {} does not exist! Skipping!'.format(fname))
df = pd.DataFrame(columns=['temperature', 'detection rate', 'mean vsini'])
df['star'] = [starname] * len(df)
return df
def get_stats(df):
Teff = df.temperature.values[0]
low, med, high = np.percentile(df['detection rate'], [16, 50, 84])
return pd.Series(dict(temperature=Teff, middle=med, low_pct=low, high_pct=high))
def make_plot(stats_df):
""" Make a plot showing the average and spread of the detection rates
"""
fig, ax = plt.subplots()
fig.subplots_adjust(left=0.15, bottom=0.18, right=0.95, top=0.85)
ax.plot(stats_df.temperature, stats_df.middle, 'r--', lw=2, label='Median')
ax.fill_between(stats_df.index, stats_df.high_pct, stats_df.low_pct, alpha=0.4, color='blue')
p = plt.Rectangle((0, 0), 0, 0, color='blue', alpha=0.4, label='16th-84th Percentile')
ax.add_patch(p)
plottools.add_spt_axis(ax, spt_values=('M5', 'M0', 'K5', 'K0', 'G5', 'G0', 'F5'))
leg = ax.legend(loc=4, fancybox=True)
ax.set_xlabel('Temperature (K)')
ax.set_ylabel('Detection Rate')
ax.set_xlim((3000, 6550))
ax.set_ylim((0.0, 1.05))
return ax
if __name__ == '__main__':
sample = get_undetected_stars()
print(sample)
summary = pd.concat([get_detection_rate(inst, star, d) for inst, star, d in zip(sample.Instrument,
sample.identifier,
sample.Parsed_date)])
# Get statistics on the summary, averaging over the stars
stats = summary[['temperature', 'detection rate']].groupby('temperature').apply(get_stats)
make_plot(stats)
plt.savefig('../Figures/DetectionRate.pdf')
plt.show() |
So it’s been ten years this Sunday since I signed my first contract with Gollancz. Which remains somewhat astonishing to me, mostly that I’m still allowed to make stuff up when I should be working a proper job… But anyway I was going to do a giveaway of all my books to celebrate when it occurred to me that I’ve done a few of those over the years and it was maybe time to do something different.
Instead, I want your money. Or rather, I want it to go to my local charity. I’m looking for bids on a set of all my published books (that’s the entire Twilight Reign plus the two Empire of a Hundred Houses books, seven novels and one collection of short stories – plus, if someone proves particularly generous, perhaps also a promise for copies of the novella and novel I’ve written but haven’t come out yet).
The charity is Helen and Douglas House, a hospice for children and young adults with life-shortening conditions. http://www.helenanddouglas.org.uk/ – and in case you want to just throw money their way right now, the best place for that is via their website or here - https://www.justgiving.com/hhadh/ – the do brilliant work and are lovely people so well deserving of your generosity.
So put your bids in the comments here, as replies on facebook or twitter or emailed through my website even – I don’t care where, I’ll do my best to collate them and make it clear what the leading bid is. I’ll be covering postage (so kinda hoping for a UK-based winner ;0) ) and I’m open to negotiation when it comes to format, size of winning bid depending – I think I’ve got most of the ones printed – and will sign/dedicate/first line as requested so long as it’s vaguely reasonable.
I’ll leave this to run until Monday morning, get bidding! |
#!/usr/bin/env python2.7
from __future__ import print_function
import sys
import time
from threading import Thread
from pymesos import MesosExecutorDriver, Executor, decode_data
class JupyterHubExecutor(Executor):
"""
May not be necessary
"""
def launchTask(self, driver, task):
def run_task(task):
update = {
'task_id': {
"value": task['task_id']['value']
},
'state': 'TASK_RUNNING',
'timestamp': time.time()
}
driver.sendStatusUpdate(update)
print(decode_data(task['data']), file=sys.stderr)
time.sleep(30)
update = {
'task_id': {
'value': task['task_id']['value']
},
'state': 'TASK_FINISHED',
'timestamp': time.time()
}
driver.sendStatusUpdate(update)
thread = Thread(target=run_task, args=(task,))
thread.start()
|
7. As the popularity of NBC Sports Live Extra has increased during the season with more soccer fans watching live games on it, the video quality has been an issue particularly on 10am ET games when some of the top teams are playing in the same window. Even with a hard-wired Internet connection, buffering happens more than it should. And when games are watched in full-screen mode, the video quality is often not good enough.
8. Android users experienced a lot of issues earlier in the season with the NBC Sports Live Extra app for Android devices being practically unusable. NBC, to their credit, upgraded the Android app to improve the viewing experience. Technically, however, it’s a difficult job to keep all Android users happy as the app works well on most of the popular Android devices, not all. It’s the nature of the beast, unfortunately.
NBC launched a brand-new feature for its first year of Premier League TV coverage, which is Premier League Extra Time. The feature is actually overflow channels that most TV providers offer to its subscribers, so you can watch live games that are not being shown live on NBC or NBCSN. Plus many of the games are available as video on-demand for later viewing.
1. If your TV provider offers Premier League Extra Time, consider yourself fortunate. There are still a small number of TV providers who are not providing the service. Or, for example, Tivo users who have Xfinity Comcast are not able to access the Premier League Extra Time features via on-demand.
1. The only complaint is that some TV providers don’t make Premier League Extra Time available, but that’s no fault of NBC Sports. That’s a decision for each TV provider. NBC is making the overflow channels available free-of-charge.
Production, analysts and commentary are key ingredients in making a successful launch, but the marketing and promotion to let people know that Premier League matches are now on NBC Sports is just as crucial.
1. NBC scored a home run hit with their Ted Lasso viral videos starring Jason Sudeikis. The NBC network gained instant credibility on both sides of the Atlantic, making sure that the words “NBC” and “EPL” were on the tips of the tongues of all US soccer fans.
2. NBC scored another hit with the advertising it did on the NBC subway cars in New York City. The subway cars were so popular that fans of some of the clubs made special pilgrimages to see them in person, and to share the photos on social media for everyone else to see. |
# jsb/persist.py
#
#
"""
allow data to be written to disk or BigTable in JSON format. creating
the persisted object restores data.
"""
## jsb imports
from jsb.utils.trace import whichmodule, calledfrom, callstack, where
from jsb.utils.lazydict import LazyDict
from jsb.utils.exception import handle_exception
from jsb.utils.name import stripname
from jsb.utils.locking import lockdec
from jsb.utils.timeutils import elapsedstring
from jsb.lib.callbacks import callbacks
from jsb.lib.errors import MemcachedCounterError, JSONParseError, WrongFileName
from datadir import getdatadir
## simplejson imports
from jsb.imports import getjson
json = getjson()
## basic imports
from collections import deque
import thread
import logging
import os
import os.path
import types
import copy
import sys
import time
## defines
cpy = copy.deepcopy
## locks
persistlock = thread.allocate_lock()
persistlocked = lockdec(persistlock)
## global list to keeptrack of what persist objects need to be saved
needsaving = deque()
def cleanup(bot=None, event=None):
global needsaving
#todo = cpy(needsaving)
r = []
for p in needsaving:
try: p.dosave() ; r.append(p) ; logging.warn("saved on retry - %s" % p.fn)
except (OSError, IOError), ex: logging.error("failed to save %s - %s" % (p, str(ex)))
for p in r:
try: needsaving.remove(p)
except ValueError: pass
return needsaving
got = False
from jsb.memcached import getmc
mc = getmc()
if mc:
status = mc.get_stats()
if status:
logging.warn("memcached uptime is %s" % elapsedstring(status[0][1]['uptime']))
got = True
else: logging.debug("no memcached found - using own cache")
from cache import get, set, delete
import fcntl
## classes
class Persist(object):
""" persist data attribute to JSON file. """
def __init__(self, filename, default=None, init=True, postfix=None, needexist=False):
""" Persist constructor """
if postfix: self.fn = str(filename.strip()) + str("-%s" % postfix)
else: self.fn = str(filename.strip())
if needexist and not os.path.exists(self.fn): raise WrongFileName(self.fn)
self.lock = thread.allocate_lock() # lock used when saving)
self.data = LazyDict(default=default) # attribute to hold the data
self.setlogname()
self.countername = self.fn + "_" + "counter"
if got:
count = mc.get(self.countername)
try:
self.mcounter = self.counter = int(count)
except (ValueError, TypeError):
self.mcounter = self.counter = mc.set(self.countername, "1") or 0
else:
self.mcounter = self.counter = 0
self.ssize = 0
self.jsontxt = ""
self.dontsave = False
if init:
self.init(default)
if default == None: default = LazyDict()
self.setlogname()
def setlogname(self):
try:
res = []
target = getdatadir().split(os.sep)
if not target[-1]: target = target[-2]
else: target = target[-1]
for i in self.fn.split(os.sep)[::-1]:
if target in i: break
if i.endswith(os.sep): i = i[:-1]
res.append(i)
self.logname = os.sep.join(res[::-1])
if not self.logname: self.logname = self.fn
except: handle_exception() ; self.logname = self.fn
def size(self):
return "%s (%s)" % (len(self.data), len(self.jsontxt))
def init(self, default={}, filename=None):
""" initialize the data. """
gotcache = False
cachetype = "cache"
try:
logging.debug("using name %s" % self.fn)
a = get(self.fn)
if a: self.data = a
else: self.data = None
if self.data != None:
logging.debug("got data from local cache")
return self
if got: self.jsontxt = mc.get(self.fn) ; cachetype = "cache"
if not self.jsontxt:
datafile = open(self.fn, 'r')
self.jsontxt = datafile.read()
datafile.close()
self.ssize = len(self.jsontxt)
cachetype = "file"
if got: mc.set(self.fn, self.jsontxt)
except IOError, ex:
if not 'No such file' in str(ex):
logging.error('failed to read %s: %s' % (self.fn, str(ex)))
raise
else:
logging.debug("%s doesn't exist yet" % self.fn)
self.jsontxt = json.dumps(default)
try:
if self.jsontxt:
logging.debug(u"loading: %s" % type(self.jsontxt))
try: self.data = json.loads(str(self.jsontxt))
except Exception, ex: logging.error("couldn't parse %s in the %s file" % (self.jsontxt, self.fn)) ; self.data = None ; self.dontsave = True
if not self.data: self.data = LazyDict()
elif type(self.data) == types.DictType:
logging.debug("converting dict to LazyDict")
d = LazyDict()
d.update(self.data)
self.data = d
set(self.fn, self.data)
logging.debug("loaded %s - %s" % (self.logname, cachetype))
except Exception, ex:
logging.error('ERROR: %s' % self.fn)
raise
def upgrade(self, filename):
self.init(self.data, filename=filename)
self.save(filename)
def get(self):
logging.debug("getting %s from local cache" % self.fn)
a = get(self.fn)
logging.debug("got %s from local cache" % type(a))
return a
def sync(self):
logging.debug("syncing %s" % self.fn)
if got: mc.set(self.fn, json.dumps(self.data))
set(self.fn, self.data)
return self
def save(self):
cleanup()
global needsaving
try: self.dosave()
except (IOError, OSError):
self.sync()
if self not in needsaving: needsaving.append(self)
@persistlocked
def dosave(self):
""" persist data attribute. """
try:
if self.dontsave: logging.error("dontsave is set on %s - not saving" % self.fn) ; return
fn = self.fn
if got: self.mcounter = int(mc.incr(self.countername))
if got and (self.mcounter - self.counter) > 1:
tmp = json.loads(mc.get(fn))
if tmp:
try: tmp.update(self.data) ; self.data = LazyDict(tmp) ; logging.warn("updated %s" % fn)
except AttributeError: pass
self.counter = self.mcounter
d = []
if fn.startswith(os.sep): d = [os.sep,]
for p in fn.split(os.sep)[:-1]:
if not p: continue
d.append(p)
pp = os.sep.join(d)
if not os.path.isdir(pp):
logging.warn("creating %s dir" % pp)
os.mkdir(pp)
tmp = fn + '.tmp' # tmp file to save to
datafile = open(tmp, 'w')
fcntl.flock(datafile, fcntl.LOCK_EX | fcntl.LOCK_NB)
json.dump(self.data, datafile, indent=True)
fcntl.flock(datafile, fcntl.LOCK_UN)
datafile.close()
try: os.rename(tmp, fn)
except (IOError, OSError):
os.remove(fn)
os.rename(tmp, fn)
jsontxt = json.dumps(self.data)
logging.debug("setting cache %s - %s" % (fn, jsontxt))
self.jsontxt = jsontxt
set(fn, self.data)
if got: mc.set(fn, jsontxt)
if 'sleeptime' in self.fn: logging.info('%s saved' % self.logname)
else: logging.warn('%s saved' % self.logname)
except IOError, ex: logging.error("not saving %s: %s" % (self.fn, str(ex))) ; raise
except: raise
finally: pass
## findfilenames function
def findfilenames(target, filter=[], skip=[]):
logging.debug("finding files in %s - filter: %s - skip: %s" % (target, filter, skip))
res = []
result = []
if not os.path.isdir(target): return res
if not target.endswith(os.sep): target += os.sep
for f in os.listdir(target):
for s in skip:
if s in f: continue
fname = target + f
if os.path.isdir(fname): res.extend(findfilenames(fname, filter, skip))
go = True
for fil in filter:
if fil.lower() not in fname.lower(): go = False ; break
if not go: continue
res.append(fname)
return res
def findnames(target, filter=[], skip=[]):
res = []
for f in findfilenames(target, filter, skip):
res.append(f.split(os.sep)[-1])
return res
class PlugPersist(Persist):
""" persist plug related data. data is stored in jsondata/plugs/{plugname}/{filename}. """
def __init__(self, filename, default={}, *args, **kwargs):
plugname = calledfrom(sys._getframe())
Persist.__init__(self, getdatadir() + os.sep + 'plugs' + os.sep + stripname(plugname) + os.sep + stripname(filename), default=default, *args, **kwargs)
class GlobalPersist(Persist):
""" persist plug related data. data is stored in jsondata/plugs/{plugname}/{filename}. """
def __init__(self, filename, default={}, *args, **kwargs):
if not filename: raise Exception("filename not set in GlobalPersist")
logging.warn("filename is %s" % filename)
Persist.__init__(self, getdatadir() + os.sep + 'globals' + os.sep + stripname(filename), default=default, *args, **kwargs)
## PersistCollection class
class PersistCollection(object):
""" maintain a collection of Persist objects. """
def __init__(self, path, *args, **kwargs):
assert path
self.path = path
d = [os.sep, ]
logging.warn("path is %s" % self.path)
for p in path.split(os.sep):
if not p: continue
d.append(p)
pp = os.sep.join(d)
try:
os.mkdir(pp)
logging.warn("creating %s dir" % pp)
except OSError, ex:
if 'Errno 13' in str(ex) or 'Errno 2' in str(ex) or "Errno 17" in str(ex): continue
logging.warn("can't make %s - %s" % (pp,str(ex))) ; continue
def filenames(self, filter=[], path=None, skip=[], result=[]):
target = path or self.path
res = findfilenames(target, filter, skip)
logging.warn("filenames are %s" % str(res))
return res
def names(self, filter=[], path=None, skip=[], result=[]):
target = path or self.path
res = findnames(target, filter, skip)
return res
def search(self, field, target):
res = []
for obj in self.objects().values():
try: item = getattr(obj.data, field)
except AttributeError: handle_exception() ; continue
if not item: continue
if target in item: res.append(obj)
return res
def objects(self, filter=[], path=None):
if type(filter) != types.ListType: filter = [filter, ]
res = {}
target = path or self.path
for f in self.filenames(filter, target):
res[f] = Persist(f)
return res
## PlugPersistCollection class
class PlugPersistCollection(PersistCollection):
def __init__(self):
plugname = calledfrom(sys._getframe())
logging.warn("plugin is %s" % plugname)
self.path = getdatadir() + os.sep + 'plugs' + os.sep + stripname(plugname) + os.sep
PersistCollection.__init__(self, self.path)
## GlobalPersistCollection class
class GlobalPersistCollection(PersistCollection):
def __init__(self):
self.path = getdatadir() + os.sep + 'globals'
GlobalCollection(self, self.path)
callbacks.add("TICK60", cleanup)
|
Tuesday Roundup: Why, Helllllllo Harley!!!
Ah, Krypton… Maybe it was a nice place to visit, but if you’re a human, you probably wouldn’t want to live there. And you definitely don’t want it returning and destroying all things we hold dear. |
# Set the QT API to PyQt4
import os
import pkg_resources
os.environ['QT_API'] = 'pyqt'
import sip
sip.setapi("QString", 2)
sip.setapi("QVariant", 2)
from PyQt4 import QtGui,QtCore
import sys
import functools,random
from templates import template_exp
import time,sys
import custom_widgets as Widgets
import numpy as np
import sys
class ConvenienceClass():
"""
This class contains methods that simplify setting up and running
an experiment.
The :func:`arbitFit` method accepts two arrays, the fitting function,
and a keyword argument 'guess' that is an array containing
guess values for the various fiting parameters.
Guess values can be obtained using the :func:`getGuessValues` based on
a keyword argument 'func' which as of this moment can be either 'sine'
or 'damped sine'
"""
timers=[]
def __init__(self):
print 'initializing convenience class'
try:
import scipy.optimize as optimize
import scipy.fftpack as fftpack
except ImportError:
print 'imports failed for scipy.optimize,scipy.fftpack'
self.optimize = None;self.fftpack=None
else:
self.optimize = optimize;self.fftpack=fftpack
self.timers=[]
def loopTask(self,interval,func,*args):
"""
Creates a QTimer that executes 'func' every 'interval' milliseconds
all additional arguments passed to this function are passed on as
arguments to func
Refer to the source code for experiments such as diodeIV, Bandpass filter etc.
"""
timer = QtCore.QTimer()
timerCallback = functools.partial(func,*args)
timer.timeout.connect(timerCallback)
timer.start(interval)
self.timers.append(timer)
return timer
def delayedTask(self,interval,func,*args):
"""
Creates a QTimer that executes 'func' once after 'interval' milliseconds.
all additional arguments passed to this function are passed on as
arguments to func
"""
timer = QtCore.QTimer()
timerCallback = functools.partial(func,*args)
timer.singleShot(interval,timerCallback)
self.timers.append(timer)
def random_color(self):
c=QtGui.QColor(random.randint(20,255),random.randint(20,255),random.randint(20,255))
if np.average(c.getRgb())<150:
c=self.random_color()
return c
def displayObjectContents(self,d):
"""
The contents of the dictionary 'd' are displayed in a new QWindow
"""
self.tree = self.pg.DataTreeWidget(data=d)
self.tree.show()
self.tree.setWindowTitle('Data')
self.tree.resize(600,600)
def dampedSine(self,x, amp, freq, phase,offset,damp):
"""
A damped sine wave function
"""
return offset + amp*np.exp(-damp*x)*np.sin(abs(freq)*x + phase)
def fitData(self,xReal,yReal,**args):
def mysine(x, a1, a2, a3,a4):
return a4 + a1*np.sin(abs(a2)*x + a3)
N=len(xReal)
yhat = self.fftpack.rfft(yReal)
idx = (yhat**2).argmax()
freqs = self.fftpack.rfftfreq(N, d = (xReal[1]-xReal[0])/(2*np.pi))
frequency = freqs[idx]
amplitude = (yReal.max()-yReal.min())/2.0
offset = yReal.max()-yReal.min()
frequency=args.get('frequency',1e6*abs(frequency)/(2*np.pi))*(2*np.pi)/1e6
phase=args.get('phase',0.)
guess = [amplitude, frequency, phase,offset]
try:
(amplitude, frequency, phase,offset), pcov = self.optimize.curve_fit(mysine, xReal, yReal, guess)
ph = ((phase)*180/(np.pi))
if(frequency<0):
#print 'negative frq'
return 0,0,0,0,pcov
if(amplitude<0):
#print 'AMP<0'
ph-=180
if(ph<-90):ph+=360
if(ph>360):ph-=360
freq=1e6*abs(frequency)/(2*np.pi)
amp=abs(amplitude)
if(frequency): period = 1./frequency
else: period = 0
pcov[0]*=1e6
return amp,freq,ph,offset,pcov
except:
return 0,0,0,0,[[]]
def getGuessValues(self,xReal,yReal,func='sine'):
if(func=='sine' or func=='damped sine'):
N=len(xReal)
offset = np.average(yReal)
yhat = self.fftpack.rfft(yReal-offset)
idx = (yhat**2).argmax()
freqs = self.fftpack.rfftfreq(N, d = (xReal[1]-xReal[0])/(2*np.pi))
frequency = freqs[idx]
amplitude = (yReal.max()-yReal.min())/2.0
phase=0.
if func=='sine':
return amplitude, frequency, phase,offset
if func=='damped sine':
return amplitude, frequency, phase,offset,0
def arbitFit(self,xReal,yReal,func,**args):
N=len(xReal)
guess=args.get('guess',[])
try:
results, pcov = self.optimize.curve_fit(func, xReal, yReal,guess)
pcov[0]*=1e6
return True,results,pcov
except:
return False,[],[]
class Experiment(QtGui.QMainWindow,template_exp.Ui_MainWindow,Widgets.CustomWidgets):
timers=[]
def __init__(self,**args):
self.qt_app = args.get('qt_app',QtGui.QApplication(sys.argv))
self.showSplash()
super(Experiment, self).__init__(args.get('parent',None))
self.updateSplash(10)
try:
import pyqtgraph as pg
import pyqtgraph.opengl as gl
except ImportError:
self.pg = None;self.gl=None
else:
self.pg = pg
self.gl=gl
self.updateSplash(10)
self.setupUi(self)
Widgets.CustomWidgets.__init__(self);self.updateSplash(10)
self.I = args.get('I',None)
self.graphContainer2_enabled=False
self.graphContainer1_enabled=False
self.console_enabled=False
self.output_enabled=False
self.viewBoxes=[]
self.plot_areas=[]
self.plots3D=[]
self.plots2D=[]
self.axisItems=[]
self.total_plot_areas=0
self.widgetBay = False
self.help_url = pkg_resources.resource_filename(__name__, os.path.join('helpfiles','interface.html'))
#self.additional_handle = QSplitterHandle(Qt.Horizontal,self.graph_splitter)
#self.graph_splitter.addWidget(self.additional_handle)
if(args.get('showresult',True)):
dock = QtGui.QDockWidget()
dock.setFeatures(QtGui.QDockWidget.DockWidgetMovable|QtGui.QDockWidget.DockWidgetFloatable)#|QDockWidget.DockWidgetVerticalTitleBar)
dock.setWindowTitle("Results")
self.output_text = QtGui.QTextEdit()
self.output_text.setReadOnly(True)
fr = QtGui.QFrame()
plt = QtGui.QGridLayout(fr)
plt.setMargin(0)
plt.addWidget(self.output_text)
self.output_enabled=True
sys.stdout = self.relay_to_console(self.output_text)
dock.setWidget(fr)
self.result_dock=dock
self.output_text.setStyleSheet("color: rgb(255, 255, 255);")
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, dock)
else:
self.result_dock=False
self.output_enabled=False
self.updateSplash(10)
if(args.get('handler',False)):
self.addHandler(args.get('handler'))
while(self.progressBar.value()<100):
self.updateSplash(1)
time.sleep(0.01)
def updateSplash(self,x,txt=''):
self.progressBar.setValue(self.progressBar.value()+x)
if(len(txt)):self.splashMsg.setText(' '+txt)
self.qt_app.processEvents()
self.splash.repaint()
def showSplash(self):
import pkg_resources
splash_pix = QtGui.QPixmap(pkg_resources.resource_filename('v0.stylesheets', "splash3.png"))
self.splash = QtGui.QSplashScreen(splash_pix, QtCore.Qt.WindowStaysOnTopHint)
# adding progress bar
self.progressBar = QtGui.QProgressBar(self.splash)
self.progressBar.resize(self.splash.width(),20)
css = pkg_resources.resource_string('v0', "stylesheets/splash.css")
if css:
self.splash.setStyleSheet(css)
self.splashMsg = QtGui.QLabel(self.splash);self.splashMsg.setStyleSheet("font-weight:bold;color:purple")
self.splash.setMask(splash_pix.mask())
self.splashMsg.setText('Loading....');self.splashMsg.resize(self.progressBar.width(),20)
self.splash.show()
self.splash.repaint()
def run(self):
def __resizeHack__():
if self.result_dock:
self.result_dock.setMaximumHeight(100)
self.result_dock.setMaximumHeight(2500)
self.delayedTask(0,__resizeHack__)
self.show()
self.splash.finish(self)
self.qt_app.exec_()
def addPlotArea(self):
fr = QtGui.QFrame(self.graph_splitter)
fr.setFrameShape(QtGui.QFrame.StyledPanel)
fr.setFrameShadow(QtGui.QFrame.Raised)
fr.setMinimumHeight(250)
self.total_plot_areas+=1
fr.setObjectName("plot"+str(self.total_plot_areas))
plt = QtGui.QGridLayout(fr)
plt.setMargin(0)
self.plot_areas.append(plt)
return len(self.plot_areas)-1
def add3DPlot(self):
plot3d = self.gl.GLViewWidget()
#gx = gl.GLGridItem();gx.rotate(90, 0, 1, 0);gx.translate(-10, 0, 0);self.plot.addItem(gx)
#gy = gl.GLGridItem();gy.rotate(90, 1, 0, 0);gy.translate(0, -10, 0);self.plot.addItem(gy)
gz = self.gl.GLGridItem();#gz.translate(0, 0, -10);
plot3d.addItem(gz);
plot3d.opts['distance'] = 40
plot3d.opts['elevation'] = 5
plot3d.opts['azimuth'] = 20
pos=self.addPlotArea()
self.plot_areas[pos].addWidget(plot3d)
self.plots3D.append(plot3d)
plot3d.plotLines3D=[]
return plot3d
def add2DPlot(self):
plot=self.pg.PlotWidget()
pos=self.addPlotArea()
self.plot_areas[pos].addWidget(plot)
plot.viewBoxes=[]
self.plotLegend=plot.addLegend(offset=(-1,1))
self.plots2D.append(plot)
return plot
def add2DPlots(self,num):
for a in range(num):yield self.add2DPlot()
def add3DPlots(self,num):
for a in range(num):yield self.add3DPlot()
def addAxis(self,plot,**args):
p3 = self.pg.ViewBox()
ax3 = self.pg.AxisItem('right')
plot.plotItem.layout.addItem(ax3, 2, 3+len(self.axisItems))
plot.plotItem.scene().addItem(p3)
ax3.linkToView(p3)
p3.setXLink(plot.plotItem)
ax3.setZValue(-10000)
if args.get('label',False):
ax3.setLabel(args.get('label',False), color=args.get('color','#ffffff'))
plot.viewBoxes.append(p3)
p3.setGeometry(plot.plotItem.vb.sceneBoundingRect())
p3.linkedViewChanged(plot.plotItem.vb, p3.XAxis)
## Handle view resizing
Callback = functools.partial(self.updateViews,plot)
plot.getViewBox().sigStateChanged.connect(Callback)
self.axisItems.append(ax3)
return p3
def enableRightAxis(self,plot):
p = self.pg.ViewBox()
plot.showAxis('right')
plot.setMenuEnabled(False)
plot.scene().addItem(p)
plot.getAxis('right').linkToView(p)
p.setXLink(plot)
plot.viewBoxes.append(p)
Callback = functools.partial(self.updateViews,plot)
plot.getViewBox().sigStateChanged.connect(Callback)
return p
def updateViews(self,plot):
for a in plot.viewBoxes:
a.setGeometry(plot.getViewBox().sceneBoundingRect())
a.linkedViewChanged(plot.plotItem.vb, a.XAxis)
def configureWidgetBay(self,name='controls'):
if(self.widgetBay):return
dock = QtGui.QDockWidget()
dock.setFeatures(QtGui.QDockWidget.DockWidgetMovable|QtGui.QDockWidget.DockWidgetFloatable)#|QDockWidget.DockWidgetVerticalTitleBar)
dock.setWindowTitle(name)
fr = QtGui.QFrame()
fr.setStyleSheet("QLineEdit {color: rgb(0,0,0);}QPushButton, QLabel ,QComboBox{color: rgb(255, 255, 255);}")
dock.setWidget(fr)
self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, dock)
self.frame_area = QtGui.QVBoxLayout(fr)
self.frame_area.setMargin(0)
self.widgetBay = True
def updateWidgetBay(self,obj):
self.configureWidgetBay()
self.frame_area.addWidget(obj)
def addHandler(self,handler,name = 'Controls'):
'''
Add handler instance(subclass of QFrame) to the left side of the window.
The contents of the handler are QWidgets which control various aspects
of the experiment that the handler has been designed for.
'''
self.configureWidgetBay(name)
self.frame=handler
self.updateWidgetBay(self.frame)
#self.updateWidgetBay(self.frame)
try:
self.I = handler.I
if(self.console_enabled):
self.ipyConsole.pushVariables({"I":self.I})
self.ipyConsole.printText("Access hardware using the Instance 'I'. e.g. I.get_average_voltage(0)")
except:
print 'Device Not Connected.'
def addConsole(self,**args):
try:
#read arguments
self.I = args.get('I',self.I)
self.showSplash();self.updateSplash(10,'Importing iPython Widgets...')
from iPythonEmbed import QIPythonWidget;self.updateSplash(10,'Creating Dock Widget...')
#-------create an area for it to sit------
dock = QtGui.QDockWidget()
dock.setFeatures(QtGui.QDockWidget.DockWidgetMovable|QtGui.QDockWidget.DockWidgetFloatable)#|QDockWidget.DockWidgetVerticalTitleBar)
dock.setWindowTitle("Interactive Python Console")
fr = QtGui.QFrame();self.updateSplash(10)
dock.setWidget(fr)
self.addDockWidget(QtCore.Qt.BottomDockWidgetArea, dock)
fr.setFrameShape(QtGui.QFrame.StyledPanel)
fr.setFrameShadow(QtGui.QFrame.Raised);self.updateSplash(10,'Embedding IPython Widget...')
#--------instantiate the iPython class-------
self.ipyConsole = QIPythonWidget(customBanner="An interactive Python Console!\n");self.updateSplash(10)
layout = QtGui.QVBoxLayout(fr)
layout.setMargin(0)
layout.addWidget(self.ipyConsole);self.updateSplash(10,'Preparing default command dictionary...')
cmdDict = {"delayedTask":self.delayedTask,"loopTask":self.loopTask,"addWidget":self.addWidget,"setCommand":self.setCommand,"Widgets":Widgets}
#if self.graphContainer1_enabled:cmdDict["graph"]=self.graph
if self.I :
cmdDict["I"]=self.I
self.ipyConsole.printText("Access hardware using the Instance 'I'. e.g. I.get_average_voltage('CH1')")
self.ipyConsole.pushVariables(cmdDict);self.updateSplash(10,'Winding up...')
self.console_enabled=True
self.splash.finish(dock);self.updateSplash(10)
dock.widget().setMaximumSize(QtCore.QSize(self.width(), self.height()/3))
dock.widget().setMinimumSize(QtCore.QSize(self.width(), self.height()/3))
print dock.width(),dock.height()
def dockResize():
dock.widget().setMaximumSize(65535,65535)
dock.widget().setMinimumSize(60,60)
self.delayedTask(0,dockResize)
return self.ipyConsole
except:
self.splash.finish(self);self.updateSplash(10)
errbox = QtGui.QMessageBox()
errbox.setStyleSheet('background:#fff;')
print errbox.styleSheet()
errbox.about(self, "Error", "iPython-qtconsole not found.\n Please Install the module")
def showHelp(self):
from PyQt4 import QtWebKit
dock = QtGui.QMainWindow()
self.helpView = QtWebKit.QWebView()
dock.setCentralWidget(self.helpView)
dock.setWindowTitle("Help window")
dock.show()
self.helpView.setUrl(QtCore.QUrl(self.help_url))
self.helpWindow = dock
def showFullHelp(self):
from PyQt4 import QtWebKit
dock = QtGui.QMainWindow()
self.helpView = QtWebKit.QWebView()
dock.setCentralWidget(self.helpView)
dock.setWindowTitle("Help window")
dock.show()
URL = pkg_resources.resource_filename(__name__, os.path.join('helpfiles','interface.html'))
self.helpView.setUrl(QtCore.QUrl(URL))
self.fullHelpWindow = dock
def showImageMap(self):
from PyQt4 import QtWebKit
dock = QtGui.QMainWindow()
self.helpView = QtWebKit.QWebView()
dock.setCentralWidget(self.helpView)
dock.setWindowTitle("Help window")
dock.show()
URL = pkg_resources.resource_filename(__name__, os.path.join('helpfiles','imagemap.html'))
self.helpView.setUrl(QtCore.QUrl(URL))
self.imageMapHelp = dock
def setHelpUrl(self,url):
if 'http' in url:
self.help_url = url
else:
self.help_url = pkg_resources.resource_filename(__name__, os.path.join('helpfiles',url))
def new3dSurface(self,plot,**args):
import scipy.ndimage as ndi
surface3d = self.gl.GLSurfacePlotItem(z=np.array([[0.1,0.1],[0.1,0.1]]), **args)
#surface3d.shader()['colorMap']=self.pg.ColorMap(np.array([0.2,0.4,0.6]),np.array([[255,0,0,255],[0,255,0,255],[0,255,255,255]])).getLookupTable()
#surface3d.shader()['colorMap'] = np.array([0.2, 2, 0.5, 0.2, 1, 1, 0.2, 0, 2])
plot.addItem(surface3d)
return surface3d
def setSurfaceData(self,surf,z):
surf.setData(z=np.array(z))
def draw3dLine(self,plot,x,y,z,color=(100,100,100)):
pts = np.vstack([x,y,z]).transpose()
plt = self.gl.GLLinePlotItem(pos=pts, color=self.pg.glColor(color),width=2)
plot.addItem(plt)
plot.plotLines3D.append(plt)
return plt
def clearLinesOnPlane(self,plot):
for a in plot.plotLines3D:
plot.removeItem(a)# a.setData(pos=[[0,0,0]])
plot.plotLines3D=[]
class relay_to_console():
def __init__(self,console):
self.console = console
self.cursor = self.console.textCursor()
self.scroll=self.console.verticalScrollBar()
def write(self,arg):
f=open('b.txt','at')
self.cursor.movePosition(QtGui.QTextCursor.End)
self.console.setTextCursor(self.cursor)
self.console.insertPlainText(arg)
#self.scroll.setValue(self.scroll.maximum())
f.write(arg)
def flush(self):
pass
def graph(self,x,y):
if(self.graphContainer1_enabled): self.reserved_curve.setData(x,y)
def setRange(self,plot,x,y,width,height):
plot.setRange(QtCore.QRectF(x,y,width,height))
def addCurve(self,plot,name='',col=(255,255,255),axis='left'):
#if(len(name)):curve = plot.plot(name=name)
#else:curve = plot.plot()
if(len(name)):curve = self.pg.PlotCurveItem(name=name)
else:curve = self.pg.PlotCurveItem()
plot.addItem(curve)
curve.setPen(color=col, width=1)
return curve
def rebuildLegend(self,plot):
self.plotLegend = plot.addLegend(offset=(-10,30))
def loopTask(self,interval,func,*args):
timer = QtCore.QTimer()
timerCallback = functools.partial(func,*args)
timer.timeout.connect(timerCallback)
timer.start(interval)
self.timers.append(timer)
return timer
def delayedTask(self,interval,func,*args):
timer = QtCore.QTimer()
timerCallback = functools.partial(func,*args)
timer.singleShot(interval,timerCallback)
self.timers.append(timer)
def addButton(self,name,command,*args):
b=QtGui.QPushButton(None)
b.setText(name)
self.updateWidgetBay(b)
self.setCommand(b,"clicked()",command,*args)
return b
def addWidget(self,widget_type,**args):
b=widget_type(**args)
if(args.has_key('object_name')): b.setObjectName(args.get('object_name'))
if(args.has_key('text')): b.setText(args.get('text'))
if(args.has_key('items')):
for a in args.get('items'): b.addItem(a)
self.updateWidgetBay(b)
return b
def setCommand(self,widget,signal,slot,*args):
buttonCallback = functools.partial(slot,*args)
QObject.connect(widget, SIGNAL(signal), buttonCallback)
'''
class WorkThread(QtCore.QThread):
punched = QtCore.pyqtSignal()
def __init__(self):
QtCore.QThread.__init__(self)
def __del__(self):
self.wait()
def run(self):
for i in range(11):
time.sleep(0.5)
self.punched.emit()
self.terminate()
progress = QtGui.QProgressDialog("Copying...", "Cancel", 0, 10)
progress.show()
T = self.WorkThread()
T.punched.connect(lambda: progress.setValue(progress.value()+1))
T.start()
'''
|
The reindeer are coming to the camp!
This weekend, we are moving the herd of reindeer to the camp area for their winter stay here in Tromsø.
Looking forward to having them around again, and hope you will find the chance to visit SamiCamp when you are in Tromsø. |
#!/usr/bin/env python
import envoy
import subprocess
import sys
import time
import logging
from watchdog.observers import Observer
from watchdog.events import FileSystemEventHandler, LoggingEventHandler
try:
WATCH = sys.argv[1]
except:
WATCH = '001-initial.py'
LASTRUN = 0
class watcher(FileSystemEventHandler):
def on_modified(self, event):
global WATCH, LASTRUN
what = 'directory' if event.is_directory else 'file'
if what == 'file' and event.src_path.startswith('./%s' % WATCH[:3]) and time.time()-LASTRUN > 2.0:
LASTRUN = time.time()
logging.info("Modified %s: %s", what, event.src_path)
# Record the active window
r = envoy.run('xdotool getactivewindow')
window_id = r.std_out.strip()
envoy.run('pkill -x -f "python %s"' % WATCH)
proc = subprocess.Popen(['python %s' % WATCH], shell=True, stdin=None, stdout=None, stderr=None, close_fds=True)
# Restore the active window
time.sleep(1.0)
envoy.run('xdotool windowactivate %s' % window_id)
if __name__ == "__main__":
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
# path = sys.argv[1] if len(sys.argv) > 1 else '.'
path = '.'
observer = Observer()
observer.schedule(watcher(), path, recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
observer.join()
|
You have heard about usage of braces to keep irregular teeth into place and give your mouth a proper shape. Well, now, these braces will look odd, whenever you smile. But now, you have the right to get rid of braces and maintain your oral health. For that, contact us, at Cosmetic Dentistry, serving USA clients with answers on What Is Invisalign Rancho Palos Verdes Ca . You have the best answer right here, from our team of leading experts.
Now, you might have this question in mind; Is Invisalign Effective Rancho Palos Verdes Ca ? Well, it is, and the result has been declared after going through some changes. |
from axelrod import Player
class Grumpy(Player):
"""A player that defects after a ceratin level of grumpiness. Grumpiness increases when the opponent defects and decreases when the opponent co-operates."""
name = 'Grumpy'
def __init__(self, starting_state = 'Nice', grumpy_threshold = 10, nice_threshold = -10):
"""Player starts of nice be default with set thresholds"""
super(Grumpy, self).__init__()
self.history = []
self.score = 0
self.state = starting_state
self.starting_state = starting_state
self.grumpy_threshold = grumpy_threshold
self.nice_threshold = nice_threshold
def strategy(self, opponent):
"""A player that gets grumpier the more the opposition defects, and nicer the more they cooperate.
Starts off Nice, but becomes grumpy once the grumpiness threshold is hit.
Won't become nice once that grumpy threshold is hit, but must reach a much lower threshold before it becomes nice again.
"""
self.grumpiness = sum(play=='D' for play in opponent.history) - sum(play=='C' for play in opponent.history)
if self.state == 'Nice':
if self.grumpiness > self.grumpy_threshold:
self.state = 'Grumpy'
return 'D'
return 'C'
if self.state == 'Grumpy':
if self.grumpiness < self.nice_threshold:
self.state = 'Nice'
return 'C'
return 'D'
def reset(self):
"""Resets score, history and state for the next round of the tournement."""
self.history = []
self.state = self.starting_state
|
Get FREE high-speed Internet access at this Fairfield Inn. Always fresh, always clean, our guest rooms are full of thoughtful amenities, including a well-lit work desk and a remote-controlled TV with free cable. We'll get you started each day with a complimentary continental breakfast that includes hot, freshly brewed coffee. |
def init_actions_(service, args):
"""
this needs to returns an array of actions representing the depencies between actions.
Looks at ACTION_DEPS in this module for an example of what is expected
"""
# some default logic for simple actions
return {
'test': ['install']
}
def test(job):
"""
Test the created directory structure is corrected after ays blueprint on a test repo
"""
import sys
RESULT_OK = 'OK : %s'
RESULT_FAILED = 'FAILED : %s'
RESULT_ERROR = 'ERROR : %s %%s' % job.service.name
model = job.service.model
model.data.result = RESULT_OK % job.service.name
failures = []
blueprints = {
'bp_args_with_dot.yaml': True,
'bp_args_with_underscore.yaml': True,
'bp_valid_args.yaml': True,
'bp_non_exists_args.yaml': False,
}
repo = None
try:
repo = 'sample_repo4'
cl = j.clients.atyourservice.get().api.ays
for bp_name, should_success in blueprints.items():
try:
bp_resp = cl.executeBlueprint(data=None, repository=repo, blueprint=bp_name)
if not should_success and bp_resp.status_code == 200:
failures.append("blueprint %s should have failed" % bp_name)
except Exception as e:
if should_success:
failures.append("blueprint %s should have succeded : %s" % (bp_name, str(e)))
if failures:
model.data.result = RESULT_FAILED % '\n'.join(failures)
except:
model.data.result = RESULT_ERROR % str(sys.exc_info()[:2])
finally:
job.service.save()
if repo:
cl.destroyRepository(data=None, repository=repo)
|
The Identity API service enables developers to manage authentication and authorization services for Rackspace services through a simple Representational State Transfer (REST) web service interface.
The Identity service v2.0 is an implementation of OpenStack Keystone Service v2.0 that provides common, token-based authentication and authorization services that allow seamless access to Rackspace products and services.
Authentication generates a token in response to valid credentials submitted by a user, process, or client. Then, that token is used to confirm identity when requesting access to systems or resources defined in the Identity service catalog.
Authorization determines the resources and actions available to a user, client, or process based on access control capabilities that can be defined and managed by using the Identity service.
This guide is intended to assist software developers who want to develop applications by using the REST application programming interface (API) for the Identity service.
Use the following links to get user and reference information for using the Identity service REST API.
You can also use Identity from the Cloud Control Panel. |
import sys
import os
import shutil
from zope.interface.verify import verifyObject
from twisted.trial import unittest
# ugly hack to avoid cyclic imports of pyrake.spider when running this test
# alone
from pyrake.interfaces import ISpiderManager
from pyrake.spidermanager import SpiderManager
from pyrake.http import Request
module_dir = os.path.dirname(os.path.abspath(__file__))
class SpiderManagerTest(unittest.TestCase):
def setUp(self):
orig_spiders_dir = os.path.join(module_dir, 'test_spiders')
self.tmpdir = self.mktemp()
os.mkdir(self.tmpdir)
self.spiders_dir = os.path.join(self.tmpdir, 'test_spiders_xxx')
shutil.copytree(orig_spiders_dir, self.spiders_dir)
sys.path.append(self.tmpdir)
self.spiderman = SpiderManager(['test_spiders_xxx'])
def tearDown(self):
del self.spiderman
del sys.modules['test_spiders_xxx']
sys.path.remove(self.tmpdir)
def test_interface(self):
verifyObject(ISpiderManager, self.spiderman)
def test_list(self):
self.assertEqual(set(self.spiderman.list()),
set(['spider1', 'spider2', 'spider3', 'spider4']))
def test_create(self):
spider1 = self.spiderman.create("spider1")
self.assertEqual(spider1.__class__.__name__, 'Spider1')
spider2 = self.spiderman.create("spider2", foo="bar")
self.assertEqual(spider2.__class__.__name__, 'Spider2')
self.assertEqual(spider2.foo, 'bar')
def test_find_by_request(self):
self.assertEqual(self.spiderman.find_by_request(Request('http://pyrake1.org/test')),
['spider1'])
self.assertEqual(self.spiderman.find_by_request(Request('http://pyrake2.org/test')),
['spider2'])
self.assertEqual(set(self.spiderman.find_by_request(Request('http://pyrake3.org/test'))),
set(['spider1', 'spider2']))
self.assertEqual(self.spiderman.find_by_request(Request('http://pyrake999.org/test')),
[])
self.assertEqual(self.spiderman.find_by_request(Request('http://spider3.com')),
[])
self.assertEqual(self.spiderman.find_by_request(Request('http://spider3.com/onlythis')),
['spider3'])
def test_load_spider_module(self):
self.spiderman = SpiderManager(['tests.test_spidermanager.test_spiders.spider1'])
assert len(self.spiderman._spiders) == 1
def test_load_base_spider(self):
self.spiderman = SpiderManager(['tests.test_spidermanager.test_spiders.spider0'])
assert len(self.spiderman._spiders) == 0
def test_load_from_crawler(self):
spider = self.spiderman.create('spider4', a='OK')
self.assertEqual(spider.a, 'OK')
|
ok - here's a clip from last nights gig (29/08) first song of the night.... if you're wondering why the band looks all dolled up it was a fundraiser and the theme was 1920's.
We've got 2 weeks off now, so time for a cup of tea and a lie down!
thanks guys - it was a good night... band played really well and no-one seemed to have an 'off' night.
I ended up using drummers old Kick Drum mic to mic up my bass cab - usually use an external DI, but drummer wasn't happy with signal he was getting to the desk from it, so we switched to the mic.
Best thing was is we finished at 12am and I was home about 1:30am!
Hey Bassheads - thought I'd post this little edit of practice footage from Wednesday before our first gig in Ak since 2010!
Last edited by definite on Mon Jan 23, 2017 7:41 am, edited 1 time in total.
About time you put that Acoustic Guitar away!
Oh *BANNED* just saw the vid I posted wasn't the gig footage - here it is!
* Right,off to get my ACOUSTIC GUITAR out!
Whats up with the errors when posting Youtube clips in here?
Fixed it for you Def. If you use the youtube button to post, you don't include the "http://youtube.com/" part, just the random letters and numbers at the end. |
# encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
from django.core.management.base import BaseCommand
from django.utils import six
from haystack import connections
class Command(BaseCommand):
help = "Clears out the search index completely."
def add_arguments(self, parser):
parser.add_argument(
'--noinput', action='store_false', dest='interactive', default=True,
help='If provided, no prompts will be issued to the user and the data will be wiped out.'
)
parser.add_argument(
"-u", "--using", action="append", default=[],
help='Update only the named backend (can be used multiple times). '
'By default all backends will be updated.'
)
parser.add_argument(
'--nocommit', action='store_false', dest='commit',
default=True, help='Will pass commit=False to the backend.'
)
def handle(self, **options):
"""Clears out the search index completely."""
self.verbosity = int(options.get('verbosity', 1))
self.commit = options.get('commit', True)
using = options.get('using')
if not using:
using = connections.connections_info.keys()
if options.get('interactive', True):
self.stdout.write("WARNING: This will irreparably remove EVERYTHING from your search index in connection '%s'." % "', '".join(using))
self.stdout.write("Your choices after this are to restore from backups or rebuild via the `rebuild_index` command.")
yes_or_no = six.moves.input("Are you sure you wish to continue? [y/N] ")
if not yes_or_no.lower().startswith('y'):
self.stdout.write("No action taken.")
return
if self.verbosity >= 1:
self.stdout.write("Removing all documents from your index because you said so.")
for backend_name in using:
backend = connections[backend_name].get_backend()
backend.clear(commit=self.commit)
if self.verbosity >= 1:
self.stdout.write("All documents removed.")
|
gold in rock in tanzania Tanzanite with graphite and laumontite: Crystals of blue tanzanite in a rock matrix with graphite and laumontite. Specimen is from Merelani Hills, Tanzania. Specimen is from Merelani Hills, Tanzania. Photograph by Parent Géry, used here under a Creative Commons license .
Silica Exposures in Artisanal Small-Scale Gold Mining in Tanzania . 2African Barrick Gold-North Mara Mine, Dar es Salaam,Tanzania. Gold miners exposed component of soil, sand, and rocks. Exposures to tons of the country's total gold extraction in 2010.(7 It is FIGURE 1.
gold in rock in tanzania The Lupa Goldfield of south-western Tanzania produced over 23 tonnes of gold during colonial times, and an unrecorded amount since independence. The New Saza Mine was the second largest pre-Independence gold producer in the country after the Geita Mine.
Below is a gallery of Tanzanite crystals for sale, the blue gem variety of the mineral species zoisite that was first discovered and only found in the Merelani Hills, Tanzania for sale from John Betts Fine Minerals in New York City, NY.Crystals of tanzanite are uncommon because they are in high demand for cutting into gemstones.
gold in rock in tanzania 1 2 ton per hour rock gold recovery machine in tanzania offers 60865 dolomite products. About 1% of these are dolomite, 1% are lime, and 1% are refractory. A wide variety of dolomite options are. Get Price Drilling Machine For Soil Investigation.
The following tables shows the latest gold price in Tanzania calculated in Tanzanian Shilling (TZS) and updated regularly. The prices are sometimes updated more frequently at times of strong price moves based on live spot gold price (Bid Price).
"Kibaran understands that the legislation is designed to address the mining and export of precious metals and metal concentrates from existing operations in Tanzania," it said in a statement. |
import os
from envparse import env
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
DEBUG = env.bool('PL_DEBUG', default=True)
SECRET_KEY = env('PL_SECRET_KEY', default="DefaultSecretKey")
ALLOWED_HOSTS = ['*'] # change to actual before production
CSRF_COOKIE_SECURE = not DEBUG # change it to False if you are not using HTTPS
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djfp2',
'djfp2.calendar',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'debug': DEBUG,
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
ROOT_URLCONF = 'djfp2.urls'
WSGI_APPLICATION = 'djfp2.wsgi.application'
AUTH_PASSWORD_VALIDATORS = []
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = False
USE_L10N = False
USE_TZ = True
TIME_ZONE = env('PL_TIMEZONE', default='UTC')
STATIC_URL = '/static/'
MEDIA_URL = '/static/media/'
# for collectstatic
STATIC_ROOT = env(
'PL_STATIC_ROOT',
default=os.path.join(BASE_DIR, "../../var/static_root")
)
DATABASES = {
'default': {
'ENGINE': env('PL_DB_ENGINE', default='django.db.backends.postgresql_psycopg2'),
'NAME': env('PL_DB_NAME', default='django_planner'),
'HOST': env('PL_DB_HOST', default='db'),
'PORT': env('PL_DB_PORT', default=5432),
'USER': env('PL_DB_USERNAME', default='django_planner'),
'PASSWORD': env('PL_DB_PASSWORD', default='replace it in django.env file'),
'ATOMIC_REQUESTS': True,
}
}
RAVEN_DSN = env('PL_RAVEN_DSN', default=None)
if RAVEN_DSN:
INSTALLED_APPS += [
'raven.contrib.django.raven_compat',
]
RAVEN_CONFIG = {
'dsn': RAVEN_DSN,
}
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
|
Home » Browse » Newspapers » International Newspapers » The Evening Standard (London, England) » Article details, "What Would We Do without Her?"
THIS is a special week for temporary office administrator Jean Seddon.
On Friday she finds out whether she has won a nationwide award celebrating the contribution of temps to UK businesses. "I never thought I would stand a chance," says Seddon, one of three finalists for the One in a Million title.
The annual award was launched three years ago by the Recruitment and Employment Confederation (REC), together with Temp Week, which is being held this week. Both initiatives were set up to put the spotlight on the hidden army of one million temporary workers beavering away in companies every day.
"We wanted to show the huge value temp workers bring," explains Marcia Roberts, director of external relations for the REC, "and to celebrate those who make an outstanding effort."
Seddon herself is 75, and in the 16 years she has worked for Stanley Staff in Ellesmere Port, she has completed 161 administrative, secretarial and receptionist assignments for 32 companies. Agency staff pay tribute to her unfailing willingness to help out, even cancelling her holiday on one occasion to rescue a regular client.
Although offered permanent positions, Seddon resolutely sticks to temping.
"I have time to enjoy with my family, doing exercise, and charity work," she says.
This is clearly a lifestyle choice, and, according to Marcia Roberts, it is increasingly common. "No longer just filling a gap while jobhunting, many of today's workers choose to temp."
So why are these workers tempted by temping? "They enjoy the flexibility to determine when they work," says Roberts.
Workplace culture has shifted, too, and temps' benefits packages are more attractive, offering sick and holiday pay. |
# -*- coding: utf-8 -*-
# Copyright (c) 2019, CRS4
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
# the Software, and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# Generated by Django 1.11.5 on 2018-10-09 12:40
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rois_manager', '0014_auto_20171109_1015'),
]
operations = [
migrations.AddField(
model_name='focusregion',
name='tissue_status',
field=models.CharField(blank=True, choices=[(b'NORMAL', b'Normal'), (b'STRESSED', b'Stressed'), (b'TUMOR', b'Tumor')], max_length=8),
),
]
|
J & H DESIGNS, INC.
S & M TRADING, LTD.
TRI-STATE REAL ESTATE INVESTMENT AND FINANCE INC.
AMERICAN GYMNASTICS & CHEERLEADING, INC.
ELITE MARKETING & TELEMARKETING, INC.
214 EAST GWINNETT CONDOMINIUM ASSOCIATION, INC.
PROUD ACRES MOBILE HOME PARK, INC.
UNFORGETTABLE EVENTS AND CATERING, INC.
Terraces At Depot Park Townhome Association, Inc.
HURT & HURT PROPERTIES, L.L.C.
A LIGHT OF HOPE, INC.
FRESH START FARMS AND RETREAT CENTER INC.
WOODSTOCK PUBLIC SAFETY FOUNDATION, INC.
M & S RENTAL, INC. |