text
stringlengths 29
850k
|
---|
#!/usr/bin/python
import os,sys
def get_hosts(hostfile):
hosts=[]
f=open(hostfile,"r")
lines=f.readlines()
for line in lines:
dictionary={}
ip=line.strip().split("\t")[0].lstrip().rstrip()
hostname=line.strip().split("\t")[1].lstrip().rstrip()
dictionary['IP']=ip
dictionary['HOSTNAME']=hostname
hosts.append(dictionary)
return hosts
if __name__=="__main__":
srcpath=sys.argv[1]
hosts=get_hosts(sys.argv[2])
destpath=sys.argv[3]
username="pi"
outputs=[]
for host in hosts:
dictionary={}
print "scp -r %s %s@%s:%s" %(srcpath,username,host['HOSTNAME'],destpath)
try:
output=os.popen("scp -r %s %s@%s:%s" %(srcpath,username,host['HOSTNAME'],destpath)).read().strip()
except:
output="Failed"
dictionary['HOST']=host['HOSTNAME']
dictionary['OUTPUT']=output
outputs.append(dictionary)
for output in outputs:
print "*****************************************************************"
print "Host: " + output['HOST']
print "------------------------"
print "Command:"
print "------------------------"
print output['OUTPUT']
print "*****************************************************************"
|
2014 Chevrolet Camaro 2LT,HEADS UP DISPLAY,HEATED LEATHER,BACKUP CAMERA,SHARP!
Package 2LT,HEADS UP DISPLAY,HEATED LEATHER,BACKUP CAMERA,SHARP!
**LOCAL TRADE**, **CLEAN DOUBLE NICE**, **NEW PRICE**, Camaro 2LT 2LT, 2D Coupe, 3.6L V6 DGI DOHC VVT, Black, Inferno Orange w/Front Leather Seating Surfaces, 4-Wheel Antilock 4-Wheel Disc Brakes, Boston Acoustics Premium 9-Speaker Audio System, Exterior Parking Camera Rear, Front Leather Seating Surfaces, Front Sport Bucket Seats, Head-Up Display, Heated Driver Front Passenger Seats, Inferno Orange Interior Accent Trim Package, Orange Rally Stripe Package, Preferred Equipment Group 2LT, Rear Parking Sensors, Remote Keyless Entry, SiriusXM Satellite Radio, StabiliTrak, Wheels: 20` 5-Spoke Black Paint Alum (LPO). Odometer is 15580 miles below market average! 19/30 City/Highway MPG Awards: * JD Power Dependability Study * ALG Best Residual Value * 2014 KBB.com 10 Coolest New Cars Under $25,000 * 2014 KBB.com Best Resale Value Awards * 2014 KBB.com Brand Image Awards Since 1979, Jerrys Auto Sales has proudly served Sioux Falls and the surrounding communities including Sioux City, Mitchell, Vermillion, Yankton and Brookings. With the best selection of late model, low mileage units, Jerrys can provide you with a high quality vehicle at a great value. We encourage you to visit us at Jerrys and check out our great selection of vehicles or call one of our sales professionals at 605-647-2980.
Hello, I would like more information about the 2014 Chevrolet Camaro, stock# 119547.
Hello, I would like to see more pictures of the 2014 Chevrolet Camaro, stock# 119547. |
import matplotlib.pyplot as plt
import math
import numpy
from mpl_toolkits.mplot3d import Axes3D
def vert(x):
return (0.000734*(x**2))-(0.1042253*x)+4.9
def horr(x):
if x>60:
return (0.00037*(x**2))-(0.04462*x)+3.438
else:
return (0.00069*(x**2))-(0.08333*x)+4.6
def radialDistortion(x,y):
camYaw=0.0/180.0*math.pi
camPitch=0.0/180.0*math.pi
camRoll=0.0/180.0*math.pi
camTrans=numpy.array([[0],[0],[0]])
camScaling = 1
camYM = numpy.matrix([[math.cos(camYaw),0,math.sin(camYaw)],[0,1,0],[-math.sin(camYaw),0,math.cos(camYaw)]])
camPM = numpy.matrix([[1,0,0],[0,math.cos(camPitch),-math.sin(camPitch)],[0,math.sin(camPitch),math.cos(camPitch)]])
camRM = numpy.matrix([[math.cos(camRoll),-math.sin(camRoll),0],[math.sin(camRoll),math.cos(camRoll),0],[0,0,1]])
# undo the camera rotation
# convert x,y into rotations
x = (x-75)/180*math.pi
y = (y-70)/180*math.pi
ommYM = numpy.matrix([[math.cos(x),0,math.sin(x)],[0,1,0],[-math.sin(x),0,math.cos(x)]])
ommPM = numpy.matrix([[1,0,0],[0,math.cos(y),-math.sin(y)],[0,math.sin(y),math.cos(y)]])
forwardVect = numpy.array([[0],[0],[1]])
vect2 = ommYM*ommPM*forwardVect
#return vect2
vect2 = vect2 + camTrans
vect2 = camYM*camPM*camRM*vect2
if (vect2[2] > 0.01):
vect2 = vect2*camScaling/vect2[2]
else:
return numpy.array([[100000],[100000],[1]])
# normalise
# now translate x-y into pixels to account for distortion
r_c = math.sqrt((vect2[0])**2+(vect2[1])**2)
k_1 = -0.61233
k_2 = 0.92386
k_3 = 0
vect2[0] = vect2[0]*(1+k_1*r_c**2+k_2*r_c**4+k_3*r_c**6)
vect2[1] = vect2[1]*(1+k_1*r_c**2+k_2*r_c**4+k_3*r_c**6)
#vect2[0] = (vect2[0]+1.0)*(576.0/2.0)
#vect2[1] = (vect2[1]+1.0)*(480.0/2.0)
# return
# camera matrix:
f_x = 574.40666#*2.0
f_y = 571.55377#*2.0
s = 0
c_x = 315.79322
c_y = 193.62054#*2.0
camMat = numpy.matrix([[f_x,s,c_x],[0,f_y,c_y],[0,0,1]])
# apply
vect2 = camMat*vect2
#vect2[0] += c_x
#vect2[1] += c_y
return vect2
startX=60
startY=70
startPixX=30
startPixY=54
currX=startX
currY=startY
currPixX = startPixX
currPixY = startPixY
itr = 0
xPoints = []
yPoints = []
xPix = []
yPix = []
scale = 1.0
scaleV = 1.0
while currY<140:
if (itr%2)==0:
currX+=(0.5*horr(currY)*scale)
while currX<140:
xPoints.append(currX)
yPoints.append(currY)
xPix.append(currPixX)
yPix.append(currPixY)
currX+=horr(currY)*scale
currPixX+=1
currX=startX
currPixX=startPixX
if (itr%2)==0:
currX+=(0.5*horr(currY)*scale)
while currX>-20:
currX-=horr(currY)*scale
currPixX-=1
xPoints.append(currX)
yPoints.append(currY)
xPix.append(currPixX)
yPix.append(currPixY)
currX=startX
currPixX=startPixX
currY+=vert(currX)*scale*scaleV
currPixY+=1
itr+=1
currY = startY
currPixY=startPixY
itr = 0
while currY>0:
if (itr%2)==0:
currX+=(0.5*horr(currY)*scale)
while currX<140:
xPoints.append(currX)
yPoints.append(currY)
xPix.append(currPixX)
yPix.append(currPixY)
currX+=horr(currY)*scale
currPixX+=1
currX=startX
currPixX=startPixX
if (itr%2)==0:
currX+=(0.5*horr(currY)*scale)
while currX>-20:
currX-=horr(currY)*scale
currPixX-=1
xPoints.append(currX)
yPoints.append(currY)
xPix.append(currPixX)
yPix.append(currPixY)
currX=startX
currPixX=startPixX
currY-=vert(currX)*scale*scaleV
currPixY-=1
itr+=1
#plt.plot(xPix,yPix, 'r.')
#plt.show()
print min(xPix)
print min(yPix)
print max(xPix)
print max(yPix)
#
f = open('gigerdatacam_ardrone.h', 'w')
f.write("#ifndef GIGERDATA_H\n#define GIGERDATA_H\n\nfloat gdata[][4] = {")
orderedCoords = sorted(zip(xPoints,yPoints,xPix,yPix))
count = 0
mooX = []
mooX2 = []
mooY = []
mooY2 = []
mooXN2 = []
mooZ= []
mooelem2= []
mooelem3= []
for elem in orderedCoords:
#if elem[1]>=0 and elem[1]<=140 and elem[0]>=0 and elem[0]<=140:
# convert angles
v = radialDistortion(elem[0],elem[1])
#f.write("{"+str(v[0])+","+str(v[1])+","+str(elem[2])+","+str(elem[3])+"}, \ \n")
if (v[0].min() > -0.0 and v[0].min() < 720.0 and v[1].min() > 0.0 and v[1].min() < 360.0):
mooX.append(v[0].min())
mooX2.append(elem[0])
mooY.append(v[1].min())
mooY2.append(elem[1])
mooXN2.append(-elem[0])
mooZ.append(v[2].min())
mooelem2.append(elem[2])
mooelem3.append(elem[3])
f.write("{"+str(719-round(v[0].min()))+","+str(359-round(v[1].min()))+","+str(elem[2])+","+str(elem[3])+"},")
count += 1
print "Yay:::"
print min(mooelem2)
print max(mooelem2)
print min(mooelem3)
print max(mooelem3)
#fig = plt.figure()
#ax = fig.add_subplot(111, projection='3d')
#ax.scatter(mooX,mooY,mooZ)# bx
fig1 = plt.figure(figsize=(8, 6))
plt.plot(mooX,mooY,'r.')
plt.axis([0,720,0,360])
plt.xlabel("X pixel location",fontsize="20");
plt.ylabel("Y pixel location",fontsize="20");
plt.show()
fig1.savefig("ommmodelpixfig.pdf",format='pdf')
fig2 = plt.figure(figsize=(12, 6))
plt.plot(xPoints,yPoints, 'g.')
xPoints2 = [ -x for x in xPoints]
plt.plot(xPoints2,yPoints, 'g.')
plt.hold
plt.plot(mooX2,mooY2, 'r.')
plt.plot(mooXN2,mooY2, 'b.')
plt.xlabel("Azimuth (degrees)",fontsize="20");
plt.ylabel("Elevation (degrees)",fontsize="20");
plt.show()
fig2.savefig("ommmodelfig.pdf",format='pdf')
f.write("{0};int gdataLength = {1};\n#endif\n".format("}",count))
f.close(); |
East West Homestay & Holiday Apartments Very comfortable bed & breakfast accommodation, self-catering apartments & cottages in a quiet street, 5 mins walk to Rotorua city centre. Heated swimming pool and spa.
Jack and Di's Luxury, lake view bed and breakfast style accommodation. Great location close to Lake Rotorua and city centre.
Hamurana Lodge Boutique luxury hotel, set on a country estate 15 mins from Rotorua city centre.
Tresco B&B Comfortable bed & breakfast accommodation in a character villa close to Rotorua city centre. Hot mineral pool.
Solitaire Lodge 5 star luxury lodge located on private peninsula at Lake Tarawera, with stunning views.
Treetops Estate 5 star luxury lodge located in its own private wilderness estate, with walking, fishing and hunting on your doorstep.
Emerald Spa New motel with luxury studios and suites boasting private spas, near city centre.
Fenton Court Motel Four star motel near Rotorua city centre, with private spas.
Sport of Kings Motel Comfortable motel on a quiet road near Rotorua city centre, with shared spa.
Regal Palms Motel 5 star motor lodge about 15 mins walk from Rotorua city centre, with private spas and swimming pool.
Treks Backpackers New Rotorua backpacker accommodation, in city centre.
Funky Green Voyager Cosy backpacker accommodation in quiet street near centre.
Kiwipaka YHA Rotorua backpacker accommodation.
Alfresco B&B Very comfortable bed & breakfast accommodation with friendly hosts, 15 mins walk along river to town centre. Outdoor spa.
Crestwood Homestay Self contained guest area with 2 bedrooms, sunny deck and great views.
White Island Rendezvous Motel & Villas Motel, self-contained villa and bed & breakfast accommodation in a great location near the wharf and town centre.
Waikawa B&B B&B accommodation with spectacular sea views on the North Island's East Cape, Te Kaha.
Auckland No. 1 House Beautiful villa on waters edge (own beach), yet only 8 mins from centre. Meet you at airport, full cooked breakfast, happy hosts.
Old Courthouse B&B Historic courthouse beautifully refurbished into a lovely B&B in Raetihi, near Tongariro National Park.
Tongariro Crossing Lodge Boutique lodge in National Park, with fantastic hosts.
Air New Zealand International and national flight service. Trialling the use of bio-fuel to reduce carbon emissions! Now offering Sydney to Rotorua direct flights.
Qantas International and national flight service.
Intercity Coachline National coach service.
Naked Bus National coach service offering cheap, no-frill fares.
Tranzscenic Rail National railway service. Auckland to Tongariro.
Interisland Ferry Ferry service connecting the North and South Islands.
Bluebridge Ferry Ferry service connecting the North and South Islands.
Go Rentals Quality, well presented and maintained rental cars at very competitive prices in New Zealand. Free delivery to Auckland hotels.
Ace Rental Cars New Zealand car hire company with competitive rates which include unlimited kilometers insurance and roadside assistance.
Tui Campers NZ Campervan rental company specialising in catering for international tourists.
Moa Trek See a decidedly different NZ with Moa Trek's wine tours, luxury customized private tours, and tailormade itineraries.
New Zealand Tours Offering small group guided tours, coach tours, self drive tours, private tours and sightseeing day tours throughout New Zealand.
Rotorua Sustainable Tourism Charter Members committed to improving their environmental sustainability.
Organic Explorer Green travel guide to sustainable NZ: eco-tourism, cafes and accommodation.
NZ Tourism Online Comprehensive directory of NZ tourism operators, activities and accommodation.
Airfare to NZ Find discount airfare to NZ with Canuckabroad.
Auckland Airport Car Hire A global leader providing affordable car rentals all across New Zealand with high presence in Rotorua.
Enlightened Traveller designs original self-guided walking tours and trekking holidays in Provence and Southern France.
Rome Hotels Book a hotel room in Rome, Italy on this website. Quality hotels on discount rates with online reservations.
Cheap Airline Tickets & Vacation Packages One stop for cheap tickets, car rentals, vacation packages and Las Vegas hotels.
Car hire in Spain Cheap car hire Spain! Compare car hire prices for Spain from all of the top rental car companies with one search, including Alamo, Budget, Avis, Sixt, Economy, Hertz, Thrifty and Easycar.
Waimangu Volcanic Valley An exceptional walk through native forest and an active volcanic valley created by Mt Tarawera's eruption in 1886.
Orakei Korako Cave & Thermal Park A really special place featuring interesting volcanic formations including silica terraces and an unforgettable geothermal cave, situated on the shores of Lake Ohakuri.
River Rats Rafting Great rafting adventures on the famous Kaituna rapids and other rivers in the region.
River Jet The only place in NZ where you can jet boat to a thermal reserve and experience 2 of NZ's best icons - jet boating and geothermal activity.
Te Urewera Rainforest Route A collection of tourism operators offering a range of guided wilderness experiences in the largest National Park in the North Island.
Ahurei Adventures Hunting and fishing trips, marae stays and bush cabin accommodation, deep in Tuhoe country, hosted by local Maori.
Te Urewera Treks Unique 1- to 3-day treks with local Maori guides in the Whirinaki Forest Park, Te Urewera National Park, and on private Maori land.
Kiwi River Safaris 2 hour guided rafting adventures on the Rangitaiki River (grade 3-4).
White Island Tours Award winning eco tours on New Zealand's most active volcano - White Island.
Whale & Dolphin Watch Unique marine adventures in the coastal waters off Whakatane, including dolphin swimming and Whale Island eco-cultural guided walks.
Kanuka Wilderness Hunting A hunting and fishing paradise, Palmerston North.
Paparangi Ventures A real NZ outback adventure on 1200Ha native bush near the wild Motu River. Accommodation and guided walks.
Active Earth NZ Adventure hiking safaris in the North Island.
Walk Gisborne Three day unguided private farm, bush and coastal walk, with spectacular views, character accommodation, gourmet meals, bag transport, massages and NZ wine.
Fly Fishing Guide Tongariro Local fishing legend Ken Drummond offers full and half-day guided fishing trips in Tongariro back-country, for all ages and experience levels.
Southern Wilderness guided hikes on the Heaphy Track and other walks in the spectacular Nelson Lakes region of the South Island.
Kaikoura Wilderness Walks operates 2 and 3 day guided walks through the privately owned Puhi Peaks Nature Reserve high in the Seaward Kaikoura Range.
Sea Kayak Abel Tasman The Sea Kayak Company is locally owned and operated and specializes in guided paddlling adventures in the Abel Tasman National Park.
Unlimited New Zealand offer a one day guided trip to Arthur's Pass National Park from Christchurch, which includes a half day guided walk and return journey on the TranzAlpine scenic train.
Pure Trails NZ Active walking holidays exploring some of our favourite places in the South Island.
NZ Walking Company Offer a range of walking holidays throughout New Zealand.
Backpack New Zealand Comprehensive directory of activities and services available in New Zealand.
Backpacker Board New Zealand The independent travel guide for New Zealand. Fully interactive online guide for backpacking and budget travel in NZ.
Trekking NZ A range of DVDs and videos about other New Zealand walking tracks.
Backcountry NZ I enjoy hiking the back country trails of NZ. Come and see spectacular views of the Milford, Routeburn, Kepler and Stewart island tracks.
Tourism Online Comprehensive directory of tourism operators and activities in New Zealand.
Adventure New Zealand Information, tours and itinerary planning for adventure travel to New Zealand.
Experience New Zealand Travel New Zealand vacation services and recommended accommodation guide.
Trout Fishing New Zealand Information and tips on trout fishing in New Zealand.
Greenpeace New Zealand and world-wide environmental campaigners. Check the site for their latest news and events, and to find out how you can help to save the world!
Kiwi Recovery Programme A site loaded with interesting information about our threatened national bird, with a little info on the programme running at Waikaremoana.
100% Pure New Zealand The official New Zealand tourism site. Contains a wealth of information on travelling and tourism in New Zealand.
Department of Conservation For more information on the Waikaremoana area and New Zealand's other national parks and conservation areas.
NZS.com The online directory of all things New Zealand.
Craig Potton Publishing Independent New Zealand publisher and distributor of fabulous NZ books and photos, with particular focus on walks and the outdoors.
Gisborne Information Centre Information on Gisborne and the East Cape area.
Rotorua Information Centre Information on the Rotorua area.
Organic Explorer Complete guide to New Zealand's eco-tours, organic food and eco-accommodation.
New Zealand Focus New Zealand tourism information guides.
Gym in a Suitcase Christchurch based website providing information on health and fitness facilities for travellers.
Tramper NZ New Zealand's most extensive site devoted solely to our favourite activity, "tramping", known as hiking or trekking in other parts of the world.
Peak Bagging Forum and route descriptions of New Zealand's peaks.
NZ Wine Cellar Site providing information on our next favourite activity! Wine sales, festival events, regions, books and general NZ wine advice.
NZ Bartender Site packed with drinks recipes, competitions and articles.
One Weather Check out the weather in New Zealand today!
Qualmark New Zealand tourism's official quality assessors.
NZ Adventure Jobs Situations vacant in NZ's Great Outdoors.
Nick Jacobs Website Design Talented young website designer, with original and eye-catching designs. Check out his site!
Travel Library Travelogues describing trips and experiences world-wide.
Peak to Peak A BIG database of hiking ("tramping") related web sites.
Tracker Outdoors A North-American based, comprehensive resource for camping, fishing, hunting and organic gardening information.
Europe Hotel Guide Online hotel reservations for most European cities.
Infohub Specialty Travel Guide A travel guide containing unique specialty interest tour packages and anything associated with specialty travel.
Perfect Places Vacation Rentals An international directory of 1000's of vacation rental homes, villas, condos, and B&B's. Enter your vacation rental criteria and you will receive a list of vacation rentals to choose from.
Turkey Travel Guide Turkey hotels guide. Turkey Tours, cheap rental cars and flight tickets.
Walking Plus Guided walking group holidays to Greece, Naxos, Tinos & more Greek Islands.
Wonder Walkers New Zealand Women Walking Site.
Fly Fishing Forum Fly fishing forum and other resources. |
"""INSTEON Standard Receive Message Type 0x50."""
from insteonplm.constants import (
MESSAGE_STANDARD_MESSAGE_RECEIVED_0X50,
MESSAGE_STANDARD_MESSAGE_RECIEVED_SIZE,
)
from insteonplm.address import Address
from insteonplm.messages.message import Message
from insteonplm.messages.messageFlags import MessageFlags
class StandardReceive(Message):
"""Insteon Standard Length Message Received.
Message type 0x50
"""
_code = MESSAGE_STANDARD_MESSAGE_RECEIVED_0X50
_sendSize = MESSAGE_STANDARD_MESSAGE_RECIEVED_SIZE
_receivedSize = MESSAGE_STANDARD_MESSAGE_RECIEVED_SIZE
_description = "INSTEON Standard Message Received"
def __init__(self, address, target, commandtuple, cmd2=None, flags=0x00):
"""Init the StandardReceive message class."""
if commandtuple.get("cmd1") is not None:
cmd1 = commandtuple["cmd1"]
cmd2out = commandtuple["cmd2"]
else:
raise ValueError
if cmd2 is not None:
cmd2out = cmd2
if cmd2out is None:
raise ValueError
self._address = Address(address)
self._target = Address(target)
self._messageFlags = MessageFlags(flags)
# self._messageFlags.extended = 0
self._cmd1 = cmd1
self._cmd2 = cmd2out
@classmethod
def from_raw_message(cls, rawmessage):
"""Create message from a raw byte stream."""
return StandardReceive(
rawmessage[2:5],
rawmessage[5:8],
{"cmd1": rawmessage[9], "cmd2": rawmessage[10]},
flags=rawmessage[8],
)
# pylint: disable=protected-access
@classmethod
def template(
cls, address=None, target=None, commandtuple=None, cmd2=-1, flags=None
):
"""Create a message template used for callbacks."""
msgraw = bytearray([0x02, cls._code])
msgraw.extend(bytes(cls._receivedSize))
msg = StandardReceive.from_raw_message(msgraw)
if commandtuple:
cmd1 = commandtuple.get("cmd1")
cmd2out = commandtuple.get("cmd2")
else:
cmd1 = None
cmd2out = None
if cmd2 is not -1:
cmd2out = cmd2
msg._address = Address(address)
msg._target = Address(target)
msg._messageFlags = MessageFlags(flags)
msg._cmd1 = cmd1
msg._cmd2 = cmd2out
return msg
@property
def address(self):
"""Return the address of the device."""
return self._address
@property
def target(self):
"""Return the address of the target device."""
return self._target
@property
def cmd1(self):
"""Return the cmd1 property of the message."""
return self._cmd1
@property
def cmd2(self):
"""Return the cmd2 property of the message."""
return self._cmd2
@property
def flags(self):
"""Return the message flags."""
return self._messageFlags
@property
def targetLow(self):
"""Return the low byte of the target message property.
Used in All-Link Cleanup message types.
"""
low_byte = None
if self.target.addr is not None and self._messageFlags.isBroadcast:
low_byte = self.target.bytes[0]
return low_byte
@property
def targetMed(self):
"""Return the middle byte of the target message property.
Used in All-Link Cleanup message types.
"""
med_byte = None
if self.target.addr is not None and self._messageFlags.isBroadcast:
med_byte = self.target.bytes[1]
return med_byte
@property
def targetHi(self):
"""Return the high byte of the target message property.
Used in All-Link Cleanup message types.
"""
hi_byte = None
if self.target.addr is not None and self._messageFlags.isBroadcast:
hi_byte = self.target.bytes[2]
return hi_byte
def _message_properties(self):
return [
{"address": self._address},
{"target": self._target},
{"flags": self._messageFlags},
{"cmd1": self._cmd1},
{"cmd2": self._cmd2},
]
|
12x12x48 tank of 33 gl. For LPG. Nice tank, used for about 6 years, installed new ( I am first owner) inside vapour box, inside my Pinzgauer 710K truck. It has a fuel gauge with a viewport and a sender to install an electric meter. This tank, I believe, was designed for a GM Suburban, but you may want to contact Sleegers engineering for specifics on installation, unless you just want to put in on your pickup bed, which is ideal. Let's talk about shipping if you are interested. I am willing to help out if you are not in a hurry to receive it.
Tank is posted on EBAY, a couple of pics there. Asking $360. Shipping is expensive, but again, I am willing to do my best to find this tank a new home. |
# Codelet Tuning Infrastructure
# Copyright (C) 2010-2015 Intel Corporation, CEA, GENCI, and UVSQ
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#************************************************************************
# Authors: Franck Talbart, Mathieu Bordet, Nicolas Petit
""" Entry module provides facilities to work with the CTI entries.
"""
import cti, ctr
import util, types, util_uid, alias, database, repository, database_manager
import os, copy, json, time, datetime, shutil, distutils.dir_util, UserDict
DataEntryMetaFiles = [cti.cti_plugin_config_get_value(v) for
v in [cti.PLUGIN_OUTPUT_FILENAME,
cti.PLUGIN_INPUT_FILENAME,
cti.DATA_INFO_FILENAME]]
class odict(UserDict.DictMixin):
def __init__(self):
self._keys = []
self._data = {}
#------------------------------------------------------------------------
def __setitem__(self, key, value):
if key not in self._data:
self._keys.append(key)
self._data[key] = value
#------------------------------------------------------------------------
def __getitem__(self, key):
return self._data[key]
#------------------------------------------------------------------------
def __delitem__(self, key):
del self._data[key]
self._keys.remove(key)
#------------------------------------------------------------------------
def keys(self):
return list(self._keys)
#------------------------------------------------------------------------
def copy(self):
copyDict = odict()
copyDict._data = self._data.copy()
copyDict._keys = self._keys[:]
return copyDict
#------------------------------------------------------------------------
class CmdNode(object):
""" Command node class represent a simple node.
The typicall CTI command has some attributes, characterize the command itself,
and params which is an input for the command.
For ordered iteration use ordered_params list.
"""
def __init__(self, attributes, params):
if cti.META_ATTRIBUTE_REP in attributes:
if attributes[cti.META_ATTRIBUTE_REP] == cti.LOCAL_REPOSITORY:
attributes[cti.META_ATTRIBUTE_REP] = cti.CTR_REP_LOCAL
elif attributes[cti.META_ATTRIBUTE_REP] == cti.COMMON_REPOSITORY:
attributes[cti.META_ATTRIBUTE_REP] = cti.CTR_REP_COMMON
elif attributes[cti.META_ATTRIBUTE_REP] == cti.TEMP_REPOSITORY:
attributes[cti.META_ATTRIBUTE_REP] = cti.CTR_REP_TEMP
self.attributes = attributes
self.params = params
#------------------------------------------------------------------------
class Commands(odict):
"""CTI commands dictionary.
This class represents an extension for the standard dictionary class.
With Commands we are able to use CTI files as a structured dictionaries.
Here is a structure of such a dictionary.
Commands CmdNode
+-------+ +------------+
| cmd1 |----->| attributes |
| cmd2 | | params |
| ... | +------------+
+-------+
"""
def __init__(self, datatype, uid, basename, default_data = None, no_none_value = False, remove_outdated_params=False):
odict.__init__(self)
self.datatype = datatype
self.uid = None
self.basename = basename
if basename is None:
return None
filename = None
if uid is None:
filename = basename
else:
filedir = ctr.ctr_plugin_get_path_by_uid(datatype, uid)
if filedir is not None:
filename = os.path.join(filedir, basename)
else:
filename = basename
self.uid = uid
try:
self.load(filename, default_data, no_none_value, remove_outdated_params)
except Exception as e:
print("Can't load the commands of \"%s\" (wrong input or output files)" % uid)
if uid is None:
print("Plugin probably not found.")
raise e
#------------------------------------------------------------------------
def __str__(self):
""" The Commands class pretty printer. """
result = ""
for cmd in self.keys():
result += "****************************************************\n"
result += "Command: %s\n" % (cmd)
result += "Attributes: %s \n" % (self[cmd].attributes)
for p in self[cmd].params:
result += "Param: %s\n" % p
val = "NONE"
if cti.META_ATTRIBUTE_VALUE in self[cmd].params[p]:
val = self[cmd].params[p][cti.META_ATTRIBUTE_VALUE]
result += "Value = %s \n" % (val)
result += "\n"
return result
#------------------------------------------------------------------------
def record_output(self, command, path):
""" Records data to the output file
Args:
command: the set of parameters
path: the path to the output file
"""
try:
output_name = cti.cti_plugin_config_get_value(cti.PLUGIN_OUTPUT_FILENAME)
if output_name:
filename = os.path.join(path, output_name)
else:
util.hapi_fail("Can't get value from config file")
except OSError, e:
util.hapi_fail("Failed to concat path: %s" % e)
return self.record(command, filename, only_values=True)
#------------------------------------------------------------------------
def record_input(self, command, path):
""" Records data to the input file
Args:
command: the set of parameters
path: the path to the output file
"""
try:
output_name = cti.cti_plugin_config_get_value(cti.PLUGIN_INPUT_FILENAME)
if output_name:
filename = os.path.join(path, output_name)
else:
util.hapi_fail("Can't get value from config file")
except OSError, e:
util.hapi_fail("Failed to concat path: %s" % e)
return self.record(command, filename, only_values=True)
#------------------------------------------------------------------------
def record(self, command, filename, only_values=False):
""" Record all values for a given command in a given file.
Args:
command: for which command
filename: an optional filename to which the data should be recorded
"""
# JSON begin
d = self[command]
jd = {}
jd[command] = {}
if only_values:
jd[command]["attributes"] = {cti.META_ATTRIBUTE_NAME:command}
else:
jd[command]["attributes"] = d.attributes
params_list = []
# marshal dict
for k in d.params:
if d.params[k][cti.META_ATTRIBUTE_NAME] != cti.META_ATTRIBUTE_REP_PRODUCE:
params_list.append(d.params[k])
new_params_list = []
for l in params_list:
# Copy the dict to avoid modification it
p = dict(l)
#Setting the correct empty value for lists
if cti.META_ATTRIBUTE_LIST in p and p[cti.META_ATTRIBUTE_LIST] and (cti.META_ATTRIBUTE_VALUE not in p or p[cti.META_ATTRIBUTE_VALUE] is None):
p[cti.META_ATTRIBUTE_VALUE]=[]
# For data entries remove everything but name and values
if only_values:
allowed = [cti.META_ATTRIBUTE_NAME, cti.META_ATTRIBUTE_VALUE]
to_remove = []
for k in p:
if k not in allowed: to_remove.append(k)
for k in to_remove: del(p[k])
new_params_list.append(p)
jd[command]["params"] = new_params_list
f = open(filename, 'w')
json.dump(types.marshall(jd), f, indent=4, allow_nan=False)
f.close()
#------------------------------------------------------------------------
def load(self, filename, default_data=None, no_none_value = False, remove_outdated_params=False):
filename = os.path.abspath(filename)
try:
f = open(filename, 'r')
try:
jd = json.load(f, encoding="utf_8")
except ValueError, e:
print filename
util.fatal("JSON file is incorrect. {0}".
format(e),
cti.CTI_ERROR_UNEXPECTED)
# When plugin_uid is defined, load the
# type, list and other meta attributes
# from the plugin default input file.
if default_data:
# if the entry does not contain all the parameters (could happen if CTI has been updated), we add them
if not no_none_value:
for cname, command in default_data.iteritems():
if cname in jd:
for param in command.params:
corresponding_params = [d[cti.META_ATTRIBUTE_NAME] for d in jd[cname]["params"]]
if param not in corresponding_params:
none_value = None
#Matrix none value for new parameters
if command.params[param][cti.META_ATTRIBUTE_TYPE] == cti.META_CONTENT_ATTRIBUTE_TYPE_MATRIX:
none_value = dict([(c,None) for c in command.params[param][cti.META_ATTRIBUTE_MATRIX_COLUMN_NAMES]])
#List none value for new parameters
elif cti.META_ATTRIBUTE_LIST in (command.params[param]) and\
command.params[param][cti.META_ATTRIBUTE_LIST]:
none_value = []
#Adding the new parameter
jd[cname]["params"].append({cti.META_ATTRIBUTE_NAME: param,
cti.META_ATTRIBUTE_VALUE: none_value,
cti.META_ATTRIBUTE_TYPE: command.params[param][cti.META_ATTRIBUTE_TYPE]})
else:
corresponding_param_index = [d[cti.META_ATTRIBUTE_NAME] for d in jd[cname]["params"]].index(param)
#Processing matrices updates
if command.params[param][cti.META_ATTRIBUTE_TYPE] == cti.META_CONTENT_ATTRIBUTE_TYPE_MATRIX:
if cti.META_ATTRIBUTE_VALUE in jd[cname]["params"][corresponding_param_index]:
old_values = jd[cname]["params"][corresponding_param_index][cti.META_ATTRIBUTE_VALUE]
else:
jd[cname]["params"][corresponding_param_index][cti.META_ATTRIBUTE_VALUE] = {}
old_values = {}
old_columns = old_values.keys()
#Warning on old params
for column_name in old_columns:
if column_name not in command.params[param][cti.META_ATTRIBUTE_MATRIX_COLUMN_NAMES]:
util.cti_plugin_print_warning("Matrix parameter '%s' doesn't have a column named '%s'. UID: %s"%(param, column_name, self.uid))
#Creating a default void value to fill the eventual new columns of the matrix
default_column_value = []
if old_columns:
default_column_value = len(old_values[old_columns[0]]) * [None]
#Generating missing columns with default values
for column_name in command.params[param][cti.META_ATTRIBUTE_MATRIX_COLUMN_NAMES]:
if column_name not in old_columns:
jd[cname]["params"][corresponding_param_index][cti.META_ATTRIBUTE_VALUE][column_name] = default_column_value
for cname, command in jd.iteritems():
#Validating command existance.
if not cname in default_data:
util.hapi_fail("Command %s doesn't exist in ctr_default file."%cname)
outdated_params = []
for v in command["params"]:
if not v[cti.META_ATTRIBUTE_NAME] in default_data[cname].params:
util.cti_plugin_print_warning("Command %s doesn't accept parameter %s. UID: %s"%(cname, v[cti.META_ATTRIBUTE_NAME], self.uid))
if remove_outdated_params:
outdated_params.append(command["params"].index(v))
if default_data[cname].params.has_key(v[cti.META_ATTRIBUTE_NAME]):
if cti.META_ATTRIBUTE_LIST in (default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]):
v[cti.META_ATTRIBUTE_LIST] = default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_LIST]
if cti.META_ATTRIBUTE_TYPE in (default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]):
v[cti.META_ATTRIBUTE_TYPE] = default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_TYPE]
else:
util.hapi_fail("Filename %s: can't get value type for parameter %s."%(filename, v[cti.META_ATTRIBUTE_NAME]))
if cti.META_ATTRIBUTE_DESC in (default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]):
v[cti.META_ATTRIBUTE_DESC] = default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_DESC]
if cti.META_ATTRIBUTE_LONG_DESC in (default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]):
v[cti.META_ATTRIBUTE_LONG_DESC] = default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_LONG_DESC]
if cti.META_ATTRIBUTE_PASSWORD in (default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]):
v[cti.META_ATTRIBUTE_PASSWORD] = default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_PASSWORD]
if v.has_key(cti.META_ATTRIBUTE_TYPE) and v[cti.META_ATTRIBUTE_TYPE] == cti.META_CONTENT_ATTRIBUTE_TYPE_MATRIX:
if cti.META_ATTRIBUTE_LIST in v and v[cti.META_ATTRIBUTE_LIST]:
util.hapi_fail("Filename %s: illegal list attribute for MATRIX parameter %s."%filename, v[cti.META_ATTRIBUTE_NAME])
if cti.META_ATTRIBUTE_MATRIX_COLUMN_NAMES in default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]:
v[cti.META_ATTRIBUTE_MATRIX_COLUMN_NAMES] = default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_MATRIX_COLUMN_NAMES]
else:
util.hapi_fail("Filename %s: can't get column names for MATRIX parameter %s."%(filename, v[cti.META_ATTRIBUTE_NAME]))
if cti.META_ATTRIBUTE_MATRIX_COLUMN_TYPES in default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]:
v[cti.META_ATTRIBUTE_MATRIX_COLUMN_TYPES] = default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_MATRIX_COLUMN_TYPES]
else:
util.hapi_fail("Filename %s: can't get column types for MATRIX parameter %s."%(filename, v[cti.META_ATTRIBUTE_NAME]))
if cti.META_ATTRIBUTE_MATRIX_COLUMN_DESCS in default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]:
v[cti.META_ATTRIBUTE_MATRIX_COLUMN_DESCS] = default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_MATRIX_COLUMN_DESCS]
else:
util.hapi_fail("Filename %s: can't get column desc for MATRIX parameter %s."%(filename, v[cti.META_ATTRIBUTE_NAME]))
if cti.META_ATTRIBUTE_MATRIX_COLUMN_LONG_DESCS in default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]:
v[cti.META_ATTRIBUTE_MATRIX_COLUMN_LONG_DESCS] = default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_MATRIX_COLUMN_LONG_DESCS]
else:
util.hapi_fail("Filename %s: can't get column long_desc for MATRIX parameter %s."%(filename, v[cti.META_ATTRIBUTE_NAME]))
if default_data[cname].params.has_key(v[cti.META_ATTRIBUTE_NAME]):
if cti.META_ATTRIBUTE_PRODUCED_BY in (default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]):
v[cti.META_ATTRIBUTE_PRODUCED_BY] = (default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_PRODUCED_BY])
if cti.META_ATTRIBUTE_TARGET in (default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]]):
v[cti.META_ATTRIBUTE_TARGET] = (default_data[cname].params[v[cti.META_ATTRIBUTE_NAME]][cti.META_ATTRIBUTE_TARGET])
if remove_outdated_params:
outdated_params.reverse()
for op in outdated_params:
del command["params"][op]
#Formatting values
for command in jd.values():
for v in command["params"]:
islist = (cti.META_ATTRIBUTE_LIST in v and v[cti.META_ATTRIBUTE_LIST])
#Trigger a critical failure on missing value
if not cti.META_ATTRIBUTE_VALUE in v:
#If it's a plugin with no default value, we skip
if self.datatype == cti.CTR_ENTRY_PLUGIN:
continue
if islist:
v[cti.META_ATTRIBUTE_VALUE] = []
else:
v[cti.META_ATTRIBUTE_VALUE] = None
if not cti.META_ATTRIBUTE_TYPE in v:
ptype='TEXT'
else:
ptype = v[cti.META_ATTRIBUTE_TYPE]
if islist:
#Trigger a critical failure on wrongly formated list
if not isinstance(v[cti.META_ATTRIBUTE_VALUE], (list, type(None))):
util.hapi_fail("CORRUPTED file '{0}: parameter '{1}' should be a list, but contains '{2}' of type '{3}' instead "\
.format(filename, v[cti.META_ATTRIBUTE_NAME], v[cti.META_ATTRIBUTE_VALUE], type(v[cti.META_ATTRIBUTE_VALUE])))
matrix_types = None
if ptype == cti.META_CONTENT_ATTRIBUTE_TYPE_MATRIX:
matrix_types = dict([(v[cti.META_ATTRIBUTE_MATRIX_COLUMN_NAMES][i], v[cti.META_ATTRIBUTE_MATRIX_COLUMN_TYPES][i]) for i in range(len(v[cti.META_ATTRIBUTE_MATRIX_COLUMN_NAMES]))])
v[cti.META_ATTRIBUTE_VALUE] = types.from_json(v[cti.META_ATTRIBUTE_VALUE], ptype,
islist, matrix_types)
for name in jd.keys():
params = odict()
for p in jd[name]["params"]:
params[p[cti.META_ATTRIBUTE_NAME]] = p
self[name] = CmdNode(jd[name]["attributes"], params)
except IOError as e:
util.cti_plugin_print_error("Could not load %s"%filename)
raise e
except Exception as e:
util.cti_plugin_print_error("Error decoding %s"%filename)
util.cti_plugin_print_error(str(e))
raise e
#------------------------------------------------------------------------
def update_references(self, command, data_uid, old_value=None):
""" Update references
Args:
command: the set of parameters
data_uid: the entry UID
old_value: if provided, contains a dict of the old params of the entry
"""
plugin_uid = load_data_info(data_uid)[cti.DATA_INFO_PLUGIN_UID]
plugin_name = util_uid.uid_visualization(util_uid.CTI_UID(plugin_uid, cti.CTR_ENTRY_PLUGIN), cti.CTR_ENTRY_PLUGIN)
data_uid = str(data_uid)
params_list = self[command].params
for p in params_list:
if cti.META_ATTRIBUTE_TYPE in params_list[p] and \
params_list[p][cti.META_ATTRIBUTE_TYPE] == cti.META_CONTENT_ATTRIBUTE_TYPE_DATA_UID and \
cti.META_ATTRIBUTE_VALUE in params_list[p]:
#Coming from an update
if old_value is not None:
#If the field was not updated
if p not in old_value:
continue
#Coming from init with empty value/list
elif params_list[p][cti.META_ATTRIBUTE_VALUE] is None or \
(isinstance(params_list[p][cti.META_ATTRIBUTE_VALUE],list) and params_list[p][cti.META_ATTRIBUTE_VALUE] == []):
continue
if cti.META_ATTRIBUTE_TARGET in params_list[p]:
old_values_list = []
if old_value and \
old_value.has_key(params_list[p][cti.META_ATTRIBUTE_NAME]) and \
old_value[params_list[p][cti.META_ATTRIBUTE_NAME]]:
# Get old value
old_values_list = old_value[params_list[p][cti.META_ATTRIBUTE_NAME]]
params_list_str = map(str, params_list[p][cti.META_ATTRIBUTE_VALUE])
old_values_list = map(str, old_values_list)
# If old list and new list are the same, do nothing
if params_list_str != old_values_list:
add_list = []
# Search the values to delete (old_values_list) and
# the values to add (add_list)
for v in params_list_str:
if old_values_list.count(v) != 0:
try:
old_values_list.remove(v)
except:
util.hapi_error("Error with list in entry <%s>" % data_uid)
else:
add_list.append(v)
old_values_list = map(util_uid.CTI_UID, old_values_list)
# Update values on the "delete" list
for d in old_values_list:
if d:
# Check the delete is not already done
(_, out_entry) = load_data(d)
old_value_del = out_entry[command].params[params_list[p][cti.META_ATTRIBUTE_TARGET]][cti.META_ATTRIBUTE_VALUE]
if str(old_value_del) == data_uid:
update_entry_parameter(d, {params_list[p][cti.META_ATTRIBUTE_TARGET] :{"value": ""}})
add_list = map(util_uid.CTI_UID, add_list)
# Update values on the "add" list
for a in add_list:
if a:
# Check the add is not already done
(_, out_entry) = load_data(a)
old_value_add = out_entry[command].params[params_list[p][cti.META_ATTRIBUTE_TARGET]][cti.META_ATTRIBUTE_VALUE]
if str(old_value_add) != data_uid:
update_entry_parameter(a, {params_list[p][cti.META_ATTRIBUTE_TARGET] :{"value": data_uid}})
elif cti.META_ATTRIBUTE_PRODUCED_BY in params_list[p]:
table_target = util_uid.uid_visualization(
util_uid.CTI_UID(str(params_list[p][cti.META_ATTRIBUTE_PRODUCED_BY]), cti.CTR_ENTRY_PLUGIN),
cti.CTR_ENTRY_PLUGIN)
# Search the name_source on the link_table
res = database_manager.search(
{
'L':{'NAME':["source"], 'TYPE':"=", 'VAL': table_target},
'LOGIC':'AND',
'R':{
'L':{'NAME':["target"], 'TYPE':"=", 'VAL': plugin_name},
'LOGIC':'AND',
'R':{'NAME':["name_target"], 'TYPE':"=", 'VAL': params_list[p][cti.META_ATTRIBUTE_NAME]}
}
},
database.Database(),
"link_table",
["name_source"]
)
target_name = ""
for r in res:
target_name = r[0]
if target_name:
if old_value and \
old_value.has_key(params_list[p][cti.META_ATTRIBUTE_NAME]) and \
old_value[params_list[p][cti.META_ATTRIBUTE_NAME]]:
if str(old_value[params_list[p][cti.META_ATTRIBUTE_NAME]]) != str(params_list[p][cti.META_ATTRIBUTE_VALUE]):
# Load old value and check the update is not already done
(_, out_entry) = load_data(old_value[params_list[p][cti.META_ATTRIBUTE_NAME]])
old_values_list = out_entry[command].params[target_name][cti.META_ATTRIBUTE_VALUE]
old_values_list = map(str, old_values_list)
# Check the update is not already done
if old_values_list.count(data_uid) != 0:
# Update the list
old_values_list.remove(data_uid)
# Update the old value
update_entry_parameter(old_value[params_list[p][cti.META_ATTRIBUTE_NAME]],
{target_name: {"value": old_values_list}})
if params_list[p][cti.META_ATTRIBUTE_VALUE]:
# Load new value and check the update is not already done
(_, out_entry) = load_data(util_uid.CTI_UID(str(params_list[p][cti.META_ATTRIBUTE_VALUE])))
new_value = out_entry[command].params[target_name][cti.META_ATTRIBUTE_VALUE]
new_value = map(str, new_value)
if data_uid not in new_value:
# Update the new value
update_entry_parameter(params_list[p][cti.META_ATTRIBUTE_VALUE],
{target_name : {"value": [data_uid], "append": True}})
#------------------------------------------------------------------------
def load_defaults(plugin_uid):
"""Load default values for a given plugin.
Args:
plugin_uid: a plugin UID for which to load defaults.
Returns:
A tuple with input/output dictionaries.
"""
datatype = cti.CTR_ENTRY_PLUGIN
input_file = cti.cti_plugin_config_get_value(cti.PLUGIN_DEFAULT_INPUT_FILENAME)
output_file = cti.cti_plugin_config_get_value(cti.PLUGIN_DEFAULT_OUTPUT_FILENAME)
try:
return (Commands(datatype, plugin_uid, input_file), Commands(datatype, plugin_uid, output_file))
except Exception as e:
raise e
#------------------------------------------------------------------------
def load_data_info(uid):
""" Parses ctr_info.txt file and returns a dictionary for it.
Args:
uid: an uid or alias for data
Returns:
A dictionary on success. This dictionary represents the contents of
the ctr_info.txt file. None value is returend on failure.
"""
info_file = ctr.ctr_plugin_info_file_load_by_uid(uid)
if info_file is None:
util.hapi_error("Can't load the entry %s\n" % uid)
result = {
cti.DATA_INFO_PLUGIN_UID : ctr.ctr_plugin_info_get_value(info_file, cti.DATA_INFO_PLUGIN_UID),
cti.DATA_INFO_ADDITIONAL_FILES : ctr.ctr_plugin_info_get_value(info_file, cti.DATA_INFO_ADDITIONAL_FILES),
cti.DATA_INFO_ALIAS : ctr.ctr_plugin_info_get_value(info_file, cti.DATA_INFO_ALIAS),
cti.DATA_INFO_DATE_TIME_END : ctr.ctr_plugin_info_get_value(info_file, cti.DATA_INFO_DATE_TIME_END),
cti.DATA_INFO_DATE_TIME_START : ctr.ctr_plugin_info_get_value(info_file, cti.DATA_INFO_DATE_TIME_START),
cti.DATA_INFO_PLUGIN_EXIT_CODE : ctr.ctr_plugin_info_get_value(info_file, cti.DATA_INFO_PLUGIN_EXIT_CODE),
cti.DATA_INFO_NOTE : ctr.ctr_plugin_info_get_value(info_file, cti.DATA_INFO_NOTE),
cti.DATA_INFO_TAG : ctr.ctr_plugin_info_get_value(info_file, cti.DATA_INFO_TAG),
cti.DATA_INFO_USER_UID : ctr.ctr_plugin_info_get_value(info_file, cti.DATA_INFO_USER_UID)
}
return result
#------------------------------------------------------------------------
def load_data(data_uid, remove_outdated_params=False):
""" Loads data for a given uid.
Args:
data_uid: UID
Returns:
A tuple with Commands dictionaries (Commands(input), Commands(output)).
"""
entry_path = ctr.ctr_plugin_get_path_by_uid(cti.CTR_ENTRY_DATA, data_uid)
if not entry_path:
util.fatal("Cannot find entry <{0}>".
format(data_uid),
cti.CTI_ERROR_UNEXPECTED)
if not util_uid.is_valid_uid(data_uid):
util.cti_plugin_print_error("Wrong UID or alias: %s" % data_uid)
exit(cti.CTI_PLUGIN_ERROR_INVALID_ARGUMENTS)
# When working with data entry, we need the creator plugin_uid
# so we can get "list", "optional" and "type" meta attributes
# we can get this info from the ctr_info file.
info = load_data_info(data_uid)
if not info:
util.cti_plugin_print_error("Can't find a data entry for given data uid %s " % data_uid)
exit(cti.CTI_PLUGIN_ERROR_INVALID_ARGUMENTS)
plugin_uid = util_uid.CTI_UID(info[cti.DATA_INFO_PLUGIN_UID], cti.CTR_ENTRY_PLUGIN)
inp, out = load_defaults(plugin_uid)
# process input
in_basename = cti.cti_plugin_config_get_value(cti.PLUGIN_INPUT_FILENAME)
input_data = Commands(cti.CTR_ENTRY_DATA, data_uid, in_basename, inp, remove_outdated_params=remove_outdated_params)
# process output
out_basename = cti.cti_plugin_config_get_value(cti.PLUGIN_OUTPUT_FILENAME)
output_data = Commands(cti.CTR_ENTRY_DATA, data_uid, out_basename, out, remove_outdated_params=remove_outdated_params)
return (input_data, output_data)
#------------------------------------------------------------------------
class Entry(object):
""" Represents an entry. """
def __init__(self, entry, path, uid):
self.path = path
self.uid = uid
self.entry = entry
#------------------------------------------------------------------------
def get_file_from_entry(uid, file_path, safe=True, destname=None):
""" Get <file_path> inside entry <uid>
Args:
uid: CTI_UID of the entry (data entry by default)
file_path: the file to get
safe: if safe is True, we check that a file with
the same name does not already exists.
destname: destination filename
plugin: if set then uid considered as plugin entry
Effect:
copies <file_path> to the current directory.
"""
if not util_uid.is_valid_uid(uid):
util.fatal("The uid <{0}> is not a valid UID".
format(uid),
cti.CTI_ERROR_UNEXPECTED)
if file_path in DataEntryMetaFiles:
util.hapi_error("metafile get is forbidden")
return
path_type = ctr.ctr_plugin_get_path_and_type_by_uid(uid)
fdir = cti.cti_plugin_config_get_value(cti.CTR_ENTRY_FILES_DIR)
if path_type:
path = path_type.key
src = os.path.join(path, fdir, file_path)
dest = os.path.join(os.getcwd(), file_path)
if safe and os.path.exists(dest):
util.hapi_error("File <%s> already exists" % dest)
return
if not destname:
shutil.copy(src, dest)
else:
shutil.copy(src, destname)
else:
util.hapi_error("Could not find entry <%s>" % uid)
#------------------------------------------------------------------------
def get_dir_from_entry(uid, dst):
""" Get files inside entry <uid>
Args:
uid: CTI_UID of the entry
Effect:
copies everything from entry's files subdir to the dst
"""
if not util_uid.is_valid_uid(uid):
util.fatal("The uid <{0}> is not a valid UID".
format(uid),
cti.CTI_ERROR_UNEXPECTED)
entry_path = ctr.ctr_plugin_get_path_by_uid(cti.CTR_ENTRY_DATA, uid)
if not entry_path:
util.fatal("Cannot find entry <{0}>".
format(uid),
cti.CTI_ERROR_UNEXPECTED)
fdir = cti.cti_plugin_config_get_value(cti.CTR_ENTRY_FILES_DIR)
path = os.path.join(entry_path, fdir)
try:
distutils.dir_util.copy_tree(path, dst)
except distutils.dir_util.DistutilsFileError, why:
util.hapi_fail("%s " % str(why))
#------------------------------------------------------------------------
def update_entry_parameter(entry_uid, values, command="init"):
""" Update an entry
Args:
entry_uid: the entry UID
values: a dictionary of keys and values to update. for example: {"a": {"value":"toto"}, "b":{"value": ["titi"], "append": True}}
append is not mandatory, and should be used with list only. If append is false, the previous list is fully replaced.
command: the command of the parameter to update
Returns: 0 if it fails, 1 if it succeeds
"""
def update_info(key, value):
data_info = ctr.ctr_plugin_info_file_load_by_uid(entry_uid)
ctr.ctr_plugin_info_put_value(data_info, key, value)
ctr_info_name = os.path.join(ctr.ctr_plugin_get_path_by_uid(cti.CTR_ENTRY_DATA,
entry_uid),
cti.cti_plugin_config_get_value(cti.DATA_INFO_FILENAME))
ctr.ctr_plugin_info_record_in_file(data_info, ctr_info_name)
def update_db(key, value, entry_uid, plugin_uid, append, db, type_v):
if key in ["repository", "path_repository", cti.DATA_INFO_DATE_TIME_START]:
if database_manager.update("entry_info", {key: value}, {'NAME':["entry_uid"], 'TYPE':"=", 'VAL':str(entry_uid)}, db) is False:
return False
else:
plugin_alias = alias.get_plugin_alias(plugin_uid)
id_entry = database_manager.uid2id(entry_uid, db)
_,output = load_defaults(plugin_uid)
key_defaults = output["init"].params[key]
if cti.META_ATTRIBUTE_LIST in key_defaults and key_defaults[cti.META_ATTRIBUTE_LIST]:
if type_v != cti.META_CONTENT_ATTRIBUTE_TYPE_DATA_UID:
table_temp = "%s_%s" % (plugin_alias, key)
id_temp = "id_{0}".format(plugin_alias)
if not append:
database_manager.delete(table_temp, {'NAME':[id_temp], 'TYPE':"=", 'VAL':id_entry}, db)
if len(value) > 0 and not util_uid.is_valid_uid(value[0]):
rows = []
for v in value:
rows.append({key:v, id_temp:id_entry})
if database_manager.insert_rows(table_temp, rows, db) is False:
return False
elif type_v == cti.META_CONTENT_ATTRIBUTE_TYPE_MATRIX:
table_temp = "%s_%s" % (plugin_alias, key)
id_table_temp = "id_{0}".format(plugin_alias)
matrix_columns = key_defaults[cti.META_ATTRIBUTE_MATRIX_COLUMN_NAMES]
if not append:
database_manager.delete(table_temp, {'NAME':[id_table_temp], 'TYPE':"=", 'VAL':id_entry}, db)
if value and value[matrix_columns[0]]:
for line in range(len(value[matrix_columns[0]])):
line_dict = dict([(column, value[column][line]) for column in matrix_columns])
line_dict[id_table_temp] = id_entry
if database_manager.insert(table_temp, line_dict, db) is False:
return False
else:
if util_uid.is_valid_uid(value):
value = database_manager.uid2id(value, db)
if database_manager.update(plugin_alias,
{key: value},
{'NAME':["id_%s" % plugin_alias], 'TYPE':"=", 'VAL':str(id_entry)}, db) is False:
return False
return True
db = database.Database()
if entry_uid is None:
return 0
res = load_data(entry_uid)
if res is None:
return 0
(_, out) = res
old_value = {}
for k in values.keys():
#Setting default "undefined" old value
old_value[k] = None
if isinstance(values[k]["value"], list):
old_value[k] = []
append = False
if values[k].has_key("append"):
append = values[k]["append"]
if k in [cti.DATA_INFO_DATE_TIME_START]:
update_info(k, values[k]["value"])
else:
if out[command].params.has_key(k):
old_value[k] = copy.copy(out[command].params[k][cti.META_ATTRIBUTE_VALUE])
if isinstance(values[k]["value"], list) and append:
if not out[command].params[k].has_key(cti.META_ATTRIBUTE_VALUE):
out[command].params[k][cti.META_ATTRIBUTE_VALUE] = []
out[command].params[k][cti.META_ATTRIBUTE_VALUE] += values[k]["value"]
elif k not in ["repository", "path_repository"]:
out[command].params[k][cti.META_ATTRIBUTE_VALUE] = values[k]["value"]
info = load_data_info(entry_uid)
type_v = ""
if out[command].params.has_key(k) and out[command].params[k].has_key(cti.META_ATTRIBUTE_TYPE):
type_v = out[command].params[k][cti.META_ATTRIBUTE_TYPE]
if not update_db(k, values[k]["value"], entry_uid, cti.CTI_UID(info[cti.DATA_INFO_PLUGIN_UID]), append, db, type_v):
return 0
path = ctr.ctr_plugin_get_path_by_uid(cti.CTR_ENTRY_DATA, entry_uid)
out.record_output(command, path)
out.update_references("init", entry_uid, old_value)
# Update the end date
date_end = str(time.strftime('%Y/%m/%d %H:%M:%S',time.localtime()))
if database_manager.update("entry_info", {"date_time_end": date_end}, {'NAME':["entry_uid"], 'TYPE':"=", 'VAL':str(entry_uid)}, db) is False:
return 0
update_info(cti.DATA_INFO_DATE_TIME_END, date_end)
return 1
#------------------------------------------------------------------------
def create_data_entry(data_id, rep, username, alias_data=None):
return create_entry(data_id, rep, cti.CTR_ENTRY_DATA, username, alias_data)
#------------------------------------------------------------------------
def create_plugin_entry(plugin_id, rep, username, alias_plugin=None):
return create_entry(plugin_id, rep, cti.CTR_ENTRY_PLUGIN, username, alias_plugin)
#------------------------------------------------------------------------
def create_entry(uid, rep_type, datatype, username, alias_entry=None):
"""General wrapper for create_entry.
Args:
uid: CTI_UID
rep_type: the repostitory type
datatype: the CTR entry data type (CTR_ENTRY_PLUGIN or CTR_ENTRY_DATA)
username: the username
alias_entry: the alias
Returns:
An Entry object or None if failed
"""
rep_type = str(rep_type).strip()
local_repository = None
# recognize repository type and convert it
if rep_type == cti.COMMON_REPOSITORY or rep_type == "1":
rep = cti.CTR_REP_COMMON
elif cti.cti_plugin_is_UID(rep_type):
rep = cti.CTR_REP_LOCAL
local_repository = util_uid.CTI_UID(rep_type, cti.CTR_ENTRY_REPOSITORY)
# We update the last_use date of the repository
now = datetime.datetime.now()
date = now.strftime('%Y/%m/%d %H:%M:%S')
try:
update_entry_parameter(local_repository, {"last_use": {"value": date}})
except:
print "Repository entry not found"
elif rep_type == cti.LOCAL_REPOSITORY or rep_type == "0":
rep = cti.CTR_REP_LOCAL
if not repository.local_exist():
print "Can't create data entry."
print "%s \n %s \n %s\n" % (cti.CTI_ERROR_MSG_REP_DOESNT_EXISTS,
cti.CTI_ERROR_MSG_CREATE_REP,
cti.CTI_ERROR_MSG_IMPORT_REP)
exit(cti.CTI_PLUGIN_ERROR_LOCAL_REP_DOESNT_EXISTS)
# We update the last_use date of the repository
repository_path = repository.get_local_rep()
uid_rep = ctr.ctr_plugin_global_index_file_get_uid_by_ctr(repository_path)
now = datetime.datetime.now()
date = now.strftime('%Y/%m/%d %H:%M:%S')
if uid_rep is not None:
update_entry_parameter(uid_rep, {"last_use": {"value": date}})
elif rep_type == cti.TEMP_REPOSITORY or rep_type == "2":
rep = cti.CTR_REP_TEMP
else:
#trying to see if it is a repository alias
local_repository = alias.get_repository_uid(rep_type)
if local_repository is None:
#<NEED-FIX The following redundant line has been added because hapi_error() doesn't print anything on the console, leaving the user confused.
util.cti_plugin_print_error("Unkown repository type {0}".format(rep_type))
#NEED-FIX>
util.hapi_error("Unkown repository type")
return None
else:
rep = cti.CTR_REP_LOCAL
now = datetime.datetime.now()
date = now.strftime('%Y/%m/%d %H:%M:%S')
update_entry_parameter(local_repository, {"last_use": {"value": date}})
db = database.Database()
result = list(database_manager.search_uids(
{'NAME':["username"], 'TYPE':"=", 'VAL':username},
db,
"user"
))
if len(result) == 1:
user_uid = result[0]
else:
util.hapi_error("Error while converting username to user_uid.")
return None
if user_uid is None:
util.hapi_error("Error with username_to_user_uid.")
return None
# create entry
x = ctr.ctr_plugin_create_entry(uid, rep, cti.CTR_ENTRY_DATA, local_repository, user_uid)
if x is None:
print(cti.CTI_ERROR_MSG_CANT_CREATE_ENTRY)
exit(cti.CTI_ERROR_UNEXPECTED)
output_uid = cti.CTI_UID(x.key)
output_dir = str(x)
# check alias
if alias_entry is not None:
if datatype == cti.CTR_ENTRY_PLUGIN:
if alias.set_plugin_alias(output_uid, alias_entry) == 0:
util.cti_plugin_print_error("Cannot set the alias %s (already used?)" % (alias_entry))
elif datatype == cti.CTR_ENTRY_DATA:
if alias.set_data_alias(output_uid, alias_entry) == 0:
util.cti_plugin_print_error("Cannot set the alias %s (already used?)"%(alias_entry))
else:
util.hapi_error("Can't set alias %s. Unkown data type. " % alias_entry)
return None
if datatype == cti.CTR_ENTRY_DATA:
update_entry_parameter(user_uid, {"last_entry_created": {"value": output_uid}})
return Entry(x, output_dir, output_uid)
#------------------------------------------------------------------------
def put_file_in_entry(uid, file_path, safe=True, filename=None):
""" Put <file_path> inside entry <uid>.
Args:
uid: CTI_UID of the entry
file_path: the file to put, path is relative to the
current working directory. It could be a list of files
safe: if safe is True, we check that a file with
the same name does not already exists.
filename: the filename of the file to put. For the moment, work only if file_path is NOT a list @todo
"""
if not(isinstance(file_path, list)):
file_path=[file_path]
dir_append = cti.cti_plugin_config_get_value(cti.CTR_ENTRY_FILES_DIR)
path = ctr.ctr_plugin_get_path_by_uid(cti.CTR_ENTRY_DATA, uid)
if path:
path_dest = os.path.join(path, dir_append)
if (not os.path.exists(path_dest)):
os.makedirs(path_dest)
filename_tmp = None
for f in file_path:
if f in DataEntryMetaFiles:
util.cti_plugin_print_error("metafile put is forbidden")
return
if not filename:
filename_tmp = os.path.basename(f)
else:
filename_tmp = filename
dest = os.path.join(path_dest, filename_tmp)
if safe and os.path.exists(dest):
util.cti_plugin_print_error("File <%s> already exists" % dest)
return
shutil.copy(f, dest)
return filename_tmp
else:
util.cti_plugin_print_error("Could not find entry <%s>" % uid)
#------------------------------------------------------------------------
def put_dir_in_entry(uid, path=None, dir_dest=""):
""" Put <path> inside entry <uid>.
Args:
uid: CTI_UID of the entry
path: the directory to put
dir_dest: the destination directory
"""
pdine_log_names = {"log":[]}
if not path:
path = os.getcwd()
entry_path = ctr.ctr_plugin_get_path_by_uid(cti.CTR_ENTRY_DATA, uid)
if not entry_path:
util.hapi_fail("Can't find \"%s\" UID. \n"
"Hint: try to import your current repository with \"cti repository import\""
% uid)
return []
path = os.path.abspath(path)
fdir = cti.cti_plugin_config_get_value(cti.CTR_ENTRY_FILES_DIR)
# log filenames
# TODO: refactor, add custom exclude patterns as a parameter
#------------------------------------------------------------------------
def save_list(src, names):
toignore = []
l = pdine_log_names["log"]
for n in names:
if n.find('.ctr') != -1:
toignore.append(n)
else:
src = os.path.abspath(src)
src_d = src[len(path)+1:]
if n != "." and n != "./":
if not os.path.isdir(os.path.join(src, n)):
if n.startswith("./"):
l.append(str(os.path.join(src_d, n[2:])))
else:
l.append(str(os.path.join(src_d, n)))
if src.find('.ctr') != -1:
toignore.append(src)
return toignore
#------------------------------------------------------------------------
#------------------------------------------------------------------------
def copytree(src, dst, symlinks=False, ignore=None, prefix=""):
for item in os.listdir(src):
s = os.path.join(src, item)
d = os.path.join(dst, item)
if os.path.isdir(s):
# if directory already exists...
if os.path.isdir(d):
copytree(s,d,symlinks,ignore, prefix=item)
else:
shutil.copytree(s, d, symlinks, ignore)
else:
shutil.copy2(s, d)
pdine_log_names["log"].append(os.path.join(prefix, item))
#------------------------------------------------------------------------
try:
dest = os.path.join(entry_path, fdir, dir_dest)
if not os.path.isdir(dest):
os.makedirs(dest)
copytree(path, dest, ignore=save_list)
except (IOError, os.error), why:
util.hapi_fail("%s " % str(why))
return pdine_log_names["log"]
#------------------------------------------------------------------------
def rm_all_files_from_entry(uid):
""" Remove all the files from an entry
Args:
uid: CTI_UID of the entry
Return
"""
if not util_uid.is_valid_uid(uid):
util.fatal("The uid <{0}> is not a valid UID".
format(uid),
cti.CTI_ERROR_UNEXPECTED)
path = ctr.ctr_plugin_get_path_by_uid(cti.CTR_ENTRY_DATA, uid)
if path:
src = os.path.join(path, "files")
try:
shutil.rmtree(src)
except:
print "Error when removing the files"
return -1
else:
util.hapi_error("Could not find entry <%s>" % uid)
return -1
return 0
#------------------------------------------------------------------------
def rm_file_from_entry(uid, filename):
""" Remove a file from an entry
Args:
uid: CTI_UID of the entry
filename: filename of the file to remove
Return
"""
if not util_uid.is_valid_uid(uid):
util.fatal("The uid <{0}> is not a valid UID".
format(uid),
cti.CTI_ERROR_UNEXPECTED)
path = ctr.ctr_plugin_get_path_by_uid(cti.CTR_ENTRY_DATA, uid)
if path:
src = os.path.join(path, "files", filename)
try:
if os.path.isfile(src):
os.remove(src)
else:
shutil.rmtree(src)
except:
print "Error when removing the file or the directory"
return -1
else:
util.hapi_error("Could not find entry <%s>" % uid)
return -1
return 0
#------------------------------------------------------------------------
|
feels right for you. You are the only expert for your own life.
that the answer to what we need to do next is in our own hearts.
trust what we hear. We will be taught what we need to learn." |
#!/usr/bin/python
"""
Copyright (C) 2010 Simon Dawson, Meryl Baquiran, Chris Ellis
and Daniel Kenji Toyama
Copyright (C) 2011 Simon Dawson, Daniel Kenji Toyama
This file is part of Gleba
Gleba is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Gleba is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Gleba. If not, see <http://www.gnu.org/licenses/>.
Path:
frontend.scale
Purpose:
Scale simulator.
It writes a chosen weight to a serial port, just like a normal scale.
"""
import multiprocessing
import Queue # for Queue.Empty Exception
import subprocess, shlex # for "forking socat"
from gi.repository import Gtk
import serial
SOCAT_EXECUTABLE = '/usr/bin/socat'
SOCAT_ARGS = '-d -d -u pty,raw,echo=0 pty,raw,echo=0'
CYCLE_TIME = 0.2
class ScaleProcess(multiprocessing.Process):
def __init__(self, output_format = None, *args, **kwargs):
super(ScaleProcess, self).__init__(None, args, kwargs)
if 'port' in kwargs:
self.serial_port = serial.Serial(kwargs['port'])
else:
self.serial_port = serial.Serial()
if 'queue' not in kwargs:
raise Exception('A multiprocessing.Queue is necessary')
self.queue = kwargs['queue']
if output_format == None:
self.output_format = 'ST,GS, {:f}KG,'
else:
self.output_format = output_format
def run(self):
weight = '0.000'
while self.is_alive():
try:
weight = self.queue.get(True, CYCLE_TIME)
except Queue.Empty:
pass
self.serial_port.write(self.line(weight))
def line(self, weight):
"""
Returns the 'line' as given by a scale.
"""
return (self.output_format + '\n').format(float(weight))
def extract_device_from_line(line):
"""
Given a line with format '..... some_device' returns
the string 'some_device'.
"""
return line[line.rfind(' ') + 1 : -1]
class Scale(Gtk.Window):
def __init__(self, *args, **kwargs):
command = SOCAT_EXECUTABLE + ' ' + SOCAT_ARGS
self.socat_process = subprocess.Popen(shlex.split(command),
shell = False,
stderr = subprocess.PIPE)
# first line of socat output (writing end of the connection)
socat_error = self.socat_process.stderr.readline()
device_for_writes = extract_device_from_line(socat_error)
# second line of socat output (reading end of the connection)
socat_error = self.socat_process.stderr.readline()
device_for_reads = extract_device_from_line(socat_error)
# consume last line (some extra info)
socat_error = self.socat_process.stderr.readline()
print ('Writing to {0} port. You can read from {1}'.format(
device_for_writes,
device_for_reads
))
self.queue = multiprocessing.Queue(1)
self.scale_process = ScaleProcess(port = device_for_writes,
queue = self.queue)
self.scale_process.start()
# GTK related stuff
super(Scale, self).__init__()
self.set_title("Scale simulator")
self.connect("delete_event", self.delete_event)
self.connect("destroy", self.destroy)
self.main_container = Gtk.HBox()
self.main_container.set_size_request(800, 40)
adj = Gtk.Adjustment(0.0, # initial value
0.0, # lower bound
10.0, # upper bound
0.001, # step increment
0, # page increment
0) # page size
adj.connect('value_changed', self.slider_change)
self.slider = Gtk.HScale.new(adj)
self.slider.set_size_request(700, 20)
self.slider.set_digits(3)
self.slider.set_value_pos(Gtk.PositionType.TOP)
self.slider.set_draw_value(True)
self.main_container.add(self.slider)
self.add(self.main_container)
self.show_all()
def delete_event(self, widget, event, data = None):
return False
def destroy(self, widget, data = None):
self.scale_process.terminate()
self.scale_process.serial_port.close() # close serial port
self.socat_process.terminate()
Gtk.main_quit()
def slider_change(self, slider):
"""
Puts the current value of self.slider into self.queue.
"""
weight = str(slider.get_value())
try:
self.queue.put(weight, True, CYCLE_TIME)
print('') # bug in Python? See commit fc96c938 notes
except Queue.Full:
pass
if __name__ == '__main__':
scale = Scale()
Gtk.main()
|
The defendant has denied the allegation.
A 28-year-old woman appeared at Burnley Magistrates' Court accused of inflicting grievous bodily harm on a baby.
Kelly Scrimshaw, of Aylesbury Walk in the town, denied the allegation and her case will be heard at Burnley Crown Court. The alleged offence took place last year in Burnley.
Prosecutor Miss Charlotte Crane told the hearing the infant was said to have suffered a shoulder fracture. Scrimshaw, who was represented by Mr Nick Cassidy, was unconditionally bailed until August 13 by District Judge John Maxwell. |
# -*- coding:utf-8 -*-
from random import randint
import pygame
from pygame.locals import *
MAZE_MAX = 50
map1 = {}
for x in xrange(0, MAZE_MAX + 2):
map1[x] = {}
for y in xrange(0, MAZE_MAX + 2):
map1[x][y] = 0
def search(xx, yy):
d = {0: {0: 0, 1: 1}, 1: {0: 1, 1: 0}, 2: {0: 0, 1: -1}, 3: {0: -1, 1: 0}}
zx = xx * 2
zy = yy * 2
map1[zx][zy] = 1
if randint(0, 1) == 1:
turn = 1
else:
turn = 3
next_value = randint(0, 3)
for i in xrange(0, 4):
if map1[zx + 2 * d[next_value][0]][zy + 2 * d[next_value][1]] == 0:
map1[zx + d[next_value][0]][zy + d[next_value][1]] = 1
search(xx + d[next_value][0], yy + d[next_value][1])
next_value = (next_value + turn) % 4
return 0
def make_maze(xi, yi):
z2 = 2 * yi + 2
for z1 in xrange(0, 2 * xi + 2 + 1):
map1[z1][0] = 1
map1[z1][z2] = 1
for z1 in xrange(0, 2 * yi + 2 + 1):
map1[0][z1] = 1
map1[z2][z1] = 1
map1[1][2] = 1
map1[2 * xi + 1][2 * yi] = 1
search((randint(1, xi)), (randint(1, yi)))
return
def run():
x = 22
y = 22
make_maze(x, y)
# for z2 in xrange(1, y * 2 + 1 + 1):
# str1 = ""
# for z1 in xrange(1, x * 2 + 1 + 1):
# if map1[z1][z2] == 0:
# str1 += "-" # print "█"
# else:
# str1 += " " # print " "
# if z2 <= y * 2:
# print str1 + "\n"
screen_size = (640, 480)
diamonds_size = (10, 10)
pygame.init()
screen = pygame.display.set_mode(screen_size, 0, 32)
background = pygame.surface.Surface(screen_size).convert()
diamonds1 = pygame.surface.Surface(diamonds_size).convert()
diamonds2 = pygame.surface.Surface(diamonds_size).convert()
background.fill((255, 255, 255))
diamonds1.fill((128, 128, 128))
diamonds2.fill((0, 0, 0))
while True:
for event in pygame.event.get():
if event.type == QUIT:
return
screen.blit(background, (0, 0))
for z2 in xrange(1, y * 2 + 1 + 1):
for z1 in xrange(1, x * 2 + 1 + 1):
if map1[z1][z2] == 0:
screen.blit(diamonds1, (z1*10, z2*10))
else:
screen.blit(diamonds2, (z1*10, z2*10))
pygame.display.update()
return 0
if __name__ == "__main__":
run()
|
"Justin's Jabber" – Back Issue Comics! 4/5/19 | EC!
Amazing Spider-Man #9, first appearance of Electro, Very Good Minus (3.5), $400.
Amazing Spider-Man #14, first appearance of The Green Goblin, Very Good Minus (3.5), $1,000.
Amazing Spider-Man #31, first appearance of Gwen Stacy and Harry Osborn, Very Good Minus (3.5), $100.
At least Peter Parker spruced himself up a bit over the years.
That original look was rough though.
Bespectacled, sweater vest, drab slacks.
Just because you are a nerd doesn’t mean you have to look like a nerd. |
#!/usr/bin/env python
# Copyright 2018 Criteo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""bgutil API."""
from __future__ import absolute_import
import argparse
import logging
import flask_restplus as rp
from biggraphite.cli.web import context
from biggraphite.cli.web.capture import Capture
from biggraphite import settings as bg_settings
api = rp.Namespace("bgutil", description="bgutil as a service")
command = api.model(
"Command",
{"arguments": rp.fields.List(rp.fields.String(), description="command arguments")},
)
class UnknownCommandException(Exception):
"""Unknown command exception."""
def __init__(self, command_name):
"""Init UnknownCommandException."""
super(UnknownCommandException, self).__init__(
"Unknown command: %s" % command_name
)
def parse_command(command_name, payload):
"""Parse and build a BgUtil command."""
# Import that here only because we are inside a command and `commands`
# need to be able to import files from all commands.
from biggraphite.cli import commands
cmd = None
for cmd in commands.COMMANDS:
if cmd.NAME == command_name:
break
if not cmd or cmd.NAME != command_name:
raise UnknownCommandException(command_name)
parser = NonExitingArgumentParser(add_help=False)
parser.add_argument(
"--help",
action=_HelpAction,
default=argparse.SUPPRESS,
help="Show this help message and exit.",
)
bg_settings.add_argparse_arguments(parser)
cmd.add_arguments(parser)
if not payload:
arguments = []
else:
arguments = payload.get("arguments", [])
args = [a for a in arguments]
opts = parser.parse_args(args)
return cmd, opts
class _HelpAction(argparse.Action):
"""Help Action that sends an exception."""
def __init__(
self,
option_strings,
dest=argparse.SUPPRESS,
default=argparse.SUPPRESS,
help=None,
):
"""Constructor."""
super(_HelpAction, self).__init__(
option_strings=option_strings,
dest=dest,
default=default,
nargs=0,
help=help,
)
def __call__(self, parser, namespace, values, option_string=None):
"""Help action."""
raise Exception(parser.format_help())
class NonExitingArgumentParser(argparse.ArgumentParser):
"""An ArgumentParser that doesn't exit."""
def exit(self, status=0, message=None):
"""Override the normal exit behavior."""
if message:
raise Exception(message)
@api.route("/<string:command_name>")
@api.param("command_name", "bgutil sub-command to run.")
class BgUtilResource(rp.Resource):
"""BgUtil Resource.
This could be implemented with one resource per command if we
dynamically looked at commands, but it's simpler this way.
"""
@api.doc("Run a bgutil command.")
@api.expect(command)
def post(self, command_name):
"""Starts a bgutil command in this thread."""
result = None
try:
cmd, opts = parse_command(command_name, api.payload)
with Capture() as capture:
cmd.run(context.accessor, opts)
result = capture.get_content()
except UnknownCommandException as e:
rp.abort(message=str(e))
except Exception as e:
logging.exception("bgutil failed")
rp.abort(message=str(e))
context.accessor.flush()
# TODO:
# - Allow asynchronous execution of commands.
# To do that we might want to run new bgutil process and to add
# a --bgutil_binary option to bgutil web (by default argv[0]). It would be
# much easier to capture output and input this way.
return result
@api.route("/async/<string:command_name>")
@api.param("command_name", "bgutil sub-command to run.")
class BgUtilAsyncResource(rp.Resource):
"""BgUtil asynchronous resource."""
@api.doc("Run a bgutil command.")
@api.expect(command)
@api.response(201, "Created")
def post(self, command_name):
"""Run asynchronously a BgUtil command."""
# TODO: monitor background tasks and feed /workers with it
try:
cmd, opts = parse_command(command_name, api.payload)
label = self._make_label(command_name)
context.task_runner.submit(label, cmd, opts)
except UnknownCommandException as e:
rp.abort(message=str(e))
except Exception as e:
logging.exception("bgutil failed")
rp.abort(message=str(e))
context.accessor.flush()
return "Running in background.", 201
@staticmethod
def _make_label(command_name):
return "%s %s" % (command_name, " ".join(api.payload["arguments"]))
@api.route("/tasks/")
class BgUtilTasksResource(rp.Resource):
"""BgUtil list asynchronous resource."""
@api.doc("List asynchronous bgutil tasks.")
def get(self):
"""List asynchronous bgutil tasks."""
return [self._format(task) for task in context.task_runner.tasks]
@staticmethod
def _format(task):
return {
"label": task.label,
"submitted_on": BgUtilTasksResource._format_date(task.submitted_on),
"started_on": BgUtilTasksResource._format_date(task.started_on),
"completed_on": BgUtilTasksResource._format_date(task.completed_on),
"status": task.status.value,
"result": task.result,
}
@staticmethod
def _format_date(date):
return date.isoformat() if date else None
|
What is the part number for the 30 series mech that has 6 terminals?
The 30 series switch mechanism that has 6 terminals is the 30MD2.
This is a 10A - 2 way, double pole, double throw switch. |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2016-02-02 16:47
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Miete',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('kaltmiete', models.PositiveSmallIntegerField(verbose_name='Kaltmiete')),
('groesse', models.DecimalField(decimal_places=2, max_digits=6, verbose_name='Größe')),
('plz', models.PositiveIntegerField(verbose_name='Postleitzahl')),
('stadtbezirk', models.CharField(max_length=30, verbose_name='stadtbezirk')),
('added', models.DateTimeField(auto_now_add=True, verbose_name='hinzugefügt')),
('bewohner', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='Bewohner')),
('abschluss', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='Jahr des Abschlusses des Mietvertrags')),
('erhoehung', models.PositiveSmallIntegerField(blank=True, null=True, verbose_name='Jahr der letzten Mieterhöhung')),
('vermieter', models.CharField(blank=True, choices=[('NP', 'gemeinnützig'), ('PR', 'privat'), ('CO', 'Unternehmen')], max_length=2, verbose_name='Vermieter')),
('email', models.EmailField(blank=True, max_length=254, verbose_name='E-Mail für Benachrichtigung über Ergebnis')),
('ipaddress', models.GenericIPAddressField(blank=True, null=True, verbose_name='IP Adresse')),
],
options={
'verbose_name_plural': 'Mieten',
},
),
]
|
Regardless of whether you’ve been married for one year or 50 years, there is one constant challenge facing every couple—to connect. By providing a little nudge here and there, couples remember just how important it is to have one-on-one time. Churches that help couples to date are game changers.
What’s included with these date nights?
This content originally appeared in the GAME CHANGERS strategy pack. |
import numpy as np
from Engine.Tracker.Filters import KalmanFilter
class BallFilter(KalmanFilter):
def __init__(self, id=None):
super().__init__(id)
self.transition_model = np.array([[1, 0.05, 0, 0], # Position x
[0, 1, 0, 0], # Speed x
[0, 0, 1, 0.05], # Position y
[0, 0, 0, 1]]) # Speed y
self.state_number = int(np.size(self.transition_model, 0))
self.observable_state = int(np.size(self.observation_model, 0))
self.x = np.zeros(self.state_number)
@property
def position(self):
if self.is_active:
return self.x[0::2]
@property
def velocity(self):
if self.is_active:
return self.x[1::2]
def update_transition_model(self, dt):
self.transition_model[[0, 2], [1, 3]] = dt
def process_covariance(self, dt):
sigma_acc_x = 10
sigma_acc_y = sigma_acc_x
process_covariance = \
np.array([
np.array([0.25 * dt ** 4, 0.50 * dt ** 3, 0, 0]) * sigma_acc_x ** 2,
np.array([0.50 * dt ** 3, 1.00 * dt ** 2, 0, 0]) * sigma_acc_x ** 2,
np.array([0, 0, 0.25 * dt ** 4, 0.50 * dt ** 3]) * sigma_acc_y ** 2,
np.array([0, 0, 0.50 * dt ** 3, 1.00 * dt ** 2]) * sigma_acc_y ** 2
])
return process_covariance
def initial_state_covariance(self):
return np.diag([10 ** 3, 0, 10 ** 3, 0])
|
My digital home on the interwebs!
Abhishek Khanna's Blog 2018 finance investing Let's re-learn!
A long time ago in a place far, far away, I used to have an interesting friend. One Friday evening, like any other Friday evening, we were chilling at our regular Theka, when suddenly he made an interesting proposition. Though in a very businesslike but suspiciously hushed tone. Only because I was his close friend, he wanted to introduce me to a business idea, where I just need to invest a small amount. The good part about his plan was that the returns would start flowing in in less than a month. The even better part about his idea was that soon after, I will have to do nothing and the so-called business will run on auto-pilot mode and I would be a sit-at-home millionaire!
But this post is neither to discuss MLM schemes, nor about old friends. This post is about a simple question that I usually ask anyone giving me an investment idea or as it is better known as 'tip' – if your idea is so commercially viable, why do you want to share your riches with me? Why don’t you mortgage your house and invest in the business? If everything goes well, soon after, by just sitting in your mortgaged house, you will be the proud owner of 100 such (mortgaged?) houses!
This is probably just one example, and that too a very crude one. But the deeper you dive into the field of investing the more examples you will find of people and even companies misselling and promising (mostly?) unachievable returns. And that’s where my friends, this line of thinking is helpful. If the cost of capital (plus transaction charges) is less than the promised(!) returns, it is probably time to ask some hard questions.
In not so distant past, till I decided to quit my full-time job and pursue my MBA, I was quite engaged in investing as well as trading. However, to fund my education, I liquidated almost all my investments, and even spent a couple of years under heavy debt! Yep, the infamous student-loan!
As the loan was paid-off and closed recently, I am re-learning the process of investing, while getting acclimatized with the new realities of the trading world. As I learn, digest, think and process, I will try to jot down some of my thoughts here on this page.
May the bulls (& bears) be always with you... ! |
# program to validate nwb files using specification language definition
import sys
import nwb.nwb_file as nwb_file
# import cProfile # for profiling
def validate_file(name, core_spec="nwb_core.py", extensions=None, verbosity="all"):
"""
Parameters
----------
name: string
Name (including path) of file to be validated
core_spec: string (default: 'nwb_core.py')
Name of core specification file or '-' to load specification(s) from HDF5 file.
extensions: array
Array of extension files
verbosity: string (default: 'all')
Controls how much validation output is displayed. Options are:
'all', 'summary', and 'none'
Returns
-------
validation_result: dict
Result of validation. Has keys: 'errors', 'warnings', 'added' which
contain counts of errors, warnings and additions. Additions are groups,
datasets or attributes added which are not defined by the core_spec
specification.
"""
if extensions is None:
extensions = []
# to validate, open the file in read-only mode, then close it
f = nwb_file.open(name, mode="r", core_spec=core_spec, extensions=extensions, verbosity=verbosity)
validation_result = f.close()
return validation_result
if __name__ == "__main__":
if len(sys.argv) < 2 or len(sys.argv) > 4:
print("format is:")
print("python %s <file_name> [ <extensions> [<core_spec>] ]" % sys.argv[0])
print("where:")
print("<extensions> is a common separated list of extension files, or '-' for none")
print("<core_spec> is the core format specification file. Default is 'nwb_core.py'")
print("Use two dashes, e.g. '- -' to load saved specifications from <file_name>")
sys.exit(0)
core_spec = 'nwb_core.py' if len(sys.argv) < 4 else sys.argv[3]
extensions = [] if len(sys.argv) < 3 or sys.argv[2] == '-' else sys.argv[2].split(',')
file_name = sys.argv[1]
if extensions == [] and core_spec == "-":
print("Loading specifications from file '%s'" % file_name)
validate_file(file_name, core_spec=core_spec, extensions=extensions)
# replace above call with following to generate execution time profile
# cProfile.run('validate_file("%s", core_spec="%s")' % (file_name, core_spec))
|
This week is your last chance this year to taste the wonderful suhoor at Seven Sands, JBR’s only Emirati restaurant.
We are greeted with a refreshing splash of rose water, and the staff are friendly and attentive from the outset. By the end of our meal, we are remarking that this is possibly the best service we’ve received in Dubai in ages.
Décor is a subtle homage to the UAE, beige like the desert, walls like fishing nets, and soft wall reliefs depicting old Dubai downstairs, and the 21st century city upstairs.
There’s a smoking lounge upstairs, so it’s no surprise that on this hot night there are more guests upstairs than down.
We are presented with suhoor specific set menus – although there’s nothing to stop you ordering a la carte. We opt for the elaborate multi-dish, three-course suhoor.
First up the drinks – named with a nod to Emirati geology and features. We took the ‘Jebel Hafeet’ and this cooling blend of cucumber, mint and basil would be perfect on the mountain top after which it’s named.
The manager introduced himself and carefully explained each dish, without being too intrusive. We already felt that nothing was too much trouble for him.
The first course was an enticing blend of all the classics, green (unripe) mango salad, lemon bazar salad with yact cheese, moutabel, hummous and even shark sambousek. While the bold yellow turmeric bread stole the bread basket show, the mango salad intrigued, and we loved the light creaminess of the yact cheese. We yearned for more, but knew there were more courses. The platter was more than enough for two.
A hearty bowl of lentil soup followed, served with lemon and Emirati “ragag croutons”. Delicious, and we are already feeling satisfied.
Main courses were four bowls of meat and fish, all traditional, and all apparently tasty, but we opted for a vegetarian main course, with no awkwardness on the part of the staff.
We were presented with a vegetable casserole, (thareed) served atop the ubiquitous ragag, which creates a wonderfully soggy, comforting base to the concoction. So traditional is this dish (often served with meat), it is even mentioned in a hadith of the Prophet Mohammed.
There’s potato, koosa, green pepper and carrot, with hints of onion, garlic, and of course, bezer spice mix which includes cardamom, cinnamon, chili, cumin and fennel.
Judging by this dish, we missed out on the meat-based offerings – chicken and fish fouga, harees with wheat from Abu Dhabi, and aishu laham, rice and lamb flavoured with garlic, ginger, saffron and dried lime.
Next came a deeply satisfying dessert platter of lugaimat (deep fried dough balls covered in date and honey ), rich, decadent camel milk ice cream, Seven Sands’ signature date pudding, and aseeda bobar (pumpkin puree flavoured with cardamom, saffron, and fresh ghee). My favourite was the aseeda bobar, although the others were all divine, and it’s a real treat to try a generous mouthful of no less than four desserts!
We rounded off our late-night feast with ghawa and karak chai, a truly fitting end to a wonderful experience. If you want to try traditional local cuisine in a contemporary oceanside setting (with views of the Ain Dubai wheel), look no further!
Suhoor at Seven Sands JBR is priced at 147 dirhams each. |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from django.forms import ValidationError
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_variables
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon.utils import validators
from openstack_dashboard import api
LOG = logging.getLogger(__name__)
class BaseUserForm(forms.SelfHandlingForm):
def __init__(self, request, *args, **kwargs):
super(BaseUserForm, self).__init__(request, *args, **kwargs)
domain_context = request.session.get('domain_context', None)
# Populate project choices
project_choices = [('', _("Select a project"))]
# If the user is already set (update action), list only projects which
# the user has access to.
user_id = kwargs['initial'].get('id', None)
projects, has_more = api.keystone.tenant_list(request, user=user_id)
if domain_context:
domain_projects = [project for project in projects
if project.domain_id == domain_context]
projects = domain_projects
for project in projects:
if project.enabled:
project_choices.append((project.id, project.name))
self.fields['project'].choices = project_choices
def clean(self):
'''Check to make sure password fields match.'''
data = super(forms.Form, self).clean()
if 'password' in data:
if data['password'] != data.get('confirm_password', None):
raise ValidationError(_('Passwords do not match.'))
return data
ADD_PROJECT_URL = "horizon:admin:projects:create"
class CreateUserForm(BaseUserForm):
name = forms.CharField(label=_("User Name"))
email = forms.EmailField(label=_("Email"))
password = forms.RegexField(
label=_("Password"),
widget=forms.PasswordInput(render_value=False),
regex=validators.password_validator(),
error_messages={'invalid': validators.password_validator_msg()})
confirm_password = forms.CharField(
label=_("Confirm Password"),
required=False,
widget=forms.PasswordInput(render_value=False))
project = forms.DynamicChoiceField(label=_("Primary Project"),
add_item_link=ADD_PROJECT_URL)
role_id = forms.ChoiceField(label=_("Role"))
def __init__(self, *args, **kwargs):
roles = kwargs.pop('roles')
super(CreateUserForm, self).__init__(*args, **kwargs)
role_choices = [(role.id, role.name) for role in roles]
self.fields['role_id'].choices = role_choices
# We have to protect the entire "data" dict because it contains the
# password and confirm_password strings.
@sensitive_variables('data')
def handle(self, request, data):
domain_context = request.session.get('domain_context', None)
try:
LOG.info('Creating user with name "%s"' % data['name'])
new_user = api.keystone.user_create(request,
name=data['name'],
email=data['email'],
password=data['password'],
project=data['project'],
enabled=True,
domain=domain_context)
messages.success(request,
_('User "%s" was successfully created.')
% data['name'])
if data['role_id']:
try:
api.keystone.add_tenant_user_role(request,
data['project'],
new_user.id,
data['role_id'])
except:
exceptions.handle(request,
_('Unable to add user'
'to primary project.'))
return new_user
except:
exceptions.handle(request, _('Unable to create user.'))
class UpdateUserForm(BaseUserForm):
id = forms.CharField(label=_("ID"), widget=forms.HiddenInput)
name = forms.CharField(label=_("User Name"))
email = forms.EmailField(label=_("Email"))
password = forms.RegexField(
label=_("Password"),
widget=forms.PasswordInput(render_value=False),
regex=validators.password_validator(),
required=False,
error_messages={'invalid': validators.password_validator_msg()})
confirm_password = forms.CharField(
label=_("Confirm Password"),
widget=forms.PasswordInput(render_value=False),
required=False)
project = forms.ChoiceField(label=_("Primary Project"))
def __init__(self, request, *args, **kwargs):
super(UpdateUserForm, self).__init__(request, *args, **kwargs)
if api.keystone.keystone_can_edit_user() is False:
for field in ('name', 'email', 'password', 'confirm_password'):
self.fields.pop(field)
# We have to protect the entire "data" dict because it contains the
# password and confirm_password strings.
@sensitive_variables('data', 'password')
def handle(self, request, data):
user = data.pop('id')
# Throw away the password confirmation, we're done with it.
data.pop('confirm_password', None)
try:
api.keystone.user_update(request, user, **data)
messages.success(request,
_('User has been updated successfully.'))
except:
exceptions.handle(request, ignore=True)
messages.error(request, _('Unable to update the user.'))
return True
|
With a stay at Batur Mountain View Hotel & Restaurant in Kintamani, you'll be within a 15-minute drive of Batur Volcano Museum and Museum Geopark Batur. Featured amenities include luggage storage and laundry facilities. A roundtrip airport shuttle is provided for a surcharge (available 24 hours), and free self parking is available onsite. |
# coding: utf-8
# In[1]:
import numpy as np
import pandas as pd
import json
from sklearn.preprocessing import MinMaxScaler
from sklearn.metrics import confusion_matrix
from sklearn.metrics import log_loss
from sklearn.metrics import precision_score
from sklearn.metrics import f1_score
from sklearn.metrics import recall_score
from sklearn.metrics import accuracy_score
from sklearn.metrics import classification_report
from multiprocessing import Pool, TimeoutError
from multiprocessing import cpu_count
from datetime import timedelta
from sklearn.ensemble import RandomForestClassifier
import sys
import csv
import itertools
import time
# In[13]:
def rf(X_train_cols,
X_train,
Y_train,
X_test,
Y_test,
n_estimators=10,
criterion="gini",
max_features="auto",
max_depth=-1,
n_jobs=1):
"""
Parameters
----------
X_train_cols : list of feature column names
from the training set
X_train : pandas data frame
data frame of features for the training set
Y_train : pandas data frame
data frame of labels for the training set
X_test : pandas data frame
data frame of features for the test set
Y_test : pandas data frame
data frame of labels for the test set
n_estimators : integer, optional (default=10)
The number of trees in the forest.
criterion : string, optional (default=”gini”)
The function to measure the quality of a split.
Supported criteria are “gini” for the Gini impurity and “entropy”
for the information gain.
max_features : int, float, string or None, optional (default=”auto”)
The number of features to consider when looking for the best split:
If int, then consider max_features features at each split.
If float, then max_features is a percentage and int(max_features * n_features)
features are considered at each split.
If “auto”, then max_features=sqrt(n_features).
If “sqrt”, then max_features=sqrt(n_features) (same as “auto”).
If “log2”, then max_features=log2(n_features).
If None, then max_features=n_features.
max_depth : integer or None, optional (default=None)
The maximum depth of the tree.
If None, then nodes are expanded until all leaves are pure or
until all leaves contain less than min_samples_split samples.
Ignored if max_leaf_nodes is not None.
n_jobs : integer, optional (default=1)
The number of jobs to run in parallel for both fit and predict.
If -1, then the number of jobs is set to the number of cores.
Result:
-------
numpy array
logloss : averaged logarithmic loss
miss_err : missclassification error rate
prec : precision
recall : recall
f1 : f1 score
parameters : previous parameters in the order previously specified
"""
if max_depth==-1:
max_depth = None
labels = np.unique(Y_train)
## # Run rf
# Define classifier
rf = RandomForestClassifier(n_estimators = n_estimators,
criterion = criterion,
max_features = max_features,
max_depth = max_depth,
n_jobs = n_jobs)
# Fit
rf.fit(X_train, Y_train)
# Predict
Y_hat = rf.predict(X_test)
Y_probs = rf.predict_proba(X_test)
## # Misclassification error rate
miss_err = 1-accuracy_score(Y_test, Y_hat)
## # Log Loss
eps = 10^(-15)
logloss = log_loss(Y_test, Y_probs, eps = eps)
##confusion_matrix
confusion_matrix1 = confusion_matrix(y_true=Y_test, y_pred=Y_hat
, labels=labels)
# classification_report
classification_report1 = classification_report(y_true=Y_test, y_pred=Y_hat)
# Variable importance
importances = rf.feature_importances_
std = np.std([tree.feature_importances_ for tree in rf.estimators_],
axis=0)
indices = np.argsort(importances)[::-1]
# Return tuple of (rank, feature name, variable importance)
var_importance = [(f+1, X_train_cols[f], importances[indices[f]]) for f in range(X_train.shape[1])]
# Output results in a list format
result = []
result.append("confusion_matrix")
result.append(confusion_matrix1)
result.append("classification_report")
result.append(classification_report1)
result.append("number of trees")
result.append(n_estimators)
result.append("max depth")
result.append(max_depth)
result.append("logloss")
result.append(logloss)
result.append("miss_err")
result.append(miss_err)
result.append("var_importance")
result.append(var_importance)
return result
|
Acrylic, charcoal, vintage collage material and ink on thick gallery wrapped canvas. Sides are painted a soft warm gray. Hanging wire included. Signed, dated and titled on the back. Ready to hang! |
import os
import shutil
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from .testapp.models import Test
from .utils import (
create_superuser,
create_img_file,
create_doc_file,
create_attachments_img,
create_attachments_doc)
from ..models import Image, Document
class AttachmentsAdminTest(TestCase):
def setUp(self):
if not os.path.exists(settings.MEDIA_ROOT):
os.mkdir(settings.MEDIA_ROOT)
create_img_file()
create_doc_file()
self.img = create_attachments_img('test.jpg')
self.doc = create_attachments_doc('test.txt')
create_superuser()
self.client = Client()
self.client.login(username='admin', password='secret')
def tearDown(self):
self.img.delete()
self.doc.delete()
shutil.rmtree(settings.MEDIA_ROOT, ignore_errors=True)
def test_image_list_view_thumbnail(self):
resp = self.client.get(reverse('admin:attachments_image_changelist'))
html = """<a href="/admin/attachments/image/1/"><img border="0" alt="" src="/media/attachments/images/test.jpg.80x80_q95_crop.jpg"></a>"""
self.assertContains(resp, html, html=True)
def test_document_list_view(self):
resp = self.client.get(reverse('admin:attachments_document_changelist'))
self.assertEqual(resp.status_code, 200)
class AttachmentsAdminUploadTest(TestCase):
def setUp(self):
if not os.path.exists(settings.MEDIA_ROOT):
os.mkdir(settings.MEDIA_ROOT)
create_img_file()
create_doc_file()
create_superuser()
self.client = Client()
self.client.login(username='admin', password='secret')
def tearDown(self):
shutil.rmtree(settings.MEDIA_ROOT, ignore_errors=True)
def test_image_upload_view(self):
# confirm there are no images
self.assertFalse(Image.objects.all())
# upload an image
img = os.path.join(settings.MEDIA_ROOT, 'test.jpg')
with open(img, 'rb') as img_file:
resp = self.client.post(
reverse('attachments:ajax_upload_image'),
{'file': img_file},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 200)
# check that our uploaded image exists
img_file = Image.objects.all()[0]
self.assertEqual(img_file.filename, 'test.jpg')
def test_document_upload_view(self):
# confirm there are no documents
self.assertFalse(Document.objects.all())
# upload a document
doc = os.path.join(settings.MEDIA_ROOT, 'test.txt')
with open(doc, 'rb') as doc_file:
resp = self.client.post(
reverse('attachments:ajax_upload_document'),
{'file': doc_file},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 200)
# check that our uploaded document exists
doc_file = Document.objects.all()[0]
self.assertEqual(doc_file.filename, 'test.txt')
class AttachmentsAdminBadUploadTest(TestCase):
def setUp(self):
if not os.path.exists(settings.MEDIA_ROOT):
os.mkdir(settings.MEDIA_ROOT)
create_img_file(filename='test.tiff')
create_doc_file(filename='test.woof')
create_superuser()
self.client = Client()
self.client.login(username='admin', password='secret')
def tearDown(self):
shutil.rmtree(settings.MEDIA_ROOT, ignore_errors=True)
def test_bad_image_upload_view(self):
# confirm there are no images
self.assertFalse(Image.objects.all())
# try to upload the bad image
img = os.path.join(settings.MEDIA_ROOT, 'test.tiff')
with open(img, 'rb') as img_file:
resp = self.client.post(
reverse('attachments:ajax_upload_image'),
{'file': img_file},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 404)
# make sure the bad image was not uploaded
self.assertFalse(Image.objects.all())
def test_bad_document_upload_view(self):
# confirm there are no documents
self.assertFalse(Document.objects.all())
# try to upload the bad document
doc = os.path.join(settings.MEDIA_ROOT, 'test.woof')
with open(doc, 'rb') as doc_file:
resp = self.client.post(
reverse('attachments:ajax_upload_document'),
{'file': doc_file},
HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(resp.status_code, 404)
# make sure the bad document was not uploaded
self.assertFalse(Document.objects.all()) |
3/8" Round Top EPDM Channel | U.S. Plastic Corp.
Overall dimensions are 1.00" Wide x 1.00" High. Channel is 0.38" Wide x 0.50" Deep. Commercial grade black EPDM channel has a 65 durometer hardness (Shore A). Tensile Strength is 1,650 psi and elongation is 310%. |
#encoding: utf-8
dta={'QS':{},'IS':{},'ISS':{}}
import sys
for f in sys.argv[1:]:
print f,'',
N=f.split('.')[1];
assert(N[-1]=='k'); N=1000*int(N[:-1])
if '.q.' in f: collider='QS'
elif '.i.' in f: collider='IS'
elif '.is.' in f: collider='ISS'
else: raise RuntimeError("Unknown collider type for file "+f)
for l in open(f):
if 'Collider' in l:
t=l.split()[2]; assert(t[-2:]=='us'); t=float(t[:-2])/1e6
if not dta[collider].has_key(N): dta[collider][N]=[t]
else: dta[collider][N]+=[t*0.01] # the second time is per 100 iterations
print
ISS_N=dta['ISS'].keys(); ISS_N.sort()
QS_N=dta['QS'].keys(); QS_N.sort()
IS_N=dta['IS'].keys(); IS_N.sort()
ISSinit=[dta['ISS'][N][0] for N in ISS_N]; ISSstep=[dta['ISS'][N][1] for N in ISS_N]
QSinit=[dta['QS'][N][0] for N in QS_N]; QSstep=[dta['QS'][N][1] for N in QS_N]
ISinit=[dta['IS'][N][0] for N in IS_N]; ISstep=[dta['IS'][N][1] for N in IS_N]
from pylab import *
plot(IS_N,ISinit,'y',ISS_N,ISSinit)
gca().set_yscale('log')
xlabel("Number of spheres")
ylabel(u"Log time for the 1st collider step [s]")
title("Colliders performance (QS=QuickSoft, IS=InsertionSort, IS/s=IS+stride)")
legend(('IS init','IS/s init',),'upper left')
ax2=twinx()
plot(IS_N,ISstep,'k-',ISS_N,ISSstep,'r-',QS_N,QSstep,'g-',QS_N,QSinit,'b-')
ylabel(u"Linear time per 1 step [s]")
legend(('IS step','IS/s step','QS step','QS init'),'right')
grid()
savefig('colliders.svg')
show()
|
Number retired in his honor.
This page is to honor those who have provided many years of service to the association. Although they have passed on, their memories will live with us always.
We hope that this page will honor their memory as they have honored us with their unselfish service.
Carter County Skywarn Association. Copyright, 2011-2019. |
'''
Created on Nov 8, 2013
@package: content
@copyright: 2013 Sourcefabric o.p.s.
@license: http://www.gnu.org/licenses/gpl-3.0.txt
@author: Mugur Rus
API specifications for content resource item.
'''
from ally.api.config import query, service
from ally.api.criteria import AsLikeOrdered
from ally.support.api.entity import IEntityPrototype
from content.base.api.domain_content import modelContent
from content.base.api.item import Item, QItem
from ally.api.type import Dict
# --------------------------------------------------------------------
TYPE_RESOURCE = 'resource'
# The resource type.(value of Item.Type for this item)
@modelContent(polymorph={Item.Type: TYPE_RESOURCE})
class ItemResource(Item):
'''
Provides the resource item model.
'''
ContentType = str
HeadLine = str
FileMeta = Dict(str, str)
# --------------------------------------------------------------------
@query(ItemResource)
class QItemResource(QItem):
'''
Provides the query for active text item model.
'''
contentType = AsLikeOrdered
headLine = AsLikeOrdered
# --------------------------------------------------------------------
@service(('Entity', ItemResource), ('QEntity', QItemResource))
class IItemResourceService(IEntityPrototype):
'''
Provides the service methods for resource items.
'''
|
While a sports collection is a great way to impress friends and family at home, the best way to impress fellow fans at the game is to buy a New York Knicks Kristaps Porzingis Jersey or stock up on Knicks apparel items. We've got just what fans of all ages need in terms of Authentic Kristaps Porzingis Jerseys, with home, away, and alternate color combinations being easily available. All of our Kristaps Porzingis Jerseys come in sizes for men and women, with youth sizes available in many styles. We even offer these sizes in our limited edition jerseys, which include holiday-themed options and vintage designs. |
"""Init Genesis APP."""
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from flask.ext.login import LoginManager
from flask.ext.migrate import Migrate
from flask_bootstrap import Bootstrap, WebCDN
from flask.ext.misaka import Misaka
from flask.ext.moment import Moment
from flask.ext.pagedown import PageDown
from flask.ext.restful import Api
from flask.ext.httpauth import HTTPBasicAuth
from flask_mail import Mail
import os
from config import config
APP = Flask(__name__)
# config handling
if os.getenv('FLASK_CONFIG'):
FLASK_CONFIG = os.getenv('FLASK_CONFIG')
else:
FLASK_CONFIG = 'default'
APP.config.from_object(config[FLASK_CONFIG])
# logging
if not APP.debug and not APP.testing:
import logging
from logging.handlers import RotatingFileHandler
FILE_HANDLER = RotatingFileHandler('/var/log/genesis/genesis.log')
FILE_HANDLER.setLevel(logging.DEBUG)
FILE_HANDLER.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'))
APP.logger.addHandler(FILE_HANDLER)
# Bootstrap
Bootstrap(APP)
APP.extensions['bootstrap']['cdns']['jquery'] = WebCDN(
'//cdnjs.cloudflare.com/ajax/libs/jquery/2.1.1/')
# Misaka Markdown
Misaka(APP)
# Moment.js
MOMENT = Moment(APP)
# SQL stuff
DB = SQLAlchemy(APP)
# Migrate
MIGRATE = Migrate(APP, DB)
# PageDown Editor
PAGEDOWN = PageDown(APP)
# Mail
MAIL = Mail(APP)
# REST API
API = Api(APP)
# HTTPAuth
BASIC_AUTH = HTTPBasicAuth()
# Login
LOGIN_MANAGER = LoginManager()
LOGIN_MANAGER.init_app(APP)
LOGIN_MANAGER.login_view = 'login'
@LOGIN_MANAGER.user_loader
def load_user(userid):
"""User loader for Genesis."""
import app.auth
return app.auth.CTUser(uid=userid)
# import
import app.views
import app.rest
import app.feeds
import app.admin
|
Gold sand stone ear plugs. semi-precious stone plugs. Available in sizes from 6mm up to 30mm. |
"""
Simpler queue management than the regular mailbox.Maildir stuff. You
do get a lot more features from the Python library, so if you need
to do some serious surgery go use that. This works as a good
API for the 90% case of "put mail in, get mail out" queues.
"""
import errno
import hashlib
import logging
import mailbox
import os
import socket
import time
from salmon import mail
# we calculate this once, since the hostname shouldn't change for every
# email we put in a queue
HASHED_HOSTNAME = hashlib.md5(socket.gethostname()).hexdigest()
class SafeMaildir(mailbox.Maildir):
def _create_tmp(self):
now = time.time()
uniq = "%s.M%sP%sQ%s.%s" % (int(now), int(now % 1 * 1e6), os.getpid(),
mailbox.Maildir._count, HASHED_HOSTNAME)
path = os.path.join(self._path, 'tmp', uniq)
try:
os.stat(path)
except OSError, e:
if e.errno == errno.ENOENT:
mailbox.Maildir._count += 1
try:
return mailbox._create_carefully(path)
except OSError, e:
if e.errno != errno.EEXIST:
raise
else:
raise
# Fall through to here if stat succeeded or open raised EEXIST.
raise mailbox.ExternalClashError('Name clash prevented file creation: %s' % path)
class QueueError(Exception):
def __init__(self, msg, data):
Exception.__init__(self, msg)
self._message = msg
self.data = data
class Queue(object):
"""
Provides a simplified API for dealing with 'queues' in Salmon.
It currently just supports maildir queues since those are the
most robust, but could implement others later.
"""
def __init__(self, queue_dir, safe=False, pop_limit=0, oversize_dir=None):
"""
This gives the Maildir queue directory to use, and whether you want
this Queue to use the SafeMaildir variant which hashes the hostname
so you can expose it publicly.
The pop_limit and oversize_queue both set a upper limit on the mail
you pop out of the queue. The size is checked before any Salmon
processing is done and is based on the size of the file on disk. The
purpose is to prevent people from sending 10MB attachments. If a
message is over the pop_limit then it is placed into the
oversize_dir (which should be a maildir).
The oversize protection only works on pop messages off, not
putting them in, get, or any other call. If you use get you can
use self.oversize to also check if it's oversize manually.
"""
self.dir = queue_dir
if safe:
self.mbox = SafeMaildir(queue_dir)
else:
self.mbox = mailbox.Maildir(queue_dir)
self.pop_limit = pop_limit
if oversize_dir:
if not os.path.exists(oversize_dir):
osmb = mailbox.Maildir(oversize_dir)
self.oversize_dir = os.path.join(oversize_dir, "new")
if not os.path.exists(self.oversize_dir):
os.mkdir(self.oversize_dir)
else:
self.oversize_dir = None
def push(self, message):
"""
Pushes the message onto the queue. Remember the order is probably
not maintained. It returns the key that gets created.
"""
return self.mbox.add(str(message))
def pop(self):
"""
Pops a message off the queue, order is not really maintained
like a stack.
It returns a (key, message) tuple for that item.
"""
for key in self.mbox.iterkeys():
over, over_name = self.oversize(key)
if over:
if self.oversize_dir:
logging.info("Message key %s over size limit %d, moving to %s.",
key, self.pop_limit, self.oversize_dir)
os.rename(over_name, os.path.join(self.oversize_dir, key))
else:
logging.info("Message key %s over size limit %d, DELETING (set oversize_dir).",
key, self.pop_limit)
os.unlink(over_name)
else:
try:
msg = self.get(key)
except QueueError, exc:
raise exc
finally:
self.remove(key)
return key, msg
return None, None
def get(self, key):
"""
Get the specific message referenced by the key. The message is NOT
removed from the queue.
"""
msg_file = self.mbox.get_file(key)
if not msg_file:
return None
msg_data = msg_file.read()
try:
return mail.IncomingMessage(self.dir, None, None, msg_data)
except Exception, exc:
logging.exception("Failed to decode message: %s; msg_data: %r", exc, msg_data)
return None
def remove(self, key):
"""Removes the queue, but not returned."""
self.mbox.remove(key)
def count(self):
"""Returns the number of messages in the queue."""
return len(self.mbox)
def clear(self):
"""
Clears out the contents of the entire queue.
Warning: This could be horribly inefficient since it
basically pops until the queue is empty.
"""
# man this is probably a really bad idea
while self.count() > 0:
self.pop()
def keys(self):
"""
Returns the keys in the queue.
"""
return self.mbox.keys()
def oversize(self, key):
if self.pop_limit:
file_name = os.path.join(self.dir, "new", key)
return os.path.getsize(file_name) > self.pop_limit, file_name
else:
return False, None
|
Distribution of agricultural inputs is usually done in a timely fashion for the Gu 2016 planting period or immediately before the Deyr season. Each household receives 25 kg of certified quality seeds prior to the Gu 2016 planting season and in line with Food Security cluster guidance. The seed packages to be distributed have been proven in previous projects allow the farmers to revive their production, whereas in the longer term improving household food consumption score and boost small-scale production and sale. The seeds are usually procured from local suppliers in Baidoa, after a transparent bidding process facilitated by READO. READO then develops an agreement with the seeds suppliers be to provide a certificate of quality assurance from the certification seeds companies in Somalia. |
from __future__ import unicode_literals
from django.conf.urls import url
from django.contrib import admin
from django.http import Http404
from django.utils.module_loading import import_string
from django.utils.translation import ugettext_lazy as _
from form_designer import settings
from form_designer.forms import FormDefinitionFieldInlineForm, FormDefinitionForm
from form_designer.models import FormDefinition, FormDefinitionField, FormLog
try:
from django.urls import reverse
except ImportError:
from django.core.urlresolvers import reverse
class FormDefinitionFieldInline(admin.StackedInline):
form = FormDefinitionFieldInlineForm
model = FormDefinitionField
extra = 1
fieldsets = [
(_('Basic'), {'fields': ['name', 'field_class', 'required', 'initial']}),
(_('Display'), {'fields': ['label', 'widget', 'help_text', 'position', 'include_result']}),
(_('Text'), {'fields': ['max_length', 'min_length']}),
(_('Numbers'), {'fields': ['max_value', 'min_value', 'max_digits', 'decimal_places']}),
(_('Regex'), {'fields': ['regex']}),
(_('Choices'), {'fields': ['choice_values', 'choice_labels']}),
(_('Model Choices'), {'fields': ['choice_model', 'choice_model_empty_label']}),
]
class FormDefinitionAdmin(admin.ModelAdmin):
save_as = True
fieldsets = [
(_('Basic'), {'fields': ['name', 'require_hash', 'method', 'action', 'title', 'body']}),
(_('Settings'), {'fields': ['allow_get_initial', 'log_data', 'success_redirect', 'success_clear', 'display_logged', 'save_uploaded_files'], 'classes': ['collapse']}),
(_('Mail form'), {'fields': ['mail_to', 'mail_from', 'mail_subject', 'mail_uploaded_files', 'mail_cover_text'], 'classes': ['collapse']}),
(_('Templates'), {'fields': ['message_template', 'form_template_name'], 'classes': ['collapse']}),
(_('Messages'), {'fields': ['success_message', 'error_message', 'submit_label'], 'classes': ['collapse']}),
]
list_display = ('name', 'title', 'method', 'count_fields')
form = FormDefinitionForm
inlines = [
FormDefinitionFieldInline,
]
search_fields = ('name', 'title')
class FormLogAdmin(admin.ModelAdmin):
list_display = ('form_definition', 'created', 'id', 'created_by', 'data_html')
list_filter = ('form_definition',)
list_display_links = None
date_hierarchy = 'created'
exporter_classes = {}
exporter_classes_ordered = []
for class_path in settings.EXPORTER_CLASSES:
cls = import_string(class_path)
if cls.is_enabled():
exporter_classes[cls.export_format()] = cls
exporter_classes_ordered.append(cls)
def get_exporter_classes(self):
return self.__class__.exporter_classes_ordered
def get_actions(self, request):
actions = super(FormLogAdmin, self).get_actions(request)
for cls in self.get_exporter_classes():
desc = _("Export selected %%(verbose_name_plural)s as %s") % cls.export_format()
actions[cls.export_format()] = (cls.export_view, cls.export_format(), desc)
return actions
def get_urls(self):
urls = [
url(
r'^export/(?P<format>[a-zA-Z0-9_-]+)/$',
self.admin_site.admin_view(self.export_view),
name='form_designer_export'
),
]
return urls + super(FormLogAdmin, self).get_urls()
def data_html(self, obj):
return obj.form_definition.compile_message(obj.data, 'html/formdefinition/data_message.html')
data_html.allow_tags = True
data_html.short_description = _('Data')
def get_change_list_query_set(self, request, extra_context=None):
"""
The 'change list' admin view for this model.
"""
if hasattr(self, 'get_changelist_instance'): # Available on Django 2.0+
cl = self.get_changelist_instance(request)
else:
list_display = self.get_list_display(request)
list_display_links = self.get_list_display_links(request, list_display)
list_filter = self.get_list_filter(request)
ChangeList = self.get_changelist(request)
cl = ChangeList(request, self.model, list_display,
list_display_links, list_filter, self.date_hierarchy,
self.search_fields, self.list_select_related,
self.list_per_page, self.list_max_show_all, self.list_editable,
self)
if hasattr(cl, "get_query_set"): # Old Django versions
return cl.get_query_set(request)
return cl.get_queryset(request)
def export_view(self, request, format):
queryset = self.get_change_list_query_set(request)
if format not in self.exporter_classes:
raise Http404()
return self.exporter_classes[format](self.model).export(request, queryset)
def changelist_view(self, request, extra_context=None):
extra_context = extra_context or {}
query_string = '?' + request.META.get('QUERY_STRING', '')
exporter_links = []
for cls in self.get_exporter_classes():
url = reverse('admin:form_designer_export', args=(cls.export_format(),)) + query_string
exporter_links.append({'url': url, 'label': _('Export view as %s') % cls.export_format()})
extra_context['exporters'] = exporter_links
return super(FormLogAdmin, self).changelist_view(request, extra_context)
admin.site.register(FormDefinition, FormDefinitionAdmin)
admin.site.register(FormLog, FormLogAdmin)
|
The temperatures have became so frigid the last couple weeks that I dread any time I need to go outside and I have been struggling to stay warm. Sweaters have been my friend lately and I love a good cable knit. Pair it with jeans and a warm coat and you have my uniform lately. Have you been dealing with snow, ice, and freezing temperatures too?
It has been so cold where I live (like -30!). It's been months since I haven't had to wear layers.
Hoodies and so many hoodies and piles of blankets - that's my routine for this week. We're down to -24C this morning - my apartment can't keep warm with that outside!
love the chunky knit sweater... can't get enough of them right now!
It has definitely been freezing here in DC! I've been wearing a lot of sweater tights and boots.
The boot cut jeans are a winner for me. I am stuck in my skiinies cause I can tuck them into boots. I've always though boot cuts should be called something else, since they go over the boot instead of tucked into them.
i love a great sweater and nice jacket. |
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Sebastian Wiesner <lunaryorn@gmail.com>
# This library is free software; you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation; either version 2.1 of the License, or (at your
# option) any later version.
# This library is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License
# for more details.
# You should have received a copy of the GNU Lesser General Public License
# along with this library; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
plugins.fake_monitor
====================
Provide a fake :class:`~pyudev.Monitor`.
This fake monitor allows to trigger arbitrary events. Use this class to
test class building upon monitor without the need to rely on real events
generated by privileged operations as provided by the
:mod:`~plugins.privileged` plugin.
.. moduleauthor:: Sebastian Wiesner <lunaryorn@gmail.com>
"""
from __future__ import (print_function, division, unicode_literals,
absolute_import)
import sys
import os
from select import select
class FakeMonitor(object):
"""
A fake :class:`~pyudev.Monitor` which allows you to trigger arbitrary
events.
This fake monitor implements the complete :class:`~pyudev.Monitor`
interface and works on real file descriptors so that you can
:func:`~select.select()` the monitor.
"""
def __init__(self, device_to_emit):
self._event_source, self._event_sink = os.pipe()
self.device_to_emit = device_to_emit
self.started = False
def trigger_event(self):
"""
Trigger an event on clients of this monitor.
"""
os.write(self._event_sink, b'\x01')
def fileno(self):
return self._event_source
def filter_by(self, *args):
pass
def start(self):
self.started = True
def poll(self, timeout=None):
rlist, _, _ = select([self._event_source], [], [], timeout)
if self._event_source in rlist:
os.read(self._event_source, 1)
return self.device_to_emit
def close(self):
"""
Close sockets acquired by this monitor.
"""
try:
os.close(self._event_source)
finally:
os.close(self._event_sink)
def pytest_funcarg__fake_monitor(request):
"""
Return a FakeMonitor, which emits the platform device as returned by
the ``fake_monitor_device`` funcarg on all triggered actions.
.. warning::
To use this funcarg, you have to provide the ``fake_monitor_device``
funcarg!
"""
return FakeMonitor(request.getfuncargvalue('fake_monitor_device'))
|
Ongoing quality control is an integral part of any maintenance programme, and our experience with innovative techniques allow you to review your maintenance programme and schedules based on the results and analysis of up-to-date technologies,potentially reducing downtime and costs.
Visual Testing, Ultrasonic Testing, Magnetic Particle Testing, Dye Penetrant Testing and, Radiographic Testing Methods. Integral to our range of services, QOS offers highly specialised Non-Destructive testing and inspection to complement our other traditional Quality Assurance and Quality Control Services. These state-of-the-art techniques include Advanced UT, Corrosion Mapping, TOFD, Long Range UT, Camera Assisted Visual inspection and Remote Field Eddy Current.
QOS provides Risk-Based Inspection (RBI) services to refineries, oil & gas processing, petrochemical and power plants. Services include the RBI methodology and consultancy.
QOS RBI is based on the American Petroleum Institute RP 580/581 inspection methodologiesand utilises of formulae-based calculations on design standards, such as ASME and ANSI.
Data history of equipment can be recorded on equipment, for example.
Corrosion affects every industry and, if undetected, may lead to catastrophic failures. Whereas external corrosion can easily be detected by conventional inspection methods, internal corrosion or erosion can only be detected through specialized monitoring techniques.
Our Corrosion Monitoring teams have expertise in various techniques, corrosion surveys by ultrasonic measurements on piping, manifolds and valves, as well as conventional fixed probes, and key point thickness measurements.
On difficult to access areas such as splash-zones of offshore platforms and on telecommunication towers, our services are provided by duel-trained Corrosion Technicians who are able to abseil.
Inspired by developments in recreational mountaineering and caving, rope access or abseiling is a work positioning technique used to reach non-readily accessible work locations by means of a rope system suspended from a nearby structure, via selected independent anchor points. Today, rope access is an internationally recognized alternative to more traditional means of access such as scaffolding, ladders, gondolas, boson chairs, mechanical lifts or staging.
Unlike other bulky and cumbersome access methods, rope access systems are dismantled and removed at the end of each working day, causing no disruption to building occupants, pedestrians and traffic flow. There is no risk associated with unattended ladders, scaffolds or other access equipment and materials.
Rope access systems require minimal installation and dismantling time. A full set of abseiling gear can be set up or dismantled within an hour, while conventional methods of accessing, such as scaffolding, often require lengthy and expensive installation periods.
Rope access activities do not cause any mechanical damage to the coatings of inspected structures. In comparison, the erection and dismantling of scaffolding usually results in damaged paintwork at contact points.
Studies have shown that the cost of performing visual inspection and Non-Destructive Testing by rope access can be drastically lowered (by up to 90%), when compared to the cost of access by traditional scaffolding.
Worldwide inspection coverage of client purchased equipment at nominated vendors.
Vendor Inspection, sometimes referred to as Source Inspection or Third Party Inspection, covers inspection activities performed at manufacturer’s works of all types of mechanical, electrical, electronic, and instrumentation equipment that are purchased by clients within the Petrochemical, Oil & Gas, Pharmaceutical and Mining Industries.
With today's technology and communications the need for companies to respond and react quickly and efficiently to quality control requirements for procured products is increasingly important.
The level of Vendor Inspection services provided is intrinsically linked to the equipment involved, the Purchase Order requirements and the technical specifications / standards (national and international) applicable to the equipment. Normally the level of inspection is reviewed with the client to ensure that both parties fully understand what is expected of the other.
Should the client require only one or two visits, for example to conduct final inspection or to witness a test on a specific piece of kit, the scope of inspection is easily and clearly defined. When the scope of inspection is more detailed the manufacturer normally draws up a Quality Control Plan or Inspection and Test Plan on which the scope of inspection for all parties is agreed and the client then approves this document.
For a sizeable skid package or critical piece of equipment a Pre Production or Pre Inspection Meeting is held during which all parties discuss the scope of inspection, locations for inspection, communication channels and agree the proposed mark up of the Quality Control Plan.
QOS are able to accommodate from single orders to major projects or contracts . Should you have a Vendor Inspection requirement it is very likely that QOS has previous experience of the vendor in question and has field engineers with qualifications and capabilities to fulfil all client expectations.
QOS Vendor Inspection philosophy is not only to provide suitably skilled and experienced, cost effective inspectors for specific assignments or projects, but to manage the client's procurement inspection quality assurance in the field.
QOS provide a comprehensive budgetary estimation and/or control system for all work performed on behalf of clients, whereby regular status report and projections of budgets can be submitted to the client.
QOS provides you with the opportunity to achieve your quality needs in a speedy, professional and efficient manner.
We can provide experienced technical personnel from an extensive international network of experienced, qualified and internally assessed inspectors available throughout the world. Thus providing a comprehensive cost effective service to our clients.
QOS is able to review your equipment specifications and advise you on the inspection requirements. If no local legislation applies in the country of operations, QOS is able to assist with assessing the equipment inspection requirements, including periodicity, by using best practice and international codes and standards.
The regulations normally require employers to ensure that, before equipment is put into use for the first time, it is thoroughly examined for defects, unless the equipment has not been used before and has a CE mark.
Once in use, the user has a responsibility to ensure that the equipment is thoroughly examined at regular intervals, as defined in the regulation.
The examinations must be carried out by a Competent Person who is able to be independent and impartial. The level of examination depends on several factors and based on the risks that could arise. The Competent Person must also decide on any appropriate tests that need to be carried out to ensure compliance.
QOS is able to carry out full equipment assessments and determine the requirements of each type of equipment. Once this is done, it is possible to implement a complete management control system to track and monitor each item so that its inspection status is known. This system can be tailored to the client’s specific requirements and can be augmented by either a computerised database or a manual system. Also, if required, QOS is able to implement a tagging system which can be colour coded to suit the client’s requirements so that equipment whose inspection is out of date is easily identifiable and can, therefore, be put out of use until the inspection has been completed. |
#! /usr/bin/env python
from __future__ import print_function
from openturns import *
from math import *
TESTPREAMBLE()
try:
RandomGenerator.SetSeed(0)
# Problem parameters
inputDimension = 2
outputDimension = 1
rho = 0.3
a = 4.
b = 5.
# Reference analytical values
covTh = a * a + b * b + 2 * a * b * rho
Si = [[(a * a + a * b * rho) / covTh, a * a / covTh],
[(b * b + a * b * rho) / covTh, b * b / covTh]]
# Model
inputName = Description(inputDimension)
inputName[0] = "X1"
inputName[1] = "X2"
outputName = Description(outputDimension)
outputName[0] = "Y"
formula = Description(outputDimension)
formula[0] = str(a) + "* X1 +" + str(b) + "* X2"
model = NumericalMathFunction(inputName, outputName, formula)
# Input distribution
distribution = ComposedDistribution([Normal()] * inputDimension)
# Correlated input distribution
S = CorrelationMatrix(inputDimension)
S[1, 0] = 0.3
R = NormalCopula().GetCorrelationFromSpearmanCorrelation(S)
myCopula = NormalCopula(R)
myCorrelatedInputDistribution = ComposedDistribution(
[Normal()] * inputDimension, myCopula)
sample = myCorrelatedInputDistribution.getSample(2000)
# Orthogonal basis
enumerateFunction = EnumerateFunction(inputDimension)
productBasis = OrthogonalProductPolynomialFactory(
[HermiteFactory()] * inputDimension, enumerateFunction)
# Adaptive strategy
adaptiveStrategy = FixedStrategy(
productBasis, enumerateFunction.getStrataCumulatedCardinal(4))
# Projection strategy
samplingSize = 250
projectionStrategy = LeastSquaresStrategy(
MonteCarloExperiment(samplingSize))
# Polynomial chaos algorithm
algo = FunctionalChaosAlgorithm(
model, distribution, adaptiveStrategy, projectionStrategy)
algo.run()
# Post-process the results
result = FunctionalChaosResult(algo.getResult())
ancova = ANCOVA(result, sample)
indices = ancova.getIndices()
uncorrelatedIndices = ancova.getUncorrelatedIndices()
for i in range(inputDimension):
value = indices[i]
print("ANCOVA index", i, "= %.8f" %
value, "absolute error=%.10f" % fabs(value - Si[i][0]))
value = uncorrelatedIndices[i]
print("ANCOVA uncorrelated index", i, "= %.8f" %
value, "absolute error=%.10f" % fabs(value - Si[i][1]))
except:
import sys
print("t_ANCOVA_std.py", sys.exc_info()[0], sys.exc_info()[1])
|
Hello friends and happy Friday eve! The weekend is so close I can taste it, especially with this fun and fruity Floating Flamingo drink I have created for you! Thirsty Thursdays are so great with a group of your gal pals to celebrate all of your hard work throughout the week and start the initiation of the weekend fun- girls just want to have fun! That's all they really want!!!
So we all know that flamingos are classy, a little sassy and pinkspirational, right? So are these yummy drinks that will get your party started right! Grab your goodies, girlies and don't just stand there on one leg!
In a drink shaker, squeeze in the juice from 4 lime wedges and drop the wedges into the shaker. Add in the minced mint leaves and use a muddler to incorporate the flavors of the lime juice and mint together.
Add some ice to your shaker and pour the watermelon vodka and lime soda water over the ice. Add the top of your shaker and shake well to mix.
Split the watermelon vodka mixture between 4 cups of your choosing, using caution not to drop in any ice. Add a fresh lime wedge into each glass.
Add two scoops of raspberry sorbet into each glass.
Waste away in Floating Flamingo-ville!
Enjoy floating away into flamingo paradise! Don't let anyone ruffle your feathers or stop your weekend fun! Have any fun drink ideas for summer? I could use some new ones, leave me your favorites in the comments below so I can try them out!
Tomorrow I will be showcasing a magical cake I made for Funfetti Friday, don't miss it! |
#!/usr/bin/python3.5
# -*- coding: utf-8 -*-
# @package concatFastasFile.py
# @author Sebastien Ravel
"""
The concatFastasFile script
===========================
:author: Sebastien Ravel
:contact: sebastien.ravel@cirad.fr
:date: 11/07/2016
:version: 0.1
Script description
------------------
This Programme concat multiple fasta files with same sequences name's into uniq file
Example
-------
>>> concatFastasFile.py -d NT_ALIGN/ -o 2241Ortho-82souches.fasta
Help Programm
-------------
optional arguments:
- \-h, --help
show this help message and exit
- \-v, --version
display concatFastasFile.py version number and exit
Input mandatory infos for running:
- \-d <path/to/directory>, --directory <path/to/directory>
path to directory fasta files (fasta","fas","fa","fna")
- \-o <filename>, --out <filename>
Name of output file
"""
##################################################
## Modules
##################################################
#Import MODULES_SEB
import sys, os
current_dir = os.path.dirname(os.path.abspath(__file__))+"/"
sys.path.insert(1,current_dir+'../modules/')
from MODULES_SEB import relativeToAbsolutePath, directory, concatFastasFiles
# Python modules
import argparse
from time import localtime, strftime
## BIO Python modules
from Bio import SeqIO
from Bio.SeqRecord import SeqRecord
##################################################
## Variables Globales
version="0.1"
VERSION_DATE='11/07/2016'
debug="False"
#debug="True"
##################################################
## Main code
##################################################
if __name__ == "__main__":
# Initializations
start_time = strftime("%d-%m-%Y_%H:%M:%S", localtime())
# Parameters recovery
parser = argparse.ArgumentParser(prog='concatFastasFile.py', description='''This Programme concat multiple fasta files with same sequences name's into uniq file''')
parser.add_argument('-v', '--version', action='version', version='You are using %(prog)s version: ' + version, help=\
'display concatFastasFile.py version number and exit')
#parser.add_argument('-dd', '--debug',choices=("False","True"), dest='debug', help='enter verbose/debug mode', default = "False")
filesReq = parser.add_argument_group('Input mandatory infos for running')
filesReq.add_argument('-d', '--directory', metavar="<path/to/directory>",type=directory, required=True, dest = 'fastaFileDir', help = 'path to directory fasta files ("fasta","fas","fa","fna")')
filesReq.add_argument('-o', '--out', metavar="<filename>", required=True, dest = 'paramoutfile', help = 'Name of output file')
# Check parameters
args = parser.parse_args()
#Welcome message
print("#################################################################")
print("# Welcome in concatFastasFile (Version " + version + ") #")
print("#################################################################")
print('Start time: ', start_time,'\n')
# Récupère les arguments
pathFastaFile = args.fastaFileDir
outputfilename = relativeToAbsolutePath(args.paramoutfile)
# resume value to user
print(" - Intput Info:")
print("\t - Fasta were in directory: %s" % pathFastaFile.pathDirectory)
print(" - Output Info:")
print("\t - Output file fasta is: %s" % outputfilename)
nbfile = len(pathFastaFile.lsExtInDirToList(["fasta","fas","fa","fna"]))
dico_concate = concatFastasFiles(pathFastaFile.pathDirectory)
output_handle = open(outputfilename, "w")
for ID, sequence in dico_concate.items():
record = SeqRecord(sequence,id=ID,name=ID, description="")
SeqIO.write(record,output_handle, "fasta")
print("\n\nExecution summary:")
print(" - Outputting \n\
Il y a au final %i fichiers concaténer\n\
les sequences sont ajouter dans le fichier %s\n" %(nbfile,outputfilename))
print("\nStop time: ", strftime("%d-%m-%Y_%H:%M:%S", localtime()))
print("#################################################################")
print("# End of execution #")
print("#################################################################")
|
Key Stage 2 (ages seven to eleven).
Primary teachers are expected to develop schemes of work and lesson plans in line with curriculum objectives and the school policy. They facilitate learning by establishing a relationship with students and by their organisation of learning resources and the classroom learning environment.
Why do schools use Creazione Primary Teachers ?
Why do teachers & educational personel use Creazione ?
Why are we great to work for ?
Creazione employ full time staff and take employee happiness and wellbeing very seriously in order to create a fun, vibrant and effective streamlined company.
We provide our staff with ongoing professional development.
We offer excellent rates of pay recognising individual ability and experience. |
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant l'action pratiquer_talent."""
from primaires.format.fonctions import supprimer_accents
from primaires.scripting.action import Action
from primaires.scripting.instruction import ErreurExecution
class ClasseAction(Action):
"""Fait pratiquer un talent à un personnage.
Contrairement à l'action 'enseigner_talent', cette action se
base sur la difficulté d'apprentissage d'un talent pour
"l'apprendre naturellement". Si l'apprentissage réussit, le
personnage verra le message "Vous progressez dans
l'apprentissage du talent...".
"""
@classmethod
def init_types(cls):
cls.ajouter_types(cls.pratiquer_talent, "Personnage", "str")
cls.ajouter_types(cls.pratiquer_talent, "Personnage", "str", "Fraction")
@staticmethod
def pratiquer_talent(personnage, nom_talent, probabilite=1):
"""Fait pratiquer le talent au personnage spécifié.
Paramètres à entrer :
* personnage : le personnage à qui l'on veut enseigner le talent
* nom_talent : le nom du talent, sous la forme d'une chaîne
* probabilite (optionnelle) : un nombre influençant l'apprentissage.
La probabilité est un nombre entre 0 et 1 qui affecte
l'apprentissage du talent. La probabilité par défaut
est de 1. Si la probabilité est plus faible, apprendre
le talent devient plus difficile. Par exemple, une
probabilité de 1/2 (0.5) rend l'apprentissage deux fois
plus difficile. Il est parfois utile de faire varier la
difficulté de l'apprentissage d'un talent (par exemple,
en fonction de la qualité des actions réussies par le
personnage).
Exemple d'utilisation :
pratiquer_talent personnage "apprivoisement"
# Le personnage va peut-être apprendre le talent
pratiquer_talent personnage "apprivoisement" 1/3
# C'est trois fois moins probable
"""
nom_talent = supprimer_accents(nom_talent).lower()
cle = None
talent = None
for t_talent in importeur.perso.talents.values():
if supprimer_accents(t_talent.nom) == nom_talent:
talent = t_talent
cle = talent.cle
break
if talent is None:
raise ErreurExecution("talent inconnu : {}".format(repr(
nom_talent)))
personnage.pratiquer_talent(cle, 1 / float(probabilite))
|
Local road crossings and utility adjustments.
The Samalaju-Bintulu-Mukah Railway project is part of the Sarawak Corridor of Renewable Energy (SCORE) project.
This is the first modern railway to be commissioned in Sarawak and follows on from a previous feasibility study undertaken by SMEC in 2010.
Samalaju is one of the key growth areas of SCORE and is earmarked as a centre for heavy industries, while Mukah is designated as the administrative, research and development hub for the vast central region of Sarawak.
The Samalaju-Bintulu-Mukah rail link will enhance transportation facilities within the SCORE area, and support development of heavy industries. The line will carry mixed freight, with the potential to carry passengers in the future.
The project comprises approximately 200 km of standard gauge freight rail and associated infrastructure for Phase 1, located between Mukah and Samalaju Industrial Park.
The project involves finalising the alignment of the new 200 km long route through tough terrain with high cuts and deep embankment, with extensive soil improvement required where the route traverses soft coastal ground south of Bintulu. |
__authors__ = ['Joel Wright']
import logging
import pygame
import pygame.time
import random
from DDRPi import DDRPiPlugin
from pygame.locals import *
class PongPlugin(DDRPiPlugin):
# Static map from joypad to player name
__player__ = {
0: 'player1',
1: 'player2'
}
__numbers__ = {
0: lambda (x,y): [(x,y),(x,y+1),(x,y+2),(x,y+3),(x,y+4),(x+1,y),(x+1,y+4),(x+2,y),(x+2,y+1),(x+2,y+2),(x+2,y+3),(x+2,y+4)],
1: lambda (x,y): [(x,y+1),(x,y+4),(x+1,y),(x+1,y+1),(x+1,y+2),(x+1,y+3),(x+1,y+4),(x+2,y+4)],
2: lambda (x,y): [(x,y),(x,y+2),(x,y+3),(x,y+4),(x+1,y),(x+1,y+2),(x+1,y+4),(x+2,y),(x+2,y+1),(x+2,y+2),(x+2,y+4)],
3: lambda (x,y): [(x,y),(x,y+4),(x+1,y),(x+1,y+2),(x+1,y+4),(x+2,y),(x+2,y+1),(x+2,y+2),(x+2,y+3),(x+2,y+4)],
4: lambda (x,y): [(x,y),(x,y+1),(x,y+2),(x+1,y+2),(x+2,y),(x+2,y+1),(x+2,y+2),(x+2,y+3),(x+2,y+4)],
5: lambda (x,y): [(x,y),(x,y+1),(x,y+2),(x,y+4),(x+1,y),(x+1,y+2),(x+1,y+4),(x+2,y),(x+2,y+2),(x+2,y+3),(x+2,y+4)],
6: lambda (x,y): [(x,y),(x,y+1),(x,y+2),(x,y+3),(x,y+4),(x+1,y),(x+1,y+2),(x+1,y+4),(x+2,y),(x+2,y+2),(x+2,y+3),(x+2,y+4)],
7: lambda (x,y): [(x,y),(x+1,y),(x+2,y),(x+2,y+1),(x+2,y+2),(x+2,y+3),(x+2,y+4)],
8: lambda (x,y): [(x,y),(x,y+1),(x,y+2),(x,y+3),(x,y+4),(x+1,y),(x+1,y+2),(x+1,y+4),(x+2,y),(x+2,y+1),(x+2,y+2),(x+2,y+3),(x+2,y+4)],
9: lambda (x,y): [(x,y),(x,y+1),(x,y+2),(x+1,y),(x+1,y+2),(x+2,y),(x+2,y+1),(x+2,y+2),(x+2,y+3),(x+2,y+4)]
}
def configure(self, config, image_surface):
"""
Called to configure the plugin before we start it.
"""
self.ddrpi_config = config
self.ddrpi_surface = image_surface
self._reset()
def start(self):
"""
Start the plugin.
"""
self.game_state['state'] = "RUNNING"
x_speed = self.game_state['ball_x_speed']
pygame.time.set_timer(USEREVENT+2,x_speed)
y_speed = self.game_state['ball_y_speed']
pygame.time.set_timer(USEREVENT+3,y_speed)
def stop(self):
"""
Stop the plugin if necessary - e.g. stop writing to the dance surface.
"""
self.game_state['state'] = "STOPPED"
self._disable_move_events()
def pause(self):
"""
Pauses the plugin - e.g. saves a game state when we enter menu mode.
"""
self.game_state['state'] = "PAUSED"
self._disable_move_events()
def _disable_move_events(self):
"""
Disable recurring movement events
"""
pygame.time.set_timer(USEREVENT+0,0)
pygame.time.set_timer(USEREVENT+1,0)
pygame.time.set_timer(USEREVENT+2,0)
pygame.time.set_timer(USEREVENT+3,0)
def resume(self):
"""
Resumes the plugin from a paused state.
"""
if self.game_state['state'] == "STOPPED":
self._draw_state()
else: # restart repeating events
self.game_state['state'] = "RUNNING"
x_speed = self.game_state['ball_x_speed']
pygame.time.set_timer(USEREVENT+2,x_speed)
y_speed = self.game_state['ball_y_speed']
pygame.time.set_timer(USEREVENT+3,y_speed)
def display_preview(self):
"""
Construct a splash screen suitable to display for a plugin selection menu
"""
black = (0,0,0)
self.ddrpi_surface.clear_tuple(black)
w = self.ddrpi_surface.width
h = self.ddrpi_surface.height
white = (255,255,255)
for x in range(0,w):
self.ddrpi_surface.draw_tuple_pixel(x,0,white)
self.ddrpi_surface.draw_tuple_pixel(x,h-1,white)
grey = (63,63,63)
for y in range(1,h-1):
self.ddrpi_surface.draw_tuple_pixel(0,y,grey)
self.ddrpi_surface.draw_tuple_pixel(w-1,y,grey)
if not y%2 == 0:
if not w%2 == 0:
self.ddrpi_surface.draw_tuple_pixel(w/2,y,grey)
else:
self.ddrpi_surface.draw_tuple_pixel(w/2,y,grey)
self.ddrpi_surface.draw_tuple_pixel(w/2-1,y,grey)
rx = random.randint(2, w-3)
ry = random.randint(2, h-3)
self.ddrpi_surface.draw_tuple_pixel(rx, ry, white)
p1y = random.randint(2, h-5)
p2y = random.randint(2, h-5)
self.ddrpi_surface.draw_tuple_box((1,p1y),(1,p1y+2),white)
self.ddrpi_surface.draw_tuple_box((w-2,p2y),(w-2,p2y+2),white)
self.ddrpi_surface.blit()
def handle(self, event):
"""
Handle the pygame event sent to the plugin from the main loop
"""
if self.game_state['state'] == "RUNNING":
repeats = {
"player1": 0,
"player2": 1
}
# Update the boards according to the event
if pygame.event.event_name(event.type) == "JoyAxisMotion":
# Handle the move
joypad = event.joy
player = PongPlugin.__player__[joypad]
direction = int(event.value)
if event.axis in [0,1]: # Ignore extra axes from complicated controllers
if direction == 0:
pygame.time.set_timer(USEREVENT+joypad,0)
else:
repeat_speed = self.game_state['initial_repeat_delay']
pygame.time.set_timer(USEREVENT+joypad,repeat_speed)
if player == 'player2' and event.axis == 0:
# Invert left/right for player 2 for face2face gaming :)
self.game_state[player]['direction'] = -direction
else:
self.game_state[player]['direction'] = direction
self._move_bat(player,self.game_state[player]['direction'])
elif pygame.event.event_name(event.type) == "UserEvent":
event_number = event.type - 24
if event_number < 2: # Events 0 and 1 are the repeat moves for players
player = PongPlugin.__player__[event_number]
speed = self.game_state['button_repeat_speed']
pygame.time.set_timer(USEREVENT+event_number,speed)
self._move_bat(player,self.game_state[player]['direction'])
elif event_number == 2: # USEREVENT+2 = x-axis ball motion
speed = self.game_state['ball_x_speed']
pygame.time.set_timer(USEREVENT+event_number,speed)
logging.debug("PongPlugin: Handling x-axis ball motion")
delta = self.game_state["ball_x_direction"]
in_play = self._move_ball((delta, 0))
if not in_play:
self._player_missed()
elif event_number == 3: # USEREVENT+3 = y-axis ball motion
logging.debug("PongPlugin: Handling y-axis ball motion")
# The current y-direction speed is set when the ball hits a bat
# so we update the y-axis event every time it occurs in case the
# speed has changed
speed = self.game_state['ball_y_speed']
pygame.time.set_timer(USEREVENT+event_number,speed)
delta = self.game_state['ball_y_direction']
in_play = self._move_ball((0, delta)) # A move in the y-axis cannot put the ball out of play
else:
logging.debug("PongPlugin: Tried to handle an unknown USEREVENT")
elif pygame.event.event_name(event.type) == "JoyButtonDown":
# Handle the button
joypad = event.joy
button = event.button
if button == 9:
logging.debug("PongPlugin: Game was paused by %s" % self.__player__[joypad])
self.pause()
else:
logging.debug("PongPlugin: Button %s does nothing" % button)
else:
logging.debug("PongPlugin: Tried to handle an unknown event type")
elif self.game_state['state'] == "STOPPED":
if pygame.event.event_name(event.type) == "JoyButtonDown":
# Handle the start button
joypad = event.joy
button = event.button
if button == 9:
self._reset()
self.start()
elif self.game_state['state'] == "PAUSED":
if pygame.event.event_name(event.type) == "JoyButtonDown":
# Handle the start button
joypad = event.joy
button = event.button
if button == 9:
self.resume()
if button == 0:
self._reset()
self.start()
elif self.game_state['state'] == "BETWEEN_POINTS":
if pygame.event.event_name(event.type) == "UserEvent":
event_number = event.type - 24
if event_number == 4: # Event 4 is the restart event after a point
pygame.time.set_timer(USEREVENT+4,0)
self.resume()
else:
logging.debug("PongPlugin: Unknown user event")
else:
logging.debug("PongPlugin: Need to handle state: " % self.__state__)
def update_surface(self):
"""
Write the updated plugin state to the dance surface and blit
"""
# Draw the background
black = (0,0,0)
self.ddrpi_surface.clear_tuple(black)
w = self.ddrpi_surface.width
h = self.ddrpi_surface.height
white = (255,255,255)
for x in range(0,w):
self.ddrpi_surface.draw_tuple_pixel(x,0,white)
self.ddrpi_surface.draw_tuple_pixel(x,h-1,white)
grey = (63,63,63)
for y in range(1,h-1):
self.ddrpi_surface.draw_tuple_pixel(0,y,grey)
self.ddrpi_surface.draw_tuple_pixel(w-1,y,grey)
if not y%2 == 0:
if not w%2 == 0:
self.ddrpi_surface.draw_tuple_pixel(w/2,y,grey)
else:
self.ddrpi_surface.draw_tuple_pixel(w/2,y,grey)
self.ddrpi_surface.draw_tuple_pixel(w/2-1,y,grey)
# Draw the current player bats and position of the ball
(bx,by) = self.game_state['ball_position']
self.ddrpi_surface.draw_tuple_pixel(bx,by,white)
p1by = self.game_state['player1']['position']
self.ddrpi_surface.draw_tuple_box((1,p1by),(1,p1by+2),white)
p2by = self.game_state['player2']['position']
self.ddrpi_surface.draw_tuple_box((w-2,p2by),(w-2,p2by+2),white)
st = self.game_state['state']
if not st == "RUNNING":
self._draw_score()
# blit to the floor
self.ddrpi_surface.blit()
def _reset(self):
w = self.ddrpi_surface.width
h = self.ddrpi_surface.height
self.game_state = {
'player1': {
'position': h/2-2,
'score': 0,
'direction': 0
},
'player2': {
'position': h/2-2,
'score': 0,
'direction': 0
},
'button_repeat_speed': 100,
'initial_repeat_delay': 200,
'ball_x_direction': 1,
'ball_x_speed': 150, # I expect this to remain constant
'ball_y_direction': [1,-1][random.randint(0,1)],
'ball_y_speed': 150, # Updated when the ball hits the bat, refreshed every y-move userevent
'ball_position': (2,h/2-1),
'state': "RUNNING",
'bat_size': 3
}
def _move_bat(self, player, y_delta):
"""
Moves a player's bat up or down depending on the y-delta given
"""
h = self.ddrpi_surface.height
current_pos = self.game_state[player]['position']
new_pos = current_pos + y_delta
bat_size = self.game_state['bat_size']
if not (new_pos < 1 or new_pos > h-bat_size-1):
self.game_state[player]['position'] = new_pos
def _move_ball(self,delta):
"""
Moves the ball according to the delta given
Returns a boolean to indicate if the ball is still in play
"""
(dx,dy) = delta
(cpx,cpy) = self.game_state['ball_position']
new_pos = (npx,npy) = (cpx+dx,cpy+dy)
w = self.ddrpi_surface.width
if self._hits_bat(new_pos) and cpx > 1 and cpx < w - 2:
self._update_y_speed(npy)
self._update_x_speed()
current_direction = self.game_state['ball_x_direction']
self.game_state['ball_x_direction'] = -current_direction
new_pos_x = (cpx - current_direction,cpy)
# move the ball
self.game_state['ball_position'] = new_pos_x
return True
elif self._hits_side(new_pos):
current_direction = self.game_state['ball_y_direction']
self.game_state['ball_y_direction'] = -current_direction
new_pos_y = (cpx,cpy - current_direction)
# move the ball
self.game_state['ball_position'] = new_pos_y
return True
else:
self.game_state['ball_position'] = new_pos
# Move the ball
w = self.ddrpi_surface.width
if (npx == 0 or npx == w-1): # The ball has passed the bat
return False
else:
return True
def _update_x_speed(self):
"""
Smoothly update the speed for the ball motion in the x-axis
"""
speed = self.game_state['ball_x_speed']
speed -= 5
if not speed < 75:
self.game_state['ball_x_speed'] = speed
def _update_y_speed(self, y):
"""
Calculate the new update speed for the ball motion in the y-axis
"""
w = self.ddrpi_surface.width
(bx,by) = self.game_state['ball_position']
speed = self.game_state['ball_y_speed']
if bx <= 2: # we need to compare the y axis position to p1's bat
bat_y = self.game_state['player1']['position']
if not by == bat_y + 1: # if we hit the middle then leave as is
direction = self.game_state['ball_y_direction']
if by == bat_y + 1 + direction: # Increase speed
speed -= random.randint(25,50)
else:
speed += random.randint(25,50)
elif bx >= w-3: # we need to compare the y axis position to p2's bat
bat_y = self.game_state['player2']['position']
if not by == bat_y + 1: # if we hit the middle then leave as is
direction = self.game_state['ball_y_direction']
if by == bat_y + 1 + direction: # Increase speed
speed -= random.randint(25,50)
else:
speed += random.randint(25,50)
else:
logging.debug("PongPlugin: Shouldn't be updating the y speed in the middle of the court")
if speed < 30:
self.game_state['ball_y_speed'] = speed
elif speed > 400:
direction = [1,-1][random.randint(0,1)]
self.game_state['ball_y_speed'] = speed
self.game_state['ball_y_direction'] = direction
else:
self.game_state['ball_y_speed'] = speed
def _hits_bat(self, pos):
"""
Tests whether the positon given is along a player's bat
"""
(px,py) = pos
w = self.ddrpi_surface.width
if px == 1: # Player1 bat x-coord
bat_pos = self.game_state['player1']['position']
if py > bat_pos+2 or py < bat_pos:
return False
else:
return True
elif px == w-2: # Player 2 bat x-coord
bat_pos = self.game_state['player2']['position']
if py > bat_pos+2 or py < bat_pos:
return False
else:
return True
else:
return False
def _hits_side(self, pos):
"""
Tests whether the positon given is along the side of the playing area
"""
(px,py) = pos
h = self.ddrpi_surface.height
if py == 0 or py == h-1:
return True
else:
return False
def _player_missed(self):
"""
Handles the event of a player missing the ball
"""
self.game_state['state'] = "BETWEEN_POINTS"
# Disable move events
self._disable_move_events()
# Update score
(bx,by) = self.game_state['ball_position']
w = self.ddrpi_surface.width
h = self.ddrpi_surface.height
if bx == 0:
self.game_state['player2']['score'] += 1
self.game_state['ball_position'] = (w-3,h/2-1)
self.game_state['ball_x_direction'] = -1
elif bx == w-1:
self.game_state['player1']['score'] += 1
self.game_state['ball_position'] = (2,h/2-1)
self.game_state['ball_x_direction'] = 1
self.game_state['player1']['position'] = h/2-2
self.game_state['player2']['position'] = h/2-2
self.game_state['ball_x_speed'] = 150
self.game_state['ball_y_speed'] = 150
self.game_state['ball_y_direction'] = [1,-1][random.randint(0,1)]
winner = None
p1_score = self.game_state['player1']['score']
p2_score = self.game_state['player2']['score']
if p1_score == 9:
winner = 'player1'
self.game_state['state'] = "STOPPED"
elif p2_score == 9:
winner = 'player2'
self.game_state['state'] = "STOPPED"
else:
pygame.time.set_timer(USEREVENT+4,2000)
logging.debug("PongPlugin Score: Player 1 (%s) - Player 2 (%s)" % (p1_score, p2_score))
def _draw_score(self):
"""
Output the current score onto the game area
"""
w = self.ddrpi_surface.width
h = self.ddrpi_surface.height
p1sx = (w/2-3)/2 + 1
p2sx = (w/2-3)/2 + w/2
psy = h/2 - 3
p1_score = self.game_state['player1']['score']
p1_score_pixels = PongPlugin.__numbers__[p1_score]((p1sx,psy))
p2_score = self.game_state['player2']['score']
p2_score_pixels = PongPlugin.__numbers__[p2_score]((p2sx,psy))
white = (255,255,255)
red = (255,0,0)
for (x,y) in p1_score_pixels:
if p2_score == 9:
self.ddrpi_surface.draw_tuple_pixel(x,y,red)
else:
self.ddrpi_surface.draw_tuple_pixel(x,y,white)
for (x,y) in p2_score_pixels:
if p1_score == 9:
self.ddrpi_surface.draw_tuple_pixel(x,y,red)
else:
self.ddrpi_surface.draw_tuple_pixel(x,y,white)
|
ARE YOU A REGISTERED USER FROM THE PREVIOUS MYWELCOME WEBSITE?
If you had an account active on the old MyWelcome website, we are asking that you verify your email address and reset your password for this new site.
*Calls to 0800 numbers are free from a UK landline, mobile and international call costs may vary.
You may wish to check this with your mobile service provider. |
# -*- coding: utf-8 -*-
##############################################################################
# For copyright and license notices, see __openerp__.py file in root directory
##############################################################################
from openerp import models, fields, api
class MrpProduction(models.Model):
_inherit = 'mrp.production'
@api.one
@api.depends('product_id')
def _calc_production_attachments(self):
self.product_attachments = None
if self.product_id:
cond = [('res_model', '=', 'product.product'),
('res_id', '=', self.product_id.id)]
attachments = self.env['ir.attachment'].search(cond)
self.product_attachments = [(6, 0, attachments.mapped('id'))]
product_attachments = fields.Many2many(
comodel_name='ir.attachment',
relation='rel_mrp_production_product_attachment',
column1='production_id', column2='attachment_id', readonly=True,
string='Product attachments', compute='_calc_production_attachments')
class MrpProductionWorkcenterLine(models.Model):
_inherit = 'mrp.production.workcenter.line'
@api.one
@api.depends('workcenter_id')
def _calc_workcenter_line_attachments(self):
self.workcenter_attachments = None
if self.workcenter_id:
cond = [('res_model', '=', 'mrp.workcenter'),
('res_id', '=', self.workcenter_id.id)]
attachments = self.env['ir.attachment'].search(cond)
self.workcenter_attachments = [(6, 0, attachments.mapped('id'))]
workcenter_attachments = fields.Many2many(
comodel_name='ir.attachment',
relation='rel_workcenterline_workcenter_attachment',
column1='workcenter_line_id', column2='attachment_id', readonly=True,
string='Workcenter attachments',
compute='_calc_workcenter_line_attachments')
|
Our Ascend Max 1800 pricing includes our standard 1 year advanced replacement warranty.
All Ascend Max 1800 products are fully tested before shipping and most products are available for overnight shipping. We stock a large variety of products some new or refurbished and some used.
If you need to buy a Ascend Max 1800 or sell a Ascend Max 1800 rent or trade this item let us know today, our prices are negotiable on every product, we also will price match other advertised prices if they are in stock with a similar warranty period. |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import time
from odoo.addons.account.tests.account_test_classes import AccountingTestCase
from odoo.exceptions import ValidationError
from odoo.tests import tagged
CH_IBAN = 'CH15 3881 5158 3845 3843 7'
QR_IBAN = 'CH21 3080 8001 2345 6782 7'
@tagged('post_install', '-at_install')
class TestSwissQR(AccountingTestCase):
def setUp(self):
super(TestSwissQR, self).setUp()
# Activate SwissQR in Swiss invoices
self.env['ir.config_parameter'].create(
{'key': 'l10n_ch.print_qrcode', 'value': '1'}
)
self.customer = self.env['res.partner'].create(
{
"name": "Partner",
"street": "Route de Berne 41",
"street2": "",
"zip": "1000",
"city": "Lausanne",
"country_id": self.env.ref("base.ch").id,
}
)
self.env.user.company_id.partner_id.write(
{
"street": "Route de Berne 88",
"street2": "",
"zip": "2000",
"city": "Neuchâtel",
"country_id": self.env.ref('base.ch').id,
}
)
self.invoice1 = self.create_invoice('base.CHF')
sale_journal = self.env['account.journal'].search([("type", "=", "sale")])
sale_journal.invoice_reference_model = "ch"
def create_invoice(self, currency_to_use='base.CHF'):
""" Generates a test invoice """
product = self.env.ref("product.product_product_4")
acc_type = self.env.ref('account.data_account_type_current_assets')
account = self.env['account.account'].search(
[('user_type_id', '=', acc_type.id)], limit=1
)
invoice = (
self.env['account.move']
.with_context(default_type='out_invoice')
.create(
{
'type': 'out_invoice',
'partner_id': self.customer.id,
'currency_id': self.env.ref(currency_to_use).id,
'date': time.strftime('%Y') + '-12-22',
'invoice_line_ids': [
(
0,
0,
{
'name': product.name,
'product_id': product.id,
'account_id': account.id,
'quantity': 1,
'price_unit': 42.0,
},
)
],
}
)
)
return invoice
def create_account(self, number):
""" Generates a test res.partner.bank. """
return self.env['res.partner.bank'].create(
{
'acc_number': number,
'partner_id': self.env.user.company_id.partner_id.id,
}
)
def swissqr_not_generated(self, invoice):
""" Prints the given invoice and tests that no Swiss QR generation is triggered. """
self.assertFalse(
invoice.can_generate_qr_bill(),
'No Swiss QR should be generated for this invoice',
)
def swissqr_generated(self, invoice, ref_type='NON'):
""" Prints the given invoice and tests that a Swiss QR generation is triggered. """
self.assertTrue(
invoice.can_generate_qr_bill(), 'A Swiss QR can be generated'
)
if ref_type == 'QRR':
self.assertTrue(invoice.invoice_payment_ref)
struct_ref = invoice.invoice_payment_ref
unstr_msg = invoice.ref or invoice.name or ''
else:
struct_ref = ''
unstr_msg = invoice.invoice_payment_ref or invoice.ref or invoice.name or ''
unstr_msg = (unstr_msg or invoice.number).replace('/', '%2F')
payload = (
"SPC%0A"
"0200%0A"
"1%0A"
"{iban}%0A"
"K%0A"
"YourCompany%0A"
"Route+de+Berne+88%0A"
"2000+Neuch%C3%A2tel%0A"
"%0A%0A"
"CH%0A"
"%0A%0A%0A%0A%0A%0A%0A"
"42.00%0A"
"CHF%0A"
"K%0A"
"Partner%0A"
"Route+de+Berne+41%0A"
"1000+Lausanne%0A"
"%0A%0A"
"CH%0A"
"{ref_type}%0A"
"{struct_ref}%0A"
"{unstr_msg}%0A"
"EPD"
).format(
iban=invoice.invoice_partner_bank_id.sanitized_acc_number,
ref_type=ref_type,
struct_ref=struct_ref or '',
unstr_msg=unstr_msg,
)
expected_url = ("/report/barcode/?type=QR&value={}"
"&width=256&height=256&quiet=1").format(payload)
url = invoice.invoice_partner_bank_id.build_swiss_code_url(
invoice.amount_residual,
invoice.currency_id.name,
None,
invoice.partner_id,
None,
invoice.invoice_payment_ref,
invoice.ref or invoice.name,
)
self.assertEqual(url, expected_url)
def test_swissQR_missing_bank(self):
# Let us test the generation of a SwissQR for an invoice, first by showing an
# QR is included in the invoice is only generated when Odoo has all the data it needs.
self.invoice1.post()
self.swissqr_not_generated(self.invoice1)
def test_swissQR_iban(self):
# Now we add an account for payment to our invoice
# Here we don't use a structured reference
iban_account = self.create_account(CH_IBAN)
self.invoice1.invoice_partner_bank_id = iban_account
self.invoice1.post()
self.swissqr_generated(self.invoice1, ref_type="NON")
def test_swissQR_qriban(self):
# Now use a proper QR-IBAN, we are good to print a QR Bill
qriban_account = self.create_account(QR_IBAN)
self.assertTrue(qriban_account.acc_type, 'qr-iban')
self.invoice1.invoice_partner_bank_id = qriban_account
self.invoice1.post()
self.swissqr_generated(self.invoice1, ref_type="QRR")
|
Emilia-Romagna Travel Logs of "ドライブ"
This is a summary of all the Travel Logs related to ドライブ that we have available for the Emilia-Romagna(Italy) area. Please go ahead and click on them if you want to read their contents. Does reading about ドライブ in Emilia-Romagna(Italy) make you want to go there too? In that case let's start planning your next trip!
Back to Top of Emilia-Romagna Travel Logs of "ドライブ" |
import random
import csv
import urllib2
class RandomStockTickers():
def __init__(self, number_of_stocks):
self.number_of_stocks = number_of_stocks
#Fetches CSV from a specified URL and converts its contents to a list
def get_list_csv_from_url(self, url):
response = urllib2.urlopen(url)
document = csv.reader(response)
rows = list(document)
return rows
#Creates URLs used for NASDAQ's REST API, fetches CSVs, then combines them into one list
def get_combined_stock_symbols_from_nasdaq_nyse_amex(self):
tickers_table = []
url_list = []
exchange_names_strings_list = ["nasdaq","nyse","amex"]
for name in exchange_names_strings_list:
exchange_tickers_url = "http://www.nasdaq.com/screening/companies-by-name.aspx?letter=0&exchange=" + \
name + "&render=download"
url_list.append(exchange_tickers_url)
for url in url_list:
tickers = self.get_list_csv_from_url(url)
for i in range(1, len(tickers)):
tickers_table.append(tickers[i])
return tickers_table
#Returns a specific number of stocks that are randomly picked from the combined list of stock tickers
def get_random_stock_tickers(self):
random_stock_tickers = []
number_of_stocks = self.number_of_stocks
combined_stock_symbols = self.get_combined_stock_symbols_from_nasdaq_nyse_amex()
row_count = len(combined_stock_symbols)
for i in range(0, number_of_stocks):
random_stock_row = random.randrange(0, row_count - 1)
random_stock_tickers.append(combined_stock_symbols[random_stock_row][0])
return random_stock_tickers
|
The main reason why you are annoying at your oral problems is your tooth cleaning is not thorough. This electric toothbrush is designed for the people who have yellow teeth, bleeding gums, bad breath and feeling troublesome about brushing teeth. Make you confident and always show your brightest teeth.
High-frequency vibration. Then it is transmitted to the brush head. No frictions in the motor inside, powerful and stable.
8,000 times per minute rotating. Soft brush brings you a -new all-dimensional tooth brushing experiment.
Anti-slipping and waterproof materials, fair-shaped appearance and scientific design.
One-piece design, waterproof and safe, can be used in the bathroom.
3 additional brush heads, meet your all-year demands. |
# -*- coding: utf-8 -*-
# (C) 2019 Smile (<http://www.smile.fr>)
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
from odoo import api, fields, models
from odoo.exceptions import UserError
class AccountAssetHistory(models.Model):
_name = 'account.asset.history'
_description = 'Asset history'
_inherit = 'abstract.asset'
_rec_name = 'asset_id'
_order = 'date_to desc'
date_to = fields.Datetime(
'Until', readonly=True, default=fields.Datetime.now)
user_id = fields.Many2one(
'res.users', 'User', readonly=True, ondelete='restrict',
default=lambda self: self._uid)
asset_id = fields.Many2one(
'account.asset.asset', 'Asset',
required=True, ondelete='cascade', index=True, auto_join=True)
category_id = fields.Many2one(
'account.asset.category', 'Asset Category',
required=True, ondelete='restrict')
display_validation_warning = fields.Boolean(
compute='_compute_display_validation_warning')
company_id = fields.Many2one(
related='asset_id.company_id', readonly=True)
currency_id = fields.Many2one(
related='asset_id.currency_id', readonly=True)
purchase_value = fields.Monetary('Gross Value', required=True)
salvage_value = fields.Monetary('Salvage Value')
purchase_value_sign = fields.Monetary(
'Gross Value', compute='_get_book_value', store=True)
salvage_value_sign = fields.Monetary(
'Salvage Value', compute='_get_book_value', store=True)
purchase_tax_amount = fields.Monetary('Tax Amount', readonly=True)
purchase_date = fields.Date(required=True, readonly=True)
in_service_date = fields.Date('In-service Date')
benefit_accelerated_depreciation = fields.Boolean(readonly=True)
note = fields.Text('Reason')
dummy = fields.Boolean(store=False)
@api.one
@api.depends('purchase_value', 'salvage_value', 'asset_id.asset_type')
def _get_book_value(self):
sign = self.asset_id.asset_type == 'purchase_refund' and -1 or 1
self.purchase_value_sign = self.purchase_value * sign
self.salvage_value_sign = self.salvage_value * sign
@api.one
@api.depends('category_id.asset_in_progress')
def _compute_display_validation_warning(self):
self.display_validation_warning = self._context.get(
'asset_validation') and self.category_id.asset_in_progress
@api.model
def _get_fields_to_read(self):
return list(set(self._fields.keys()) - set(models.MAGIC_COLUMNS)
& set(self.env['account.asset.asset']._fields.keys())
- {'old_id', '__last_update'})
@api.onchange('asset_id')
def _onchange_asset(self):
for field in self._get_fields_to_read():
self[field] = self.asset_id[field]
@api.onchange('category_id')
def _onchange_category(self):
if self.dummy:
for field in self.asset_id._category_fields:
self[field] = self.category_id[field]
else:
self.dummy = True
@api.model
def create(self, vals):
if self._context.get('data_integration'):
return super(AccountAssetHistory, self).create(vals)
# Update asset with vals and save old vals by creating a history record
asset = self.env['account.asset.asset'].browse(vals['asset_id'])
fields_to_read = self._get_fields_to_read()
old_vals = asset.read(fields_to_read, load='_classic_write')[0]
del old_vals['id']
for field in dict(vals):
if field not in fields_to_read:
old_vals[field] = vals[field]
del vals[field]
asset.with_context(from_history=True).write(vals)
asset.compute_depreciation_board()
return super(AccountAssetHistory, self).create(old_vals)
@api.multi
def button_validate(self):
if self._context.get('asset_validation'):
asset = self.mapped('asset_id')
try:
asset.validate()
except UserError:
self.unlink()
return asset.button_put_into_service()
return {'type': 'ir.actions.act_window_close'}
|
This is a short TED Talk by Rob Reid (The $8 billion iPad) that tries to infuse a little “reasonability test” into our blind belief in the numbers provided by those with self-interest … in this case, the music/entertainment industry.
There are several examples that you could turn into signed number addition or subtraction problems. In my favorite example (about 2:57 in the video), Reid uses what he calls “Copyright Math” to “prove” that by their own calculations, the job losses in the movie industry that came with the Internet must have resulted in a negative number of people employed.
In 1998, prior to the rapid adoption of the Internet, the U.S. Motion Picture and Video Industry employed 270,000 people (according to the U.S. Bureau of Labor Statistics). Today, the movie industry claims that 373,000 jobs have been lost due to the Internet.
[Intermediate Algebra] If the job market for the motion picture and video industry grew by 2% every year (without the Internet “loss” figures), how many people would be employed in 2012 in the combined movie/music industries? How many jobs would have been created between 1998 and 2012 at the 2% growth rate? If the job market grew by 5% every year (without the Internet “loss” figures), how many people would be employed in 2012 in the combined movie/music industries? How many jobs would be created between 1998 and 2012 at the 5% growth rate? |
#!/usr/bin/env python3
# Copyright (c) 2015-2018 The Dash Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from collections import namedtuple
from test_framework.mininode import *
from test_framework.test_framework import EvoZnodeTestFramework
from test_framework.util import *
from time import *
'''
dip4-coinbasemerkleroots.py
Checks DIP4 merkle roots in coinbases
'''
class TestNode(SingleNodeConnCB):
def __init__(self):
SingleNodeConnCB.__init__(self)
self.last_mnlistdiff = None
def on_mnlistdiff(self, conn, message):
self.last_mnlistdiff = message
def wait_for_mnlistdiff(self, timeout=30):
self.last_mnlistdiff = None
def received_mnlistdiff():
return self.last_mnlistdiff is not None
return wait_until(received_mnlistdiff, timeout=timeout)
def getmnlistdiff(self, baseBlockHash, blockHash):
msg = msg_getmnlistd(baseBlockHash, blockHash)
self.send_message(msg)
self.wait_for_mnlistdiff()
return self.last_mnlistdiff
class LLMQCoinbaseCommitmentsTest(EvoZnodeTestFramework):
def __init__(self):
super().__init__(6, 5, None)
def run_test(self):
self.test_node = TestNode()
self.test_node.add_connection(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], self.test_node))
NetworkThread().start() # Start up network handling in another thread
self.test_node.wait_for_verack()
self.confirm_mns()
null_hash = format(0, "064x")
# Check if a diff with the genesis block as base returns all MNs
expectedUpdated = [mn.proTxHash for mn in self.mninfo]
mnList = self.test_getmnlistdiff(null_hash, self.nodes[0].getbestblockhash(), {}, [], expectedUpdated)
expectedUpdated2 = expectedUpdated + []
# Register one more MN, but don't start it (that would fail as DashTestFramework doesn't support this atm)
baseBlockHash = self.nodes[0].getbestblockhash()
self.prepare_masternode(self.mn_count)
new_mn = self.mninfo[self.mn_count]
# Now test if that MN appears in a diff when the base block is the one just before MN registration
expectedDeleted = []
expectedUpdated = [new_mn.proTxHash]
mnList = self.test_getmnlistdiff(baseBlockHash, self.nodes[0].getbestblockhash(), mnList, expectedDeleted, expectedUpdated)
assert(mnList[new_mn.proTxHash].confirmedHash == 0)
# Now let the MN get enough confirmations and verify that the MNLISTDIFF now has confirmedHash != 0
self.confirm_mns()
mnList = self.test_getmnlistdiff(baseBlockHash, self.nodes[0].getbestblockhash(), mnList, expectedDeleted, expectedUpdated)
assert(mnList[new_mn.proTxHash].confirmedHash != 0)
# Spend the collateral of the previously added MN and test if it appears in "deletedMNs"
expectedDeleted = [new_mn.proTxHash]
expectedUpdated = []
baseBlockHash2 = self.nodes[0].getbestblockhash()
self.remove_mastermode(self.mn_count)
mnList = self.test_getmnlistdiff(baseBlockHash2, self.nodes[0].getbestblockhash(), mnList, expectedDeleted, expectedUpdated)
# When comparing genesis and best block, we shouldn't see the previously added and then deleted MN
mnList = self.test_getmnlistdiff(null_hash, self.nodes[0].getbestblockhash(), {}, [], expectedUpdated2)
#############################
# Now start testing quorum commitment merkle roots
self.nodes[0].generate(1)
oldhash = self.nodes[0].getbestblockhash()
# Test DIP8 activation once with a pre-existing quorum and once without (we don't know in which order it will activate on mainnet)
self.test_dip8_quorum_merkle_root_activation(True)
for n in self.nodes:
n.invalidateblock(oldhash)
self.sync_all()
first_quorum = self.test_dip8_quorum_merkle_root_activation(False)
# Verify that the first quorum appears in MNLISTDIFF
expectedDeleted = []
expectedNew = [QuorumId(100, int(first_quorum, 16))]
quorumList = self.test_getmnlistdiff_quorums(null_hash, self.nodes[0].getbestblockhash(), {}, expectedDeleted, expectedNew)
baseBlockHash = self.nodes[0].getbestblockhash()
second_quorum = self.mine_quorum()
assert False, 1
# Verify that the second quorum appears in MNLISTDIFF
expectedDeleted = []
expectedNew = [QuorumId(100, int(second_quorum, 16))]
quorums_before_third = self.test_getmnlistdiff_quorums(baseBlockHash, self.nodes[0].getbestblockhash(), quorumList, expectedDeleted, expectedNew)
block_before_third = self.nodes[0].getbestblockhash()
third_quorum = self.mine_quorum()
# Verify that the first quorum is deleted and the third quorum is added in MNLISTDIFF (the first got inactive)
expectedDeleted = [QuorumId(100, int(first_quorum, 16))]
expectedNew = [QuorumId(100, int(third_quorum, 16))]
self.test_getmnlistdiff_quorums(block_before_third, self.nodes[0].getbestblockhash(), quorums_before_third, expectedDeleted, expectedNew)
# Verify that the diff between genesis and best block is the current active set (second and third quorum)
expectedDeleted = []
expectedNew = [QuorumId(100, int(second_quorum, 16)), QuorumId(100, int(third_quorum, 16))]
self.test_getmnlistdiff_quorums(null_hash, self.nodes[0].getbestblockhash(), {}, expectedDeleted, expectedNew)
# Now verify that diffs are correct around the block that mined the third quorum.
# This tests the logic in CalcCbTxMerkleRootQuorums, which has to manually add the commitment from the current
# block
mined_in_block = self.nodes[0].quorum("info", 100, third_quorum)["minedBlock"]
prev_block = self.nodes[0].getblock(mined_in_block)["previousblockhash"]
prev_block2 = self.nodes[0].getblock(prev_block)["previousblockhash"]
next_block = self.nodes[0].getblock(mined_in_block)["nextblockhash"]
next_block2 = self.nodes[0].getblock(mined_in_block)["nextblockhash"]
# The 2 block before the quorum was mined should both give an empty diff
expectedDeleted = []
expectedNew = []
self.test_getmnlistdiff_quorums(block_before_third, prev_block2, quorums_before_third, expectedDeleted, expectedNew)
self.test_getmnlistdiff_quorums(block_before_third, prev_block, quorums_before_third, expectedDeleted, expectedNew)
# The block in which the quorum was mined and the 2 after that should all give the same diff
expectedDeleted = [QuorumId(100, int(first_quorum, 16))]
expectedNew = [QuorumId(100, int(third_quorum, 16))]
quorums_with_third = self.test_getmnlistdiff_quorums(block_before_third, mined_in_block, quorums_before_third, expectedDeleted, expectedNew)
self.test_getmnlistdiff_quorums(block_before_third, next_block, quorums_before_third, expectedDeleted, expectedNew)
self.test_getmnlistdiff_quorums(block_before_third, next_block2, quorums_before_third, expectedDeleted, expectedNew)
# A diff between the two block that happened after the quorum was mined should give an empty diff
expectedDeleted = []
expectedNew = []
self.test_getmnlistdiff_quorums(mined_in_block, next_block, quorums_with_third, expectedDeleted, expectedNew)
self.test_getmnlistdiff_quorums(mined_in_block, next_block2, quorums_with_third, expectedDeleted, expectedNew)
self.test_getmnlistdiff_quorums(next_block, next_block2, quorums_with_third, expectedDeleted, expectedNew)
# Using the same block for baseBlockHash and blockHash should give empty diffs
self.test_getmnlistdiff_quorums(prev_block, prev_block, quorums_before_third, expectedDeleted, expectedNew)
self.test_getmnlistdiff_quorums(prev_block2, prev_block2, quorums_before_third, expectedDeleted, expectedNew)
self.test_getmnlistdiff_quorums(mined_in_block, mined_in_block, quorums_with_third, expectedDeleted, expectedNew)
self.test_getmnlistdiff_quorums(next_block, next_block, quorums_with_third, expectedDeleted, expectedNew)
self.test_getmnlistdiff_quorums(next_block2, next_block2, quorums_with_third, expectedDeleted, expectedNew)
def test_getmnlistdiff(self, baseBlockHash, blockHash, baseMNList, expectedDeleted, expectedUpdated):
d = self.test_getmnlistdiff_base(baseBlockHash, blockHash)
# Assert that the deletedMNs and mnList fields are what we expected
assert_equal(set(d.deletedMNs), set([int(e, 16) for e in expectedDeleted]))
assert_equal(set([e.proRegTxHash for e in d.mnList]), set(int(e, 16) for e in expectedUpdated))
# Build a new list based on the old list and the info from the diff
newMNList = baseMNList.copy()
for e in d.deletedMNs:
newMNList.pop(format(e, '064x'))
for e in d.mnList:
newMNList[format(e.proRegTxHash, '064x')] = e
cbtx = CCbTx()
cbtx.deserialize(BytesIO(d.cbTx.vExtraPayload))
# Verify that the merkle root matches what we locally calculate
hashes = []
for mn in sorted(newMNList.values(), key=lambda mn: ser_uint256(mn.proRegTxHash)):
hashes.append(hash256(mn.serialize()))
merkleRoot = CBlock.get_merkle_root(hashes)
assert_equal(merkleRoot, cbtx.merkleRootMNList)
return newMNList
def test_getmnlistdiff_quorums(self, baseBlockHash, blockHash, baseQuorumList, expectedDeleted, expectedNew):
d = self.test_getmnlistdiff_base(baseBlockHash, blockHash)
assert_equal(set(d.deletedQuorums), set(expectedDeleted))
assert_equal(set([QuorumId(e.llmqType, e.quorumHash) for e in d.newQuorums]), set(expectedNew))
newQuorumList = baseQuorumList.copy()
for e in d.deletedQuorums:
newQuorumList.pop(e)
for e in d.newQuorums:
newQuorumList[QuorumId(e.llmqType, e.quorumHash)] = e
cbtx = CCbTx()
cbtx.deserialize(BytesIO(d.cbTx.vExtraPayload))
if cbtx.version >= 2:
hashes = []
for qc in newQuorumList.values():
hashes.append(hash256(qc.serialize()))
hashes.sort()
merkleRoot = CBlock.get_merkle_root(hashes)
assert_equal(merkleRoot, cbtx.merkleRootQuorums)
return newQuorumList
def test_getmnlistdiff_base(self, baseBlockHash, blockHash):
hexstr = self.nodes[0].getblockheader(blockHash, False)
header = FromHex(CBlockHeader(), hexstr)
d = self.test_node.getmnlistdiff(int(baseBlockHash, 16), int(blockHash, 16))
assert_equal(d.baseBlockHash, int(baseBlockHash, 16))
assert_equal(d.blockHash, int(blockHash, 16))
# Check that the merkle proof is valid
proof = CMerkleBlock(header, d.merkleProof)
proof = proof.serialize().hex()
assert_equal(self.nodes[0].verifytxoutproof(proof), [d.cbTx.hash])
# Check if P2P messages match with RPCs
d2 = self.nodes[0].protx("diff", baseBlockHash, blockHash)
assert_equal(d2["baseBlockHash"], baseBlockHash)
assert_equal(d2["blockHash"], blockHash)
assert_equal(d2["cbTxMerkleTree"], d.merkleProof.serialize().hex())
assert_equal(d2["cbTx"], d.cbTx.serialize().hex())
assert_equal(set([int(e, 16) for e in d2["deletedMNs"]]), set(d.deletedMNs))
assert_equal(set([int(e["proRegTxHash"], 16) for e in d2["mnList"]]), set([e.proRegTxHash for e in d.mnList]))
assert_equal(set([QuorumId(e["llmqType"], int(e["quorumHash"], 16)) for e in d2["deletedQuorums"]]), set(d.deletedQuorums))
assert_equal(set([QuorumId(e["llmqType"], int(e["quorumHash"], 16)) for e in d2["newQuorums"]]), set([QuorumId(e.llmqType, e.quorumHash) for e in d.newQuorums]))
return d
def test_dip8_quorum_merkle_root_activation(self, with_initial_quorum):
if with_initial_quorum:
# Mine one quorum before dip8 is activated
self.mine_quorum()
cbtx = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2)["tx"][0]
assert(cbtx["cbTx"]["version"] == 1)
assert(self.nodes[0].getblockchaininfo()["bip9_softforks"]["dip0008"]["status"] != "active")
while self.nodes[0].getblockchaininfo()["bip9_softforks"]["dip0008"]["status"] != "active":
self.nodes[0].generate(4)
self.sync_all()
self.nodes[0].generate(1)
sync_blocks(self.nodes)
# Assert that merkleRootQuorums is present and 0 (we have no quorums yet)
cbtx = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2)["tx"][0]
assert_equal(cbtx["cbTx"]["version"], 2)
assert("merkleRootQuorums" in cbtx["cbTx"])
merkleRootQuorums = int(cbtx["cbTx"]["merkleRootQuorums"], 16)
if with_initial_quorum:
assert(merkleRootQuorums != 0)
else:
assert_equal(merkleRootQuorums, 0)
set_mocktime(get_mocktime() + 1)
set_node_times(self.nodes, get_mocktime())
self.nodes[0].spork("SPORK_17_QUORUM_DKG_ENABLED", 0)
self.wait_for_sporks_same()
# Mine quorum and verify that merkleRootQuorums has changed
quorum = self.mine_quorum()
cbtx = self.nodes[0].getblock(self.nodes[0].getbestblockhash(), 2)["tx"][0]
assert(int(cbtx["cbTx"]["merkleRootQuorums"], 16) != merkleRootQuorums)
return quorum
def confirm_mns(self):
tm = 0
while tm < 30:
diff = self.nodes[0].protx("diff", 1, self.nodes[0].getblockcount())
print(diff)
found_unconfirmed = False
for mn in diff["mnList"]:
if int(mn["confirmedHash"], 16) == 0:
found_unconfirmed = True
break
if not found_unconfirmed:
break
self.nodes[0].generate(1)
tm += 1
sync_blocks(self.nodes)
if __name__ == '__main__':
LLMQCoinbaseCommitmentsTest().main()
|
Published at Friday, April 26th, 2019 - 4:20 PM. 4 Wiring Diagram. By Steven G. Rios.
This awesome image collections about 4 way switch schematic is accessible to download. We collect this wonderful picture from online and select the best for you. 4 way switch schematic images and pictures selection that uploaded here was carefully chosen and uploaded by author after choosing the ones which are best among the others.
So, ultimately we make it and here these list ofwonderful picture for your inspiration and information reason regarding the 4 way switch schematic as part of [blog] exclusive updates collection. So, take your time and get the best 4 way switch schematic photos and pictures posted here that suitable with your needs and use it for your own collection and personal use.
Regarding Image brief description: Pic has been submitted by Steven G. Rios and has been tagged by category in field. You are able to give your opinion as feed-back to our website value. |
"""
Django settings for flavify project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
from django.core.exceptions import ImproperlyConfigured
from django.contrib.messages import constants as messages
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
def get_env_variable(var_name):
try:
return os.environ[var_name]
except KeyError:
error_msg = "Set the %s environment variable" % var_name
raise ImproperlyConfigured(error_msg)
SECRET_KEY = get_env_variable('SECRET_KEY')
GOOGLE_RECAPTCHA_SECRET_KEY = get_env_variable('GOOGLE_RECAPTCHA_SECRET_KEY')
ALLOWED_HOSTS = []
# For bootstrap use.
MESSAGE_TAGS = {
messages.DEBUG: 'alert-info',
messages.INFO: 'alert-info',
messages.SUCCESS: 'alert-success',
messages.WARNING: 'alert-warning',
messages.ERROR: 'alert-danger',
}
# Application definition
INSTALLED_APPS = [
'flavors.apps.FlavorsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'django_select2',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'allauth.socialaccount.providers.twitter',
'bootstrap3',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'flavify.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, "templates")],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
AUTHENTICATION_BACKENDS = (
# Needed to login by username in Django admin, regardless of `allauth`
'django.contrib.auth.backends.ModelBackend',
# `allauth` specific authentication methods, such as login by e-mail
'allauth.account.auth_backends.AuthenticationBackend',
)
# BEGIN allauth settings
LOGIN_REDIRECT_URL = '/'
ACCOUNT_LOGOUT_ON_GET = True
SOCIALACCOUNT_QUERY_EMAIL = True
ACCOUNT_EMAIL_REQUIRED = True
# END allauth settings
EMAIL_HOST = 'smtp.mailgun.org'
EMAIL_USE_TLS = True
EMAIL_PORT = 587
EMAIL_HOST_USER = 'postmaster@mg.flavoration.com'
EMAIL_HOST_PASSWORD = get_env_variable('EMAIL_HOST_PASSWORD')
WSGI_APPLICATION = 'flavify.wsgi.application'
# For sites framework
SITE_ID = 1
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'America/Los_Angeles'
USE_I18N = False
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = [
os.path.join(BASE_DIR, "static"),
] |
Okay - so you have eaten them fresh with spoonfuls of sugar, you have mushed them up with brown sugar and poured them over cornflakes for breakfast, you have had them on a pav, in a sponge and … there are still a couple of buckets of tamarillo staring at you from the kitchen floor. What to do?
Once, every tamarillo season, I invite special friends to an exquisite meal of Wild Pork and Tamarillo - it’s an annual tradition and, in the absence of a very cross pig this year, I used venison instead. No problems. It was just as delicious with the fruit flavourings in the casserole teasing the palate between mouthfuls of a superbly rich and aged cab/shiraz. One year I had to use beef to maintain the traditional celebration of the tamarillo harvest. With a little less fruit it was fine, and the addition of port during the cooking masked any disappointment in the choice of meat.
Several years ago I attempted to make tamarillo wine and, eighteen months later, ended up with a flavourless aviation fuel! As an addition (in small amounts) to summer punch it was fine, but I have never bothered again.
But, two very successful ways of emptying the buckets are Tamarillo Chutney and Tamarillo Sauce. The Chutney is excellent anytime of the year - with crackers and cheese, in a meat sandwich, as a side dish with a curry or spooned over Perohee (a North Canadian recipe for small pastry parcels of cheese and mashed potato). Tamarillo Sauce is, likewise, a versatile addition to the home maker’s pantry. Use it instead of tomato sauce, add it into mince dishes or spread some on a hunk of bread under a slice of cheese and grill it for a quick snack.
And, if all else fails, take the last of the offending fruit, mix it with brown sugar to your own taste requirements and freeze it uncooked in small, one serve, containers. For a pick-me-up during the winter, sensor reheat the container in the microwave and pour the fruit over your porridge and cream!
Wild Pork and Tamarillo (with apologies to the younger folk - you will have to do some conversions).
Fry an onion in 1ounce of butter, add 2 pound of diced wild pork and brown.
Put the pork and onion into a crock pot and add: 1 chopped apple, 1 sliced banana, 1 cup of stock (beef), 2 teaspoons of brown sugar, 2 handfuls of sultanas, 1 dessertspoon of coconut, 1 teaspoon of salt, and 1 dessertspoon of Curry Powder. Cook on low for at least 6 hours. For the last 3 hours of cooking scoop out 4 to 6 tamarillo and add gently to the casserole. Serve to some good friends with a crisp, fresh salad, heaps of specialty bread and a Coleraine Cabernet/Merlot. If you so wish, you may substitute half a cup of the stock for half a cup of port. If the sauce produced in the crock pot is too thin, leave the lid off in the last stages of cooking to allow it to reduce and thicken to your liking.
Take about 8 pounds of tamarillo (scooped out), 2 large onions chopped up, 2 pound of apples chopped up (I do this and add these after the vinegar has been put in to stop them from going brown), 2 pound of brown sugar, one quarter of a pound of salt, 2 ounces of black pepper, 1 ounce of allspice, half an ounce of cayenne pepper, 2 quarts of vinegar, and 1 ounce of whole cloves. Boil this mixture in a large preserving pan for about 4 hours.
Strain the mush through a sieve (I like to push everything I can through the sieve using a big wooden spoon), pour into pretty bottles, label and give some away.
Take about 3 pound of tamarillo (scooped out), 1 pound of chopped onions, and about one quarter of a pound of chopped apples, a pint of vinegar, 2 and a half pound of brown sugar, half a packet of mixed spice, a tablespoon of salt, and a scant half teaspoon of cayenne pepper. This shouldn’t need more than an hour’s boiling and will make 5 to 6 pound of chutney.
Microwave Tamarillo Chutney (for when time is short and you need to cheat!) 3 to 4 tamarillo, 1 chopped apple, 1 finely chopped onion, three quarters of a cup of brown sugar, one quarter of a teaspoon of mixed spice, one quarter of a teaspoon of salt, one quarter of a cup of vinegar. Scoop out the tamarillo, put into a microwave proof dish and zap for about 4 minutes on high. Mash the fruit up and add all of the other ingredients. Cover and cook for 10 minutes. Uncover and cook for a further 6 minutes or until it has gone thick (I give it 3 minutes and then 1 minutes bursts until I am happy with it). Pour into a medium sized jar or let it cool and serve to those unexpected guests on the front veranda, under the wisteria!
For a more substantial snack put a spoonful on top of Perohee and garnish the plate with a few sprigs of what-ever-herb-looks-nice-at-the-moment.
When I make Perohee I MAKE PEROHEE! Huge amounts. I put them onto trays and freeze them and, once frozen, they can be bagged up for instant use anytime. You can add them to any meal from breakfast to supper.
Make a huge pot of mashed potato and add heaps of cheese. Tasty is best but any cheese your budget will allow is fine. Add in some Maggie Powdered Chicken Stock (or Bacon or Herb etc) to taste. Don’t throw the spud water away as you will need it for the pastry.
In a big bowl put 9 cups of flour and 1 teaspoon of salt. Measure the cooled spud water - you will need 4 cups but you can make the 4 cups up with tap water if there is not enough water from the spuds. Beat in 2 eggs and 1 tablespoon of oil. Use this mixture to make a pastry-type dough with the flour.
Roll it out at usual, cut into suitable sized squares (not too big) and make a sealed parcel with about 2 teaspoons of the mashed potato in the centre of each one. Mine end up being about the size of a golf ball.
Freeze or use immediately. They can be deep fried or baked in the oven.
If I am in a hurry and have to use them directly from the freezer I give them about 10 seconds (per golf ball) in the microwave before I cook them. |
"""Send a reminder about working days with missing performance."""
import logging
from django.core.management.base import BaseCommand
from django.contrib.auth import models as auth_models
from django.utils.translation import ugettext as _
from django.db.models import Q
import calendar
import datetime
import requests
from dateutil.relativedelta import relativedelta
from ninetofiver import models, settings
from ninetofiver.utils import send_mail
from ninetofiver.calculation import get_range_info
log = logging.getLogger(__name__)
class Command(BaseCommand):
"""Send a reminder about working days with missing performance."""
args = ''
help = 'Send a reminder about working days with missing performance'
def handle(self, *args, **options):
"""Send a reminder about working days with missing performance."""
# Fetch all active users
users = (auth_models.User.objects
.filter(is_active=True))
# Get range info for all users for yesterday
yesterday = datetime.date.today() - datetime.timedelta(days=1)
range_info = get_range_info(users, yesterday, yesterday)
for user in users:
user_range_info = range_info[user.id]
if (not user_range_info['work_hours']) or (user_range_info['remaining_hours'] != user_range_info['work_hours']):
log.info('User %s skipped because they were not required to log performance yesterday' % user)
continue
log.info('Sending reminder to %s' % user.email)
if settings.MATTERMOST_INCOMING_WEBHOOK_URL and settings.MATTERMOST_PERFORMANCE_REMINDER_NOTIFICATION_ENABLED:
try:
requests.post(settings.MATTERMOST_INCOMING_WEBHOOK_URL, json={
'channel': '@%s' % user.username,
'text': _('Hi there! It looks like you didn\'t log any performance yesterday. Did you forget to add something? Maybe you should take a look!'),
})
except:
log.error('Could not send mattermost notification!', exc_info=True)
if settings.ROCKETCHAT_INCOMING_WEBHOOK_URL and settings.ROCKETCHAT_PERFORMANCE_REMINDER_NOTIFICATION_ENABLED:
try:
requests.post(settings.ROCKETCHAT_INCOMING_WEBHOOK_URL, json={
'channel': '@%s' % user.username,
'text': _('Hi there! It looks like you didn\'t log any performance yesterday. Did you forget to add something? Maybe you should take a look!'),
})
except:
log.error('Could not send rocketchat notification!', exc_info=True) |
First of all, we want to assure you that we will never sell, barter, or rent your email address and other personal information to a third party. Period.
When you visit our Web site, our Web server automatically recognizes and stores the name of the domain from which you access the Internet (for example, aol.com, if you are connecting from an America Online account), the date and time you access our site, and the Internet Protocol (IP) address of the Web site from where you came. This information enables us to run site usage tools and create statistics about our site. These statistics help us to better understand how our site is being used and what we can do to make it more useful to visitors. This information is not stored in a personally identifiable format.
If you choose to identify yourself by becoming a member of Parental Wisdom we will aggregate information related to what pages you access. Information submitted by you, such as comments or feedback, may be stored in a personally identifiable format and is used only for internal purposes and is not shared with people or organizations outside of Parental Wisdom unless you give us permission to do so.
Parental Wisdom recognizes that members may not wish to be contacted and allows you to opt out of any e-mail newsletter. When you receive an email, you will be given instructions on how to remove yourself from that list.
Parental Wisdom will release any information that is required to be released by law or court order.
We use secure socket layer (SSL) encryption to protect the transmission of information you submit to us when you use our secure online forms. All the information you provide us through these forms is stored securely offline. If you send us an email, you should know that email is not necessarily secure against interception unless you are using a security-enabled web browser. |
import sys
import random
import bisect
import pysam
import gzip
import cPickle
import numpy
from time import time
import argparse
import math
inds={'A':0,'T':1,'G':2,'C':3,'N':4,'a':0,'t':1,'g':2,'c':3,'n':4}
def main(argv):
t0 = time()
parser = argparse.ArgumentParser(description='sub-wessim: a sub-program for Wessim1. (NOTE!) Do not run this program. Use "Wessim1.py" instead. ', prog='wessim1_sub', formatter_class=argparse.RawTextHelpFormatter)
group1 = parser.add_argument_group('Mandatory input files')
group1.add_argument('-R', metavar = 'FILE', dest='reference', required=True, help='(R)eference genome FASTA file')
group1.add_argument('-B', metavar = 'FILE', dest='region', required=True, help='Target region .(B)ED file')
group2 = parser.add_argument_group('Parameters for exome capture')
group2.add_argument('-f', metavar = 'INT', type=int, dest='fragsize', required=False, help='mean (f)ragment size. this corresponds to insert size when sequencing in paired-end mode. [200]', default=200)
group2.add_argument('-d', metavar = 'INT', type=int, dest='fragsd', required=False, help='standard (d)eviation of fragment size [50]', default=50)
group2.add_argument('-m', metavar = 'INT', type=int, dest='fragmin', required=False, help='(m)inimum fragment length [read_length + 20 for single-end, 2*read_length + 20 for paired-end]')
group2.add_argument('-y', metavar = 'PERCENT',type=int, dest='bind', required=False, help='minimum required fraction of probe match to be h(y)bridized [50]', default=50)
group3 = parser.add_argument_group('Parameters for sequencing')
group3.add_argument('-p', action='store_true', help='generate paired-end reads [single]')
group3.add_argument('-n', help='do not care')
group3.add_argument('-1', metavar = 'INT', type=int, dest='readstart', required=True, help='start number of read')
group3.add_argument('-2', metavar = 'INT', type=int, dest='readend', required=True, help='end number of read')
group3.add_argument('-l', metavar = 'INT', type=int, dest='readlength', required=True, help='read (l)ength (bp)')
group3.add_argument('-i', metavar = 'INT', type=int, dest='processid', required=True, help='subprocess (i)d')
group3.add_argument('-M', metavar = 'FILE', dest='model', required=True, help='GemSim (M)odel file (.gzip)')
group3.add_argument('-t', help='do not care')
group4 = parser.add_argument_group('Output options')
group4.add_argument('-o', metavar = 'FILE', dest='outfile', help='(o)utput file header. ".fastq.gz" or ".fastq" will be attached automatically. Output will be splitted into two files in paired-end mode', required=True)
group4.add_argument('-z', action='store_true', help='compress output with g(z)ip [false]')
group4.add_argument('-q', metavar = 'INT', type=int, dest='qualbase', required=False, help='(q)uality score offset [33]', default=33)
group4.add_argument('-v', action='store_true', help='(v)erbose; print out intermediate messages.')
args = parser.parse_args()
reffile = args.reference
regionfile = args.region
faoutfile = regionfile + ".fa"
abdoutfile = regionfile + ".abd"
isize = args.fragsize
isd = args.fragsd
imin = args.fragmin
bind = args.bind
subid = args.processid
paired = args.p
readlength = args.readlength
readstart = args.readstart
readend = args.readend
if imin==None:
if paired:
imin = readlength + 20
else:
imin = readlength + 20
if isize < imin:
print "too small mean fragment size (" + str(isize) + ") compared to minimum length (" + str(imin) + "). Increase it and try again."
sys.exit(0)
model = args.model
f = open(faoutfile)
i = f.readline()
seqlist = []
abdlist = []
while i:
header = i.strip()[1:]
seq = f.readline().strip()
seqlist.append((header, seq))
i = f.readline()
f.close()
f = open(abdoutfile)
i = f.readline()
while i:
abd = int(i.strip())
abdlist.append(abd)
i = f.readline()
f.close()
last = abdlist[-1]
outfile = args.outfile + "-" + str(subid)
compress = args.z
qualbase = args.qualbase
verbose = args.v
wread = None
wread2 = None
if paired and compress:
wread = gzip.open(outfile + "_1.fastq.gz", 'wb')
wread2 = gzip.open(outfile + "_2.fastq.gz", 'wb')
elif paired and not compress:
wread = open(outfile + "_1.fastq", 'w')
wread2 = open(outfile + "_2.fastq", 'w')
elif not paired and compress:
wread = gzip.open(outfile + ".fastq.gz", 'wb')
else:
wread = open(outfile + ".fastq", 'w')
processed = 0
totalseq = 1
first = True
dirtag = ('','+','-')
### Ignore first 5 lines of psl file (header)
if paired:
mx1,mx2,insD1,insD2,delD1,delD2,intervals,gQualL,bQualL,iQualL,mates,rds,rdLenD = parseModel(model, paired, readlength)
m0=float(mates[0])
m1=float(mates[1])
rd0=float(rds[0])
rd1=float(rds[1])
unAlign0=(m0*rd1-m1*m0)/(rd0*rd1-m1*m0)
unAlign1=1.0-(unAlign0/(m0/rd0))
keys=intervals.keys()
keys.sort()
if isize=='emp':
inters=[]
for k in keys:
inters.append((k,intervals[k]))
interval=bisect_choiceTUP(inters)
#inserts1and2
insDict1=mkInserts(mx1,insD1)
insDict2=mkInserts(mx2,insD2)
#deletions1and2
delDict1=mkDels(mx1,delD1)
delDict2=mkDels(mx2,delD2)
else:
mx1,insD1,delD1,gQualL,bQualL,iQualL,readCount,rdLenD=parseModel(model, paired, readlength)
insDict=mkInserts(mx1,insD1)
#deletions
delDict=mkDels(mx1,delD1)
gens=genRef('')
gQList=[]
for i in (gQualL):
gL=[]
keys=i.keys()
keys.sort()
for k in keys:
gL.append((chr(k+qualbase),i[k]))
gQList.append(bisect_choiceTUP(gL))
#choose bad quality bases
bQList=[]
for i in (bQualL):
bL=[]
keys=i.keys()
keys.sort()
for k in keys:
bL.append((chr(k+qualbase),i[k]))
bQList.append(bisect_choiceTUP(bL))
#choose qualities for inserts
iQList=[]
for i in (iQualL):
iL=[]
keys=i.keys()
keys.sort()
for k in keys:
iL.append((chr(k+qualbase),i[k]))
iQList.append(bisect_choiceTUP(iL))
#choose read length
if readlength=='d':
rdlog.info('Using empirical read length distribution')
lgth=[]
keys=rdLenD.keys()
keys.sort()
for k in keys:
lgth.append((k,rdLenD[k]))
RL=bisect_choiceTUP(lgth)
else:
RL=ln(readlength)
mvnTable = readmvnTable()
gcVector = getFragmentUniform(abdlist, seqlist, last, isize, 1000, bind)
# print gcVector
# u1, u2, newSD, m1, m2 = generateMatrices(isd, isize, gcVector)
gcSD = numpy.std(gcVector)
newSD = isd*2
### Generate!
count = 0
i = readstart
while i < readend+1:
pos = int(random.uniform(1, last))
ind = getIndex(abdlist, pos)
seq = seqlist[ind]
ref = seq[1]
refLen=len(ref)
header = seq[0]
headervalues = header.split("_")
fragment_chrom = headervalues[0]
fragment_start = int(headervalues[1])
fragment_end = int(headervalues[2])
if refLen<imin:
continue
gccount = getGCCount(seq)
keep = H2(refLen, gccount, isize, newSD, isd, gcSD,mvnTable)
if not keep:
continue
if not paired:
readLen=RL()
read1,pos,dir,quals1=readGen1(ref,refLen,readLen,gens(),readLen,mx1,insDict,delDict,gQList,bQList,iQList,qualbase)
if read1==None or quals1==None:
continue
head1='@'+'r'+str(i)+'_from_' + fragment_chrom + "_" + str(fragment_start + pos + 1) + "_" + dirtag[dir]
else:
val=random.random()
ln1=RL()
ln2=RL()
inter = isize
read1,pos1,dir1,quals1,read2,pos2,dir2,quals2 = readGenp(ref,refLen,ln1,ln2,gens(),mx1,insDict1,delDict1,gQList,bQList,iQList,qualbase)
p1 = fragment_chrom + "_" + str(fragment_start + pos1 + 1) + "_" + dirtag[dir1]
p2 = fragment_chrom + "_" + str(fragment_start + pos2 + 1) + "_" + dirtag[dir2]
if val > unAlign0+unAlign1:
pass
elif val > unAlign1:
read2='N'*ln2
quals2=chr(0+qualbase)*ln2
p2 = '*'
else:
read1='N'*ln1
quals1=chr(0+qualbase)*ln1
p1='*'
head1='@'+'r'+str(i)+'_from_'+ p1 + ":" + p2 + "/1"
head2='@'+'r'+str(i)+'_from_'+ p1 + ":" + p2 + "/2"
wread.write(head1 + '\n')
wread.write(read1.upper()+'\n')
wread.write('+\n')
wread.write(quals1+'\n')
if paired:
wread2.write(head2 + "\n")
wread2.write(read2.upper() + "\n")
wread2.write("+\n")
wread2.write(quals2 + "\n")
count +=1
i+=1
if count % 1000000 == 0 and count!=0:
t1 = time()
print "[subprocess " + str(subid) + "]: " + str(count) + " reads have been generated... in %f secs" % (t1-t0)
wread.close()
if paired:
wread2.close()
def pickonekey(matchkeys):
r = int(random.uniform(0, len(matchkeys)-1))
key = matchkeys[r]
return key
def getSequence(ref, fragment):
chrom = fragment[0]
start = int(fragment[1])
end = int(fragment[2])
seq = ref.fetch(chrom, start, end)
return seq
def getFragment(matchdic, key, mu, sigma, lower, bind):
ins = getInsertLength(mu, sigma, lower)
match = matchdic[key]
pickedproberegion = pickproberegion(match)
pickedfragment = pickFragment(pickedproberegion, ins, bind)
return pickedfragment
def getFragmentUniform(abdlist, seqlist, last, mu, total, bind):
result = []
i = 0
while i < 1000:
pos = int(random.uniform(1, last))
ind = getIndex(abdlist, pos)
seq = seqlist[ind][1]
seqlen = len(seq)
if seqlen < mu:
continue
margin = seqlen - mu
start = random.randint(0, margin)
seq = seq[start: start+mu]
gcCount = getGCCount(seq)
result.append(gcCount)
i+=1
return result
def getInsertLength(mu, sigma, lower):
while True:
length = int(random.gauss(mu, sigma))
if length >= lower:
return length
def pickproberegion(match):
scores = []
for m in match:
scores.append(int(m[0]))
reprobs_cumul = scoretoprob(scores, 0.7)
ran = random.random()
ind = bisect.bisect_left(reprobs_cumul, ran)
pickedmatch = match[ind]
return pickedmatch
def pickFragment(pickedproberegion, ins, bind):
probechrom = pickedproberegion[1]
probestart = int(pickedproberegion[2])
probeend = int(pickedproberegion[3])
probelength = probeend - probestart
minimummatch = int(probelength*bind/100)
overlap = int(random.triangular(minimummatch, probelength, probelength))
margin = max(ins - overlap, 0)
rangestart = probestart - margin
rangeend = probeend + margin
seqstart = random.randint(rangestart, rangeend - ins)
return probechrom, seqstart, seqstart + ins
def scoretoprob(scores, r):
maxscore = max(scores)
rescores = []
reprobs = []
reprobs_cumul = []
totalscore = 0.0
for score in scores:
mismatch = maxscore - score
rescore = 1.0 * pow(r, mismatch)
rescores.append(rescore)
totalscore += rescore
totalprob = 0.0
for rescore in rescores:
reprob = rescore / totalscore
totalprob += reprob
reprobs.append(reprob)
reprobs_cumul.append(totalprob)
return reprobs_cumul
def getGCCount(seq):
gc = 0
for nuc in seq:
if nuc=="G" or nuc=="C" or nuc=="g" or nuc=="c":
gc += 1
return gc
def readSimpleSingle(ref, rlen, err):
reflen = len(ref)
x = random.uniform(0, 2)
startloc = int(random.uniform(0, reflen - rlen))
template = ref
rc = False
read = template[startloc:startloc + rlen]
if x > 1: # negative strand
read = comp(read)[::-1]
rc = True
qual = rlen * 'h'
rctag = "+"
if rc:
rctag = "-"
return startloc, rctag, read, qual
def comp(sequence):
""" complements a sequence, preserving case. Function imported from GemSim"""
d={'A':'T','T':'A','C':'G','G':'C','a':'t','t':'a','c':'g','g':'c','N':'N','n':'n'}
cSeq=''
for s in sequence:
if s in d.keys():
cSeq+=d[s]
else:
cSeq+='N'
return cSeq
def usage():
print ">python x3.probestatistics reference.fa probe.fa probealign.psl readoutput.fastq.gz"
sys.exit()
def test(filename):
mx1,mx2,insD1,insD2,delD1,delD2,intervals,gQualL,bQualL,iQualL,mates,rds,rdLenD = parseModel(filename, paired, 100)
sys.exit(1)
def parseModel(gzipFile,paired,readlen):
"""prepares error models for input to mkErrors."""
file=gzip.open(gzipFile,'rb')
if paired:
modReadLen=cPickle.load(file)
if readlen!='d' and readlen>modReadLen:
print "Inappropriate read length chosen for model. Maximum for this model: " + str(modReadLen)
file.close()
sys.exit()
mx1=cPickle.load(file)
mx2=cPickle.load(file)
insD1=cPickle.load(file)
insD2=cPickle.load(file)
delD1=cPickle.load(file)
delD2=cPickle.load(file)
intD=cPickle.load(file)
gQualL=cPickle.load(file)
bQualL=cPickle.load(file)
iQualL=cPickle.load(file)
mates=cPickle.load(file)
rds=cPickle.load(file)
rdLenD=cPickle.load(file)
file.close()
return mx1,mx2,insD1,insD2,delD1,delD2,intD,gQualL,bQualL,iQualL,mates,rds,rdLenD
else:
modReadLen=cPickle.load(file)
if readlen!='d' and readlen>modReadLen:
print "Inappropriate read length chosen for model. Maximum for this model: " + str(modReadLen)
file.close()
sys.exit()
mx=cPickle.load(file)
insD=cPickle.load(file)
delD=cPickle.load(file)
gQualL=cPickle.load(file)
bQualL=cPickle.load(file)
iQualL=cPickle.load(file)
readCount=cPickle.load(file)
rdLenD=cPickle.load(file)
file.close()
return mx,insD,delD,gQualL,bQualL,iQualL,readCount,rdLenD
def mkInserts(mx,insD):
"""Returns a dictionary consisting of compiled functions to make inserts."""
insertDict={}
posKeys=insD.keys()
posKeys.sort()
for p in posKeys:
indicies=p.split('.')
tot=mx[int(indicies[0])][int(indicies[1])][int(indicies[2])][int(indicies[3])][int(indicies[4])][int(indicies[5])][5]
insertKeys=insD[p].keys()
insertKeys.sort()
insertList=[]
iSum=0
for i in insertKeys:
insertList.append((i,insD[p][i]))
iSum+=0
insertList.append(('',tot-iSum))
insert=bisect_choiceTUP(insertList)
insertDict[p]=insert
return insertDict
def mkDels(mx,delD):
"""Returns a dictionary consisting of compiled functions to make deletiosn."""
deletionDict={}
posKeys=delD.keys()
posKeys.sort()
for p in posKeys:
indicies=p.split('.')
tot=mx[int(indicies[0])][int(indicies[1])][int(indicies[2])][int(indicies[3])][int(indicies[4])][int(indicies[5])][5]
items=delD[p]
items.reverse()
items.append(tot-sum(items))
items.reverse()
delete=bisect_choice(items)
deletionDict[p]=delete
return deletionDict
def bisect_choice(items):
"""Returns a function that makes a weighted random choice from items."""
added_weights = []
last_sum = 0
for weight in items:
last_sum += weight
added_weights.append(last_sum)
def choice(rnd=random.random, bis=bisect.bisect):
return bis(added_weights, rnd() * last_sum)
return choice
def bisect_choiceTUP(items):
"""Returns a function that makes a weighted random choice from a list of tuples."""
added_weights = []
last_sum = 0.0
for item,weight in items:
weight=float(weight)
last_sum += weight
added_weights.append(last_sum)
def choice(rnd=random.random, bis=bisect.bisect):
return items[bis(added_weights, rnd() * last_sum)][0]
return choice
def ln(length):
"""Returns static length as a funtion."""
def val():
return length
return val
def readGen1(ref,refLen,readLen,genos,inter,mx1,insD1,delD1,gQ,bQ,iQ,qual):
"""Generates a random read of desired length from a reference."""
extrabase = 10
margin = refLen - inter - 10
ind=random.randint(0,(margin-1))
dir=random.randint(1,2)
end=ind+inter + extrabase
read = ref[ind:end]
if dir==2:
cRef = comp(ref)[::-1]
read = cRef[refLen-end:refLen-ind]
if genos!='':
read=mutate(read,ind,genos,refLen,1,readPlus,hd)
read,quals=mkErrors(read,readLen,mx1,insD1,delD1,gQ,bQ,iQ,qual)
if dir==2:
ind=ind + extrabase
return read, ind, dir, quals
def readGenp(ref, refLen, readLen1, readLen2, genos, mx1, insD1, delD1, gQ, bQ, iQ, qual):
"""Generates a pair of reads from given DNA fragment."""
cRef = comp(ref)[::-1]
extrabase = 10
ind1 = 0
ind2 = refLen - readLen2
end1 = readLen1 + extrabase
end2 = ind2 + readLen2
dir1=1
dir2=2
read1 = ref[ind1:end1]
read2 = cRef[ind1:end1]
read1, quals1 = mkErrors(read1, readLen1, mx1, insD1, delD1, gQ, bQ, iQ, qual)
read2, quals2 = mkErrors(read2, readLen2, mx1, insD1, delD1, gQ, bQ, iQ, qual)
pairorder = random.randint(1,2)
if pairorder==1:
return read1, ind1, dir1, quals1, read2, ind2, dir2, quals2
else:
return read2, ind2, dir2, quals2, read1, ind1, dir1, quals1
def readGen2(reference,cRef,pos,dir,readLen,genos,inter,mx2,insD2,delD2,gQ,bQ,iQ,qual):
"""Generates the 2nd read of a random pair of reads."""
refLen=len(reference)
readPlus=int(readLen*1.5)
if dir==1:
end=pos+inter
start=end-readPlus
if start<0:
start=0
read=cRef[start:end]
if genos!='':
read=mutate(read,start,genos,refLen,2,readPlus,hd)
read=read[::-1]
read,quals=mkErrors(read,readLen,mx2,insD2,delD2,gQ,bQ,iQ,qual)
else:
start=pos-inter+1
end=start+readPlus
read=reference[start:end]
if genos!='':
read=mutate(read,start,genos,refLen,1,readPlus,hd)
read,quals=mkErrors(read,readLen,mx2,insD2,delD2,gQ,bQ,iQ,qual)
return read, quals
def mutate(read,ind,gens,refLen,dir,readLn,hd):
"""Adds predetermined mutations to reads."""
d={'A':'T','T':'A','C':'G','G':'C','a':'t','t':'a','c':'g','g':'c','N':'N','n':'n'}
if gens=={}:
return read
else:
chroms=gens.keys()
if hd not in chroms:
return read
else:
posi=gens[hd].keys()
if dir==1:
for p in posi:
if p >ind and p<=(ind+readLn):
read1=read[:p-(ind+1)]+gens[hd][p]
read1=read1+read[p-ind:]
read=read1
elif p<=ind+readLn-refLen:
read1=read[:refLen-ind+p-1]+gens[hd][p]
read1+=read[refLen-ind+p:]
read=read1
return read
elif dir==2:
for p in posi:
if p >ind and p<=(ind+readLn):
read1=read[:p-(ind+1)]+d[gens[hd][p]]
read1=read1+read[p-ind:]
read=read1
elif p<=ind+readLn-refLen:
read1=read[:refLen-ind+p-1]+d[gens[hd][p]]
read1+=read[refLen-ind+p:]
read=read1
return read
def genRef(ref):
"""Returns input as function"""
def r():
return ref
return r
def mkErrors(read,readLen,mx,insD,delD,gQ,bQ,iQ,qual):
"""Adds random errors to read."""
pos=0
quals=''
qualslist = []
index='0.4.4.4.4.'+str(inds[read[0]])
if index in insD:
insert=insD[index]()
read='NNNN'+insert+read
for i in insert:
# quals+=iQ[0]()
qualslist.append(iQ[0]())
pos+=1
else:
read='NNNN'+read
prev=read[pos:pos+4]
after = read[pos+4]
d0=pos
d1=inds[prev[3]]
d2=inds[prev[2]]
d3=inds[prev[1]]
d4=inds[prev[0]]
d5=inds[after]
pos+=1
while pos<=readLen and pos<len(read)-4:
d0 = pos
d4 = d3
d3 = d2
d2 = d1
d1 = d5
d5 = inds[read[pos+4]]
index = '.'.join([str(d0), str(d1), str(d2), str(d3), str(d4), str(d5)])
Mprobs=mx[d0][d1][d2][d3][d4][d5]
tot=float(Mprobs[5])
if not tot==0:
Mprobs = Mprobs/tot
val=random.random()
a=Mprobs[0]
t=Mprobs[1]+a
g=Mprobs[2]+t
c=Mprobs[3]+g
n=Mprobs[4]+c
success=False
if val>n or tot == 0:
gPos=pos-1
while gPos>=0:
try:
qualslist.append(gQ[gPos]())
success=True
break
except:
gPos-=1
if success==False:
qualslist.append(chr(30+qual))
elif val>c:
read=read[:pos+3]+'N'+read[pos+4:]
bPos=pos-1
while bPos>=0:
try:
qualslist.append(bQ[bPos]())
success=True
break
except:
bPos-1
if success==False:
qualslist.append(chr(2+qual))
elif val>g:
read=read[:pos+3]+'C'+read[pos+4:]
bPos=pos-1
while bPos>=0:
try:
qualslist.append(bQ[bPos]())
success=True
break
except:
bPos-1
if success==False:
qualslist.append(chr(2+qual))
elif val>t:
read=read[:pos+3]+'G'+read[pos+4:]
bPos=pos-1
while bPos>=0:
try:
qualslist.append(bQ[bPos]())
success=True
break
except:
bPos-1
if success==False:
qualslist.append(chr(2+qual))
elif val>a:
read=read[:pos+3]+'T'+read[pos+4:]
bPos=pos-1
while bPos>=0:
try:
qualslist.append(bQ[bPos]())
success=True
break
except:
bPos-1
if success==False:
qualslist.append(chr(2+qual))
else:
read=read[:pos+3]+'A'+read[pos+4:]
bPos=pos-1
while bPos>=0:
try:
qualslist.append(bQ[bPos]())
success=True
break
except:
bPos-1
if success==False:
qualslist.append(chr(2+qual))
if index in delD:
delete=delD[index]()
read=read[:pos+4]+read[pos+delete+4:]
if index in insD:
insert=insD[index]()
read=read[:pos+4]+insert+read[pos+4:]
for i in insert:
iPos=pos-1
while iPos>=0:
try:
qualslist.append(iQ[iPos]())
success=True
break
except:
iPos-=1
if success==False:
qualslist.append(chr(2+qual))
pos+=len(insert)
pos+=1
qualslist.append(qualslist[-1])
readback = read
read=read[4:readLen+4]
quals=''.join(qualslist)[:readLen]
if len(quals)!=len(read):
print "unexpected stop"
return None, None
return read,quals
def generateM(sd, newSD, x,t, gcVector):
gcSD = numpy.std(gcVector)*(newSD/sd)
s00 = gcSD*gcSD + newSD*newSD*t*t
s11 = newSD*newSD
rho = newSD*t/math.sqrt(s00)
m = numpy.matrix([[s00, rho*math.sqrt(s00*s11)], [rho*math.sqrt(s00*s11), s11]])
w, v = numpy.linalg.eig(m)
d = numpy.matrix([[math.sqrt(w[0]),0],[0,math.sqrt(w[1])]])
M = v*d
return M, m
def generateMatrices(sd,x, gcVector):
M1, m1 = generateM(sd, sd, x,1/0.9, gcVector)
e1 = numpy.matrix([[1],[0]])
e2 = numpy.matrix([[0],[1]])
longAxis1 = M1*e1
longAxis2 = M1*e2
longAxis = longAxis1
if norm(longAxis1) < norm(longAxis2):
longAxis = longAxis2
M2 = []
m2 = []
newSD = sd;
for i in range(100, 1000):
newSD = sd*i/100.0
M2, m2= generateM(sd, newSD,x,0.5, gcVector)
if norm(numpy.linalg.inv(M2)*longAxis)<1.0:
break
u1 = numpy.linalg.inv(M1)
u2 = numpy.linalg.inv(M2)
return u1, u2, newSD, m1, m2
def getProb(l,n,x,sd,gcSD,alpha, mvnpdf):
p1 = mvnpdf[0][int(cut((l-x)/sd)*100)]
p2 = mvnpdf[0][int(cut((n-(x/2+(l-x)*alpha))/(l*gcSD/x))*100)]
return float(p1)*float(p2)
def H2(l, n, x, sd1, sd2, gcSD, mvnpdf):
bp = getProb(l,n,x,sd1,gcSD,.5,mvnpdf)
ap = getProb(l,n,x,sd2,gcSD,9/7,mvnpdf)
v = ap/bp
r = random.random()
toKeep = v > r
return toKeep
def norm(x):
y=x[0]*x[0]+x[1]*x[1]
return math.sqrt(y)
def cut(x):
y = abs(x)
if y >5.00:
y = 5.00
return y
def H(l, n, x, u1, u2, mvnpdf):
u = numpy.matrix([[x/2], [x]])
nl1 = numpy.matrix([[n],[l]])
v1 = u1*(nl1-u)
v2 = u2*(nl1-u)
p1 = mvnpdf[int(cut(v1[0])*100)][int(cut(v1[1])*100)]
p2 = mvnpdf[int(cut(v2[0])*100)][int(cut(v2[1])*100)]
v = float(p1)/float(p2)
r = random.random()
toKeep = v > r
return toKeep
def readmvnTable():
f = open("lib/mvnTable.txt")
context = f.read()
lines = context.split("\n")
mvnTable = []
for line in lines:
values = line.split("\t")
if len(values)<500:
continue
mvnTable.append(values)
f.close()
return mvnTable
def getIndex(abdlist, pos):
i = bisect.bisect_right(abdlist, pos)
return i
if __name__=="__main__":
main(sys.argv[1:])
sys.exit(0)
|
It's Teaser Tuesday again and I'm sharing a scene from Dark Storms, the third Pelican Cay story. Hero's sister, Adriana, craves excitement but gets more than she bargains for.
Gabe was sprawled face down on his bed, the twisted sheet draped loosely over one leg and pinned beneath the other, when he was roused by a rap on his door. He shook himself awake as another knock sounded. The digital clock on the bedside table told him it was nearly two in the afternoon.
He swung his legs off the bed, grabbed the untucked sheet, and wrapped it around his waist as he shuffled to the door. Still a little drowsy, he tried and failed to manage the chain stop and gave up, pulling the door open without it.
She put a little extra wiggle in her walk as she wandered off. Grateful and grinning now himself, he closed the door. Whatever was on top in the bureau drawers is what he grabbed and took into the bathroom.
Adriana was relaxing in one of his two patio chairs when he joined her. She took in his lightweight track pants and Navy t-shirt and gave a sad little sigh. “Gone, but not forgotten.” Before he could respond, she picked up a thermal mug from the little table next to her and offered it to him.
“Thanks.” The mug was brand new and stamped on the outside with the resort’s logo.
She left him there and Gabe ran back into his quarters to brush his teeth, find his trunks, and grab his wraparound sunglasses. Working his feet into his leather sandals, he looked around, wondering how he was supposed to carry his damn trunks. He didn’t have a bag small enough. Ah, screw it. He wadded them in his hand, slid his wallet into his back pocket and plucked his keys off the bedside table. Out of habit, he tested the door once it closed to be sure it locked properly.
The grounds crew was busy. He could hear the buzz of trimmers being used around beds and borders. At least one riding mower passed nearby, making curving patterns in the grass on its way back between the buildings.
Skipping up the two steps and into the airy lobby, he met Adriana’s gaze over the counter and they both smiled. His heart beat just a little faster at seeing her again. It was ridiculous. She’d just left him five minutes ago. Yet, he couldn’t stop his blazing grin from flaring to life when she looked at him.
An attractive woman stepped out of the office. She was a little shorter, a bit thicker through the body, but clearly Adriana’s mother. Smiling, she approached the tall counter, giving him a friendly once over. The women’s smiles were identical. Where they differed was the eyes, or rather the expression in the eyes. Adriana looked at him with familiarity, intimacy, and open amusement.
Rosa turned her around and with hands on both shoulders, walked Adriana out from behind the counter.
Gabe was snickering when she came back out of the office swinging a canvas bag by the straps.
She cringed at how that came out, but he was grinning when he dunked his swim trunks in her bag.
“I’m not complaining.” In fact, he was enjoying it. He hoped she didn’t stop.
It was when they rounded the building and followed the walkway to the parking lot that he felt the same damn prickling sensation on the back of his neck again. Glancing around, he spotted the same maintenance man who’d glared at him the day before. He was working on a guestroom lock today.
“Lucky you,” she muttered quietly.
“Not exactly.” She picked up the pace, forcing him to pursue her if he was going to read her face.
“I could talk to him.” Regardless of what she said, he was going to keep an eye on Derrick. She wasn’t the only one getting a bad vibe from the guy.
Hopeful scribe and word-aholic. Loves reading, loves writing, loves my family and friends, and is tickled beyond measure that you've stopped by.
Click the buttons to find my titles at the following retailers.
A romantic comedy with a hilarious punch.
She's done with men. He's in love again.
Download the Kindle, ebook, or from Eden Books today.
She'd assumed this brainy hottie wasn't tough enough to survive in her woods. She was wrong. Perhaps the real question was always, is she strong enough to love this man?
Download the Kindle, ebook, or from Eden Books today.
They lost touch once before. Now, to be reunited after fifteen years, there's no danger he won't face to keep her.
ebook, or get it from Eden Books today.
She wanted to hate him. She never dreamed she'd come to want him.
Get the Kindle, ebook or Eden Books today.
He's a twenty-eight-year-old fantasy, not an option, right? A single mom, a younger man, and a chemistry that won't be denied.
She's sabotaged every potential relationship to avoid caring for a man exactly like this! So how did Dylan get past her defenses?
ebook, or Eden Books today.
Some threats you know. Some you don't. And some you can't resist.
Read the Kindle, ebook or Eden Books today.
A weekend fix-up goes awry when Britt chooses another man instead.
A bold invitation. One unforgettable weekend.
ebook, or Eden Books. Get the paperback today.
An innocent wife. An honorable artist. A scandal no one saw coming.
Sexual Politics, available now in gorgeous paperback!
A day she won't get over. A man she can't forget.
Read it now on Kindle, eBook, or Eden Books.
She'd never resented her mother's taste in younger men more, but enough to accept his mistake and love a Tarnished Hero?
Read it today on Kindle, eBook, or Eden Books.
The woman who wouldn't be tied down just chose the newcomer. There will be consequences.
Read it today on Kindle or eBook.
The open-door policy between best friends leads to an intimacy none of them anticipate. Can they handle it or will it ruin everything?
Read it tonight on Kindle, eBook, or Eden Books. |
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 8 14:37:33 2015
@author: ebachelet
"""
from __future__ import division
import numpy as np
from scipy import integrate
import os
import VBBinaryLensing
import time
VBB = VBBinaryLensing.VBBinaryLensing()
VBB.Tol = 0.001
VBB.RelTol = 0.001
VBB.minannuli=2 # stabilizing for rho>>caustics
def impact_parameter(tau, uo):
"""
The impact parameter U(t).
"Gravitational microlensing by the galactic halo",Paczynski, B. 1986
http://adsabs.harvard.edu/abs/1986ApJ...304....1P
:param array_like tau: the tau define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param array_like uo: the uo define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:return: the impact parameter U(t)
:rtype: array_like
"""
impact_param = (tau ** 2 + uo ** 2) ** 0.5 # u(t)
return impact_param
def amplification_PSPL(tau, uo):
"""
The Paczynski Point Source Point Lens magnification and the impact parameter U(t).
"Gravitational microlensing by the galactic halo",Paczynski, B. 1986
http://adsabs.harvard.edu/abs/1986ApJ...304....1P
:param array_like tau: the tau define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param array_like uo: the uo define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:return: the PSPL magnification A_PSPL(t) and the impact parameter U(t)
:rtype: tuple, tuple of two array_like
"""
# For notations, check for example : http://adsabs.harvard.edu/abs/2015ApJ...804...20C
impact_param = impact_parameter(tau, uo) # u(t)
impact_param_square = impact_param ** 2 # u(t)^2
amplification_pspl = (impact_param_square + 2) / (impact_param * (impact_param_square + 4) ** 0.5)
# return both magnification and U, required by some methods
return amplification_pspl
def Jacobian_amplification_PSPL(tau, uo):
""" Same function as above, just also returns the impact parameter needed for the Jacobian PSPL model.
The Paczynski Point Source Point Lens magnification and the impact parameter U(t).
"Gravitational microlensing by the galactic halo",Paczynski, B. 1986
http://adsabs.harvard.edu/abs/1986ApJ...304....1P
:param array_like tau: the tau define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param array_like uo: the uo define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:return: the PSPL magnification A_PSPL(t) and the impact parameter U(t)
:rtype: tuple, tuple of two array_like
"""
# For notations, check for example : http://adsabs.harvard.edu/abs/2015ApJ...804...20C
impact_param = impact_parameter(tau, uo) # u(t)
impact_param_square = impact_param ** 2 # u(t)^2
amplification_pspl = (impact_param_square + 2) / (impact_param * (impact_param_square + 4) ** 0.5)
# return both magnification and U, required by some methods
return amplification_pspl, impact_param
def amplification_FSPLarge(tau, uo, rho, limb_darkening_coefficient):
"""
The VBB FSPL for large source. Faster than the numba implementations...
Much slower than Yoo et al. but valid for all rho, all u_o
:param array_like tau: the tau define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param array_like uo: the uo define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param float rho: the normalised angular source star radius
:param float limb_darkening_coefficient: the linear limb-darkening coefficient
:return: the FSPL magnification A_FSPL(t) for large sources
:rtype: array_like
"""
VBB.LoadESPLTable(os.path.dirname(VBBinaryLensing.__file__)+'/VBBinaryLensing/data/ESPL.tbl')
amplification_fspl = []
impact_param = (tau**2+uo**2)**0.5
for ind,u in enumerate(impact_param):
magnification_VBB = VBB.ESPLMagDark(u,rho,limb_darkening_coefficient)
amplification_fspl.append(magnification_VBB)
return np.array(amplification_fspl)
def amplification_FSPLee(tau, uo, rho, gamma):
"""
The Lee et al. Finite Source Point Lens magnification.
https://iopscience.iop.org/article/10.1088/0004-637X/695/1/200/pdf Leet et al.2009
Much slower than Yoo et al. but valid for all rho, all u_o
:param array_like tau: the tau define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param array_like uo: the uo define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param float rho: the normalised angular source star radius
:param float gamma: the microlensing limb darkening coefficient.
:return: the FSPL magnification A_FSPL(t)
:rtype: array_like
"""
impact_param = impact_parameter(tau, uo) # u(t)
impact_param_square = impact_param ** 2 # u(t)^2
amplification_pspl = (impact_param_square + 2) / (impact_param * (impact_param_square + 4) ** 0.5)
z_yoo = impact_param / rho
amplification_fspl = np.zeros(len(amplification_pspl))
# Far from the lens (z_yoo>>1), then PSPL.
indexes_PSPL = np.where((z_yoo >= 10))[0]
amplification_fspl[indexes_PSPL] = amplification_pspl[indexes_PSPL]
# Close to the lens (z>3), USPL
indexes_US = np.where( (z_yoo >3) & (z_yoo <10))[0]
ampli_US = []
for idx,u in enumerate(impact_param[indexes_US]):
ampli_US.append(1/(np.pi*rho**2)*integrate.quad(Lee_US,0.0,np.pi,args=(u,rho,gamma),limit=100,
epsabs=0.001, epsrel=0.001)[0])
amplification_fspl[indexes_US] = ampli_US
# Very Close to the lens (z<=3), FSPL
indexes_FS = np.where((z_yoo <=3))[0]
ampli_FS = []
for idx,u in enumerate(impact_param[indexes_FS]):
ampli_FS.append(2/(np.pi*rho**2)*integrate.nquad(Lee_FS,[Lee_limits,[0.0,np.pi]],args=(u,rho,gamma),
opts=[{'limit':100,'epsabs' :0.001,'epsrel':0.001},
{'limit':100,'epsabs' : 0.001,'epsrel':0.001}])[0])
amplification_fspl[indexes_FS] = ampli_FS
return amplification_fspl
def amplification_FSPL(tau, uo, rho, gamma, yoo_table):
"""
The Yoo et al. Finite Source Point Lens magnification.
"OGLE-2003-BLG-262: Finite-Source Effects from a Point-Mass Lens",Yoo, J. et al 2004
http://adsabs.harvard.edu/abs/2004ApJ...603..139Y
:param array_like tau: the tau define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param array_like uo: the uo define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param float rho: the normalised angular source star radius
:param float gamma: the microlensing limb darkening coefficient.
:param array_like yoo_table: the Yoo et al. 2004 table approximation. See microlmodels for more details.
:return: the FSPL magnification A_FSPL(t)
:rtype: array_like
"""
impact_param = impact_parameter(tau, uo) # u(t)
impact_param_square = impact_param ** 2 # u(t)^2
amplification_pspl = (impact_param_square + 2) / (impact_param * (impact_param_square + 4) ** 0.5)
z_yoo = impact_param / rho
amplification_fspl = np.zeros(len(amplification_pspl))
# Far from the lens (z_yoo>>1), then PSPL.
indexes_PSPL = np.where((z_yoo > yoo_table[0][-1]))[0]
amplification_fspl[indexes_PSPL] = amplification_pspl[indexes_PSPL]
# Very close to the lens (z_yoo<<1), then Witt&Mao limit.
indexes_WM = np.where((z_yoo < yoo_table[0][0]))[0]
amplification_fspl[indexes_WM] = amplification_pspl[indexes_WM] * \
(2 * z_yoo[indexes_WM] - gamma * (2 - 3 * np.pi / 4) * z_yoo[indexes_WM])
# FSPL regime (z_yoo~1), then Yoo et al derivatives
indexes_FSPL = np.where((z_yoo <= yoo_table[0][-1]) & (z_yoo >= yoo_table[0][0]))[0]
amplification_fspl[indexes_FSPL] = amplification_pspl[indexes_FSPL] * \
(yoo_table[1](z_yoo[indexes_FSPL]) - gamma * yoo_table[2](z_yoo[indexes_FSPL]))
return amplification_fspl
def Jacobian_amplification_FSPL(tau, uo, rho, gamma, yoo_table):
"""Same function as above, just also returns the impact parameter needed for the Jacobian FSPL model.
The Yoo et al. Finite Source Point Lens magnification and the impact parameter U(t).
"OGLE-2003-BLG-262: Finite-Source Effects from a Point-Mass Lens",Yoo, J. et al 2004
http://adsabs.harvard.edu/abs/2004ApJ...603..139Y
:param array_like tau: the tau define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param array_like uo: the uo define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param float rho: the normalised angular source star radius
:param float gamma: the microlensing limb darkening coefficient.
:param array_like yoo_table: the Yoo et al. 2004 table approximation. See microlmodels for more details.
:return: the FSPL magnification A_FSPL(t) and the impact parameter U(t)
:rtype: tuple, tuple of two array_like
"""
impact_param = impact_parameter(tau, uo) # u(t)
impact_param_square = impact_param ** 2 # u(t)^2
amplification_pspl = (impact_param_square + 2) / (impact_param * (impact_param_square + 4) ** 0.5)
z_yoo = impact_param / rho
amplification_fspl = np.zeros(len(amplification_pspl))
# Far from the lens (z_yoo>>1), then PSPL.
indexes_PSPL = np.where((z_yoo > yoo_table[0][-1]))[0]
amplification_fspl[indexes_PSPL] = amplification_pspl[indexes_PSPL]
# Very close to the lens (z_yoo<<1), then Witt&Mao limit.
indexes_WM = np.where((z_yoo < yoo_table[0][0]))[0]
amplification_fspl[indexes_WM] = amplification_pspl[indexes_WM] * \
(2 * z_yoo[indexes_WM] - gamma * (2 - 3 * np.pi / 4) * z_yoo[indexes_WM])
# FSPL regime (z_yoo~1), then Yoo et al derivatives
indexes_FSPL = np.where((z_yoo <= yoo_table[0][-1]) & (z_yoo >= yoo_table[0][0]))[0]
amplification_fspl[indexes_FSPL] = amplification_pspl[indexes_FSPL] * \
(yoo_table[1](z_yoo[indexes_FSPL]) - gamma * yoo_table[2](z_yoo[indexes_FSPL]))
return amplification_fspl, impact_param
def amplification_USBL(separation, mass_ratio, x_source, y_source, rho):
"""
The Uniform Source Binary Lens amplification, based on the work of Valerio Bozza, thanks :)
"Microlensing with an advanced contour integration algorithm: Green's theorem to third order, error control,
optimal sampling and limb darkening ",Bozza, Valerio 2010. Please cite the paper if you used this.
http://mnras.oxfordjournals.org/content/408/4/2188
:param array_like separation: the projected normalised angular distance between the two bodies
:param float mass_ratio: the mass ratio of the two bodies
:param array_like x_source: the horizontal positions of the source center in the source plane
:param array_like y_source: the vertical positions of the source center in the source plane
:param float rho: the normalised (to :math:`\\theta_E') angular source star radius
:param float tolerance: the relative precision desired in the magnification
:return: the USBL magnification A_USBL(t)
:rtype: array_like
"""
amplification_usbl = []
for xs, ys, s in zip(x_source, y_source, separation):
magnification_VBB = VBB.BinaryMag2(s, mass_ratio, xs, ys, rho)
amplification_usbl.append(magnification_VBB)
return np.array(amplification_usbl)
def amplification_FSBL(separation, mass_ratio, x_source, y_source, rho, limb_darkening_coefficient):
"""
The Uniform Source Binary Lens amplification, based on the work of Valerio Bozza, thanks :)
"Microlensing with an advanced contour integration algorithm: Green's theorem to third order, error control,
optimal sampling and limb darkening ",Bozza, Valerio 2010. Please cite the paper if you used this.
http://mnras.oxfordjournals.org/content/408/4/2188
:param array_like separation: the projected normalised angular distance between the two bodies
:param float mass_ratio: the mass ratio of the two bodies
:param array_like x_source: the horizontal positions of the source center in the source plane
:param array_like y_source: the vertical positions of the source center in the source plane
:param float limb_darkening_coefficient: the linear limb-darkening coefficient
:param float rho: the normalised (to :math:`\\theta_E') angular source star radius
:param float tolerance: the relative precision desired in the magnification
:return: the USBL magnification A_USBL(t)
:rtype: array_like
"""
amplification_fsbl = []
for xs, ys, s in zip(x_source, y_source, separation):
# print index,len(Xs)
# print s,q,xs,ys,rho,tolerance
magnification_VBB = VBB.BinaryMagDark(s, mass_ratio, xs, ys, rho, limb_darkening_coefficient, VBB.Tol)
amplification_fsbl.append(magnification_VBB)
return np.array(amplification_fsbl)
def amplification_PSBL(separation, mass_ratio, x_source, y_source):
"""
The Point Source Binary Lens amplification, based on the work of Valerio Bozza, thanks :)
"Microlensing with an advanced contour integration algorithm: Green's theorem to third order, error control,
optimal sampling and limb darkening ",Bozza, Valerio 2010. Please cite the paper if you used this.
http://mnras.oxfordjournals.org/content/408/4/2188
:param array_like separation: the projected normalised angular distance between the two bodies
:param float mass_ratio: the mass ratio of the two bodies
:param array_like x_source: the horizontal positions of the source center in the source plane
:param array_like y_source: the vertical positions of the source center in the source plane
:return: the PSBL magnification A_PSBL(t)
:rtype: array_like
"""
amplification_psbl = []
for xs, ys, s in zip(x_source, y_source, separation):
magnification_VBB =VBB.BinaryMag0(s, mass_ratio, xs, ys)
amplification_psbl.append(magnification_VBB)
return np.array(amplification_psbl)
def amplification_FSPL_for_Lyrae(tau, uo, rho, gamma, yoo_table):
"""
The Yoo et al Finite Source Point Lens magnification.
"OGLE-2003-BLG-262: Finite-Source Effects from a Point-Mass Lens",Yoo, J. et al 2004
http://adsabs.harvard.edu/abs/2004ApJ...603..139Y
:param array_like tau: the tau define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param array_like uo: the uo define for example in
http://adsabs.harvard.edu/abs/2015ApJ...804...20C
:param float rho: the normalised angular source star radius
:param float gamma: the microlensing limb darkening coefficient.
:param array_like yoo_table: the Yoo et al. 2004 table approximation. See microlmodels for more details.
:return: the FSPL magnification A_FSPL(t)
:rtype: array_like
"""
impact_param = impact_parameter(tau, uo) # u(t)
impact_param_square = impact_param ** 2 # u(t)^2
amplification_pspl = (impact_param_square + 2) / (impact_param * (impact_param_square + 4) ** 0.5)
z_yoo = impact_param / rho
amplification_fspl = np.zeros(len(amplification_pspl))
# Far from the lens (z_yoo>>1), then PSPL.
indexes_PSPL = np.where((z_yoo > yoo_table[0][-1]))[0]
amplification_fspl[indexes_PSPL] = amplification_pspl[indexes_PSPL]
# Very close to the lens (z_yoo<<1), then Witt&Mao limit.
indexes_WM = np.where((z_yoo < yoo_table[0][0]))[0]
amplification_fspl[indexes_WM] = amplification_pspl[indexes_WM] * \
(2 * z_yoo[indexes_WM] - gamma[indexes_WM] * (2 - 3 * np.pi / 4) * z_yoo[
indexes_WM])
# FSPL regime (z_yoo~1), then Yoo et al derivatives
indexes_FSPL = np.where((z_yoo <= yoo_table[0][-1]) & (z_yoo >= yoo_table[0][0]))[0]
amplification_fspl[indexes_FSPL] = amplification_pspl[indexes_FSPL] * \
(yoo_table[1](z_yoo[indexes_FSPL]) - gamma[indexes_FSPL] * yoo_table[2](
z_yoo[indexes_FSPL]))
return amplification_fspl
# Using numba to speed up Lee et al. computation
import numba
from numba import cfunc,carray
from numba.types import intc, CPointer, float64
from scipy import LowLevelCallable
def jit_integrand_function(integrand_function):
jitted_function = numba.jit(integrand_function, nopython=True)
@cfunc(float64(intc, CPointer(float64)))
def wrapped(n, xx):
values = carray(xx,n)
return jitted_function(values)
return LowLevelCallable(wrapped.ctypes)
def Lee_limits(x,u,rho,gamma) :
if x>np.arcsin(rho/u):
limit_1 = 0
limit_2 = 0
return [limit_1,limit_2]
else :
factor = (rho**2-u**2*np.sin(x)**2)**0.5
ucos = u*np.cos(x)
if u<=rho :
limit_1 = 0
limit_2 = ucos+factor
return [limit_1,limit_2]
else:
limit_1 = ucos-factor
limit_2 = ucos+factor
return [limit_1,limit_2]
def Lee_US( x,u,rho,gamma ):
limits = Lee_limits(x,u,rho,gamma)
amp = limits[1]*(limits[1]**2+4)**0.5-limits[0]*(limits[0]**2+4)**0.5
return amp
@jit_integrand_function
def Lee_FS(args) :
x,phi,u,rho,gamma = args
x2 = x**2
u2 = u**2
factor=(1-gamma*(1-1.5*(1-(x2-2*u*x*np.cos(phi)+u2)/rho**2)**0.5))
if np.isnan(factor):
factor = 0
amp = (x2+2)/((x2+4)**0.5)
amp *= factor
return amp
|
First of all, we had a wonderful 5 weeks in our future town of residence. It was great to be able to “redneck house camp” on the tiny house and really start to feel (and see) it turn into our home. Granted we only have “walking water” right now (meaning we have to walk to get water) we have heat/air, a coffee pot, our wonderful SleepNumber bed, and a number of other amenities including a mobile hotspot for Internet, and Netflix for those quiet nights! But now perhaps the real work begins.
During the last month we were able to have a perc test administered on our land with positive results. The land percs. What does this mean though? For those who live in the city, it is common that folks who live in the county or in more rural surroundings do not have access to city sewage. So we have to get septic tanks. Quite simply put, septic tanks are part of a small scale sewage treatment system. (Other components, typically mandated and/or restricted by local governments, optionally include pumps, alarms, sand filters, and clarified liquid effluent disposal means such as a septic drain field, ponds, natural stone fiber filter plants or peat moss beds.) To be more specific though the term “septic” refers to the anaerobic bacterial environment that develops in the tank which decomposes or mineralizes the waste discharged into the tank. (READ: it is a poop holding tank). In theory this is an awesome thing. It allowed us to then receive our address and be registered with the 911 emergency system. So what is the downside you may ask? The downside is that the septic system required on our land is quite complex and therefore, quite costly! We are required to have a septic system that includes a pump system in order to achieve gravity flow. Because our land grades downward we do not have gravity on our side and therefore have to have a pump system that will pump the sewage uphill to the actual septic tank. This requires a 750 gallown septic tank, 60′ of 1.5″ PVC pipe, a septic pump, a pump tank, underground wiring harnesses, a simplex control panel, an alarm, and a system of 3 distribution boxes that are attached to nitrification lines. it is fair to say this falls well outside of the DIY arena. In fact, we have to have a licensed septic company and electrician complete the work that then has to receive a passing mark from a North Carolina Electrical Inspector.
After thinking about the system, the cost (and how it related to our overall goal of living off-grid, being debt free, etc) we now feel that we want to have a composting system instead. The catch? There is no current code for composting and in fact some counties require that you install the septic system approved for your land or suffer a FAIL on the Certificate of Occupancy. That thought alone is quite jarring. Compounded with Hari’s recent post on her worst tiny house fear things got very real, if you will.
Since the very beginning of our adventure we have wanted to live legally; to be accepted as a legal, tax-paying family, in our county. Simple enough, right? Wrong. There is no cut and dried way for a tiny house to yet achieve such. In fact, we are finding out that it takes numerous phone calls, repetitive explanations, uncomfortable conversation, scads of questions and uncertain answers, and more patience than I have ever prayed for! As a family we have also started the conversation of how much fight do we have in us? Are we willing to fight to the bitter end to be accepted? Is it even a fight we are passionate enough about? Can we afford to fight if we have to? All of these and more have permeated our conversations. We have no answer though.
ME: Hi. My name is Andrew Odom. How are you today?
ME: I have several questions – unorthodox ones – that I would like to ask you about my family living in a non-standard home. Do you have a few minutes to talk or is there a better time to call?
ME: My wife and I just finished building our tiny house. You may or may not have heard of such. It is a 240 sq.ft. home build on a trailer. It measure 30′ wide and 8′ long. It weighs just at 6100 pounds and is custom built.
As the 15-minute conversation progressed I told him a bit more about our home and even invited him to look at our website. He was more than happy to. His demeanor lightened and we were talking much more easily. I told him we had a VIN number and a registration for our custom trailer. I told him about some of the construction details and spoke emphatically about our desire to work with the zoning department to be legal and to live in our version of a “dream house.” After we hung up I followed with a personal email and a photo of our tiny house – as it is today – attached.
All in all I think things went well for the beginning of our conversation. I don’t expect it to be easy. I don’t expect everyone to understand. I don’t expect it to even be cheap, per se. But I do expect the county to be welcoming and amicable and so far I have no reason not to think such. So how do you start the legal process for your tiny house? I don’t know.
You just pick up the phone and start!
Good for you Andrew. I am going to go the “flying under the radar” route first and see how peoples reactions are. The last thing I’d want is to find a great place to park only to get turned down when I asked permission.
We are legally “camping” right now while we go through all of this. We just don’t feel right being under the radar. We want to raise our daughter knowing that sometimes you have to stand up for your dreams and show others that some molds are made to be broken.
The “mold” that you refer to here is MUCH GREATER than most of us actually have the capacity to grasp at this juncture. The “mold to be broken” is an entire system….an economic one, a political one….it encompasses an entire mythological belief system in this country in which it is IMPERATIVE that everyone OUTSIDE WashingtonWallstreet adheres. There’s a subtle shift occurring in We The People at this juncture…one in which the formerly accepted constraining paradigms are being questioned and frankly EXPOSED. Wallstreet OWNS Washington…and the States are merely a microcosm…a smaller mirror image of the split between government being in place TO SERVE AND DO THE WILL OF THE PEOPLE AND IMPLEMENT THE PRIORITIES OF THE PEOPLE and/or a massive self-serving bureaucracy in place for on other reason other than to be its OWN ECHO-CHAMBER and perpetuate itself. It is no small thing to make attempts, even small ones like “trying to comply” and “be a good citizen” (of which I’m sure you both are), to work within THE SYSTEM. Perhaps you and your wife will make a dent in helping your state and county SYSTEM actually work for the people (of which they are HIRED to do…because their paychecks are paid for BY THE CITIZENRY). With all that said…I trust that you WILL MAKE IT WORK regardless.
Great thoughts. You are exactly right Kath. We are not trying to hide in the system and accept it with complacency. We want to work within the system to change it. We want to hold a place of citizenry that will allow us to vote (with ballot and dollar), voice ourselves, and create a sustainable future for our daughter.
Good for you Drew. I went to our county but as we are not grid tied we have no inspections. I specifically asked about a composting toilet ( to which they liken an outhouse idea) they had no issue so long as we did not have running water in the house. As of the completion of the house there is not indoor water but there are also no restrictions to later upgrades. Its a combo of under the radar and legal.
Nice. Very nice. We won’t be tied in to the city or county water. We are going to have a deep well (160ft with 1hp pump) as our water source. Perhaps we will have equal good fortune.
1.) The pumping the sewage uphill to the tank; Is that because the tank has to be uphill from where the house is positioned by code or could you have placed the house in a different location and dispensed with the need for a pump?
2.) Are you prepared for that moment when the reaction you receive from the individual on the other end of the phone, is embedded in their opinion and not in the actual guidelines?
Keep us posted and keep up the pioneering work, we will all benefit from your stellar efforts.
2) Actually, yes, we are. And we are seeking the patience and wisdom of God to help us move through that and see what opportunities await us should we get a less than positive response.
You better believe I’ll keep everyone posted. One day at a time we’ll ALL make it work.
I have no idea. You should have a profile. You have posted several times before. SMHhhhmph.
to have found this tiny one.
They did question our sleeping loft…….we made light of it and moved on.
I just did not have it in me to fight any more 🙂 Go for it, Andrew! You are young, you can change things.
And there you are Deb. Welcome! HAHAHAHA.
I don’t expect the county to give us such a hard time actually. The co. is actually quite filled with mobile homes, RV living situations, etc. I am not sure if they are grandfathered in or if they fall in ordinance. My biggest concern is that we are a custom home (as opposed to manufactured) and we don’t have a bathroom in the tiny house trailer (that is in the ANNEX building plan.) We thought it would be easier because the land is privately owned and in the county, not city. I haven’t heard anything back today so I am hoping that the inspector is just enjoying reading through our site and learning about Tiny r(E)volution and trying to figure out how to help us achieve our dreams.
I think you could build it yourself and save allot of money. Excavation equipment can be rented fairly cheap. Just have it inspected before you bury it.
Unfortunately you cannot build your own septic system legally. Functionally? Yes. Legally? No.
I don’t know if this will work…..but in another time, on a different planet, far, far from here, we found someone who actually put them in for a living. We asked him if WE could do it to his (the county’s also) specs, and have him sign off on it, as if he had done it. He was ok with it as he personally inspected it to make sure it would fly, and then we paid him a ‘consultant’s fee’. Everyone was happy, everyone got their fees, we saved money.
Andrew, thank you for this post. I am building in my backyard in Oakland, CA. I look to move back to the East Coast, but have not identified my land yet. You confirmed what has been shown to me thus far, such as when interacting with the DMV, and that is to be open, honest, and let the reason why come out if they so care to hear. I will certainly start contacting the county planning and inspection department for each county in which I may desire to live. The septic tank issue was one I did not think of. Thank you, and may you and yours be blessed.
Absolutely zachpetry. Thank you so much for reading and I am glad you found some useful information. Best of luck in your pursuits.
If I build a livable,extremely upgraded Shed, on my own property,w a composting toilet and water source from my own house,do I have to worry about all these legal rules? |
from versioning import GLVersion
from command_filtering import GLType, GLBaseType, GLBaseTypes
import re
class GLExtension:
def __init__(self):
self.name = None # str
self.api = [] # [str]
def __str__(self):
return 'GLExtension(' + self.name + ')'
def __repr__(self):
return str(self)
class GLArgument:
def __init__(self, arg_type=None, arg_name='', arg_group=None):
self.atype = arg_type
self.name = arg_name
self.group = arg_group
def as_typed(self):
return '%s %s' % (str(self.atype), self.name)
def as_name(self):
return self.name
def as_grouped(self):
return '%s %s' % (self.group, self.name)
def arguments_to_string(arguments, typed=True):
out = ''
for arg in arguments:
if arg is None:
continue
out += (arg.as_name() if not typed else arg.as_typed()) + ', '
if arguments:
out = out[:-2]
return out
class GLCommand:
def __init__(self):
self.ret_type = None # str
self.name = None # str
self.original_name = None # str
self.args = [] # [(name, type, group), ...]
self.min_api = [GLVersion(), GLVersion()]
self.max_api = [GLVersion(), GLVersion()]
self.extensions = []
self.fallback = []
self.source_element = None
# Whether this is an extension-only function
def is_extension(self):
return self.min_api[0].isnone and \
self.min_api[1].isnone and \
self.extensions
def template_check(self):
if self.is_extension():
# No version, extension only
return '/* No template check available */'
if self.min_api[0].isnone and not self.min_api[1].isnone:
# ES-exclusive function
return 'GL_VERSION_REQ_ES(%s)' % self.min_api[1].template_str()
elif not self.min_api[0].isnone and self.min_api[1].isnone:
# Desktop-exclusive function
return 'GL_VERSION_REQ_DESKTOP(%s)' % self.min_api[0].template_str()
else:
return 'GL_VERSION_REQ_COMBO(%s, %s)' % \
(self.min_api[0].template_str(), self.min_api[1].template_str())
def compile_check(self):
if self.is_extension():
return '0'
else:
return 'GL_VERSION_VERIFY(%s, %s)' % \
(self.min_api[0].compile_str(),
self.min_api[1].compile_str())
def extension_check(self):
if len(self.extensions) and False:
out = ''
for ext in self.extensions:
out += ' || (defined(%s) && %s)' % (ext.name, ext.name)
return out
return ''
def ptr_check(self):
base = 'if(!%s)\n {' % self.original_name
for ext in self.fallback:
base += '\n#if defined(%s) && %s' % (ext[1].name, ext[1].name)
base += '\n if(%s) Throw(undefined_behavior("extension %s is available"));' %\
(ext[0].original_name, ext[1].name)
base += '\n#endif'
base += '\n Throw(undefined_behavior("function not loaded!"));'
base += '\n }'
return base
def returns_value(self):
return str(self.ret_type) != 'void'
def param_string(self, function_prefix='', function_arguments=None, command_arguments=None):
return \
'''
#if %s%s
%s
%s%s %s(%s)
{
#ifndef NDEBUG
%s
#endif
%s%s(%s);
}
#endif
''' % (self.compile_check(), self.extension_check(), self.template_check(),
function_prefix,
self.ret_type, self.name,
arguments_to_string(self.args) if function_arguments is None else function_arguments,
self.ptr_check(),
'return ' if self.returns_value() else '',
self.original_name,
arguments_to_string(self.args, False) if command_arguments is None else command_arguments
)
def __str__(self):
return self.param_string()
def __repr__(self):
return self.name
def set_cmd_version(cmd_obj, version_desc):
if version_desc.api == 'ES' and \
version_desc.islowerthan(cmd_obj.min_api[1]):
cmd_obj.min_api[1] = version_desc
elif version_desc.islowerthan(cmd_obj.min_api[0]):
cmd_obj.min_api[0] = version_desc
def extract_commands(registry, commands, cmd_names, version_desc):
for cmd in registry.find('commands').findall('command'):
cmd_name = cmd.find('proto').findtext('name')
if cmd_name not in cmd_names:
continue
if cmd_name in commands:
cmd_obj = commands[cmd_name]
set_cmd_version(cmd_obj, version_desc)
else:
ret_type = ''.join(''.join(cmd.find('proto').itertext()).rsplit(cmd_name, 1))
cmd_obj = GLCommand()
cmd_obj.ret_type = GLType.from_string(ret_type.strip())
cmd_obj.original_name = cmd_name
cmd_obj.name = cmd_name
cmd_obj.source_element = cmd
for arg in cmd.findall('param'):
arg_obj = GLArgument()
arg_obj.name = arg.findtext('name')
arg_obj.atype = GLType.from_string(''.join(''.join(arg.itertext()).rsplit(arg_obj.name, 1)))
arg_obj.group = arg.get('group')
cmd_obj.args += [arg_obj]
set_cmd_version(cmd_obj, version_desc)
commands[cmd_name] = cmd_obj
#
|
This is an easy spell to help bring more traveling/journey into your life. Get your passport ready!
The fan should be a hand held, the kind that you wave. In other words an old school fan, like they use to do in churches or one of those cute Chinese fans. As for the feather, it should be small and light, kind of like the ones from a goose down pillow.
When you are ready to conjure, breathe in the smell of mint. Breathe in slowly and take your time. Begin to visualize yourself in different places, seeing different landscapes, places you have always wanted to go. Toss the little feather in the air, and use the fan to blow it around the room. Try to keep it airborne as you see yourself visiting/moving new places in the near future.
Eventually, let the feather land in the dish of mint, then take a few more sniffs of the mint to slowly close out of the ritual and bring your energy back in. Repeat this for the whole week, every morning (or night). Don't forget to put an intention into this spell, it is the most important ingredient. |
"""Ensure all tokens are associated with a resource owner.
"""
from __future__ import absolute_import, unicode_literals
import json
import mock
from .test_utils import get_query_credentials, get_fragment_credentials
from ....unittest import TestCase
from oauthlib.oauth2 import RequestValidator
from oauthlib.oauth2 import WebApplicationServer, MobileApplicationServer
from oauthlib.oauth2 import LegacyApplicationServer, BackendApplicationServer
class ResourceOwnerAssociationTest(TestCase):
auth_uri = 'http://example.com/path?client_id=abc'
token_uri = 'http://example.com/path'
def set_client(self, request):
request.client = mock.MagicMock()
request.client.client_id = 'mocked'
return True
def set_user(self, client_id, code, client, request):
request.user = 'test'
return True
def set_user_from_username(self, username, password, client, request):
request.user = 'test'
return True
def set_user_from_credentials(self, request):
request.user = 'test'
request.client = mock.MagicMock()
request.client.client_id = 'mocked'
return True
def inspect_client(self, request, refresh_token=False):
if not request.user:
raise ValueError()
return 'abc'
def setUp(self):
self.validator = mock.MagicMock(spec=RequestValidator)
self.validator.get_default_redirect_uri.return_value = 'http://i.b./path'
self.validator.authenticate_client.side_effect = self.set_client
self.web = WebApplicationServer(self.validator,
token_generator=self.inspect_client)
self.mobile = MobileApplicationServer(self.validator,
token_generator=self.inspect_client)
self.legacy = LegacyApplicationServer(self.validator,
token_generator=self.inspect_client)
self.backend = BackendApplicationServer(self.validator,
token_generator=self.inspect_client)
def test_web_application(self):
# TODO: code generator + intercept test
h, _, s = self.web.create_authorization_response(
self.auth_uri + '&response_type=code',
credentials={'user': 'test'}, scopes=['random'])
self.assertEqual(s, 302)
self.assertIn('Location', h)
code = get_query_credentials(h['Location'])['code'][0]
self.assertRaises(ValueError,
self.web.create_token_response, self.token_uri,
body='grant_type=authorization_code&code=%s' % code)
self.validator.validate_code.side_effect = self.set_user
_, body, _ = self.web.create_token_response(self.token_uri,
body='grant_type=authorization_code&code=%s' % code)
self.assertEqual(json.loads(body)['access_token'], 'abc')
def test_mobile_application(self):
self.assertRaises(ValueError,
self.mobile.create_authorization_response,
self.auth_uri + '&response_type=token')
h, _, s = self.mobile.create_authorization_response(
self.auth_uri + '&response_type=token',
credentials={'user': 'test'}, scopes=['random'])
self.assertEqual(s, 302)
self.assertIn('Location', h)
self.assertEqual(get_fragment_credentials(h['Location'])['access_token'][0], 'abc')
def test_legacy_application(self):
body = 'grant_type=password&username=abc&password=secret'
self.assertRaises(ValueError,
self.legacy.create_token_response,
self.token_uri, body=body)
self.validator.validate_user.side_effect = self.set_user_from_username
_, body, _ = self.legacy.create_token_response(
self.token_uri, body=body)
self.assertEqual(json.loads(body)['access_token'], 'abc')
def test_backend_application(self):
body = 'grant_type=client_credentials'
self.assertRaises(ValueError,
self.backend.create_token_response,
self.token_uri, body=body)
self.validator.authenticate_client.side_effect = self.set_user_from_credentials
_, body, _ = self.backend.create_token_response(
self.token_uri, body=body)
self.assertEqual(json.loads(body)['access_token'], 'abc')
|
Illustro Obscurum Volume III is the third installment in Michael Bukowski’s Illustro Obscurum series. This Volume contains 29 illustrations based on the works of H.P. Lovecraft . |
#!/usr/bin/python
#
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import httplib2
import sys
import os
import string
import time
import datetime
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import AccessTokenRefreshError
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.tools import run
# For this example, the client id and client secret are command-line arguments.
client_id = sys.argv[1]
client_secret = sys.argv[2]
client_string = sys.argv[3]
client_longString = sys.argv[4]
client_startString = sys.argv[5]
if len(sys.argv) > 6:
client_endString = sys.argv[6]
ev_type = 1
else:
ev_type = 2
# The scope URL for read/write access to a user's calendar data
scope = 'https://www.googleapis.com/auth/calendar'
# Create a flow object. This object holds the client_id, client_secret, and
# scope. It assists with OAuth 2.0 steps to get user authorization and
# credentials.
flow = OAuth2WebServerFlow(client_id, client_secret, scope)
def main():
# Create a Storage object. This object holds the credentials that your
# application needs to authorize access to the user's data. The name of the
# credentials file is provided. If the file does not exist, it is
# created. This object can only hold credentials for a single user, so
# as-written, this script can only handle a single user.
storage = Storage('credentials.dat')
# The get() function returns the credentials for the Storage object. If no
# credentials were found, None is returned.
credentials = storage.get()
# If no credentials are found or the credentials are invalid due to
# expiration, new credentials need to be obtained from the authorization
# server. The oauth2client.tools.run() function attempts to open an
# authorization server page in your default web browser. The server
# asks the user to grant your application access to the user's data.
# If the user grants access, the run() function returns new credentials.
# The new credentials are also stored in the supplied Storage object,
# which updates the credentials.dat file.
if credentials is None or credentials.invalid:
credentials = run(flow, storage)
# Create an httplib2.Http object to handle our HTTP requests, and authorize it
# using the credentials.authorize() function.
http = httplib2.Http()
http = credentials.authorize(http)
# The apiclient.discovery.build() function returns an instance of an API service
# object can be used to make API calls. The object is constructed with
# methods specific to the calendar API. The arguments provided are:
# name of the API ('calendar')
# version of the API you are using ('v3')
# authorized httplib2.Http() object that can be used for API calls
service = build('calendar', 'v3', http=http)
try:
if ev_type == 1:
event = {
'summary': client_string,
'description': client_longString,
'start': {
'dateTime': client_startString,
'timeZone': 'America/New_York'
},
'end': {
'dateTime':client_endString,
'timeZone': 'America/New_York'
},
}
else:
year = string.atoi(client_startString.split("-")[0])
month = string.atoi(client_startString.split("-")[1])
day = string.atoi(client_startString.split("-")[2])
start_time = datetime.date(year, month, day)
one_day = datetime.timedelta(days=1)
end_time = start_time + one_day
end_time_str = end_time.strftime("%Y-%m-%d")
event = {
'summary': client_string,
'description': client_longString,
'start': {
'date': client_startString,
},
'end': {
'date': end_time_str,
},
}
# request = service.events().quickAdd(calendarId='1ocenl402qcp8eg74ddv44uar4@group.calendar.google.com', text=client_string)
request = service.events().insert(calendarId='1ocenl402qcp8eg74ddv44uar4@group.calendar.google.com', body=event)
response = request.execute()
except AccessTokenRefreshError:
# The AccessTokenRefreshError exception is raised if the credentials
# have been revoked by the user or they have expired.
print ('The credentials have been revoked or expired, please re-run'
'the application to re-authorize')
# Remove credentials.dat because it can only handly credentials from one account and fails miserably if you try to
# use a different account.
os.remove('credentials.dat')
if __name__ == '__main__':
main() |
Learning the art of calligraphy may take some time and effort but it is well worth the time and effort you put into it. Many teens enjoy learning new and interesting techniques for art and writing projects. Whether you learn from a friend, teacher, online or a book that you have borrowed from the library, calligraphy is a great art form to freely express oneself.
The most commonly used venue for calligraphy is invitations. Brides to be are always looking for unique invitation styles. As a teen entrepreneur you could easily start your own business in hand written wedding invitations for older siblings, family and friends. Calligraphy not only looks unique, it is. Creating unique one of a kind invitations is a great way to set yourself apart from all the other companies that offer invitations.
Hand written wedding invitations may be more time consuming, but for the bride to be that is willing to go the extra mile, this will be a memory of a life time. You can provide the bride with shower invitations, wedding invitations, RSVP cards, Thank you cards and even place cards for the reception. Another great idea is you could write the vows in calligraphy for the bride and groom. This could be framed and will look great on their wall for many years to come.
One young teen took this a step further and uses colored pencils and colors in parts of the words as her own unique twist to calligraphy. The results are frameable works of art hanging in many homes in her local community.
Venture out further and offer birthday invitations, Christmas cards and even personalized notes for your friends and family. Once your friends and family see what you can do with a calligraphy pen, you will be in business in short order. Your friends and family may well come up with even more ideas for your calligraphy.
Try silk screening calligraphy on a tee shirt or a hat. Create frameable artwork and take the time to frame it.
The word Calligraphy is derived from the ancient Greek word of Kallos meaning beauty. Calligraphy is a very artistic form of writing often taking on a shape all its own due to each individual’s technique or style.
There are many great ways to use it as your own business. Set up a booth at a local fair and with some nice parchment paper charge a fee for a frameable art expression of a persons name. Charge either by the letter or every so many letters and add in the cost of the fancier paper and you will be in business. Be sure to have some samples on hand. You might even want to go ahead and have some fairly common names ready for sale if you have enough advance time.
To advertise your work create a beautiful poster done in calligraphy. Make some copies and post the poster around your local town or at craft and local fairs.
You could set up lessons as well. Teaching your friends and family how to write in calligraphy won’t necessarily give you more competition since everyone has their own unique style. It might even come in handy when you get that 250 wedding invitation order. Charging a nominal hourly rate to teach friends and family this can become a doubly lucrative teen business.
Calligraphy can become a very profitable business for a teen with the right determination. |
"""Builds the Adience network.
Summary of available functions:
# Compute input images and labels for training. If you would like to run
# evaluations, use input() instead.
inputs, labels = distorted_inputs()
# Compute inference on the model inputs to make a prediction.
predictions = inference(inputs)
# Compute the total loss of the prediction with respect to the labels.
loss = loss(predictions, labels)
# Create a graph to run one step of training with respect to the loss.
train_op = train(loss, global_step)
"""
# pylint: disable=missing-docstring
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gzip
import os
import re
import sys
import tarfile
import tensorflow.python.platform
from six.moves import urllib
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
import adience_input
from tensorflow.python.platform import gfile
ad_input = adience_input.DataInput()
ad_input.read_from_txt()
FLAGS = tf.app.flags.FLAGS
# Basic model parameters.
tf.app.flags.DEFINE_integer('batch_size', 32,
"""Number of images to process in a batch.""")
tf.app.flags.DEFINE_string('data_dir', 'data/aligned',
"""Path to the CIFAR-10 data directory.""")
# Process images of this size. Note that this differs from the original CIFAR
# image size of 32 x 32. If one alters this number, then the entire model
# architecture will change and any model would need to be retrained.
IMAGE_SIZE = 64
# Global constants describing the CIFAR-10 data set.
NUM_CLASSES = 2
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = 0 #change it when reading input data (in distorded inputs)
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = 0
# Constants describing the training process.
MOVING_AVERAGE_DECAY = 0.9999 # The decay to use for the moving average.
NUM_EPOCHS_PER_DECAY = 350.0 # Epochs after which learning rate decays.
LEARNING_RATE_DECAY_FACTOR = 0.1 # Learning rate decay factor.
INITIAL_LEARNING_RATE = 0.1 # Initial learning rate.
# If a model is trained with multiple GPU's prefix all Op names with tower_name
# to differentiate the operations. Note that this prefix is removed from the
# names of the summaries when visualizing a model.
TOWER_NAME = 'tower'
def _activation_summary(x):
"""Helper to create summaries for activations.
Creates a summary that provides a histogram of activations.
Creates a summary that measure the sparsity of activations.
Args:
x: Tensor
Returns:
nothing
"""
# Remove 'tower_[0-9]/' from the name in case this is a multi-GPU training
# session. This helps the clarity of presentation on tensorboard.
tensor_name = re.sub('%s_[0-9]*/' % TOWER_NAME, '', x.op.name)
tf.histogram_summary(tensor_name + '/activations', x)
tf.scalar_summary(tensor_name + '/sparsity', tf.nn.zero_fraction(x))
def _variable_on_cpu(name, shape, initializer):
"""Helper to create a Variable stored on CPU memory.
Args:
name: name of the variable
shape: list of ints
initializer: initializer for Variable
Returns:
Variable Tensor
"""
with tf.device('/cpu:0'):
var = tf.get_variable(name, shape, initializer=initializer)
return var
def _variable_with_weight_decay(name, shape, stddev, wd):
"""Helper to create an initialized Variable with weight decay.
Note that the Variable is initialized with a truncated normal distribution.
A weight decay is added only if one is specified.
Args:
name: name of the variable
shape: list of ints
stddev: standard deviation of a truncated Gaussian
wd: add L2Loss weight decay multiplied by this float. If None, weight
decay is not added for this Variable.
Returns:
Variable Tensor
"""
var = _variable_on_cpu(name, shape,
tf.truncated_normal_initializer(stddev=stddev))
if wd:
weight_decay = tf.mul(tf.nn.l2_loss(var), wd, name='weight_loss')
tf.add_to_collection('losses', weight_decay)
return var
def _generate_image_and_label_batch(image, label, min_queue_examples):
"""Construct a queued batch of images and labels.
Args:
image: 3-D Tensor of [IMAGE_SIZE, IMAGE_SIZE, 3] of type.float32.
label: 1-D Tensor of type.int32
min_queue_examples: int32, minimum number of samples to retain
in the queue that provides of batches of examples.
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
"""
# Create a queue that shuffles the examples, and then
# read 'FLAGS.batch_size' images + labels from the example queue.
num_preprocess_threads = 16
images, label_batch = tf.train.shuffle_batch(
[image, label],
batch_size=FLAGS.batch_size,
num_threads=num_preprocess_threads,
capacity=min_queue_examples + 3 * FLAGS.batch_size,
min_after_dequeue=min_queue_examples)
# Display the training images in the visualizer.
tf.image_summary('images', images)
return images, tf.reshape(label_batch, [FLAGS.batch_size])
def distorted_inputs():
"""Construct distorted input for CIFAR training using the Reader ops.
Raises:
ValueError: if no data_dir
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
"""
# filenames = [os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin',
# 'data_batch_%d.bin' % i)
# for i in xrange(1, 5)]
# for f in filenames:
# if not gfile.Exists(f):
# raise ValueError('Failed to find file: ' + f)
#ad_input.read_adience()
#change if you want to go to cross-fold
#
global NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = len(ad_input.train_string_que)
# Create a queue that produces the filenames to read.
#filename_queue = tf.train.string_input_producer(filenames)
# Read examples from files in the filename queue.
read_input = ad_input.read_adience()
reshaped_image = tf.cast(read_input.dec_image, tf.float32)
height = IMAGE_SIZE
width = IMAGE_SIZE
# Image processing for training the network. Note the many random
# distortions applied to the image.
# Randomly crop a [height, width] section of the image.
distorted_image = tf.image.random_crop(reshaped_image, [height, width])
# Randomly flip the image horizontally.
distorted_image = tf.image.random_flip_left_right(distorted_image)
# Because these operations are not commutative, consider randomizing
# randomize the order their operation.
distorted_image = tf.image.random_brightness(distorted_image,
max_delta=63)
distorted_image = tf.image.random_contrast(distorted_image,
lower=0.2, upper=1.8)
# Subtract off the mean and divide by the variance of the pixels.
float_image = tf.image.per_image_whitening(distorted_image)
# Ensure that the random shuffling has good mixing properties.
min_fraction_of_examples_in_queue = 0.4
min_queue_examples = int(NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN *
min_fraction_of_examples_in_queue)
print ('Filling queue with %d Adience images before starting to train. '
'This will take a few minutes.' % min_queue_examples)
# Generate a batch of images and labels by building up a queue of examples.
return _generate_image_and_label_batch(float_image, read_input.label,
min_queue_examples)
def inputs(eval_data):
print("\neval inputs adience called")
"""Construct input for Adience evaluation using the Reader ops.
Args:
eval_data: bool, indicating if one should use the train or eval data set.
Raises:
ValueError: if no data_dir
Returns:
images: Images. 4D tensor of [batch_size, IMAGE_SIZE, IMAGE_SIZE, 3] size.
labels: Labels. 1D tensor of [batch_size] size.
"""
global NUM_EXAMPLES_PER_EPOCH_FOR_EVAL
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = len(ad_input.eval_string_que)
#TODO:
# if not eval_data:
# filenames = [os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin',
# 'data_batch_%d.bin' % i)
# for i in xrange(1, 5)]
# num_examples_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN
# else:
# filenames = [os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin',
# 'test_batch.bin')]
# num_examples_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_EVAL
#
# for f in filenames:
# if not gfile.Exists(f):
# raise ValueError('Failed to find file: ' + f)
# Create a queue that produces the filenames to read.
#filename_queue = tf.train.string_input_producer(filenames)
# Read examples from files in the filename queue.
read_input = ad_input.read_adience_eval()
reshaped_image = tf.cast(read_input.dec_image, tf.float32)
print("reshaped image eval")
height = IMAGE_SIZE
width = IMAGE_SIZE
# Image processing for evaluation.
# Crop the central [height, width] of the image.
resized_image = tf.image.resize_image_with_crop_or_pad(reshaped_image, width, height)
print("image resized")
# Subtract off the mean and divide by the variance of the pixels.
float_image = tf.image.per_image_whitening(resized_image)
# Ensure that the random shuffling has good mixing properties.
min_fraction_of_examples_in_queue = 0.4
min_queue_examples = int(NUM_EXAMPLES_PER_EPOCH_FOR_EVAL *
min_fraction_of_examples_in_queue)
print("eval inputs adience done")
# Generate a batch of images and labels by building up a queue of examples.
return _generate_image_and_label_batch(float_image, read_input.label,
min_queue_examples)
def inference(images):
"""Build the CIFAR-10 model.
Args:
images: Images returned from distorted_inputs() or inputs().
Returns:
Logits.
"""
# We instantiate all variables using tf.get_variable() instead of
# tf.Variable() in order to share variables across multiple GPU training runs.
# If we only ran this model on a single GPU, we could simplify this function
# by replacing all instances of tf.get_variable() with tf.Variable().
#
# conv1
with tf.variable_scope('conv1') as scope:
kernel = _variable_with_weight_decay('weights', shape=[5, 5, 3, 64],
stddev=1e-4, wd=0.0)
conv = tf.nn.conv2d(images, kernel, [1, 1, 1, 1], padding='SAME')
biases = _variable_on_cpu('biases', [64], tf.constant_initializer(0.0))
bias = tf.nn.bias_add(conv, biases)
conv1 = tf.nn.relu(bias, name=scope.name)
_activation_summary(conv1)
# pool1
pool1 = tf.nn.max_pool(conv1, ksize=[1, 3, 3, 1], strides=[1, 2, 2, 1],
padding='SAME', name='pool1')
# norm1
norm1 = tf.nn.lrn(pool1, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,
name='norm1')
# conv2
with tf.variable_scope('conv2') as scope:
kernel = _variable_with_weight_decay('weights', shape=[5, 5, 64, 64],
stddev=1e-4, wd=0.0)
conv = tf.nn.conv2d(norm1, kernel, [1, 1, 1, 1], padding='SAME')
biases = _variable_on_cpu('biases', [64], tf.constant_initializer(0.1))
bias = tf.nn.bias_add(conv, biases)
conv2 = tf.nn.relu(bias, name=scope.name)
_activation_summary(conv2)
# norm2
norm2 = tf.nn.lrn(conv2, 4, bias=1.0, alpha=0.001 / 9.0, beta=0.75,
name='norm2')
# pool2
pool2 = tf.nn.max_pool(norm2, ksize=[1, 3, 3, 1],
strides=[1, 2, 2, 1], padding='SAME', name='pool2')
# local3
with tf.variable_scope('local3') as scope:
# Move everything into depth so we can perform a single matrix multiply.
dim = 1
for d in pool2.get_shape()[1:].as_list():
dim *= d
reshape = tf.reshape(pool2, [FLAGS.batch_size, dim])
weights = _variable_with_weight_decay('weights', shape=[dim, 384],
stddev=0.04, wd=0.004)
biases = _variable_on_cpu('biases', [384], tf.constant_initializer(0.1))
local3 = tf.nn.relu_layer(reshape, weights, biases, name=scope.name)
_activation_summary(local3)
# local4
with tf.variable_scope('local4') as scope:
weights = _variable_with_weight_decay('weights', shape=[384, 192],
stddev=0.04, wd=0.004)
biases = _variable_on_cpu('biases', [192], tf.constant_initializer(0.1))
local4 = tf.nn.relu_layer(local3, weights, biases, name=scope.name)
_activation_summary(local4)
# softmax, i.e. softmax(WX + b)
with tf.variable_scope('softmax_linear') as scope:
weights = _variable_with_weight_decay('weights', [192, NUM_CLASSES],
stddev=1/192.0, wd=0.0)
biases = _variable_on_cpu('biases', [NUM_CLASSES],
tf.constant_initializer(0.0))
softmax_linear = tf.nn.xw_plus_b(local4, weights, biases, name=scope.name)
_activation_summary(softmax_linear)
return softmax_linear
def loss(logits, labels):
"""Add L2Loss to all the trainable variables.
Add summary for for "Loss" and "Loss/avg".
Args:
logits: Logits from inference().
labels: Labels from distorted_inputs or inputs(). 1-D tensor
of shape [batch_size]
Returns:
Loss tensor of type float.
"""
# Reshape the labels into a dense Tensor of
# shape [batch_size, NUM_CLASSES].
sparse_labels = tf.reshape(labels, [FLAGS.batch_size, 1])
#(FLAGS.batch_size, 1) if old tensorflow
indices = tf.reshape(tf.range(0,FLAGS.batch_size,1), [FLAGS.batch_size, 1])
concated = tf.concat(1, [indices, sparse_labels])
dense_labels = tf.sparse_to_dense(concated,
[FLAGS.batch_size, NUM_CLASSES],
1.0, 0.0)
# Calculate the average cross entropy loss across the batch.
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(
logits, dense_labels, name='cross_entropy_per_example')
cross_entropy_mean = tf.reduce_mean(cross_entropy, name='cross_entropy')
tf.add_to_collection('losses', cross_entropy_mean)
# The total loss is defined as the cross entropy loss plus all of the weight
# decay terms (L2 loss).
return tf.add_n(tf.get_collection('losses'), name='total_loss')
def _add_loss_summaries(total_loss):
"""Add summaries for losses in CIFAR-10 model.
Generates moving average for all losses and associated summaries for
visualizing the performance of the network.
Args:
total_loss: Total loss from loss().
Returns:
loss_averages_op: op for generating moving averages of losses.
"""
# Compute the moving average of all individual losses and the total loss.
loss_averages = tf.train.ExponentialMovingAverage(0.9, name='avg')
losses = tf.get_collection('losses')
loss_averages_op = loss_averages.apply(losses + [total_loss])
# Attach a scalar summmary to all individual losses and the total loss; do the
# same for the averaged version of the losses.
for l in losses + [total_loss]:
# Name each loss as '(raw)' and name the moving average version of the loss
# as the original loss name.
tf.scalar_summary(l.op.name +' (raw)', l)
tf.scalar_summary(l.op.name, loss_averages.average(l))
return loss_averages_op
def train(total_loss, global_step):
"""Train CIFAR-10 model.
Create an optimizer and apply to all trainable variables. Add moving
average for all trainable variables.
Args:
total_loss: Total loss from loss().
global_step: Integer Variable counting the number of training steps
processed.
Returns:
train_op: op for training.
"""
# Variables that affect learning rate.
num_batches_per_epoch = NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN / FLAGS.batch_size
decay_steps = int(num_batches_per_epoch * NUM_EPOCHS_PER_DECAY)
# Decay the learning rate exponentially based on the number of steps.
lr = tf.train.exponential_decay(INITIAL_LEARNING_RATE,
global_step,
decay_steps,
LEARNING_RATE_DECAY_FACTOR,
staircase=True)
tf.scalar_summary('learning_rate', lr)
# Generate moving averages of all losses and associated summaries.
loss_averages_op = _add_loss_summaries(total_loss)
# Compute gradients.
with tf.control_dependencies([loss_averages_op]):
opt = tf.train.GradientDescentOptimizer(lr)
grads = opt.compute_gradients(total_loss)
# Apply gradients.
apply_gradient_op = opt.apply_gradients(grads, global_step=global_step)
# Add histograms for trainable variables.
for var in tf.trainable_variables():
tf.histogram_summary(var.op.name, var)
# Add histograms for gradients.
for grad, var in grads:
if grad:
tf.histogram_summary(var.op.name + '/gradients', grad)
# Track the moving averages of all trainable variables.
variable_averages = tf.train.ExponentialMovingAverage(
MOVING_AVERAGE_DECAY, global_step)
variables_averages_op = variable_averages.apply(tf.trainable_variables())
with tf.control_dependencies([apply_gradient_op, variables_averages_op]):
train_op = tf.no_op(name='train')
return train_op
|
My mom and my mother-in-law have both asked me what I want this year for Christmas, and I really didn’t have much of anything to tell them because I don’t have anything reasonable on my list this year. Of course I’d like a new couch, dining set, tons of rugs, etc., but I don’t think those are exactly the types of things they have in mind to gift 30-year-old me. 😉 So creating this post is requiring me to buckle down and really think about the things that I’d love to open for Christmas (or my January birthday – hehe) this year.
A medium, navy Longchamps bag, because I’ve been eyeing a bag like this ever since I met my bestie Jessica, who has been a long-time fan of hers. I have never bought myself a pricey purse before though because I just. can’t. do it… but dang, I love this one.
My pillows, more specifically a “My Pillow”, because my friends and family rave about these pillows, and my whole family is hurting for some new ones!
An 8-person Raclette, because we cooked with a Raclette in Switzerland and fell in love with this entertaining form of cooking! We love to entertain and have people over, so I visualize using this a ton!
A Nordstrom Rack gift card, because they have great deals on Zella athletic wear, which is my favorite brand of clothes to wear when working out.
A cheese board with knives, because I always wish I had one of them when I’m getting ready for a party, but haven’t gotten around to getting one.
Moscow mule mugs, because Moscow Mules are one of my favorite grown up drinks, so it only makes sense that I have a proper set of mugs to serve them in.
A West Point long-sleeve shirt, because my 10 year old West Point shirts are showing some serious age and need some replacing.
What are YOU asking for for Christmas this year? Why is it so hard to come up with gifts for yourself? |
# -*- coding: utf-8 -*-
"""
Bottle is a fast and simple micro-framework for small web applications. It
offers request dispatching (Routes) with url parameter support, templates,
key/value databases, a built-in HTTP Server and adapters for many third party
WSGI/HTTP-server and template engines - all in a single file and with no
dependencies other than the Python Standard Library.
Homepage and documentation: http://wiki.github.com/defnull/bottle
Special thanks to Stefan Matthias Aust [http://github.com/sma]
for his contribution to SimpleTemplate
Licence (MIT)
-------------
Copyright (c) 2009, Marcel Hellkamp.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
Example
-------
from bottle import route, run, request, response, send_file, abort
@route('/')
def hello_world():
return 'Hello World!'
@route('/hello/:name')
def hello_name(name):
return 'Hello %s!' % name
@route('/hello', method='POST')
def hello_post():
name = request.POST['name']
return 'Hello %s!' % name
@route('/static/:filename#.*#')
def static_file(filename):
send_file(filename, root='/path/to/static/files/')
run(host='localhost', port=8080)
"""
__author__ = 'Marcel Hellkamp'
__version__ = '0.6.4'
__license__ = 'MIT'
import types
import sys
import cgi
import mimetypes
import os
import os.path
import traceback
import re
import random
import threading
import time
import warnings
import email.utils
from wsgiref.headers import Headers as HeaderWrapper
from Cookie import SimpleCookie
import anydbm as dbm
import subprocess
import thread
try:
from urlparse import parse_qs
except ImportError: # pragma: no cover
from cgi import parse_qs
try:
import cPickle as pickle
except ImportError: # pragma: no cover
import pickle as pickle
try:
try:
from json import dumps as json_dumps
except ImportError: # pragma: no cover
from simplejson import dumps as json_dumps
except ImportError: # pragma: no cover
json_dumps = None
# Exceptions and Events
class BottleException(Exception):
""" A base class for exceptions used by bottle. """
pass
class HTTPError(BottleException):
"""
A way to break the execution and instantly jump to an error handler.
"""
def __init__(self, status, text):
self.output = text
self.http_status = int(status)
BottleException.__init__(self, status, text)
def __repr__(self):
return 'HTTPError(%d,%s)' % (self.http_status, repr(self.output))
def __str__(self):
return HTTP_ERROR_TEMPLATE % {
'status' : self.http_status,
'url' : request.path,
'error_name' : HTTP_CODES.get(self.http_status, 'Unknown').title(),
'error_message' : ''.join(self.output)
}
class BreakTheBottle(BottleException):
"""
Not an exception, but a straight jump out of the controller code.
Causes the Bottle to instantly call start_response() and return the
content of output
"""
def __init__(self, output):
self.output = output
# WSGI abstraction: Request and response management
_default_app = None
def default_app(newapp = None):
"""
Returns the current default app or sets a new one.
Defaults to an instance of Bottle
"""
global _default_app
if newapp:
_default_app = newapp
if not _default_app:
_default_app = Bottle()
return _default_app
class Bottle(object):
def __init__(self, catchall=True, optimize=False, autojson=True):
self.simple_routes = {}
self.regexp_routes = {}
self.default_route = None
self.error_handler = {}
self.optimize = optimize
self.autojson = autojson
self.catchall = catchall
self.serve = True
def match_url(self, url, method='GET'):
"""
Returns the first matching handler and a parameter dict or (None, None)
"""
url = url.strip().lstrip("/ ")
# Search for static routes first
route = self.simple_routes.get(method,{}).get(url,None)
if route:
return (route, {})
routes = self.regexp_routes.get(method,[])
for i in range(len(routes)):
match = routes[i][0].match(url)
if match:
handler = routes[i][1]
if i > 0 and self.optimize and random.random() <= 0.001:
routes[i-1], routes[i] = routes[i], routes[i-1]
return (handler, match.groupdict())
if self.default_route:
return (self.default_route, {})
if method == 'HEAD': # Fall back to GET
return self.match_url(url)
else:
return (None, None)
def add_controller(self, route, controller, **kargs):
""" Adds a controller class or object """
if '{action}' not in route and 'action' not in kargs:
raise BottleException("Routes to controller classes or object MUST"
" contain an {action} placeholder or use the action-parameter")
for action in (m for m in dir(controller) if not m.startswith('_')):
handler = getattr(controller, action)
if callable(handler) and action == kargs.get('action', action):
self.add_route(route.replace('{action}', action), handler, **kargs)
def add_route(self, route, handler, method='GET', simple=False, **kargs):
""" Adds a new route to the route mappings. """
if isinstance(handler, type) and issubclass(handler, BaseController):
handler = handler()
if isinstance(handler, BaseController):
self.add_controller(route, handler, method=method, simple=simple, **kargs)
return
method = method.strip().upper()
route = route.strip().lstrip('$^/ ').rstrip('$^ ')
if re.match(r'^(\w+/)*\w*$', route) or simple:
self.simple_routes.setdefault(method, {})[route] = handler
else:
route = re.sub(r':([a-zA-Z_]+)(?P<uniq>[^\w/])(?P<re>.+?)(?P=uniq)',
r'(?P<\1>\g<re>)',route)
route = re.sub(r':([a-zA-Z_]+)', r'(?P<\1>[^/]+)', route)
route = re.compile('^%s$' % route)
self.regexp_routes.setdefault(method, []).append([route, handler])
def route(self, url, **kargs):
"""
Decorator for request handler.
Same as add_route(url, handler, **kargs).
"""
def wrapper(handler):
self.add_route(url, handler, **kargs)
return handler
return wrapper
def set_default(self, handler):
self.default_route = handler
def default(self):
""" Decorator for request handler. Same as add_defroute( handler )."""
def wrapper(handler):
self.set_default(handler)
return handler
return wrapper
def set_error_handler(self, code, handler):
""" Adds a new error handler. """
self.error_handler[int(code)] = handler
def error(self, code=500):
"""
Decorator for error handler.
Same as set_error_handler(code, handler).
"""
def wrapper(handler):
self.set_error_handler(code, handler)
return handler
return wrapper
def cast(self, out):
"""
Cast the output to an iterable of strings or something WSGI can handle.
Set Content-Type and Content-Length when possible. Then clear output
on HEAD requests.
Supports: False, str, unicode, list(unicode), dict(), open()
"""
if not out:
out = []
response.header['Content-Length'] = '0'
elif isinstance(out, types.StringType):
out = [out]
elif isinstance(out, unicode):
out = [out.encode(response.charset)]
elif isinstance(out, list) and isinstance(out[0], unicode):
out = map(lambda x: x.encode(response.charset), out)
elif self.autojson and json_dumps and isinstance(out, dict):
out = [json_dumps(out)]
response.content_type = 'application/json'
elif hasattr(out, 'read'):
out = request.environ.get('wsgi.file_wrapper',
lambda x: iter(lambda: x.read(8192), ''))(out)
if isinstance(out, list) and len(out) == 1:
response.header['Content-Length'] = str(len(out[0]))
if not hasattr(out, '__iter__'):
raise TypeError('Request handler for route "%s" returned [%s] '
'which is not iterable.' % (request.path, type(out).__name__))
return out
def __call__(self, environ, start_response):
""" The bottle WSGI-interface. """
request.bind(environ)
response.bind()
try: # Unhandled Exceptions
try: # Bottle Error Handling
if not self.serve:
abort(503, "Server stopped")
handler, args = self.match_url(request.path, request.method)
if not handler:
raise HTTPError(404, "Not found")
output = handler(**args)
db.close()
except BreakTheBottle, e:
output = e.output
except HTTPError, e:
response.status = e.http_status
output = self.error_handler.get(response.status, str)(e)
output = self.cast(output)
if response.status in (100, 101, 204, 304) or request.method == 'HEAD':
output = [] # rfc2616 section 4.3
except (KeyboardInterrupt, SystemExit, MemoryError):
raise
except Exception, e:
response.status = 500
if self.catchall:
err = "Unhandled Exception: %s\n" % (repr(e))
if DEBUG:
err += TRACEBACK_TEMPLATE % traceback.format_exc(10)
output = [str(HTTPError(500, err))]
request._environ['wsgi.errors'].write(err)
else:
raise
status = '%d %s' % (response.status, HTTP_CODES[response.status])
start_response(status, response.wsgiheaders())
return output
class Request(threading.local):
""" Represents a single request using thread-local namespace. """
def bind(self, environ):
"""
Binds the enviroment of the current request to this request handler
"""
self._environ = environ
self.environ = self._environ
self._GET = None
self._POST = None
self._GETPOST = None
self._COOKIES = None
self.path = self._environ.get('PATH_INFO', '/').strip()
if not self.path.startswith('/'):
self.path = '/' + self.path
@property
def method(self):
""" Get the request method (GET,POST,PUT,DELETE,...) """
return self._environ.get('REQUEST_METHOD', 'GET').upper()
@property
def query_string(self):
""" Get content of QUERY_STRING """
return self._environ.get('QUERY_STRING', '')
@property
def input_length(self):
""" Get content of CONTENT_LENGTH """
try:
return max(0,int(self._environ.get('CONTENT_LENGTH', '0')))
except ValueError:
return 0
@property
def GET(self):
""" Get a dict with GET parameters. """
if self._GET is None:
data = parse_qs(self.query_string, keep_blank_values=True)
self._GET = {}
for key, value in data.iteritems():
if len(value) == 1:
self._GET[key] = value[0]
else:
self._GET[key] = value
return self._GET
@property
def POST(self):
""" Get a dict with parsed POST or PUT data. """
if self._POST is None:
data = cgi.FieldStorage(fp=self._environ['wsgi.input'],
environ=self._environ, keep_blank_values=True)
self._POST = {}
for item in data.list:
name = item.name
if not item.filename:
item = item.value
self._POST.setdefault(name, []).append(item)
for key in self._POST:
if len(self._POST[key]) == 1:
self._POST[key] = self._POST[key][0]
return self._POST
@property
def params(self):
""" Returns a mix of GET and POST data. POST overwrites GET """
if self._GETPOST is None:
self._GETPOST = dict(self.GET)
self._GETPOST.update(dict(self.POST))
return self._GETPOST
@property
def COOKIES(self):
""" Returns a dict with COOKIES. """
if self._COOKIES is None:
raw_dict = SimpleCookie(self._environ.get('HTTP_COOKIE',''))
self._COOKIES = {}
for cookie in raw_dict.itervalues():
self._COOKIES[cookie.key] = cookie.value
return self._COOKIES
class Response(threading.local):
""" Represents a single response using thread-local namespace. """
def bind(self):
""" Clears old data and creates a brand new Response object """
self._COOKIES = None
self.status = 200
self.header_list = []
self.header = HeaderWrapper(self.header_list)
self.content_type = 'text/html'
self.error = None
self.charset = 'utf8'
def wsgiheaders(self):
''' Returns a wsgi conform list of header/value pairs '''
for c in self.COOKIES.itervalues():
self.header.add_header('Set-Cookie', c.OutputString())
return [(h.title(), str(v)) for h, v in self.header.items()]
@property
def COOKIES(self):
if not self._COOKIES:
self._COOKIES = SimpleCookie()
return self._COOKIES
def set_cookie(self, key, value, **kargs):
"""
Sets a Cookie. Optional settings:
expires, path, comment, domain, max-age, secure, version, httponly
"""
self.COOKIES[key] = value
for k, v in kargs.iteritems():
self.COOKIES[key][k] = v
def get_content_type(self):
""" Get the current 'Content-Type' header. """
return self.header['Content-Type']
def set_content_type(self, value):
if 'charset=' in value:
self.charset = value.split('charset=')[-1].split(';')[0].strip()
self.header['Content-Type'] = value
content_type = property(get_content_type, set_content_type, None,
get_content_type.__doc__)
class BaseController(object):
_singleton = None
def __new__(cls, *a, **k):
if not cls._singleton:
cls._singleton = object.__new__(cls, *a, **k)
return cls._singleton
def abort(code=500, text='Unknown Error: Appliction stopped.'):
""" Aborts execution and causes a HTTP error. """
raise HTTPError(code, text)
def redirect(url, code=307):
""" Aborts execution and causes a 307 redirect """
response.status = code
response.header['Location'] = url
raise BreakTheBottle("")
def send_file(filename, root, guessmime = True, mimetype = None):
""" Aborts execution and sends a static files as response. """
root = os.path.abspath(root) + os.sep
filename = os.path.abspath(os.path.join(root, filename.strip('/\\')))
if not filename.startswith(root):
abort(401, "Access denied.")
if not os.path.exists(filename) or not os.path.isfile(filename):
abort(404, "File does not exist.")
if not os.access(filename, os.R_OK):
abort(401, "You do not have permission to access this file.")
if guessmime and not mimetype:
mimetype = mimetypes.guess_type(filename)[0]
if not mimetype: mimetype = 'text/plain'
response.content_type = mimetype
stats = os.stat(filename)
if 'Last-Modified' not in response.header:
lm = time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(stats.st_mtime))
response.header['Last-Modified'] = lm
if 'HTTP_IF_MODIFIED_SINCE' in request.environ:
ims = request.environ['HTTP_IF_MODIFIED_SINCE']
# IE sends "<date>; length=146"
ims = ims.split(";")[0].strip()
ims = parse_date(ims)
if ims is not None and ims >= stats.st_mtime:
abort(304, "Not modified")
if 'Content-Length' not in response.header:
response.header['Content-Length'] = str(stats.st_size)
raise BreakTheBottle(open(filename, 'rb'))
def parse_date(ims):
"""
Parses date strings usually found in HTTP header and returns UTC epoch.
Understands rfc1123, rfc850 and asctime.
"""
try:
ts = email.utils.parsedate_tz(ims)
if ts is not None:
if ts[9] is None:
return time.mktime(ts[:8] + (0,)) - time.timezone
else:
return time.mktime(ts[:8] + (0,)) - ts[9] - time.timezone
except (ValueError, IndexError):
return None
# Decorators
def validate(**vkargs):
"""
Validates and manipulates keyword arguments by user defined callables.
Handles ValueError and missing arguments by raising HTTPError(403).
"""
def decorator(func):
def wrapper(**kargs):
for key, value in vkargs.iteritems():
if key not in kargs:
abort(403, 'Missing parameter: %s' % key)
try:
kargs[key] = value(kargs[key])
except ValueError, e:
abort(403, 'Wrong parameter format for: %s' % key)
return func(**kargs)
return wrapper
return decorator
def route(url, **kargs):
"""
Decorator for request handler. Same as add_route(url, handler, **kargs).
"""
return default_app().route(url, **kargs)
def default():
"""
Decorator for request handler. Same as set_default(handler).
"""
return default_app().default()
def error(code=500):
"""
Decorator for error handler. Same as set_error_handler(code, handler).
"""
return default_app().error(code)
# Server adapter
class WSGIAdapter(object):
def run(self, handler): # pragma: no cover
pass
def __repr__(self):
return "%s()" % (self.__class__.__name__)
class CGIServer(WSGIAdapter):
def run(self, handler):
from wsgiref.handlers import CGIHandler
CGIHandler().run(handler)
class ServerAdapter(WSGIAdapter):
def __init__(self, host='127.0.0.1', port=8080, **kargs):
WSGIAdapter.__init__(self)
self.host = host
self.port = int(port)
self.options = kargs
def __repr__(self):
return "%s (%s:%d)" % (self.__class__.__name__, self.host, self.port)
class WSGIRefServer(ServerAdapter):
def run(self, handler):
from wsgiref.simple_server import make_server
srv = make_server(self.host, self.port, handler)
srv.serve_forever()
class CherryPyServer(ServerAdapter):
def run(self, handler):
from cherrypy import wsgiserver
server = wsgiserver.CherryPyWSGIServer((self.host, self.port), handler)
server.start()
class FlupServer(ServerAdapter):
def run(self, handler):
from flup.server.fcgi import WSGIServer
WSGIServer(handler, bindAddress=(self.host, self.port)).run()
class PasteServer(ServerAdapter):
def run(self, handler):
from paste import httpserver
from paste.translogger import TransLogger
app = TransLogger(handler)
httpserver.serve(app, host=self.host, port=str(self.port))
class FapwsServer(ServerAdapter):
"""
Extremly fast webserver using libev.
See http://william-os4y.livejournal.com/
Experimental ...
"""
def run(self, handler):
import fapws._evwsgi as evwsgi
from fapws import base
evwsgi.start(self.host, self.port)
evwsgi.set_base_module(base)
def app(environ, start_response):
environ['wsgi.multiprocess'] = False
return handler(environ, start_response)
evwsgi.wsgi_cb(('',app))
evwsgi.run()
def run(app=None, server=WSGIRefServer, host='127.0.0.1', port=8080,
interval=1, reloader=False, **kargs):
""" Runs bottle as a web server. """
if not app:
app = default_app()
quiet = bool(kargs.get('quiet', False))
# Instantiate server, if it is a class instead of an instance
if isinstance(server, type):
if issubclass(server, CGIServer):
server = server()
elif issubclass(server, ServerAdapter):
server = server(host=host, port=port, **kargs)
if not isinstance(server, WSGIAdapter):
raise RuntimeError("Server must be a subclass of WSGIAdapter")
if not quiet and isinstance(server, ServerAdapter): # pragma: no cover
if not reloader or os.environ.get('BOTTLE_CHILD') == 'true':
print "Bottle server starting up (using %s)..." % repr(server)
print "Listening on http://%s:%d/" % (server.host, server.port)
print "Use Ctrl-C to quit."
print
else:
print "Bottle auto reloader starting up..."
try:
if reloader and interval:
reloader_run(server, app, interval)
else:
server.run(app)
except KeyboardInterrupt:
if not quiet: # pragma: no cover
print "Shutting Down..."
#TODO: If the parent process is killed (with SIGTERM) the childs survive...
def reloader_run(server, app, interval):
if os.environ.get('BOTTLE_CHILD') == 'true':
# We are a child process
files = dict()
for module in sys.modules.values():
file_path = getattr(module, '__file__', None)
if file_path and os.path.isfile(file_path):
file_split = os.path.splitext(file_path)
if file_split[1] in ('.py', '.pyc', '.pyo'):
file_path = file_split[0] + '.py'
files[file_path] = os.stat(file_path).st_mtime
thread.start_new_thread(server.run, (app,))
while True:
time.sleep(interval)
for file_path, file_mtime in files.iteritems():
if not os.path.exists(file_path):
print "File changed: %s (deleted)" % file_path
elif os.stat(file_path).st_mtime > file_mtime:
print "File changed: %s (modified)" % file_path
else: continue
print "Restarting..."
app.serve = False
time.sleep(interval) # be nice and wait for running requests
sys.exit(3)
while True:
args = [sys.executable] + sys.argv
environ = os.environ.copy()
environ['BOTTLE_CHILD'] = 'true'
exit_status = subprocess.call(args, env=environ)
if exit_status != 3:
sys.exit(exit_status)
# Templates
class TemplateError(HTTPError):
def __init__(self, message):
HTTPError.__init__(self, 500, message)
class BaseTemplate(object):
def __init__(self, template='', name=None, filename=None, lookup=[]):
"""
Create a new template.
If a name is provided, but no filename and no template string, the
filename is guessed using the lookup path list.
Subclasses can assume that either self.template or self.filename is set.
If both are present, self.template should be used.
"""
self.name = name
self.filename = filename
self.template = template
self.lookup = lookup
if self.name and not self.filename:
for path in self.lookup:
fpath = os.path.join(path, self.name+'.tpl')
if os.path.isfile(fpath):
self.filename = fpath
if not self.template and not self.filename:
raise TemplateError('Template (%s) not found.' % self.name)
self.prepare()
def prepare(self):
"""
Run preparatios (parsing, caching, ...).
It should be possible to call this multible times to refresh a template.
"""
raise NotImplementedError
def render(self, **args):
"""
Render the template with the specified local variables and return an
iterator of strings (bytes). This must be thread save!
"""
raise NotImplementedError
class MakoTemplate(BaseTemplate):
output_encoding=None
input_encoding=None
default_filters=None
global_variables={}
def prepare(self):
from mako.template import Template
from mako.lookup import TemplateLookup
#TODO: This is a hack... http://github.com/defnull/bottle/issues#issue/8
mylookup = TemplateLookup(directories=map(os.path.abspath, self.lookup)+['./'])
if self.template:
self.tpl = Template(self.template,
lookup=mylookup,
output_encoding=MakoTemplate.output_encoding,
input_encoding=MakoTemplate.input_encoding,
default_filters=MakoTemplate.default_filters
)
else:
self.tpl = Template(filename=self.filename,
lookup=mylookup,
output_encoding=MakoTemplate.output_encoding,
input_encoding=MakoTemplate.input_encoding,
default_filters=MakoTemplate.default_filters
)
def render(self, **args):
_defaults = MakoTemplate.global_variables.copy()
_defaults.update(args)
return self.tpl.render(**_defaults)
class CheetahTemplate(BaseTemplate):
def prepare(self):
from Cheetah.Template import Template
self.context = threading.local()
self.context.vars = {}
if self.template:
self.tpl = Template(source=self.template, searchList=[self.context.vars])
else:
self.tpl = Template(file=self.filename, searchList=[self.context.vars])
def render(self, **args):
self.context.vars.update(args)
out = str(self.tpl)
self.context.vars.clear()
return [out]
class Jinja2Template(BaseTemplate):
env = None # hopefully, a Jinja environment is actually thread-safe
def prepare(self):
if not self.env:
from jinja2 import Environment, FunctionLoader
self.env = Environment(line_statement_prefix="#", loader=FunctionLoader(self.loader))
if self.template:
self.tpl = self.env.from_string(self.template)
else:
self.tpl = self.env.get_template(self.filename)
def render(self, **args):
return self.tpl.render(**args).encode("utf-8")
def loader(self, name):
if not name.endswith(".tpl"):
for path in self.lookup:
fpath = os.path.join(path, name+'.tpl')
if os.path.isfile(fpath):
name = fpath
break
f = open(name)
try: return f.read()
finally: f.close()
class SimpleTemplate(BaseTemplate):
re_python = re.compile(r'^\s*%\s*(?:(if|elif|else|try|except|finally|for|'
'while|with|def|class)|(include|rebase)|(end)|(.*))')
re_inline = re.compile(r'\{\{(.*?)\}\}')
dedent_keywords = ('elif', 'else', 'except', 'finally')
def prepare(self):
if self.template:
code = self.translate(self.template)
self.co = compile(code, '<string>', 'exec')
else:
code = self.translate(open(self.filename).read())
self.co = compile(code, self.filename, 'exec')
def translate(self, template):
indent = 0
strbuffer = []
code = []
self.includes = dict()
class PyStmt(str):
def __repr__(self): return 'str(' + self + ')'
def flush(allow_nobreak=False):
if len(strbuffer):
if allow_nobreak and strbuffer[-1].endswith("\\\\\n"):
strbuffer[-1]=strbuffer[-1][:-3]
code.append(' ' * indent + "_stdout.append(%s)" % repr(''.join(strbuffer)))
code.append((' ' * indent + '\n') * len(strbuffer)) # to preserve line numbers
del strbuffer[:]
for line in template.splitlines(True):
m = self.re_python.match(line)
if m:
flush(allow_nobreak=True)
keyword, subtpl, end, statement = m.groups()
if keyword:
if keyword in self.dedent_keywords:
indent -= 1
code.append(" " * indent + line[m.start(1):])
indent += 1
elif subtpl:
tmp = line[m.end(2):].strip().split(None, 1)
if not tmp:
code.append(' ' * indent + "_stdout.extend(_base)\n")
else:
name = tmp[0]
args = tmp[1:] and tmp[1] or ''
if name not in self.includes:
self.includes[name] = SimpleTemplate(name=name, lookup=self.lookup)
if subtpl == 'include':
code.append(' ' * indent +
"_ = _includes[%s].execute(_stdout, %s)\n"
% (repr(name), args))
else:
code.append(' ' * indent +
"_tpl['_rebase'] = (_includes[%s], dict(%s))\n"
% (repr(name), args))
elif end:
indent -= 1
code.append(' ' * indent + '#' + line[m.start(3):])
elif statement:
code.append(' ' * indent + line[m.start(4):])
else:
splits = self.re_inline.split(line) # text, (expr, text)*
if len(splits) == 1:
strbuffer.append(line)
else:
flush()
for i in range(1, len(splits), 2):
splits[i] = PyStmt(splits[i])
splits = [x for x in splits if bool(x)]
code.append(' ' * indent + "_stdout.extend(%s)\n" % repr(splits))
flush()
return ''.join(code)
def execute(self, stdout, **args):
args['_stdout'] = stdout
args['_includes'] = self.includes
args['_tpl'] = args
eval(self.co, args)
if '_rebase' in args:
subtpl, args = args['_rebase']
args['_base'] = stdout[:] #copy stdout
del stdout[:] # clear stdout
return subtpl.execute(stdout, **args)
return args
def render(self, **args):
""" Render the template using keyword arguments as local variables. """
stdout = []
self.execute(stdout, **args)
return stdout
def template(tpl, template_adapter=SimpleTemplate, **args):
'''
Get a rendered template as a string iterator.
You can use a name, a filename or a template string as first parameter.
'''
lookup = args.get('template_lookup', TEMPLATE_PATH)
if tpl not in TEMPLATES or DEBUG:
if "\n" in tpl or "{" in tpl or "%" in tpl or '$' in tpl:
TEMPLATES[tpl] = template_adapter(template=tpl, lookup=lookup)
elif '.' in tpl:
TEMPLATES[tpl] = template_adapter(filename=tpl, lookup=lookup)
else:
TEMPLATES[tpl] = template_adapter(name=tpl, lookup=lookup)
if not TEMPLATES[tpl]:
abort(500, 'Template (%s) not found' % tpl)
args['abort'] = abort
args['request'] = request
args['response'] = response
return TEMPLATES[tpl].render(**args)
def mako_template(tpl_name, **kargs):
kargs['template_adapter'] = MakoTemplate
return template(tpl_name, **kargs)
def cheetah_template(tpl_name, **kargs):
kargs['template_adapter'] = CheetahTemplate
return template(tpl_name, **kargs)
def jinja2_template(tpl_name, **kargs):
kargs['template_adapter'] = Jinja2Template
return template(tpl_name, **kargs)
def view(tpl_name, **defaults):
''' Decorator: Rendes a template for a handler.
Return a dict of template vars to fill out the template.
'''
def decorator(func):
def wrapper(**kargs):
out = func(**kargs)
defaults.update(out)
return template(tpl_name, **defaults)
return wrapper
return decorator
def mako_view(tpl_name, **kargs):
kargs['template_adapter'] = MakoTemplate
return view(tpl_name, **kargs)
def cheetah_view(tpl_name, **kargs):
kargs['template_adapter'] = CheetahTemplate
return view(tpl_name, **kargs)
def jinja2_view(tpl_name, **kargs):
kargs['template_adapter'] = Jinja2Template
return view(tpl_name, **kargs)
# Database
class BottleBucket(object): # pragma: no cover
""" Memory-caching wrapper around anydbm """
def __init__(self, name):
self.__dict__['name'] = name
self.__dict__['db'] = dbm.open(DB_PATH + '/%s.db' % name, 'c')
self.__dict__['mmap'] = {}
def __getitem__(self, key):
if key not in self.mmap:
self.mmap[key] = pickle.loads(self.db[key])
return self.mmap[key]
def __setitem__(self, key, value):
if not isinstance(key, str): raise TypeError("Bottle keys must be strings")
self.mmap[key] = value
def __delitem__(self, key):
if key in self.mmap:
del self.mmap[key]
del self.db[key]
def __getattr__(self, key):
try: return self[key]
except KeyError: raise AttributeError(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try: del self[key]
except KeyError: raise AttributeError(key)
def __iter__(self):
return iter(self.ukeys())
def __contains__(self, key):
return key in self.ukeys()
def __len__(self):
return len(self.ukeys())
def keys(self):
return list(self.ukeys())
def ukeys(self):
return set(self.db.keys()) | set(self.mmap.keys())
def save(self):
self.close()
self.__init__(self.name)
def close(self):
for key in self.mmap:
pvalue = pickle.dumps(self.mmap[key], pickle.HIGHEST_PROTOCOL)
if key not in self.db or pvalue != self.db[key]:
self.db[key] = pvalue
self.mmap.clear()
if hasattr(self.db, 'sync'):
self.db.sync()
if hasattr(self.db, 'close'):
self.db.close()
def clear(self):
for key in self.db:
del self.db[key]
self.mmap.clear()
def update(self, other):
self.mmap.update(other)
def get(self, key, default=None):
try:
return self[key]
except KeyError:
if default:
return default
raise
class BottleDB(threading.local): # pragma: no cover
""" Holds multible BottleBucket instances in a thread-local way. """
def __init__(self):
self.__dict__['open'] = {}
def __getitem__(self, key):
warnings.warn("Please do not use bottle.db anymore. This feature is deprecated. You may use anydb directly.", DeprecationWarning)
if key not in self.open and not key.startswith('_'):
self.open[key] = BottleBucket(key)
return self.open[key]
def __setitem__(self, key, value):
if isinstance(value, BottleBucket):
self.open[key] = value
elif hasattr(value, 'items'):
if key not in self.open:
self.open[key] = BottleBucket(key)
self.open[key].clear()
for k, v in value.iteritems():
self.open[key][k] = v
else:
raise ValueError("Only dicts and BottleBuckets are allowed.")
def __delitem__(self, key):
if key not in self.open:
self.open[key].clear()
self.open[key].save()
del self.open[key]
def __getattr__(self, key):
try: return self[key]
except KeyError: raise AttributeError(key)
def __setattr__(self, key, value):
self[key] = value
def __delattr__(self, key):
try: del self[key]
except KeyError: raise AttributeError(key)
def save(self):
self.close()
self.__init__()
def close(self):
for db in self.open:
self.open[db].close()
self.open.clear()
# Modul initialization and configuration
DB_PATH = './'
TEMPLATE_PATH = ['./', './views/']
TEMPLATES = {}
DEBUG = False
HTTP_CODES = {
100: 'CONTINUE',
101: 'SWITCHING PROTOCOLS',
200: 'OK',
201: 'CREATED',
202: 'ACCEPTED',
203: 'NON-AUTHORITATIVE INFORMATION',
204: 'NO CONTENT',
205: 'RESET CONTENT',
206: 'PARTIAL CONTENT',
300: 'MULTIPLE CHOICES',
301: 'MOVED PERMANENTLY',
302: 'FOUND',
303: 'SEE OTHER',
304: 'NOT MODIFIED',
305: 'USE PROXY',
306: 'RESERVED',
307: 'TEMPORARY REDIRECT',
400: 'BAD REQUEST',
401: 'UNAUTHORIZED',
402: 'PAYMENT REQUIRED',
403: 'FORBIDDEN',
404: 'NOT FOUND',
405: 'METHOD NOT ALLOWED',
406: 'NOT ACCEPTABLE',
407: 'PROXY AUTHENTICATION REQUIRED',
408: 'REQUEST TIMEOUT',
409: 'CONFLICT',
410: 'GONE',
411: 'LENGTH REQUIRED',
412: 'PRECONDITION FAILED',
413: 'REQUEST ENTITY TOO LARGE',
414: 'REQUEST-URI TOO LONG',
415: 'UNSUPPORTED MEDIA TYPE',
416: 'REQUESTED RANGE NOT SATISFIABLE',
417: 'EXPECTATION FAILED',
500: 'INTERNAL SERVER ERROR',
501: 'NOT IMPLEMENTED',
502: 'BAD GATEWAY',
503: 'SERVICE UNAVAILABLE',
504: 'GATEWAY TIMEOUT',
505: 'HTTP VERSION NOT SUPPORTED',
}
HTTP_ERROR_TEMPLATE = """
<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML 2.0//EN">
<html>
<head>
<title>Error %(status)d: %(error_name)s</title>
</head>
<body>
<h1>Error %(status)d: %(error_name)s</h1>
<p>Sorry, the requested URL <tt>%(url)s</tt> caused an error:</p>
<pre>
%(error_message)s
</pre>
</body>
</html>
"""
TRACEBACK_TEMPLATE = """
<h2>Traceback:</h2>
<pre>
%s
</pre>
"""
request = Request()
response = Response()
db = BottleDB()
local = threading.local()
#TODO: Global and app local configuration (debug, defaults, ...) is a mess
def debug(mode=True):
global DEBUG
DEBUG = bool(mode)
def optimize(mode=True):
default_app().optimize = bool(mode)
|
multi-word fill quiz 9Fwf2 on "Patterns of reactivity"
1. Some metals like 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube react with cold water to form an 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube and a 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube gas.
2. If the gas is collected in an inverted (upside down!) 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube of water (tricky!) and a lit splint applied it pops! showing the gas to be 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube .
3. If universal indicator is added to the water, during the reaction it changes from 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube (pH 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube ) to 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube (pH 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube ), showing an alkaline solution is formed.
4. This reaction is 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube , it should be done behind a safety 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube and safety 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube should be worn.
5. You can tell a 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube change is taking place because 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube is given out and bubbles are seen as 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube substances are formed.
6. The reaction of potassium with water is so 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube , that the heat given out 147alkalicalciumchemicalcolourlessdangerousexothermicglassesgreenheathydrogenhydroxideignitesmetalnewpotassiumpurplescreensodiumtesttube the hydrogen gas and lilac flame is seen. |
# -*- coding: utf-8 -*-
"""
Definition of CohesiveLaw class
"""
import numpy as np
class CohesiveLaw:
"""
A class for cohesive law implementation
"""
def __init__(self, cohesive_law_points: np.array):
"""
Build a cohesive zone model object
:param cohesive_law_points: array describing the stress - opening curve of the
cohesive model
# TODO : mettre à jour data container pour construire les modèles cohésifs
"""
assert len(cohesive_law_points.shape) == 2, "array should be 2D"
assert cohesive_law_points.shape[1] == 2, "array should be size (x, 2)"
assert cohesive_law_points[0, 0] == 0., "first value of separation should be 0."
assert cohesive_law_points[-1, 1] == 0., "last value of stress should be 0."
self.cohesive_law_points = cohesive_law_points
self.separation_points = self.cohesive_law_points[:, 0]
assert np.all(np.diff(self.separation_points) >= 0), "separation is not sorted"
def compute_cohesive_force(self, opening):
"""
Returns the cohesive force associated with the given opening
:param opening: discontinuity opening
:return: float
"""
# Theoretically, this case should not append but this verification ensure no index error
# will occur in the future
if opening > self.separation_points[-1]:
return 0.
# Find the relevant points to interpolate cohesive law
index = np.searchsorted(self.separation_points, opening)
# Interpolate the cohesive law
return CohesiveLaw.interpolate_cohesive_law(opening,
self.cohesive_law_points[index - 1, 0],
self.cohesive_law_points[index, 0],
self.cohesive_law_points[index - 1, 1],
self.cohesive_law_points[index, 1])
@classmethod
def interpolate_cohesive_law(cls, opening, separation_1, separation_2, stress_1, stress_2):
"""
Interpolate the value of cohesive stress between points 1 and 2
:param opening: discontinuity opening
:param separation_1: separation at point 1
:param separation_2: separation at point 2
:param stress_1: stress at point 1
:param stress_2: stress at point 2
:return: cohesive stress
"""
slope = (stress_2 - stress_1) / (separation_2 - separation_1)
return stress_1 + slope * (opening - separation_1)
|
When Marc Andreessen penned his now seminal essay “Why software is eating the world” way back in 2011, he noted the many business and industries that already were being run on software and delivered as online services. From Netflix and Amazon to Uber and Airbnb, he cited entrepreneurial technology companies that were invading and overturning established industry structures from the outside in.
That trend, of course, has continued and intensified in many arenas of human endeavor. But “Industry with a capital I” as I sometimes call it—manufacturing, processing, energy and their other heavy industry brethren—has so far proved resistant to disruption from the outside in.
Sure, a number pure-tech plays have emerged to offer specialized software tools for, say, IIoT device management or time-series analytics, and Industry leverages that same general-purpose cloud infrastructure that supports other digital plays, but it’s largely companies that grew up in the industrial space that are driving the equipment-specific or ecosystem solutions needed to drive transformation. Andreessen predicted this, conceding that for markets with a “heavy real-world component such as oil and gas,” the software revolution would be primarily an opportunity for incumbents.
Software and connectivity are indeed revolutionizing Industry, but they’re doing so from the inside out. The latest generation of industrial machines and equipment may include mechanical and electrical advances, but more and more often it’s the intelligence packaged as software and visibility delivered through connectivity and integration with complementary sources of intel that truly differentiate these machines from previous generations of systems—and from those of competitors. And because so much of Industry relies on legacy capital equipment, it also falls to frontline engineers, technicians, IT systems analysts and other Industrial decision makers to strategically retrofit and upgrade their own assets in order to take advantage of digital technology’s new capabilities.
In this issue’s cover story (p19), we report the results of our fifth annual Digital Transformation: State of the Initiative study. The results of this year’s survey of more than 650 industry professionals indicate that the overwhelming majority believe digitalization will have a profound impact on their organizations’ operations. Further, respondents indicate that digital technologies only recently considered bleeding edge—from augmented reality to blockchain—are already in use or are being actively piloted within a majority of respondents’ organizations.
Industry may, for the most part, be safe from entrepreneurial disruption from without. But for any given organization, a competitor down the road that embraces digitalization first is a different story altogether. |
from __future__ import print_function
try:
import httplib2
import urllib3
from apiclient import errors
except ImportError:
print("run pip3 install httplib2")
import os
try:
from apiclient import discovery
except ImportError:
print("run `pip3 install google-api-python-client`\n "
"or manually on https://developers.google.com/api-client-library/python/start/installation")
import oauth2client
from oauth2client import client
from oauth2client import tools
import translations
try:
import argparse
parser = argparse.ArgumentParser(parents=[tools.argparser], description='Create localizable files')
parser.add_argument('--id', help='provide file id to avoid prompt')
parser.add_argument('--path', help='Path destination for *.lproj folders', default='./')
parser.add_argument('--platform', choices=['ios', 'android'], help='Should be either ios or android', default='ios')
parser.add_argument('--gid', help='Use the Google sheet ID from the end of the url link')
parser.add_argument('--keep_csv', type=bool, help='Should keep the CSV file on the disk', default=False)
args = parser.parse_args()
flags = args
except ImportError:
flags = None
print("Cannot parse")
SCOPES = 'https://www.googleapis.com/auth/drive'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Drive API Python Quickstart'
def get_credentials():
"""Gets valid user credentials from storage.
If nothing has been stored, or if the stored credentials are invalid,
the OAuth2 flow is completed to obtain the new credentials.
Returns:
Credentials, the obtained credential.
"""
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir, 'drive-python-quickstart.json')
store = oauth2client.file.Storage(credential_path)
credentials = store.get()
if not credentials or credentials.invalid:
flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
flow.user_agent = APPLICATION_NAME
if flags:
credentials = tools.run_flow(flow, store, flags)
else: # Needed only for compatibility with Python 2.6
credentials = tools.run(flow, store)
print('Storing credentials to ' + credential_path)
return credentials
def getFiles(service):
"""
Retrieve a list of File resources.
Args:
service: Drive API service instance.
Returns:
List of File resources.
"""
result = []
page_token = None
while True:
try:
param = {}
if page_token:
param['pageToken'] = page_token
param['maxResults'] = '1000'
files = service.files().list(**param).execute()
result.extend(files['items'])
page_token = files.get('nextPageToken')
if not page_token:
break
except errors.HttpError as error:
print('An error occurred: %s' % error)
break
return service, result
def download__file_metadata(file_id, token, gid=0):
file_id = file_id
url = "https://docs.google.com/spreadsheets/d/"+file_id+"/export?gid="+str(gid)+"&format=csv"
headers = {"Authorization": "Bearer "+str(token)}
r = urllib3.PoolManager().request('GET', url=url, headers=headers)
return r.data
def main():
"""Shows basic usage of the Google Drive API.
Creates a Google Drive API service object and outputs the names and IDs
for up to 10 files.
"""
credentials = get_credentials()
credentials.authorize(httplib2.Http())
token = str(credentials.access_token)
if args.id:
file = download__file_metadata(args.id, token, args.gid)
else:
i = 0
service, files = getFiles(service)
for item in files:
print(str(item['title']) + " - " + str(item['id']))
i += 1
exit(1)
content = file
filename = "tmp" + '.csv'
csvf = open(filename, 'w')
csvf.write(content.decode("utf-8"))
csvf.close()
if args.platform == 'ios':
translations.translate(filename, args.path)
elif args.platform == 'android':
translations.translate_android(filename, args.path)
else:
print("Invalid platform. type --help for help")
if not args.keep_csv:
os.remove(filename)
print("Your files have been generated under '"+args.path+"'")
def download_file(service, drive_file):
download_url = drive_file['exportLinks']['text/csv']
if args.gid:
download_url += "&gid=" + args.gid
if download_url:
resp, content = service._http.request(download_url)
if resp.status == 200:
return content
else:
print('An error occurred: %s' % resp)
return None
else:
# The file doesn't have any content stored on Drive.
return None
if __name__ == '__main__':
main() |
Hi there! Coming at you a little late on this lovely summer morning. This past week with my parents was extremely fun and last night I was 100% wiped out! I haven’t been sleeping too well and my allergies + sinuses [not typically a problem for me] have been really bad. I woke up with one of those “must take advil + go back to sleep” headaches this morning. Eeek!
There is so much to catch you all up on, so let’s start with Friday.
My parents + I headed to La Creperie, which is a local French bakery + restaurant.
I checked their menu the day before, and noticed they had a gluten free [buckwheat] batter option. You just have to let them know a day in advance so they can prepare the batter. Perfect!
We started off by ordering 1 croissant to share. Don’t be fooled, this croissant is huge! And yes, there is gluten in it, but about once a month I will bend the rules for something special like this. Also, their dough goes through a 3-day fermentation process, that actually kills a lot of the gluten. It’s similar to sourdough bread + beer! I’ve been able to drink beer with no problems, so I wanted to give this a try.
I’ve never been to France, but I would have to assume this is an extremely authentic croissant.
My dad ordered a crazy sandwich on a croissant. It looked pretty amazing.
My mom and I split, per usual! We ordered a sweet + savory crepe with the gluten free batter. The crepe was nice + crispy and had a light buckwheat flavor. For the sweet version, we ordered banana/blueberry/maple syrup. We thought it was lacking on the filling a bit and for me, it was way too sweet from the syrup. It was good, but I liked the savory crepe much more!
This was fabulous! Full of spinach/egg filling with tomato + brie on top.
Friday night, we headed out to show our parents a few of our favorite local spots.
My mom is not a beer fan, so Chris, my dad + I shared 3 sampler trays. It looks like a lot of beer, but each tray is just over 1 normal beer. I love getting the samplers!
Chris had a super long work week, so it was nice to go out + relax together.
Eric + Kelsey joined us as well!
I love taking my parents around, showing them all of our favorite spots!
Always doing something goofy with this lady!
Then we headed to our very favorite brewery, Equinox. Everyone had a beer and then we headed to dinner.
One of our favorite restaurants in Ft. Collins, is Crown Pub. They only have 1 veggie + GF menu item, but it is one of my favorite meals ever! It’s actually a curry dish, which is strange, because the rest of their menu is very meat + potatoes. The quality of their food is excellent and the service is always great.
After dinner, we headed home to use our new + improved fire pit!! We are loving it!
The boys went out for an early round of golf + my mom and I relaxed, walked, and went to the farmers market.
I picked up a loaf of fresh sourdough bread. The woman that bakes this bread, makes only sourdough + rye breads. I have been looking for legit sourdough bread, that goes through a long fermentation process, to see how my stomach reacts. She actually had a sign on her stand, talking about gluten intolerances and that frequently people are able to eat sourdough because it’s much lower in gluten.
Definitely the most authentic sourdough I’ve ever eaten. Extremely hearty + rustic. I enjoyed every single bite of these 2 slices of toast!
I’m going to give it one more test run, to figure out if I can tolerate it or not. This would be such a nice once in awhile treat!
Cucumber pickles have finally arrived!!!! Can’t wait to pickle these!
Early evening Saturday, we headed up for to Nederland, a small mountain town. It’s about 1.5hrs away, which includes a gorgeous drive up Boulder canyon.
We checked out the cute little local shops, including the food co-op.
The shops stop at a restaurant + brew pub, called Wild Mountain Smokehouse. We sat outside and enjoyed the mountain weather. While it was 100* in Boulder, after driving up the canyon, Nederland was at 75*. Perfect if you ask me!
Wild Mountain has an extremely large menu, even including smoked + grilled tofu! They have 6 different house-made BBQ sauces that you can choose from. They range from Texas style to Carolina to spicy Atomic.
Everyone loved their meals, including my non-meat eating self!
Was that enough catch up for you?? Don’t worry, there is a little more coming soon!!
Before I go, there are 2 things to remind you about.
1. Summer in a Box giveaway ends tonight at midnight!!
2. And much more importantly….I’m sure you’ve seen this on many other blogs, but The Great Fundraising Act blogger auction is currently underway! This fundraiser is to help out the lovely Susan, from The Great Balancing Act. Susan was recently diagnosed with Hodgkin’s Lymphoma. You can read more about her story here + on her blog. She is staying positive + upbeat and has been extremely inspiring.
The lovely Janeetha, called out to the blogging community asking for support + help, in raising money for Susan’s every increasing medical bills. This online blogger auction is the biggest I’ve seen! For this auction, I donated a batch of my Peanut Butter Brown Rice Crisp Bars! Extremely kid friendly, vegan + gluten free, these are a major crowd pleaser. They also travel well! The closing times are staggered, so make sure to check each item carefully! |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# forms.py
#
# Copyright 2012 Thomas Grainger <tagrain@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation; version 3.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
from django import forms
from django.forms.widgets import CheckboxSelectMultiple
from django.core.urlresolvers import reverse
from crispy_forms.helper import FormHelper
from crispy_forms.layout import Submit, Reset, Layout, Div, Fieldset
from crispy_forms.bootstrap import FormActions
from maluroam.eduroam_snort.models import Blacklist, Rule
time_formats = ("%Y-%m-%dT%H:%M:%S.%fZ", "%Y-%m-%d %H:%M")
class RangeForm(forms.Form):
earliest = forms.DateTimeField(required=False, input_formats=time_formats)
latest = forms.DateTimeField(required=False, input_formats=time_formats)
def clean(self):
cleaned_data = super(RangeForm, self).clean()
for key, value in cleaned_data.items():
if not value:
del(cleaned_data[key])
return cleaned_data
class ActivityRangeForm(RangeForm):
username = forms.CharField(required=False)
class FilterForm(forms.Form):
earliest = forms.DateTimeField(required=False)
latest = forms.DateTimeField(required=False)
rule = forms.ModelMultipleChoiceField(
required=False,
queryset=Rule.objects.all(),
widget = forms.CheckboxSelectMultiple,
)
blacklist = forms.ModelMultipleChoiceField(
required=False,
queryset=Blacklist.objects.all(),
widget = forms.CheckboxSelectMultiple,
)
def __init__(self, *args, **kwargs):
helper = FormHelper()
helper.form_class = "form-inline"
helper.form_method = "get"
helper.form_action = reverse("users")
helper.layout = Layout(
Div(
Div(
Fieldset("Date",
"earliest",
"latest"
),
css_class = "well span4",
),
Div(
Fieldset("Rules",
"rule",
),
css_class = "well span4",
),
Div(
Fieldset("Blacklists",
"blacklist",
),
css_class = "well span4",
),
css_class = "row-fluid"
),
FormActions(
Submit('filter', 'Filter', css_class="btn btn-primary"),
Reset('reset', 'Reset', css_class="btn btn-danger")
)
)
self.helper = helper
super(FilterForm, self).__init__(*args, **kwargs)
|
Khloé Kardashian has made her first direct comments about the Tristan Thompson-Jordyn Woods cheating scandal and all the drama that's been playing out in the tabloids about it. Kardashian tweeted a message to her fans, thanking them for their support and letting them know there will be much more to come about all this.
"Hi loves, wanted you to know that I appreciate you!" she wrote. " I’ve been reading your kind words and they really are a blessing to me. I love you! Thank you Thank you! I’ll be back when I’m in the mood to chat with you all. Until then remember to be kind to one another."
Kardashian hasn't been completely silent since TMZ reported the news that Thompson allegedly cheated on her with Jordyn Woods during Valentine's Day weekend. She initially commented on Hollywood Unlocked's Instagram revealing that Thompson and Woods had a fling—not with words but speaking head emojis. She's also been regularly posting emotional quotes about heartbreak and betrayal on her Instagram Story.
The Kardashians were reported by TMZ this morning to have completely cut off Jordyn Woods. They refuse to do business with her ever again. While Kylie Jenner will also not work with her best friend again, Jenner is said to still be conflicted about whether or not to end her relationship with Woods for good. |
# This file is part of Libreosteo.
#
# Libreosteo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Libreosteo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Libreosteo. If not, see <http://www.gnu.org/licenses/>.
from django.apps import AppConfig
from sqlite3 import OperationalError
import logging
# Get an instance of a logger
logger = logging.getLogger(__name__)
class LibreosteoConfig(AppConfig):
name = 'libreosteoweb'
verbose_name = "Libreosteo WebApp"
def ready(self):
import libreosteoweb.api.receivers
import libreosteoweb.models as models
file_import_list = models.FileImport.objects.all()
try:
for f in file_import_list:
f.delete()
except Exception:
logger.debug("Exception when purging files at starting application")
try:
office_settings_list = models.OfficeSettings.objects.all()
if len(office_settings_list) <= 0 :
default = models.OfficeSettings()
default.save()
except Exception:
logger.warn("No database ready to initialize office settings")
|
The trails at camp are so ✌🏻ful. Join us for our 5k trail 🏃🏼♂️🏃🏼♀️on May 19th. Click on the link for details and registration.
Elvis is coming to the State Theater on May 17th!! Enjoy a night on the town while supporting Camp Fish Tales! Click on the link and get your tickets today!
Thank you, Tom Thelen for all your years of dedication to our amazing non-profit. We couldn’t do it without you!!
Bubba's Tri-City Cycle's annual charity bike show and poker run is coming up!!!!
If you want to join in the fun and give back to our amazing nonprofit, call 989-697-5525.
We are about 6 weeks away from the start of camp! The first day of camp is June 2nd. Register today at www.campfishtales.org, then click on the button that says 'Register'. If you already registered last year, please do not make another profile. You can use last years and just update the new information and the week you wish to register. If you have any questions, feel free to contact the office 989-879-5199 or my cell at 989-615-7840. Our theme this year is AROUND THE WORLD IN 5 DAYS! The campers are going to be issued passports, so have them be prepared to travel the world!! We are so excited for the season to begin! |
import types
import crypt
from django.contrib.auth.backends import ModelBackend
from django.conf import settings
from django.core.exceptions import PermissionDenied
from django.contrib.auth import get_user_model
from erudit.utils.mandragore import (
get_user_from_mandragore,
update_user_password
)
def set_password_mandragore(self, raw_password):
""" Set the password in Mandragore
This method is meant to replace the default set_password
method of :py:class:`django.contrib.auth.models.User`
Uses :py:func:`crypt.crypt` to generate a ``SHA512`` hash of
raw_password. raw_password is salted with a random salt
generated by :py:func:`crypt.mksalt`.
"""
# Use only 8 characters in the salt. Otherwise the generated hash will be
# to long for the mandragore MotDePasse Field.
the_hash = crypt.crypt(
raw_password,
salt=crypt.mksalt(
method=crypt.METHOD_SHA512
)[:11]
)
update_user_password(self.username, the_hash)
self.save()
class MandragoreBackend(ModelBackend):
""" Authenticate users against the Mandragore database
Monkeypatches django.contrib.auth.models.User to replace `set_password` with
:py:func:`set_password_mandragore`
"""
def authenticate(self, username=None, password=None):
User = get_user_model()
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
raise PermissionDenied()
# Being connected to the "Mandragore" database is not
# mandatory. Thus we do not raise `PermissionDenied` but
# let Django try to authenticate the user with the ModelBackend.
if ((not hasattr(settings, 'EXTERNAL_DATABASES') or
type(settings.EXTERNAL_DATABASES) != dict or
'mandragore' not in settings.EXTERNAL_DATABASES)):
return None
mand_user, mand_pw = get_user_from_mandragore(username)
_, algo, salt, hashed_pass = mand_pw.split('$')
user_pass = crypt.crypt(
password, '${}${}'.format(
algo,
salt,
)
)
if user_pass == mand_pw:
user.set_password = types.MethodType(set_password_mandragore, user)
return user
|
Scam artists aren’t always easy to spot at first glance, but once you know what to look for, they are much easier to identify.
Most scam artists are actually very easy to like. This is one way they build trust with their “marks.” Scam artists are often overly confident in their own abilities — and potential customers are drawn to this confidence because they’re looking for answers.
Most scam artists charge five-figure fees for coaching programs. The fees can be $20,000 or more. And the coaching programs are usually sold by promising future wealth because it’s the only way to justify such high prices.
Scam artists are good at getting paid, but nearly always fail to deliver on their promises. Do some research and you will probably find dozens of unhappy clients who’ve posted complaints online.
Most scam artists sell one solution while using another. For example: Using a product launch to sell an Adwords course. The disconnect is obvious once you know to look for it.
In case after case, the scam artist’s basic operation looks like this. Step 1: He or she takes your money. Step 2: There is no step two. Some scam artists actually do deliver something for their fee, but it’s always far short of the outlandish promises.
Many scam artists pitch their programs via seminars and other live events. They may speak dozens of times a year because of their ability to sell from the stage. This builds their credibility and helps them perpetuate their scams without being detected.
Anytime you’re considering making a hefty investment with any kind of “guru,” make sure you do your due diligence — especially if it’s a high-priced coaching program. You may even want to use this blog post as a check list to make sure you don’t get ripped off.
Please also remember this: Scam artists do not think of themselves as scam artists. They are often delusional and think they are doing the world a favor. This is another reason so many people get taken in. I hope this list of common traits will help you see through the deception.
P.S. If you enjoyed this article, you may also enjoy this one: How to Spot a Con Man. |
from collections import OrderedDict, defaultdict
try:
import itertools.izip as zip
except ImportError:
pass
import numpy as np
from .interface import Interface, DataError
from ..dimension import dimension_name
from ..element import Element
from ..dimension import OrderedDict as cyODict
from ..ndmapping import NdMapping, item_check, sorted_context
from ..util import isscalar
from .. import util
class DictInterface(Interface):
"""
Interface for simple dictionary-based dataset format. The dictionary
keys correspond to the column (i.e dimension) names and the values
are collections representing the values in that column.
"""
types = (dict, OrderedDict, cyODict)
datatype = 'dictionary'
@classmethod
def dimension_type(cls, dataset, dim):
name = dataset.get_dimension(dim, strict=True).name
values = dataset.data[name]
return type(values) if isscalar(values) else values.dtype.type
@classmethod
def init(cls, eltype, data, kdims, vdims):
odict_types = (OrderedDict, cyODict)
if kdims is None:
kdims = eltype.kdims
if vdims is None:
vdims = eltype.vdims
dimensions = [dimension_name(d) for d in kdims + vdims]
if (isinstance(data, list) and all(isinstance(d, dict) for d in data) and
not all(c in d for d in data for c in dimensions)):
raise ValueError('DictInterface could not find specified dimensions in the data.')
elif isinstance(data, tuple):
data = {d: v for d, v in zip(dimensions, data)}
elif util.is_dataframe(data) and all(d in data for d in dimensions):
data = {d: data[d] for d in dimensions}
elif isinstance(data, np.ndarray):
if data.ndim == 1:
if eltype._auto_indexable_1d and len(kdims)+len(vdims)>1:
data = np.column_stack([np.arange(len(data)), data])
else:
data = np.atleast_2d(data).T
data = {k: data[:,i] for i,k in enumerate(dimensions)}
elif isinstance(data, list) and data == []:
data = OrderedDict([(d, []) for d in dimensions])
elif isinstance(data, list) and isscalar(data[0]):
if eltype._auto_indexable_1d:
data = {dimensions[0]: np.arange(len(data)), dimensions[1]: data}
else:
data = {dimensions[0]: data}
elif (isinstance(data, list) and isinstance(data[0], tuple) and len(data[0]) == 2
and any(isinstance(v, tuple) for v in data[0])):
dict_data = zip(*((util.wrap_tuple(k)+util.wrap_tuple(v))
for k, v in data))
data = {k: np.array(v) for k, v in zip(dimensions, dict_data)}
# Ensure that interface does not consume data of other types
# with an iterator interface
elif not any(isinstance(data, tuple(t for t in interface.types if t is not None))
for interface in cls.interfaces.values()):
data = {k: v for k, v in zip(dimensions, zip(*data))}
elif (isinstance(data, dict) and not any(isinstance(v, np.ndarray) for v in data.values()) and not
any(d in data or any(d in k for k in data if isinstance(k, tuple)) for d in dimensions)):
# For data where both keys and values are dimension values
# e.g. {('A', 'B'): (1, 2)} (should consider deprecating)
dict_data = sorted(data.items())
k, v = dict_data[0]
if len(util.wrap_tuple(k)) != len(kdims) or len(util.wrap_tuple(v)) != len(vdims):
raise ValueError("Dictionary data not understood, should contain a column "
"per dimension or a mapping between key and value dimension "
"values.")
dict_data = zip(*((util.wrap_tuple(k)+util.wrap_tuple(v))
for k, v in dict_data))
data = {k: np.array(v) for k, v in zip(dimensions, dict_data)}
if not isinstance(data, cls.types):
raise ValueError("DictInterface interface couldn't convert data.""")
unpacked = []
for d, vals in data.items():
if isinstance(d, tuple):
vals = np.asarray(vals)
if vals.shape == (0,):
for sd in d:
unpacked.append((sd, np.array([], dtype=vals.dtype)))
elif not vals.ndim == 2 and vals.shape[1] == len(d):
raise ValueError("Values for %s dimensions did not have "
"the expected shape.")
else:
for i, sd in enumerate(d):
unpacked.append((sd, vals[:, i]))
elif d not in dimensions:
unpacked.append((d, vals))
else:
if not isscalar(vals):
vals = np.asarray(vals)
if not vals.ndim == 1 and d in dimensions:
raise ValueError('DictInterface expects data for each column to be flat.')
unpacked.append((d, vals))
if not cls.expanded([vs for d, vs in unpacked if d in dimensions and not isscalar(vs)]):
raise ValueError('DictInterface expects data to be of uniform shape.')
if isinstance(data, odict_types):
data.update(unpacked)
else:
data = OrderedDict(unpacked)
return data, {'kdims':kdims, 'vdims':vdims}, {}
@classmethod
def validate(cls, dataset, vdims=True):
dim_types = 'all' if vdims else 'key'
dimensions = dataset.dimensions(dim_types, label='name')
not_found = [d for d in dimensions if d not in dataset.data]
if not_found:
raise DataError('Following columns specified as dimensions '
'but not found in data: %s' % not_found, cls)
lengths = [(dim, 1 if isscalar(dataset.data[dim]) else len(dataset.data[dim]))
for dim in dimensions]
if len({l for d, l in lengths if l > 1}) > 1:
lengths = ', '.join(['%s: %d' % l for l in sorted(lengths)])
raise DataError('Length of columns must be equal or scalar, '
'columns have lengths: %s' % lengths, cls)
@classmethod
def unpack_scalar(cls, dataset, data):
"""
Given a dataset object and data in the appropriate format for
the interface, return a simple scalar.
"""
if len(data) != 1:
return data
key = list(data.keys())[0]
if len(data[key]) == 1 and key in dataset.vdims:
scalar = data[key][0]
return scalar.compute() if hasattr(scalar, 'compute') else scalar
return data
@classmethod
def isscalar(cls, dataset, dim):
name = dataset.get_dimension(dim, strict=True).name
values = dataset.data[name]
if isscalar(values):
return True
if values.dtype.kind == 'O':
unique = set(values)
else:
unique = np.unique(values)
if (~util.isfinite(unique)).all():
return True
return len(unique) == 1
@classmethod
def shape(cls, dataset):
return cls.length(dataset), len(dataset.data),
@classmethod
def length(cls, dataset):
lengths = [len(vals) for d, vals in dataset.data.items()
if d in dataset.dimensions() and not isscalar(vals)]
return max(lengths) if lengths else 1
@classmethod
def array(cls, dataset, dimensions):
if not dimensions:
dimensions = dataset.dimensions(label='name')
else:
dimensions = [dataset.get_dimensions(d).name for d in dimensions]
arrays = [dataset.data[dim.name] for dim in dimensions]
return np.column_stack([np.full(len(dataset), arr) if isscalar(arr) else arr
for arr in arrays])
@classmethod
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):
dim = dimension_name(dimension)
data = list(dataset.data.items())
data.insert(dim_pos, (dim, values))
return OrderedDict(data)
@classmethod
def redim(cls, dataset, dimensions):
all_dims = dataset.dimensions()
renamed = []
for k, v in dataset.data.items():
if k in dimensions:
k = dimensions[k].name
elif k in all_dims:
k = dataset.get_dimension(k).name
renamed.append((k, v))
return OrderedDict(renamed)
@classmethod
def concat(cls, datasets, dimensions, vdims):
columns = defaultdict(list)
for key, ds in datasets:
for k, vals in ds.data.items():
columns[k].append(vals)
for d, k in zip(dimensions, key):
columns[d.name].append(np.full(len(ds), k))
template = datasets[0][1]
dims = dimensions+template.dimensions()
return OrderedDict([(d.name, np.concatenate(columns[d.name])) for d in dims])
@classmethod
def sort(cls, dataset, by=[], reverse=False):
by = [dataset.get_dimension(d).name for d in by]
if len(by) == 1:
sorting = cls.values(dataset, by[0]).argsort()
else:
arrays = [dataset.dimension_values(d) for d in by]
sorting = util.arglexsort(arrays)
return OrderedDict([(d, v if isscalar(v) else (v[sorting][::-1] if reverse else v[sorting]))
for d, v in dataset.data.items()])
@classmethod
def range(cls, dataset, dimension):
dim = dataset.get_dimension(dimension)
column = dataset.data[dim.name]
if isscalar(column):
return column, column
return Interface.range(dataset, dimension)
@classmethod
def values(cls, dataset, dim, expanded=True, flat=True):
dim = dataset.get_dimension(dim).name
values = dataset.data.get(dim)
if isscalar(values):
if not expanded:
return np.array([values])
values = np.full(len(dataset), values, dtype=np.array(values).dtype)
else:
if not expanded:
return util.unique_array(values)
values = np.asarray(values)
return values
@classmethod
def reindex(cls, dataset, kdims, vdims):
dimensions = [dataset.get_dimension(d).name for d in kdims+vdims]
return OrderedDict([(d, dataset.dimension_values(d))
for d in dimensions])
@classmethod
def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs):
# Get dimensions information
dimensions = [dataset.get_dimension(d) for d in dimensions]
kdims = [kdim for kdim in dataset.kdims if kdim not in dimensions]
vdims = dataset.vdims
# Update the kwargs appropriately for Element group types
group_kwargs = {}
group_type = dict if group_type == 'raw' else group_type
if issubclass(group_type, Element):
group_kwargs.update(util.get_param_values(dataset))
group_kwargs['kdims'] = kdims
group_kwargs.update(kwargs)
# Find all the keys along supplied dimensions
keys = (tuple(dataset.data[d.name] if isscalar(dataset.data[d.name])
else dataset.data[d.name][i] for d in dimensions)
for i in range(len(dataset)))
# Iterate over the unique entries applying selection masks
grouped_data = []
for unique_key in util.unique_iterator(keys):
mask = cls.select_mask(dataset, dict(zip(dimensions, unique_key)))
group_data = OrderedDict(((d.name, dataset.data[d.name] if isscalar(dataset.data[d.name])
else dataset.data[d.name][mask])
for d in kdims+vdims))
group_data = group_type(group_data, **group_kwargs)
grouped_data.append((unique_key, group_data))
if issubclass(container_type, NdMapping):
with item_check(False), sorted_context(False):
return container_type(grouped_data, kdims=dimensions)
else:
return container_type(grouped_data)
@classmethod
def select(cls, dataset, selection_mask=None, **selection):
if selection_mask is None:
selection_mask = cls.select_mask(dataset, selection)
indexed = cls.indexed(dataset, selection)
data = OrderedDict((k, v if isscalar(v) else v[selection_mask])
for k, v in dataset.data.items())
if indexed and len(list(data.values())[0]) == 1 and len(dataset.vdims) == 1:
value = data[dataset.vdims[0].name]
return value if isscalar(value) else value[0]
return data
@classmethod
def sample(cls, dataset, samples=[]):
mask = False
for sample in samples:
sample_mask = True
if isscalar(sample): sample = [sample]
for i, v in enumerate(sample):
name = dataset.get_dimension(i).name
sample_mask &= (dataset.data[name]==v)
mask |= sample_mask
return {k: col if isscalar(col) else np.array(col)[mask]
for k, col in dataset.data.items()}
@classmethod
def aggregate(cls, dataset, kdims, function, **kwargs):
kdims = [dataset.get_dimension(d, strict=True).name for d in kdims]
vdims = dataset.dimensions('value', label='name')
groups = cls.groupby(dataset, kdims, list, OrderedDict)
aggregated = OrderedDict([(k, []) for k in kdims+vdims])
dropped = []
for key, group in groups:
key = key if isinstance(key, tuple) else (key,)
for kdim, val in zip(kdims, key):
aggregated[kdim].append(val)
for vdim, arr in group.items():
if vdim in dataset.vdims:
if isscalar(arr):
aggregated[vdim].append(arr)
continue
try:
if isinstance(function, np.ufunc):
reduced = function.reduce(arr, **kwargs)
else:
reduced = function(arr, **kwargs)
aggregated[vdim].append(reduced)
except TypeError:
dropped.append(vdim)
return aggregated, list(util.unique_iterator(dropped))
@classmethod
def iloc(cls, dataset, index):
rows, cols = index
scalar = False
if isscalar(cols):
scalar = isscalar(rows)
cols = [dataset.get_dimension(cols, strict=True)]
elif isinstance(cols, slice):
cols = dataset.dimensions()[cols]
else:
cols = [dataset.get_dimension(d, strict=True) for d in cols]
if isscalar(rows):
rows = [rows]
new_data = OrderedDict()
for d, values in dataset.data.items():
if d in cols:
if isscalar(values):
new_data[d] = values
else:
new_data[d] = values[rows]
if scalar:
arr = new_data[cols[0].name]
return arr if isscalar(arr) else arr[0]
return new_data
@classmethod
def has_holes(cls, dataset):
from holoviews.element import Polygons
key = Polygons._hole_key
return key in dataset.data and isinstance(dataset.data[key], list)
@classmethod
def holes(cls, dataset):
from holoviews.element import Polygons
key = Polygons._hole_key
if key in dataset.data:
return [[[np.asarray(h) for h in hs] for hs in dataset.data[key]]]
else:
return super(DictInterface, cls).holes(dataset)
Interface.register(DictInterface)
|
There comes a time when people exchange beautiful vows. For some it is due to the pressure of external sources. There are some people that did not want to actually get married. They do it because they are expected to do so and makes them safe. Many people assume that it is what other people expect of them. This ends up in a colorful wedding ceremony but an unhappy marriage. No need to panic because this is okay. This guide talks about what kids need to know about divorce.
It is not always that you meet someone who has potential. Most of the people don’t even reach their potential. This is because of inability or their choices. Potential is limiting. You check out what is presented at that time. This is what we see. This is a mistake that most people make with their ex boyfriends at least once. At that point you perceived that they had all the qualities you were looking for. The ex boyfriend talked well, walked well and after some tine it was just another story.
Majority of people embrace the idea of getting married.
However, marriage can turn out to be tough even if it is healthy and still functional. The both of you might be doing everything right but you expect to meet rough patches. It is important to let your kids know that divorce doesn’t necessarily mean that any of you is doing something wrong. Abuse can occur. It could be because of miscommunication. The other reason could be irreconcilable differences. Divorce has to be based on legitimate grounds. At some point parents have to do what is right and stop beating themselves up. Regret is not necessary because at one point this person was your source of joy.
It is important to acknowledge the fact that divorce can be devastating. However, being trapped in a marriage that is not working can turn out to be the most awful feeling. There are times when couples can go for marriage counseling and it helps them. However, at times divorce is the only option left.
At times people change their minds and decide divorce is the best choice they have. Every person is free to make their choices to make despite the world being so judgemental. Kids need to know that they are responsible for their own happiness. Divorce is a painful process. However, it can hurt more to stay in a marriage you are not happy at all. All you will keep thinking about is your freedom and how happy you could be if you had moved on. This is what can hurt the most which can be toxic. |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2013 - Thierry Godin. All Rights Reserved
# @author Thierry Godin <thierry@lapinmoutardepommedauphine.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import time
from datetime import datetime, timedelta
from openerp import netsvc, tools, pooler
from openerp.osv import fields, osv
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class pos_message(osv.Model):
_name = 'pos.message'
_columns = {
'pos_ids' : fields.many2many('pos.config',
'pos_message_config_rel',
'message_id',
'config_id',
'Point of Sale'),
'title' : fields.char('Title', size=128, required=True),
'active': fields.boolean('Active'),
'message_type': fields.selection([
(1, 'Information'),
(2, 'Question'),
(3, 'Alert'),
(4, 'Warning'),
(5, 'Other')
],
'Type',
help="Select the type of the message to be displayed on POS"),
'message' : fields.text('Message', required=True),
'start_at' : fields.date('Starting Date', required=True),
'stop_at' : fields.date('Ending Date', required=True),
'frequency': fields.selection([
(1, 'Once'),
(2, 'Every X hours'),
],
'Frequency',
help="Set the frequency of occurrence of the message"),
'interval' : fields.selection([
(1, '1'),
(2, '2'),
(3, '3'),
(4, '4'),
],
'Interval',
help="Display message each x hours"),
}
_defaults = {
'message_type' : 1,
'frequency' : 1,
'interval': 1,
'active': True,
'start_at': fields.date.context_today,
'stop_at': fields.date.context_today,
}
# get available messags for the POS
def get_available_message(self, cr, uid, posid, context=None):
if context is None:
context = {}
date_now = time.strftime("%Y-%m-%d")
date_time_now = time.strftime("%Y-%m-%d %H:%M:%S")
res = {}
default_res = {
'm_id': None,
'm_type': 0,
'm_title': None,
'm_content': None
}
messages_ids = self.search(cr, uid, [
('active', '=', True),
('start_at', '<=', date_now),
('stop_at', '>=', date_now),
('pos_ids', '=', posid)
])
_logger.info('messages_ids : %r', messages_ids)
if messages_ids:
for m_id in messages_ids:
message = self.browse(cr, uid, m_id, context=context)
m_title = _(message.title)
m_type = message.message_type
m_frequency = int(message.frequency)
m_interval = int(message.interval)
m_message = message.message
res = {
'm_id': m_id,
'm_type': m_type,
'm_title': m_title,
'm_content': m_message
}
if m_frequency == 1:
nb_read_max = 1
else:
nb_read_max = 24
date_read_start = time.strftime("%Y-%m-%d 00:00:00")
date_read_stop = time.strftime("%Y-%m-%d 23:59:00")
obj_read = self.pool.get('pos.message.read')
read_ids = obj_read.search(cr, uid, [
('pos_id', '=', posid),
('message_id', '=', m_id),
('date_read', '>', date_read_start),
('date_read', '<', date_read_stop)
])
if read_ids:
# once
if nb_read_max == 1:
res = default_res
continue
message_read = obj_read.browse(cr, uid, read_ids[0], context=context)
mr_date_plus = datetime.strptime(message_read.date_read, "%Y-%m-%d %H:%M:%S") + timedelta(hours=m_interval)
mr_date_now = datetime.strptime(date_time_now, "%Y-%m-%d %H:%M:%S")
if mr_date_now >= mr_date_plus :
break
else:
res = default_res
continue
else:
break
else:
res = default_res
return res
class pos_message_read(osv.Model):
_name = 'pos.message.read'
_order = 'pos_id, date_read desc'
_columns = {
'message_id' : fields.integer('Message id'),
'pos_id' : fields.integer('POS id'),
'date_read' : fields.datetime('Date read'),
}
def write_pos_message_read(self, cr, uid, mid, posid, context=None):
if context is None:
context = {}
date_now = time.strftime("%Y-%m-%d %H:%M:%S")
read_id = self.create(cr, uid, {'message_id' : mid, 'pos_id' : posid, 'date_read': date_now}, context=context)
return read_id
class inherit_pos_config(osv.Model):
_name = 'pos.config'
_inherit = 'pos.config'
_columns = {
'message_ids': fields.many2many('pos.message',
'pos_message_config_rel', 'config_id',
'message_id',
'Messages'),
}
|
Avanti Carpet Cleaning began offering limited cleaning services around 20 years ago. Situated in Denver, we were a small dry cleaner firm. But, as the time passing, we have explored our services. As a result, today, we are providing gigantic cleaning services.
We’re a motley crew of service professionals here at Avanti Carpet Cleaning. There’s no doubt about that! But after 20 years in business, one thing remains unchanged: we all share a purpose, and that purpose is to provide the best experience for our customers we can. Life is hard enough. We try to make it a little easier.
Owner operated since the beginning, The Carpet Care is a local, personal approach to home cleaning that you won’t find anywhere else.
© 2019 Avanti Carpet Cleaning. All rights reserved. |
from tkinter import *
from tkinter.ttk import *
from random import random, shuffle, choice
from math import *
from time import time, sleep
from threading import Thread
import time_profile
from bisect import insort, bisect_left
from SimpleMaths import linear_map
from animation import AnimatedValue
class EndOfChainError(Exception):
pass
class MarkovNode():
def __lt__(self, other):
try:
return self.value.__lt__(other.value)
except AttributeError:
return self.value.__lt__(other)
def __init__(self, value, mode):
'''
Gets a Canvas object and places itself on the canvas.
value is the tuple of the string values of the node.
'''
self.value = value
self.destination_nodes = list() # List of all node occurences. May contain duplicates.
self.mode = mode
# Information getting methods
def get_seperator(self):
if self.mode == 'Word':
return " "
elif self.mode == 'Character':
return ""
elif self.mode == 'Line':
return "\n"
else:
print("ERROR - Unexpected Mode1")
exit()
def get_value_string(self):
return self.get_seperator().join(self.value).replace(" ", "_").replace("\n", "\\n")
def get_last_value(self, add_seperator=True):
return self.value[-1] + self.get_seperator()
def _unique_destinations(self):
return list(set(self.destination_nodes))
def _unique_destinations_with_occurences(self):
return [(i, self.destination_nodes.count(i)) for i in self._unique_destinations()]
def cache_sorted_unique_destination(self):
if hasattr(self, "cached_sorted_unique_destination"):
return
self.cached_sorted_unique_destination = self._unique_destinations_with_occurences()
self.cached_sorted_unique_destination.sort(key=lambda x: x[1])
self.cached_sorted_unique_destination.reverse()
try:
self.max_connections = self.cached_sorted_unique_destination[0][1]
except IndexError:
self.max_connections = 0
self.cached_sorted_unique_destination = [i[0] for i in self.cached_sorted_unique_destination]
def sorted_unique_destinations(self):
return self.cached_sorted_unique_destination
def get_max_connections(self):
return self.max_connections
# Chain creation/jumping methods
def connect(self, destination_node):
'''
Creates a new link from this node
to the destination_node(also a MarkovNode).
'''
self.destination_nodes.append(destination_node)
def select(self):
'''
Selects one of the connected nodes.
'''
try:
return choice(self.destination_nodes)
except IndexError:
raise EndOfChainError
class MarkovDraw:
active_color = "#FF0000"
inactive_color = "#000000"
line_color = "#808080"
active_line_color = "#FF8080"
text_font = "Ariel", 24, "bold"
@staticmethod
def change_font_size(size):
MarkovDraw.text_font = MarkovDraw.text_font[0], size, MarkovDraw.text_font[2]
def __init__(self, markov_node, canvas, x=random() * 300, y=random() * 300):
self.node = markov_node
self.coordinate = [x, y]
self.animated_x = AnimatedValue(self.coordinate[0])
self.animated_y = AnimatedValue(self.coordinate[1])
self.canvas = canvas
self.line_ids = dict()
self.text_id = canvas.create_text(self.coordinate[0], self.coordinate[1],
text=self.node.get_value_string(), fill=MarkovDraw.inactive_color,
font=MarkovDraw.text_font)
self.canvas.tag_raise(self.text_id) # Place the text at the topmost stack
def connections_to_width(self, num, mx):
'''
How thick should each line be, given the number of connections?
'''
global width_multiplier, max_connections_per_node
# return num/max_connections_per_node*width_multiplier
return num / mx * width_multiplier
def draw_lines(self, targets):
for destination_node in targets: # create a new line
self.line_ids[destination_node] = self.canvas.create_line(
self.coordinate[0], self.coordinate[1],
destination_node.coordinate[0], destination_node.coordinate[1], fill=MarkovDraw.line_color,
width=self.connections_to_width(self.node.destination_nodes.count(destination_node.node),
self.node.get_max_connections()))
self.canvas.tag_lower(self.line_ids[destination_node]) # Place the line at the bottommost stack
def max_connections(self):
mx = 0
for i in self.node.destination_nodes:
n = self.node.destination_nodes.count(i)
if n > mx:
mx = n
return mx
def update(self, current_time):
try:
self.canvas
except AttributeError:
return # Not yet drawn.
x = int(self.animated_x.get_value(current_time))
y = int(self.animated_y.get_value(current_time))
dx = -self.coordinate[0] + x
dy = -self.coordinate[1] + y
if dx != 0 or dy != 0:
self.canvas.move(self.text_id, dx, dy)
self.coordinate[0] = x
self.coordinate[1] = y
for i in self.line_ids:
try:
orig_coords = self.canvas.coords(self.line_ids[i])
if orig_coords != [self.coordinate[0], self.coordinate[1], i.coordinate[0], i.coordinate[1]]:
self.canvas.coords(self.line_ids[i], self.coordinate[0], self.coordinate[1], i.coordinate[0],
i.coordinate[1])
except KeyError: # Line not yet created.
pass
def activate(self):
try:
self.canvas
except AttributeError:
return # Not yet drawn.
self.canvas.itemconfigure(self.text_id, fill=MarkovDraw.active_color)
def activate_line_to(self, to):
try:
self.canvas.itemconfigure(self.line_ids[to], fill=MarkovDraw.active_line_color)
except KeyError:
print("KeyError on activate_line_to")
except AttributeError:
print("AttributeError on activate_line_to")
def deactivate(self):
try:
self.canvas
except AttributeError:
return # Not yet drawn.
self.canvas.itemconfigure(self.text_id, fill=MarkovDraw.inactive_color)
def remove_from_canvas(self):
try:
self.canvas
except AttributeError:
return # Not yet drawn.
for i in self.line_ids:
self.canvas.delete(self.line_ids[i])
self.canvas.delete(self.text_id)
del self.canvas
del self.text_id
def move_to(self, x, y, duration, ease_in, ease_out):
self.animated_x.animate(x, duration, ease_in, ease_out)
self.animated_y.animate(y, duration, ease_in, ease_out)
# Nodes List.
nodes = list()
active_node = None
first_node = None
last_node=None
active_node_draw = None
nodes_draw = []
max_connections_per_node = 1
# Node initialization functions.
def order_list(lst, order):
res = list()
for i in range(len(lst)):
res.append(tuple(lst[i - order + 1:i + 1]))
return res
def split_by(s, mode):
if mode == 'Word':
return s.split(" ")
elif mode == 'Character':
return list(s)
elif mode == 'Line':
return s.split("\n")
else:
print("ERROR - Unexpected Mode2")
exit()
def generate_chain(lst, mode):
global nodes, active_node, first_node, last_node
global canvas
global input_options_progress
global tk
canvas.delete(ALL)
nodes = list()
active_node = None
prev_node = None
first_node = None
last_node=None
percentage = 0
total = len(lst)
for i in range(len(lst)):
if i / total > percentage / 100:
percentage += 1
# print(percentage)
input_options_progress.set(i / total * 100)
tk.update()
try:
mn = nodes[bisect_left(nodes, lst[i])] # Is this element already in the list of nodes?
except IndexError:
mn = None
if mn == None or lst[i] != mn.value: # It's not in the list, i guess.
mn = MarkovNode(lst[i], mode)
insort(nodes, mn)
if first_node == None:
first_node = mn
if prev_node != None:
prev_node.connect(mn)
last_node=mn
'''
for j in nodes: # TODO performance...
if j.value == lst[i]:
mn = j
if mn == None: # No Duplicates
mn = MarkovNode(lst[i], mode)
nodes.append(mn)
if prev_node != None:
prev_node.connect(mn)
'''
prev_node = mn
global chain_info_numnodes
chain_info_numnodes.set("Number of nodes: " + str(len(nodes)))
chain_info_connections.set("Number of connections:" + str(len(lst)))
chain_info_closed.set(["Chain is closed.", "Chain is open"][len(last_node.destination_nodes) == 0])
print("Finished Generating Node Graph.")
input_options_progress.set(0)
print("Caching Unique nodes...")
percentage = 0
total = len(nodes)
for i in range(len(nodes)):
# print(i,nodes[i].value)
if i / total > percentage / 100:
percentage += 1
# print(percentage)
input_options_progress.set(i / total * 100)
tk.update()
nodes[i].cache_sorted_unique_destination()
input_options_progress.set(0)
def parse_and_generate():
global input_options_strip_newlines, input_options_strip_spaces, input_options_case
print("Generating Chain...")
mode = input_options_split_vars.get()
order = int(input_options_order_vars.get())
inp = input_input_box.get("1.0", 'end-1c')
# print(input_options_strip_newlines.get(), input_options_strip_spaces.get())
if input_options_strip_newlines.get() == "1":
inp = inp.replace("\n", " ")
if input_options_strip_spaces.get() == "1":
inp = inp.replace(" ", "")
if input_options_case.get() == "1":
inp = inp.upper()
split = split_by(inp, mode)
# print("Split")
ordered = order_list(split, order)
# print("Ordered.")
trimmed = [i for i in ordered if i] # Remove blank elements.
# print("Trimmed.")
generate_chain(trimmed, mode)
generate = False
def start_generating_text():
global generate
generate = True
follow_node()
chain_options_generate.state(['disabled'])
chain_options_stop.state(['!disabled'])
def stop_generating_text():
global generate
generate = False
chain_options_generate.state(['!disabled'])
chain_options_stop.state(['disabled'])
def follow_node():
global generate, generate_delay
global active_node, nodes, chain_results_box, to_be_active, nodes_draw, first_node
global canvas
if not generate:
return
# First step
if active_node == None:
to_be_active = first_node
else:
try:
to_be_active = active_node.node.select()
for i in nodes_draw:
if i.node == to_be_active:
i.activate()
active_node.activate_line_to(i)
active_node.deactivate()
except EndOfChainError:
stop_generating_text()
return
canvas.after(int(linear_map(0, 100, 0, 1500, generate_delay)), follow_node_part2)
def follow_node_part2():
global generate, generate_delay
global active_node, nodes, chain_results_box, to_be_active, nodes_draw, max_nodes
global canvas
global display_options_frame
prev = [0, 0]
for i in nodes_draw:
if i.node == to_be_active:
prev = i.coordinate
if not active_node == None:
# Remove previous
active_node.remove_from_canvas()
for i in nodes_draw:
i.remove_from_canvas()
nodes_draw = list()
center = canvas_position_active()
# print("Prev coords:", prev)
active_node = MarkovDraw(to_be_active, canvas, prev[0], prev[1])
active_node.activate()
# print("Moving to:", center)
active_node.move_to(center[0], center[1], (linear_map(0, 100, 0, 1.5, generate_delay)), True, True)
destination_nodes = active_node.node.sorted_unique_destinations()[:max_nodes]
if display_options_sort.get() == "0":
shuffle(destination_nodes)
others = canvas_position_connected(len(destination_nodes))
others_outer = canvas_position_connected(len(destination_nodes), 3)
# print(others)
for i in range(len(destination_nodes)):
if i >= max_nodes:
break
# print("Drawing destination:",i)
# nodes_draw.append(MarkovDraw(destination_nodes[i],canvas, others_outer[i][0], others_outer[i][1]))
# nodes_draw[-1].move_to(others[i][0], others[i][1], (linearMap(0, 100, 0, 1.5, generate_delay)), False, True)
nodes_draw.append(MarkovDraw(destination_nodes[i], canvas, prev[0], prev[1]))
nodes_draw[-1].move_to(others[i][0], others[i][1], (linear_map(0, 100, 0, 1.5, generate_delay)), True, True)
nodes_draw[-1].deactivate()
active_node.draw_lines(nodes_draw)
chain_results_box.insert(END, active_node.node.get_last_value())
if generate:
tk.after(int(linear_map(0, 100, 0, 3000, generate_delay)), follow_node)
def update_canvas():
global canvas
global nodes_draw, active_node_draw
t = time()
for i in nodes_draw:
i.update(t)
if active_node != None:
active_node.update(t)
canvas.after(5, update_canvas)
# The position of the active node.
def canvas_position_active():
global canvas
w = canvas.winfo_width()
h = canvas.winfo_height()
# print(w,h)
return (w / 2, h / 2)
# Positions of the connected nodes.
def canvas_position_connected(num, r_multiplier=1):
w = canvas.winfo_width()
h = canvas.winfo_height()
r = min(h, w) / 3 * r_multiplier
res = []
for i in range(num):
# ang=pi*(i+1)/(num+1)-pi/2
ang = 2 * pi * i / num
res.append((w / 2 + r * cos(ang), h / 2 + r * sin(ang)))
return res
# Main UI Setup.
# Tk
tk = Tk()
tk.title("Markov Graphic Text Generator")
# Tk>Menu
menu = Notebook(tk, width=300, height=500)
menu.grid(column=1, row=1, sticky=(W, E, N, S))
tk.rowconfigure(1, weight=1)
# Tk>Menu>Input Tab
input_tab = Frame()
menu.add(input_tab, text="Input")
# Tk>Menu>Input Tab>Input
input_input_frame = LabelFrame(input_tab, text="Input")
input_input_frame.grid(column=1, row=1, sticky=(W, E, N, S))
input_tab.columnconfigure(1, weight=1)
input_tab.rowconfigure(1, weight=1)
# Tk>Menu>Input Tab>Input>Input Textbox
input_input_box = Text(input_input_frame, width=50)
input_input_box.grid(column=1, row=1, sticky=(W, E, N, S))
input_input_frame.columnconfigure(1, weight=1)
input_input_frame.rowconfigure(1, weight=1)
# Tk>Menu>Input Tab>Input>Input Clear Button
input_input_box_clear_btn = Button(input_input_frame, text="Clear",
command=lambda: input_input_box.delete("1.0", 'end'))
input_input_box_clear_btn.grid(column=1, columnspan=2, row=2, sticky=(W, E, N, S))
# Tk>Menu>Input Tab>Input>Input Scrollbox
input_input_box_scroller = Scrollbar(input_input_frame, orient=VERTICAL, command=input_input_box.yview)
input_input_box_scroller.grid(column=2, row=1, sticky=(W, E, N, S))
input_input_box['yscrollcommand'] = input_input_box_scroller.set
# Tk>Menu>Input Tab>Options
input_options_frame = LabelFrame(input_tab, text="Options")
input_options_frame.grid(column=1, row=2, sticky=(W, E))
input_tab.columnconfigure(1, weight=1)
# Tk>Menu>Input Tab>Options>Strip Spaces
input_options_strip_spaces = Variable()
input_options_strip_spaces.set(0)
input_options_strip_spaces_btn = Checkbutton(input_options_frame, text='Strip Spaces ( _ )',
variable=input_options_strip_spaces)
input_options_strip_spaces_btn.grid(column=1, row=2, columnspan=2, sticky=(W, E))
input_options_strip_newlines = Variable()
input_options_strip_newlines.set(0)
input_options_strip_newlines_btn = Checkbutton(input_options_frame, text='Newlines to Space ( \\n --> _ )',
variable=input_options_strip_newlines)
input_options_strip_newlines_btn.grid(column=1, row=1, columnspan=2, sticky=(W, E))
input_options_case = Variable()
input_options_case.set(0)
input_options_case_btn = Checkbutton(input_options_frame, text='Ignore case',
variable=input_options_case)
input_options_case_btn.grid(column=1, row=3, columnspan=2, sticky=(W, E))
# Tk>Menu>Input Tab>Options>Split-Label
input_options_split_label = Label(input_options_frame, text="Split By:")
input_options_split_label.grid(column=1, row=4, sticky=(W, E))
input_options_frame.columnconfigure(2, weight=1)
# Tk>Menu>Input Tab>Options>Split-RadioButton
input_options_split_vars = StringVar()
def input_options_split_vars_set():
global input_options_split_vars
global input_options_strip_spaces, input_options_strip_newlines
if input_options_split_vars.get() == 'Character':
pass
elif input_options_split_vars.get() == 'Word':
input_options_strip_spaces.set(0)
elif input_options_split_vars.get() == 'Line':
input_options_strip_spaces.set(0)
input_options_strip_newlines.set(0)
else:
print("ERROR - Unexpected Mode3")
exit()
input_options_split_char = Radiobutton(input_options_frame, text='Character', command=input_options_split_vars_set,
variable=input_options_split_vars, value='Character')
input_options_split_char.grid(column=2, row=4, sticky=(W, E))
input_options_split_word = Radiobutton(input_options_frame, text='Word', command=input_options_split_vars_set,
variable=input_options_split_vars, value='Word')
input_options_split_word.grid(column=2, row=5, sticky=(W, E))
input_options_split_line = Radiobutton(input_options_frame, text='Line', command=input_options_split_vars_set,
variable=input_options_split_vars, value='Line')
input_options_split_line.grid(column=2, row=6, sticky=(W, E))
input_options_split_vars.set("Character")
# Tk>Menu>Input Tab>Options>Order-Label
input_options_order_label = Label(input_options_frame, text="Chain Order:")
input_options_order_label.grid(column=1, row=7, sticky=(W, E))
# Tk>Menu>Input Tab>Options>Order-Spinbox
input_options_order_vars = StringVar()
input_options_order = Spinbox(input_options_frame, textvariable=input_options_order_vars)
input_options_order['values'] = ('1', '2', '3', '4', '5')
input_options_order.grid(column=2, row=7, sticky=(W, E))
# Tk>Menu>Input Tab>Options>Generate
input_options_generate = Button(input_options_frame, text="Generate Graph", command=parse_and_generate)
input_options_generate.grid(column=1, row=8, columnspan=2, sticky=(W, E))
# Tk>Menu>Input Tab>Options>Progreess bar
input_options_progress = Variable()
input_options_progress_bar = Progressbar(input_options_frame, orient=HORIZONTAL, length=200,
mode='determinate', variable=input_options_progress)
input_options_progress_bar.grid(column=1, row=9, columnspan=2, sticky=(W, E))
# Tk>Menu>Chain Tab
chain_tab = Frame()
menu.add(chain_tab, text="Chain")
# Tk>Menu>Chain Tab>Information
chain_info_frame = LabelFrame(chain_tab, text="Information")
chain_info_frame.grid(column=1, row=1, sticky=(W, E))
chain_tab.columnconfigure(1, weight=1)
# Tk>Menu>Chain Tab>Information>NumNodes
chain_info_numnodes = StringVar()
chain_info_numnodes_label = Label(chain_info_frame, textvariable=chain_info_numnodes)
chain_info_numnodes_label.grid(column=1, row=1, sticky=(W, E))
# Tk>Menu>Chain Tab>Information>NumNodes
chain_info_connections = StringVar()
chain_info_connections_label = Label(chain_info_frame, textvariable=chain_info_connections)
chain_info_connections_label.grid(column=1, row=2, sticky=(W, E))
# Tk>Menu>Chain Tab>Information>NumNodes
chain_info_closed = StringVar()
chain_info_closed_label = Label(chain_info_frame, textvariable=chain_info_closed)
chain_info_closed_label.grid(column=1, row=3, sticky=(W, E))
# Tk>Menu>Chain Tab>Options
chain_options_frame = LabelFrame(chain_tab, text="Options")
chain_options_frame.grid(column=1, row=2, sticky=(W, E))
chain_tab.columnconfigure(1, weight=1)
# Tk>Menu>Chain Tab>Options>Speed-Label
chain_options_speed_label = Label(chain_options_frame, text="Delay")
chain_options_speed_label.grid(column=1, row=1, sticky=(W, E))
# Tk>Menu>Chain Tab>Options>Speed-Slider
generate_delay = 1
def chain_options_speed_func(x):
global generate_delay
generate_delay = float(x)
chain_options_speed = Scale(chain_options_frame,
orient=HORIZONTAL, length=200, from_=1.0, to=100.0,
command=chain_options_speed_func)
chain_options_speed.set(30)
chain_options_speed.grid(column=2, row=1, sticky=(W, E))
chain_options_frame.columnconfigure(2, weight=1)
# Tk>Menu>Chain Tab>Options>Generate
chain_options_generate = Button(chain_options_frame, text="Generate Text", command=start_generating_text)
chain_options_generate.grid(column=1, row=3, columnspan=2, sticky=(W, E))
# Tk>Menu>Chain Tab>Options>Stop
chain_options_stop = Button(chain_options_frame, text="Stop", command=stop_generating_text)
chain_options_stop.grid(column=1, row=4, columnspan=2, sticky=(W, E))
# Tk>Menu>Chain Tab>Results
chain_results_frame = LabelFrame(chain_tab, text="Results")
chain_results_frame.grid(column=1, row=3, sticky=(W, E, N, S))
chain_tab.columnconfigure(1, weight=1)
chain_tab.rowconfigure(3, weight=1)
# Tk>Menu>Chain Tab>Results>Results Textbox
chain_results_box = Text(chain_results_frame, width=50)
chain_results_box.grid(column=1, row=1, sticky=(W, E, N, S))
chain_results_frame.columnconfigure(1, weight=1)
chain_results_frame.rowconfigure(1, weight=1)
# Tk>Menu>Chain Tab>Results>Results Scrollbox
chain_results_box_scroller = Scrollbar(chain_results_frame, orient=VERTICAL, command=chain_results_box.yview)
chain_results_box_scroller.grid(column=2, row=1, sticky=(W, E, N, S))
chain_results_box['yscrollcommand'] = chain_results_box_scroller.set
# Tk>Menu>Chain Tab>Results>Results Clear Btn
chain_results_box_clear_btn = Button(chain_results_frame, text="Clear",
command=lambda: chain_results_box.delete("1.0", 'end'))
chain_results_box_clear_btn.grid(column=1, columnspan=2, row=2, sticky=(W, E, N, S))
# Tk>Menu>Display Tab
display_tab = Frame()
menu.add(display_tab, text="Display")
# Tk>Menu>Display Tab>Options
display_options_frame = LabelFrame(display_tab, text="Options")
display_options_frame.grid(column=1, row=1, sticky=(W, E))
display_tab.columnconfigure(1, weight=1)
# Tk>Menu>Display Tab>Options>Strip Spaces
display_options_sort = Variable()
display_options_sort_btn = Checkbutton(display_options_frame, text='Sort nodes',
variable=display_options_sort)
display_options_sort_btn.grid(column=1, row=1, columnspan=3, sticky=(W, E))
display_options_sort.set("0")
# Tk>Menu>Display Tab>Options>Line Width-Label
display_options_line_width_label = Label(display_options_frame, text="Line Width")
display_options_line_width_label.grid(column=1, row=2, sticky=(W, E))
# Tk>Menu>Display Tab>Options>Line Width-Value
width_multiplier_str = StringVar()
display_options_max_nodes_label = Label(display_options_frame, textvariable=width_multiplier_str)
display_options_max_nodes_label.grid(column=2, row=2, sticky=(W, E))
# Tk>Menu>Display Tab>Options>Line Width-Slider
width_multiplier = 1
def set_line_width(x):
global width_multiplier
global width_multiplier_str
width_multiplier = float(x)
width_multiplier_str.set("{:.2f}".format(width_multiplier))
display_options_line_width = Scale(display_options_frame,
orient=HORIZONTAL, length=200, from_=1.0, to=30.0,
command=set_line_width)
display_options_line_width.set(15)
display_options_line_width.grid(column=3, row=2, sticky=(W, E))
display_options_frame.columnconfigure(3, weight=1)
# Tk>Menu>Display Tab>Options>Text Size-Label
display_options_text_size_label = Label(display_options_frame, text="Text Size")
display_options_text_size_label.grid(column=1, row=3, sticky=(W, E))
# Tk>Menu>Display Tab>Options>Text Size-Value
text_size_str = StringVar()
display_options_max_nodes_label = Label(display_options_frame, textvariable=text_size_str)
display_options_max_nodes_label.grid(column=2, row=3, sticky=(W, E))
# Tk>Menu>Display Tab>Options>Text Size-Slider
text_size = 1
def set_text_size(x):
global text_size
global text_size_str
text_size = int(round(float(x)))
text_size_str.set("{:.2f}".format(text_size))
MarkovDraw.change_font_size(text_size)
display_options_text_size = Scale(display_options_frame,
orient=HORIZONTAL, length=200, from_=1.0, to=100.0,
command=set_text_size)
display_options_text_size.grid(column=3, row=3, sticky=(W, E))
display_options_text_size.set(24)
# Tk>Menu>Display Tab>Options>Max Nodes Displayed-Label
display_options_max_nodes_label = Label(display_options_frame, text="Max. nodes")
display_options_max_nodes_label.grid(column=1, row=4, sticky=(W, E))
# Tk>Menu>Display Tab>Options>Max Nodes Displayed-Value
max_nodes_str = StringVar()
display_options_max_nodes_label = Label(display_options_frame, textvariable=max_nodes_str)
display_options_max_nodes_label.grid(column=2, row=4, sticky=(W, E))
# Tk>Menu>Display Tab>Options>Max Nodes Displayed-Slider
max_nodes = 1
def set_max_nodes(x):
global max_nodes
global max_nodes_str
max_nodes = int(round(float(x)))
max_nodes_str.set(max_nodes)
display_options_max_nodes = Scale(display_options_frame,
orient=HORIZONTAL, length=200, from_=1.0, to=300.0,
command=set_max_nodes)
display_options_max_nodes.grid(column=3, row=4, sticky=(W, E))
display_options_max_nodes.set(100)
# Tk>Canvas
canvas = Canvas(tk, background="#FFFFFF", width=500, height=500)
canvas.grid(column=2, row=1, sticky=(W, E, N, S))
tk.columnconfigure(2, weight=1)
# Tk>Size grip
Sizegrip(tk).grid(column=999, row=999, sticky=(S, E))
update_canvas()
tk.mainloop()
|
This is the website of Cerberus Training Group, LLC.
We can be reached via e-mail at ssshields@gorge.net or you can reach us by telephone at 509-774-8252.
We have a continual commitment to our clients, that your privacy stays private. We hold the trust of our clients with the utmost respect.
We collect information volunteered by the consumer, such as survey information and/or site registrations, name and address, telephone number, payment information (e.g., credit card number and billing address).
The information we collect is used to improve the content of our Web page, used to notify potential clients about updates to our Web site, used by us to contact potential clients for marketing purposes, disclosed when legally required to do so, at the request of governmental authorities conducting an investigation, to verify or enforce compliance with the policies governing our Website and applicable laws or to protect against misuse or unauthorized use of our Website, to a successor entity in connection with a corporate merger, consolidation, sale of assets or other corporate change respecting the Website.
If you do not wish to receive such mailings, please let us know by calling us at the number provided above, sending us an e-mail or writing to us at the above address. Please provide us with your exact name and address. We will be sure your name is removed from our mailing list.
Customers may prevent their information from being used for purposes other than those for which it was originally collected by sending us an e-mail or writing to us at the above address.
Consumers can access this information by sending us an e-mail or writing to us at the above address.
Consumers can have this information corrected by sending us an e-mail or writing to us at the above address. |
from behave import *
import time
from clarify_python.helper import get_link_href, get_embedded
@when('I request a list of bundles without authentication')
def step_impl(context):
try:
context.result = context.customer.client().get_bundle_list()
except Exception as e:
context.exception = e
@when('I request a list of bundles')
def step_impl(context):
context.result = context.customer.client().get_bundle_list()
@when('I create a bundle named "{name}" with the media url "{url}"')
def step_impl(context, name, url):
name = context.names.translate(name)
url = context.url_table.resolve(url)
try:
context.my_bundle = context.customer.client().create_bundle(name=name, media_url=url)
except Exception as e:
print(e)
@then('my results should include a bundle named "{name}"')
def step_impl(context, name):
found = False
bundle_name = context.names.translate(name)
def check_bundle_name(client, bundle_href):
nonlocal found, bundle_name
bundle = client.get_bundle(bundle_href)
if bundle['name'] == bundle_name:
found = True
return False
context.customer.client().bundle_list_map(check_bundle_name, context.result)
assert found
@given('I have a bundle named "{name}"')
def step_impl(context, name):
name = context.names.translate(name)
context.my_bundle = context.customer.client().create_bundle(name=name)
@when('I delete my bundle')
def step_impl(context):
context.customer.client().delete_bundle(get_link_href(context.my_bundle, 'self'))
@then('the server should not list my bundle')
def step_impl(context):
found = False
my_bundle_href = get_link_href(context.my_bundle, 'self')
def check_bundle_href(client, bundle_href):
nonlocal my_bundle_href, found
if bundle_href == my_bundle_href:
found = True
context.customer.client().bundle_list_map(check_bundle_href)
assert not found
@then('My results should include a track with the URL "{url}"')
def step_impl(context, url):
found = False
url = context.url_table.resolve(url)
def check_bundle_track(client, bundle_href):
nonlocal found, url
bundle = client.get_bundle(bundle_href, embed_tracks=True)
tracks = get_embedded(bundle, 'clarify:tracks')
for track in tracks['tracks']:
if track['media_url'] == url:
found = True
return False
context.customer.client().bundle_list_map(check_bundle_track, context.result)
assert found
@when('I search my bundles for the text "{text}" in "{lang}"')
def step_impl(context, text, lang):
# Wait for the bundle to be indexed
time.sleep(4)
context.result = context.customer.client().search(query=text, language=lang)
@when('I wait until the bundle has the "{insight_rel}" insight')
def step_impl(context, insight_rel):
keywords_href = None
while keywords_href is None:
insights = context.customer.client().get_bundle(get_link_href(context.my_bundle, 'clarify:insights'))
keywords_href = get_link_href(insights, insight_rel)
if keywords_href is None:
time.sleep(3)
@then('I should receive "{count:d}" keywords including "{word}"')
def step_impl(context, count, word):
insights = context.customer.client().get_insights(get_link_href(context.my_bundle, 'clarify:insights'))
keywords = context.customer.client().get_insight(get_link_href(insights, 'insight:spoken_keywords'))
found = False
for kw in keywords['track_data'][0]['keywords']:
if kw['term'] == word:
found = True
break
assert len(keywords['track_data'][0]['keywords']) == count
assert found
@then('The spoken words insight should reveal "{count:d}" spoken words')
def step_impl(context, count):
insights = context.customer.client().get_insights(get_link_href(context.my_bundle, 'clarify:insights'))
spoken_words = context.customer.client().get_bundle(get_link_href(insights, 'insight:spoken_words'))
assert spoken_words['track_data'][0]['word_count'] == count
@given('My bundle should have exactly "{count:d}" tracks')
def step_impl(context, count):
tracks = context.customer.client().get_track_list(get_link_href(context.my_bundle, 'clarify:tracks'))
assert len(tracks['tracks']) == count
@then('My bundle should have exactly "{count:d}" tracks')
def step_impl(context, count):
tracks = context.customer.client().get_track_list(get_link_href(context.my_bundle, 'clarify:tracks'))
assert len(tracks['tracks']) == count
@when('I add a track with URL "{url}" to the bundle')
def step_impl(context, url):
url = context.url_table.resolve(url)
context.customer.client().create_track(get_link_href(context.my_bundle, 'clarify:tracks'), media_url=url)
@given('I add a track with URL "{url}" to the bundle')
def step_impl(context, url):
url = context.url_table.resolve(url)
context.customer.client().create_track(get_link_href(context.my_bundle, 'clarify:tracks'), media_url=url)
@when('I request a list of tracks')
def step_impl(context):
context.my_tracks = context.customer.client().get_track_list(get_link_href(context.my_bundle, 'clarify:tracks'))
@then('my tracks should include the URL "{url}"')
def step_impl(context, url):
found = False
url = context.url_table.resolve(url)
for track in context.my_tracks['tracks']:
if track['media_url'] == url:
found = True
break
assert found
|
In 1954, their first year in Baltimore, the Orioles went 11-11 against the Red Sox. It was the team’s best record against any opponent that season.
And so began the longest-running division rivalries in Orioles history.
Of the seven other teams in the 1954 American League, only New York and Boston have shared a division with Baltimore for the franchise’s entire modern history.
In other words, the Evil Empires of the American League East have been thorns in the Orioles’ side from the get-go.
-Overall, the Orioles have done slightly better against the Yankees than they have against the Red Sox. Unfortunately, they don’t hold a winning record against either franchise.
The Birds are 412-497 (.453) all-time against the Yankees and 408-495 (.452) all-time against the Red Sox.
-The O’s have had 18 winning seasons against the Yankees and three seasons (’61, ’73, ’07) where they earned a series split. Baltimore’s best showing against New York came in 1966 when the team went 15-3 in head-to-head games.
-The O’s have had 17 winning seasons against the Red Sox and five seasons (’54, ’68, ’71, ’75, and ’98) where they earned a series split. Baltimore’s best showing against Boston came in 1960 when the team went 16-6 in head-to-head games.
-The Orioles’ worst season against each franchise came at different times, but the record was the same each time. The O’s were 1-12 against the Yankees in 1985 and 1-12 against the Red Sox in 1987.
-Bring back the ’60s and ’70s. The Orioles posted winning percentages of .603 and .549 against the Yankees and .571 and .509 against the Red Sox in those successive, successful decades.
The O’s did not lose a season series against the Yankees for 11 straight seasons from 1964 through 1974. Meanwhile, the Birds did not lose a season series against the Red Sox for eight consecutive seasons from 1964 through 1971.
-The 1960s were the only decade when the Orioles won 100 or more games against both franchises. Conversely, the 2000s were the only decade when they lost 100 or more games against both franchises.
By the way, the 2000s were every bit as bad as you remember them: 62-116 (.348) against New York, 64-114 (.360) against Boston.
-Finally, as if you needed a reminder, Baltimore’s last winning season against the Yankees came in 1997 when they went 8-4 against New York (note: the O’s did split with the Yankees in 2007); Baltimore’s last winning season against the Red Sox came in 2004 when they went 10-9 against Boston.
Here’s to rivalries in the 2010s that look more like those of the ’60s and ’70s Can you dig it, man? |
import DeepFried2 as df
import numpy as np
from examples.utils import make_progressbar
def train(Xtrain, ytrain, model, optimiser, criterion, epoch, batch_size, mode='train'):
progress = make_progressbar('Training ({}) epoch #{}'.format(mode, epoch), len(Xtrain))
progress.start()
shuffle = np.random.permutation(len(Xtrain))
for ibatch in range(len(Xtrain) // 2 // batch_size):
indices = shuffle[ibatch*batch_size*2 : (ibatch+1)*batch_size*2]
Xleft = Xtrain[indices[:batch_size]].astype(df.floatX)
yleft = ytrain[indices[:batch_size]]
Xright = Xtrain[indices[batch_size:]].astype(df.floatX)
yright = ytrain[indices[batch_size:]]
# Need to put the targets into a column because of the way BCE works.
y = (yleft == yright)[:,None].astype(df.floatX)
if mode == 'train':
model.zero_grad_parameters()
model.accumulate_gradients((Xleft, Xright), y, criterion)
optimiser.update_parameters(model)
elif mode == 'stats':
model.accumulate_statistics((Xleft, Xright))
else:
assert False, "Mode should be either 'train' or 'stats'"
progress.update(ibatch*batch_size*2 + len(y))
progress.finish()
|
Our Hazlet United Fall Classic is arriving quickly and so is a great opportunity to advertise your business or show support to over 100 teams from the tri-state area.
These teams are comprised of almost 2,000 players and their families who will be playing at our Hazlet Complex Fields the weekend of October 5th - 7th, 2018.
If you are interested in placing an ad in our Ad Journal and supporting our local soccer players in the process, please click on the Ad Journal Form on the side bar. |
from datetime import datetime, timedelta
from typing import *
import pros.common.ui as ui
from pros.common import logger
from pros.config.cli_config import cli_config
from ..templates import BaseTemplate, Template
class Depot(object):
def __init__(self, name: str, location: str, config: Dict[str, Any] = None,
update_frequency: timedelta = timedelta(minutes=1),
config_schema: Dict[str, Dict[str, Any]] = None):
self.name: str = name
self.location: str = location
self.config: Dict[str, Any] = config or {}
self.config_schema: Dict[str, Dict[str, Any]] = config_schema or {}
self.remote_templates: List[BaseTemplate] = []
self.last_remote_update: datetime = datetime(2000, 1, 1) # long enough time ago to force re-check
self.update_frequency: timedelta = update_frequency
def update_remote_templates(self, **_):
self.last_remote_update = datetime.now()
def fetch_template(self, template: BaseTemplate, destination: str, **kwargs) -> Template:
raise NotImplementedError()
def get_remote_templates(self, auto_check_freq: Optional[timedelta] = None, force_check: bool = False, **kwargs):
if auto_check_freq is None:
auto_check_freq = getattr(self, 'update_frequency', cli_config().update_frequency)
logger(__name__).info(f'Last check of {self.name} was {self.last_remote_update} '
f'({datetime.now() - self.last_remote_update} vs {auto_check_freq}).')
if force_check or datetime.now() - self.last_remote_update > auto_check_freq:
with ui.Notification():
ui.echo(f'Updating {self.name}... ', nl=False)
self.update_remote_templates(**kwargs)
ui.echo('Done', color='green')
for t in self.remote_templates:
t.metadata['origin'] = self.name
return self.remote_templates
|
The State Government has provided Rs 510 crore under the Biju KBK Yojana (BKY) and to the Western Orissa Development Council (WODC) since 2005-06.
Out of this, Rs 260 crore has been provided under the BKY while Rs 250 crore was allocated for the WODC. However, funds from BKY were not distributed equally among all the districts.
While the lowest allocation of Rs 17 crore has gone to the Nuapada district during the last three years, the highest provision of Rs 45 crore each was made for Koraput and Balangir districts.
Official sources, however, dismissed this discrepancy saying that the size of the district and projects approved by the district planning committees are taken into account while finalising the allocations.
Provisions for other districts under BKY include: Malkangiri-Rs 23 crore, Nabarangpur-Rs 32.5 crore, Rayagada-Rs 35.5 crore, Subarnapur-Rs 20 crore and Kalahandi-Rs 42 crore.
The State Government has made a provision of Rs 28.5 crore for two similar programmes, the Biju Kandhamal and Biju Gajapati Yojanas in the supplementary budgets. The allocation for the WODC has, however, increased over the years. While it was Rs 50 crore in 2006-07, this has increased to Rs 100 crore in 2008-09. |
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import re
import psycopg2 as pg
from psycopg2 import extras as pgextras
from six.moves.urllib.parse import urlparse
from datadog_checks.base import AgentCheck, ConfigurationError, is_affirmative
from datadog_checks.pgbouncer.metrics import DATABASES_METRICS, POOLS_METRICS, STATS_METRICS
class ShouldRestartException(Exception):
pass
class PgBouncer(AgentCheck):
"""Collects metrics from pgbouncer"""
DB_NAME = 'pgbouncer'
SERVICE_CHECK_NAME = 'pgbouncer.can_connect'
def __init__(self, name, init_config, instances):
super(PgBouncer, self).__init__(name, init_config, instances)
self.host = self.instance.get('host', '')
self.port = self.instance.get('port', '')
self.user = self.instance.get('username', '')
self.password = self.instance.get('password', '')
self.tags = self.instance.get('tags', [])
self.database_url = self.instance.get('database_url')
self.use_cached = is_affirmative(self.instance.get('use_cached', True))
if not self.database_url:
if not self.host:
raise ConfigurationError("Please specify a PgBouncer host to connect to.")
if not self.user:
raise ConfigurationError("Please specify a user to connect to PgBouncer as.")
self.connection = None
def _get_service_checks_tags(self):
host = self.host
port = self.port
if self.database_url:
parsed_url = urlparse(self.database_url)
host = parsed_url.hostname
port = parsed_url.port
service_checks_tags = ["host:%s" % host, "port:%s" % port, "db:%s" % self.DB_NAME]
service_checks_tags.extend(self.tags)
service_checks_tags = list(set(service_checks_tags))
return service_checks_tags
def _collect_stats(self, db):
"""Query pgbouncer for various metrics"""
metric_scope = [STATS_METRICS, POOLS_METRICS, DATABASES_METRICS]
try:
with db.cursor(cursor_factory=pgextras.DictCursor) as cursor:
for scope in metric_scope:
descriptors = scope['descriptors']
metrics = scope['metrics']
query = scope['query']
try:
self.log.debug("Running query: %s", query)
cursor.execute(query)
rows = cursor.fetchall()
except Exception as e:
self.log.exception("Not all metrics may be available: %s", str(e))
else:
for row in rows:
self.log.debug("Processing row: %r", row)
# Skip the "pgbouncer" database
if row['database'] == self.DB_NAME:
continue
tags = list(self.tags)
tags += ["%s:%s" % (tag, row[column]) for (column, tag) in descriptors if column in row]
for (column, (name, reporter)) in metrics:
if column in row:
reporter(self, name, row[column], tags)
if not rows:
self.log.warning("No results were found for query: %s", query)
except pg.Error:
self.log.exception("Connection error")
raise ShouldRestartException
def _get_connect_kwargs(self):
"""
Get the params to pass to psycopg2.connect() based on passed-in vals
from yaml settings file
"""
if self.database_url:
return {'dsn': self.database_url}
if self.host in ('localhost', '127.0.0.1') and self.password == '':
# Use ident method
return {'dsn': "user={} dbname={}".format(self.user, self.DB_NAME)}
args = {
'host': self.host,
'user': self.user,
'password': self.password,
'database': self.DB_NAME,
}
if self.port:
args['port'] = self.port
return args
def _get_connection(self, use_cached=None):
"""Get and memoize connections to instances"""
use_cached = use_cached if use_cached is not None else self.use_cached
if self.connection and use_cached:
return self.connection
try:
connect_kwargs = self._get_connect_kwargs()
connection = pg.connect(**connect_kwargs)
connection.set_isolation_level(pg.extensions.ISOLATION_LEVEL_AUTOCOMMIT)
except Exception:
redacted_url = self._get_redacted_dsn()
message = u'Cannot establish connection to {}'.format(redacted_url)
self.service_check(
self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL, tags=self._get_service_checks_tags(), message=message
)
raise
self.connection = connection
return connection
def _get_redacted_dsn(self):
if not self.database_url:
return u'pgbouncer://%s:******@%s:%s/%s' % (self.user, self.host, self.port, self.DB_NAME)
parsed_url = urlparse(self.database_url)
if parsed_url.password:
return self.database_url.replace(parsed_url.password, '******')
return self.database_url
def check(self, instance):
try:
db = self._get_connection()
self._collect_stats(db)
except ShouldRestartException:
self.log.info("Resetting the connection")
db = self._get_connection(use_cached=False)
self._collect_stats(db)
redacted_dsn = self._get_redacted_dsn()
message = u'Established connection to {}'.format(redacted_dsn)
self.service_check(
self.SERVICE_CHECK_NAME, AgentCheck.OK, tags=self._get_service_checks_tags(), message=message
)
self._set_metadata()
def _set_metadata(self):
if self.is_metadata_collection_enabled():
pgbouncer_version = self.get_version()
if pgbouncer_version:
self.set_metadata('version', pgbouncer_version)
def get_version(self):
db = self._get_connection()
regex = r'\d+\.\d+\.\d+'
with db.cursor(cursor_factory=pgextras.DictCursor) as cursor:
cursor.execute('SHOW VERSION;')
if db.notices:
data = db.notices[0]
else:
data = cursor.fetchone()[0]
res = re.findall(regex, data)
if res:
return res[0]
self.log.debug("Couldn't detect version from %s", data)
|
Contents: CD-ROM. November 1997. Jewel case insert; back cover. |
# euphoria-py
# Copyright (C) 2015 Emily A. Bellows
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Nickname and authentication state machines."""
import logging
from asyncio import AbstractEventLoop
from typing import Optional
import tiny_agent
from euphoria import Client, Packet
from tiny_agent import Agent
logger = logging.getLogger(__name__)
__all__ = ['NickAndAuth']
class NickAndAuth(Agent):
@tiny_agent.init
def __init__(self, client: Client, desired_nick: str, passcode: str = "", loop: AbstractEventLoop = None):
super(NickAndAuth, self).__init__(loop=loop)
self._client = client
self._client.add_listener(self)
self._desired_nick = desired_nick
self._current_nick = ""
self._passcode = passcode
self._authorized = False
@property
def desired_nick(self) -> str:
return self._desired_nick
@property
def current_nick(self) -> str:
return self._current_nick
@property
def passcode(self) -> str:
return self._passcode
@property
def authorized(self) -> bool:
return self._authorized
@tiny_agent.call
async def set_desired_nick(self, new_nick: str) -> Optional[str]:
self._desired_nick = new_nick
packet = await self._client.send_nick(new_nick)
if packet.error:
return packet.error
else:
nick_reply = packet.nick_reply
self._current_nick = nick_reply.to
self._desired_nick = nick_reply.to
return None
@tiny_agent.call
async def set_passcode(self, new_passcode: str) -> Optional[str]:
self._passcode = new_passcode
packet = await self._client.send_auth(new_passcode)
if packet.error:
return packet.error
else:
auth_reply = packet.auth_reply
assert auth_reply.success
self._authorized = True
self.set_desired_nick(self._desired_nick)
return None
@tiny_agent.send
async def on_packet(self, packet: Packet):
hello_event = packet.hello_event
if hello_event:
self._current_nick = hello_event.session.name
self._authorized = not hello_event.room_is_private
if self._authorized:
self.set_desired_nick(self._desired_nick)
return
bounce_event = packet.bounce_event
if bounce_event:
self._authorized = False
self.set_passcode(self._passcode)
|
Steve Fraze, chairman of the Texas Tech University Department of Agricultural Education and Communications has been named the interim dean of the College of Agricultural Sciences and Natural Resources.
“I am pleased that Dr. Fraze will take over a position that is critical to the Texas Tech University System,” Schovanec said. “The College of Agricultural Sciences and Natural Resources will be a key contributor to the system’s plan to develop a College of Veterinary Medicine in Amarillo, and graduates of the college are leading the way in both academia and the agriculture industry around the world. |