repo_name
stringlengths 6
61
| path
stringlengths 4
230
| copies
stringlengths 1
3
| size
stringlengths 4
6
| text
stringlengths 1.01k
850k
| license
stringclasses 15
values | hash
int64 -9,220,477,234,079,998,000
9,219,060,020B
| line_mean
float64 11.6
96.6
| line_max
int64 32
939
| alpha_frac
float64 0.26
0.9
| autogenerated
bool 1
class | ratio
float64 1.62
6.1
| config_test
bool 2
classes | has_no_keywords
bool 2
classes | few_assignments
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
tpeek/bike_safety | imagersite/imager_profile/migrations/0005_auto_20150802_0303.py | 2 | 1491 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('imager_profile', '0004_auto_20150802_0153'),
]
operations = [
migrations.RemoveField(
model_name='imagerprofile',
name='name',
),
migrations.AddField(
model_name='imagerprofile',
name='nickname',
field=models.CharField(max_length=128, null=True, blank=True),
),
migrations.AlterField(
model_name='imagerprofile',
name='address',
field=models.TextField(null=True, blank=True),
),
migrations.AlterField(
model_name='imagerprofile',
name='camera',
field=models.CharField(help_text=b'What is the make and model of your camera?', max_length=128, null=True, blank=True),
),
migrations.AlterField(
model_name='imagerprofile',
name='photography_type',
field=models.CharField(blank=True, max_length=64, null=True, help_text=b'What is your photography type?', choices=[(b'H', b'Hobbist'), (b'A', b'Abstract'), (b'B', b'Black and White'), (b'P', b'Panorama'), (b'J', b'Journalism')]),
),
migrations.AlterField(
model_name='imagerprofile',
name='website_url',
field=models.URLField(null=True, blank=True),
),
]
| mit | -5,988,474,420,307,295,000 | 33.674419 | 241 | 0.570758 | false | 3.965426 | false | false | false |
iandennismiller/nexus-root | fabfile.py | 1 | 5893 | # -*- coding: utf-8 -*-
# nexus5-root (c) Ian Dennis Miller
from fabric.api import task, env
import shutil
import requests
import os.path
import time
import glob
from subprocess import call
adb_cmd = os.path.join(os.path.expanduser(env.sdk_path), "platform-tools", "adb")
fastboot_cmd = os.path.join(os.path.expanduser(env.sdk_path), "platform-tools", "fastboot")
def download_url(source_url, destination_filename):
if not os.path.isfile(destination_filename):
print("downloading {0}...".format(source_url))
r = requests.get(source_url, stream=True, headers={'referer': source_url})
if r.status_code == 200:
with open(destination_filename, 'wb') as f:
r.raw.decode_content = True
shutil.copyfileobj(r.raw, f)
print("image downloaded to {0}".format(destination_filename))
else:
print("already downloaded as {0}".format(destination_filename))
def install_app(apk_filename):
"install an app on the phone"
call([
adb_cmd, "install",
os.path.join(
env.working_path,
"download",
apk_filename)
])
time.sleep(1)
@task
def ensure_paths():
download_path = os.path.join(env.working_path, "download")
build_path = os.path.join(env.working_path, env.nexus_model)
if not os.path.isdir(download_path):
os.mkdir(download_path)
if not os.path.isdir(build_path):
os.mkdir(build_path)
def download_apps():
download_url("http://file.appsapk.com/wp-content/uploads/downloads/BusyBox.apk",
os.path.join(env.working_path, "download", "BusyBox.apk"))
download_url(
"https://drive.google.com/uc?export=download&confirm=no_antivirus&id=0B8muzPZAeiQ6RlFzMWM4ZUZKQ2s",
os.path.join(env.working_path, "download", "TitaniumBackup.apk"))
download_url(
"https://drive.google.com/uc?export=download&confirm=no_antivirus&id=0B8muzPZAeiQ6S293d2lqWE1rRlk",
os.path.join(env.working_path, "download", "AndroidTerminalEmulator.apk"))
@task
def download_sdk():
"download the Android SDK"
download_url(env.sdk_url, os.path.join(env.working_path, "download", "sdk.tgz"))
call(["tar", "-xvzf", os.path.join(env.working_path, "download", "sdk.tgz"),
"-C", os.path.expanduser(env.sdk_path)])
@task
def download_twrp():
"download TWRP"
download_url(env.bootloader_url, os.path.join(env.working_path, "download", "twrp.img"))
call(["cp",
os.path.join(env.working_path, "download", "twrp.img"),
os.path.join(env.working_path, env.nexus_model)
])
@task
def download_nexus_image():
"download the stock Nexus image"
download_url(env.image_url, os.path.join(env.working_path, "download", "nexus-image.tgz"))
call(["tar", "-xvzf", os.path.join(env.working_path, "download", "nexus-image.tgz"),
"-C", env.working_path])
call(["mv",
glob.glob(os.path.join(env.working_path, "{0}-*".format(env.nexus_model)))[0],
os.path.join(env.working_path, "nexus-image")
])
call(["mv",
os.path.join(env.working_path, "nexus-image"),
os.path.join(env.working_path, env.nexus_model)
])
@task
def adb_bootloader():
"reboot the phone into the bootloader using adb"
call([adb_cmd, "reboot", "bootloader"])
raw_input('Press ENTER after your phone has rebooted.')
@task
def fastboot_bootloader():
"reboot the phone into the bootloader using fastboot"
call([fastboot_cmd, "reboot-bootloader"])
raw_input('Press ENTER after your phone has rebooted.')
@task
def fastboot_recovery():
"reboot the phone into the recovery using fastboot"
call([fastboot_cmd, "reboot-recovery"])
raw_input('Press ENTER after your phone has rebooted.')
@task
def reboot():
"reboot the phone"
call([fastboot_cmd, "reboot"])
@task
def unlock():
"unlock the phone's bootloader. NB: This step will wipe all user data."
call([fastboot_cmd, "oem", "unlock"])
print("Now you must select 'yes' to wipe your user data and unlock the bootloader.")
raw_input('Press ENTER after you have unlocked the bootloader.')
reboot()
@task
def backup():
"copy backup from phone to local system"
call([adb_cmd, "pull", env.remote_backup_path, os.path.expanduser(env.local_backup_path)])
@task
def restore():
"restore backup from local system to phone"
call([adb_cmd, "push", os.path.expanduser(env.local_backup_path), env.remote_backup_path])
@task
def flash_bootloader():
"flash the stock bootloader"
call([
fastboot_cmd, "flash", "bootloader",
glob.glob(os.path.join(
env.working_path, env.nexus_model,
"nexus-image",
"bootloader-*.img"))[0]
])
time.sleep(1)
fastboot_bootloader()
time.sleep(5)
@task
def flash_radio():
"flash the radio image"
call([
fastboot_cmd, "flash", "radio",
glob.glob(os.path.join(
env.working_path, env.nexus_model,
"nexus-image",
"radio-*.img"))[0]
])
time.sleep(1)
fastboot_bootloader()
time.sleep(5)
@task
def flash_image():
"flash the nexus image"
call([
fastboot_cmd, "-w", "update",
glob.glob(os.path.join(
env.working_path, env.nexus_model,
"nexus-image",
"image-*.zip"))[0]
])
time.sleep(5)
@task
def flash_recovery():
"flash the recovery image"
call([
fastboot_cmd, "flash", "recovery",
os.path.join(
env.working_path, env.nexus_model,
"twrp.img")
])
time.sleep(1)
fastboot_recovery()
time.sleep(5)
@task
def install_apps():
"install several key APKs: BusyBox and TitaniumBackup"
install_app("TitaniumBackup.apk")
install_app("BusyBox.apk")
install_app("AndroidTerminalEmulator.apk")
| mit | -1,053,682,374,143,985,400 | 26.92891 | 107 | 0.633294 | false | 3.230811 | false | false | false |
hzruandd/AutobahnPython | examples/twisted/websocket/broadcast/server.py | 2 | 4216 | ###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Tavendo GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import sys
from twisted.internet import reactor
from twisted.python import log
from twisted.web.server import Site
from twisted.web.static import File
from autobahn.twisted.websocket import WebSocketServerFactory, \
WebSocketServerProtocol, \
listenWS
class BroadcastServerProtocol(WebSocketServerProtocol):
def onOpen(self):
self.factory.register(self)
def onMessage(self, payload, isBinary):
if not isBinary:
msg = "{} from {}".format(payload.decode('utf8'), self.peer)
self.factory.broadcast(msg)
def connectionLost(self, reason):
WebSocketServerProtocol.connectionLost(self, reason)
self.factory.unregister(self)
class BroadcastServerFactory(WebSocketServerFactory):
"""
Simple broadcast server broadcasting any message it receives to all
currently connected clients.
"""
def __init__(self, url, debug=False, debugCodePaths=False):
WebSocketServerFactory.__init__(self, url, debug=debug, debugCodePaths=debugCodePaths)
self.clients = []
self.tickcount = 0
self.tick()
def tick(self):
self.tickcount += 1
self.broadcast("tick %d from server" % self.tickcount)
reactor.callLater(1, self.tick)
def register(self, client):
if client not in self.clients:
print("registered client {}".format(client.peer))
self.clients.append(client)
def unregister(self, client):
if client in self.clients:
print("unregistered client {}".format(client.peer))
self.clients.remove(client)
def broadcast(self, msg):
print("broadcasting message '{}' ..".format(msg))
for c in self.clients:
c.sendMessage(msg.encode('utf8'))
print("message sent to {}".format(c.peer))
class BroadcastPreparedServerFactory(BroadcastServerFactory):
"""
Functionally same as above, but optimized broadcast using
prepareMessage and sendPreparedMessage.
"""
def broadcast(self, msg):
print("broadcasting prepared message '{}' ..".format(msg))
preparedMsg = self.prepareMessage(msg)
for c in self.clients:
c.sendPreparedMessage(preparedMsg)
print("prepared message sent to {}".format(c.peer))
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'debug':
log.startLogging(sys.stdout)
debug = True
else:
debug = False
ServerFactory = BroadcastServerFactory
# ServerFactory = BroadcastPreparedServerFactory
factory = ServerFactory(u"ws://127.0.0.1:9000",
debug=debug,
debugCodePaths=debug)
factory.protocol = BroadcastServerProtocol
factory.setProtocolOptions(allowHixie76=True)
listenWS(factory)
webdir = File(".")
web = Site(webdir)
reactor.listenTCP(8080, web)
reactor.run()
| mit | 7,782,896,050,432,775,000 | 32.19685 | 94 | 0.656309 | false | 4.428571 | false | false | false |
rdmorganiser/rdmo | rdmo/domain/migrations/0030_permissions.py | 2 | 1257 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-02-28 12:34
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('domain', '0029_meta'),
]
operations = [
migrations.AlterModelOptions(
name='attribute',
options={'permissions': (('view_attribute', 'Can view Attribute'),), 'verbose_name': 'Attribute', 'verbose_name_plural': 'Attributes'},
),
migrations.AlterModelOptions(
name='attributeentity',
options={'ordering': ('uri',), 'permissions': (('view_attributeentity', 'Can view Attribute entity'),), 'verbose_name': 'Attribute entity', 'verbose_name_plural': 'Attribute entities'},
),
migrations.AlterModelOptions(
name='range',
options={'ordering': ('attribute',), 'permissions': (('view_range', 'Can view Range'),), 'verbose_name': 'Range', 'verbose_name_plural': 'Ranges'},
),
migrations.AlterModelOptions(
name='verbosename',
options={'permissions': (('view_verbosename', 'Can view Verbose name'),), 'verbose_name': 'Verbose name', 'verbose_name_plural': 'Verbose names'},
),
]
| apache-2.0 | -2,316,072,209,223,785,000 | 39.548387 | 197 | 0.601432 | false | 4.364583 | false | false | false |
mpvillafranca/barestapas | tango_with_django_project/rango/views.py | 1 | 3802 | from django.shortcuts import render
from django.http import HttpResponse
from django.template import RequestContext
from rango.models import Bar, Tapa
from rango.forms import TapaForm
from django.contrib.auth.decorators import login_required
from django.http import JsonResponse
# Pagina inicio
def index(request):
# Query the database for a list of ALL bares currently stored.
# Order the bares by no. likes in descending order.
# Retrieve the top 5 only - or all if less than 5.
# Place the list in our context_dict dictionary which will be passed to the template engine.
bares_list = Bar.objects.order_by('-n_visitas')[:5]
context_dict = {'bares': bares_list}
# Render the response and send it back!
return render(request, 'rango/index.html', context_dict)
# Pagina bares
def bar(request, bar_name_slug):
# Create a context dictionary which we can pass to the template rendering engine.
context_dict = {}
try:
# Can we find a bar name slug with the given name?
# If we can't, the .get() method raises a DoesNotExist exception.
# So the .get() method returns one model instance or raises an exception.
bar = Bar.objects.get(slug=bar_name_slug)
context_dict['bar_name'] = bar.nombre
# Retrieve all of the associated tapas.
# Note that filter returns >= 1 model instance.
tapas = Tapa.objects.filter(bar=bar)
# Adds our results list to the template context under name tapas.
context_dict['tapas'] = tapas
# We also add the bar object from the database to the context dictionary.
# We'll use this in the template to verify that the bar exists.
context_dict['bar'] = bar
# New: Aumentar visitas cada vez que se pida la pagina
bar.n_visitas += 1
bar.save()
except Bar.DoesNotExist:
# We get here if we didn't find the specified bar.
# Don't do anything - the template displays the "no bar" message for us.
pass
# Go render the response and return it to the client.
return render(request, 'rango/bar.html', context_dict)
# Pagina Acerca de
def about(request):
# Create a context dictionary which we can pass to the template rendering engine.
context_dict = {}
return render(request, 'rango/about.html', context_dict)
# Pagina add tapa
@login_required
def add_tapa(request, bar_name_slug):
try:
ba = Bar.objects.get(slug=bar_name_slug)
except Category.DoesNotExist:
ba = None
if request.method == 'POST':
form = TapaForm(request.POST)
if form.is_valid():
if ba:
tapa = form.save(commit=False)
tapa.bar = ba
tapa.votos = 0
tapa.save()
# probably better to use a redirect here.
return bar(request, bar_name_slug)
else:
print form.errors
else:
form = TapaForm()
context_dict = {'form':form, 'bar': ba}
return render(request, 'rango/add_tapa.html', context_dict)
def reclama_datos (request):
bares = Bar.objects.order_by('-n_visitas')[:3]
datos={'bares':[bares[0].nombre,bares[1].nombre,bares[2].nombre],
'visitas':[bares[0].n_visitas,
bares[1].n_visitas,
bares[2].n_visitas
]
}
return JsonResponse(datos, safe=False)
def like_tapa(request):
context = RequestContext(request)
tapa_id = None
if request.method == 'GET':
tapa_id = request.GET['tapa_id']
votos = 0
if tapa_id:
tapa = Tapa.objects.get(id=int(tapa_id))
if tapa:
votos = tapa.votos + 1
tapa.votos = votos
tapa.save()
return HttpResponse(votos)
| gpl-3.0 | 4,444,577,186,692,221,400 | 32.350877 | 96 | 0.627827 | false | 3.702045 | false | false | false |
viper7882/PyMT4 | PyMT4_example.py | 1 | 3249 | """
(C) Copyright 2013 Rob Watson rmawatson [at] hotmail.com and others.
All rights reserved. This program and the accompanying materials
are made available under the terms of the GNU Lesser General Public License
(LGPL) version 2.1 which accompanies this distribution, and is available at
http://www.gnu.org/licenses/lgpl-2.1.html
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
Contributors:
Rob Watson ( rmawatson [at] hotmail )
"""
from PyMT4 import *
from threading import Thread
import time
class OnOrderManager(object):
__tickets = []
__handlers = []
__runthread = None
__shutdown = False
@classmethod
def Initialize(cls):
tickets = [OrderSelect(index,SELECT_BY_POS,MODE_TRADES) for index in range(OrdersTotal())]
for ticket in tickets:
if OrderType(ticket) in (OP_BUY ,OP_SELL):
cls.__tickets.append(ticket)
cls.__runthread = Thread(target=OnOrderManager.__run)
cls.__runthread.start()
@classmethod
def RegisterOnOrderHandler(cls,callback):
cls.__handlers.append(callback)
@classmethod
def __run(cls):
count = 0
while not cls.__shutdown:
if count == 10:
count = 0
tickets = [OrderSelect(index,SELECT_BY_POS,MODE_TRADES) for index in range(OrdersTotal())]
for ticket in tickets:
if OrderType(ticket) in (OP_BUY ,OP_SELL) and ticket not in cls.__tickets:
for handler in cls.__handlers:
#try:
handler(ticket)
#except:pass
cls.__tickets.append(ticket)
count +=1
time.sleep(0.2)
@classmethod
def Shutdown(cls):
if cls.__runthread:
cls.__shutdown = True
cls.__runthread.join()
RegisterOnOrderHandler = OnOrderManager.RegisterOnOrderHandler
def OnOrderHandler(ticket):
orderSize = OrderLots(ticket)
orderStop = OrderStopLoss(ticket)
openPrice = OrderOpenPrice(ticket)
orderType = OrderType(ticket)
orderSymbol = OrderSymbol(ticket)
lotStepValue = MarketInfo(orderSymbol,MODE_POINT)
if not orderStop:
newStop = openPrice + ((-50*lotStepValue*10) if orderType == OP_BUY else (50*lotStepValue*10))
OrderModify(ticket,0.0,newStop,0.0,0,0)
def OnTickHandler(symbol,bid,ask):
print symbol,bid,ask
if __name__ == "__main__":
print Connect()
OnOrderManager.Initialize()
RegisterOnOrderHandler(OnOrderHandler)
RegisterOnTickHandler("*",OnTickHandler)
try:
while(True):
time.sleep(0.5)
except KeyboardInterrupt:
pass
OnOrderManager.Shutdown()
Disconnect()
| lgpl-2.1 | 8,430,288,219,255,540,000 | 26.533898 | 106 | 0.579563 | false | 4.224967 | false | false | false |
openstack/manila | manila/share/drivers/netapp/dataontap/cluster_mode/data_motion.py | 1 | 34061 | # Copyright (c) 2016 Alex Meade. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
NetApp Data ONTAP data motion library.
This library handles transferring data from a source to a destination. Its
responsibility is to handle this as efficiently as possible given the
location of the data's source and destination. This includes cloning,
SnapMirror, and copy-offload as improvements to brute force data transfer.
"""
from oslo_config import cfg
from oslo_log import log
from oslo_utils import excutils
from manila import exception
from manila.i18n import _
from manila.share import configuration
from manila.share import driver
from manila.share.drivers.netapp.dataontap.client import api as netapp_api
from manila.share.drivers.netapp.dataontap.client import client_cmode
from manila.share.drivers.netapp import options as na_opts
from manila.share.drivers.netapp import utils as na_utils
from manila.share import utils as share_utils
from manila import utils
LOG = log.getLogger(__name__)
CONF = cfg.CONF
def get_backend_configuration(backend_name):
config_stanzas = CONF.list_all_sections()
if backend_name not in config_stanzas:
msg = _("Could not find backend stanza %(backend_name)s in "
"configuration which is required for replication or migration "
"workflows with the source backend. Available stanzas are "
"%(stanzas)s")
params = {
"stanzas": config_stanzas,
"backend_name": backend_name,
}
raise exception.BadConfigurationException(reason=msg % params)
config = configuration.Configuration(driver.share_opts,
config_group=backend_name)
if config.driver_handles_share_servers:
# NOTE(dviroel): avoid using a pre-create vserver on DHSS == True mode
# when retrieving remote backend configuration.
config.netapp_vserver = None
config.append_config_values(na_opts.netapp_cluster_opts)
config.append_config_values(na_opts.netapp_connection_opts)
config.append_config_values(na_opts.netapp_basicauth_opts)
config.append_config_values(na_opts.netapp_transport_opts)
config.append_config_values(na_opts.netapp_support_opts)
config.append_config_values(na_opts.netapp_provisioning_opts)
config.append_config_values(na_opts.netapp_data_motion_opts)
return config
def get_client_for_backend(backend_name, vserver_name=None):
config = get_backend_configuration(backend_name)
client = client_cmode.NetAppCmodeClient(
transport_type=config.netapp_transport_type,
ssl_cert_path=config.netapp_ssl_cert_path,
username=config.netapp_login,
password=config.netapp_password,
hostname=config.netapp_server_hostname,
port=config.netapp_server_port,
vserver=vserver_name or config.netapp_vserver,
trace=na_utils.TRACE_API)
return client
class DataMotionSession(object):
def _get_backend_volume_name(self, config, share_obj):
"""Return the calculated backend name of the share.
Uses the netapp_volume_name_template configuration value for the
backend to calculate the volume name on the array for the share.
"""
volume_name = config.netapp_volume_name_template % {
'share_id': share_obj['id'].replace('-', '_')}
return volume_name
def _get_backend_qos_policy_group_name(self, share):
"""Get QoS policy name according to QoS policy group name template."""
__, config = self.get_backend_name_and_config_obj(share['host'])
return config.netapp_qos_policy_group_name_template % {
'share_id': share['id'].replace('-', '_')}
def _get_backend_snapmirror_policy_name_svm(self, share_server_id,
backend_name):
config = get_backend_configuration(backend_name)
return (config.netapp_snapmirror_policy_name_svm_template
% {'share_server_id': share_server_id.replace('-', '_')})
def get_vserver_from_share_server(self, share_server):
backend_details = share_server.get('backend_details')
if backend_details:
return backend_details.get('vserver_name')
def get_vserver_from_share(self, share_obj):
share_server = share_obj.get('share_server')
if share_server:
return self.get_vserver_from_share_server(share_server)
def get_backend_name_and_config_obj(self, host):
backend_name = share_utils.extract_host(host, level='backend_name')
config = get_backend_configuration(backend_name)
return backend_name, config
def get_backend_info_for_share(self, share_obj):
backend_name, config = self.get_backend_name_and_config_obj(
share_obj['host'])
vserver = (self.get_vserver_from_share(share_obj) or
config.netapp_vserver)
volume_name = self._get_backend_volume_name(config, share_obj)
return volume_name, vserver, backend_name
def get_client_and_vserver_name(self, share_server):
destination_host = share_server.get('host')
vserver = self.get_vserver_from_share_server(share_server)
backend, __ = self.get_backend_name_and_config_obj(destination_host)
client = get_client_for_backend(backend, vserver_name=vserver)
return client, vserver
def get_snapmirrors(self, source_share_obj, dest_share_obj):
dest_volume_name, dest_vserver, dest_backend = (
self.get_backend_info_for_share(dest_share_obj))
dest_client = get_client_for_backend(dest_backend,
vserver_name=dest_vserver)
src_volume_name, src_vserver, __ = self.get_backend_info_for_share(
source_share_obj)
snapmirrors = dest_client.get_snapmirrors(
source_vserver=src_vserver, dest_vserver=dest_vserver,
source_volume=src_volume_name, dest_volume=dest_volume_name,
desired_attributes=['relationship-status',
'mirror-state',
'source-vserver',
'source-volume',
'last-transfer-end-timestamp'])
return snapmirrors
def create_snapmirror(self, source_share_obj, dest_share_obj):
"""Sets up a SnapMirror relationship between two volumes.
1. Create SnapMirror relationship
2. Initialize data transfer asynchronously
"""
dest_volume_name, dest_vserver, dest_backend = (
self.get_backend_info_for_share(dest_share_obj))
dest_client = get_client_for_backend(dest_backend,
vserver_name=dest_vserver)
src_volume_name, src_vserver, __ = self.get_backend_info_for_share(
source_share_obj)
# 1. Create SnapMirror relationship
# TODO(ameade): Change the schedule from hourly to a config value
dest_client.create_snapmirror_vol(src_vserver,
src_volume_name,
dest_vserver,
dest_volume_name,
schedule='hourly')
# 2. Initialize async transfer of the initial data
dest_client.initialize_snapmirror_vol(src_vserver,
src_volume_name,
dest_vserver,
dest_volume_name)
def delete_snapmirror(self, source_share_obj, dest_share_obj,
release=True):
"""Ensures all information about a SnapMirror relationship is removed.
1. Abort snapmirror
2. Delete the snapmirror
3. Release snapmirror to cleanup snapmirror metadata and snapshots
"""
dest_volume_name, dest_vserver, dest_backend = (
self.get_backend_info_for_share(dest_share_obj))
dest_client = get_client_for_backend(dest_backend,
vserver_name=dest_vserver)
src_volume_name, src_vserver, src_backend = (
self.get_backend_info_for_share(source_share_obj))
# 1. Abort any ongoing transfers
try:
dest_client.abort_snapmirror_vol(src_vserver,
src_volume_name,
dest_vserver,
dest_volume_name,
clear_checkpoint=False)
except netapp_api.NaApiError:
# Snapmirror is already deleted
pass
# 2. Delete SnapMirror Relationship and cleanup destination snapshots
try:
dest_client.delete_snapmirror_vol(src_vserver,
src_volume_name,
dest_vserver,
dest_volume_name)
except netapp_api.NaApiError as e:
with excutils.save_and_reraise_exception() as exc_context:
if (e.code == netapp_api.EOBJECTNOTFOUND or
e.code == netapp_api.ESOURCE_IS_DIFFERENT or
"(entry doesn't exist)" in e.message):
LOG.info('No snapmirror relationship to delete')
exc_context.reraise = False
if release:
# If the source is unreachable, do not perform the release
try:
src_client = get_client_for_backend(src_backend,
vserver_name=src_vserver)
except Exception:
src_client = None
# 3. Cleanup SnapMirror relationship on source
try:
if src_client:
src_client.release_snapmirror_vol(src_vserver,
src_volume_name,
dest_vserver,
dest_volume_name)
except netapp_api.NaApiError as e:
with excutils.save_and_reraise_exception() as exc_context:
if (e.code == netapp_api.EOBJECTNOTFOUND or
e.code == netapp_api.ESOURCE_IS_DIFFERENT or
"(entry doesn't exist)" in e.message):
# Handle the case where the snapmirror is already
# cleaned up
exc_context.reraise = False
def update_snapmirror(self, source_share_obj, dest_share_obj):
"""Schedule a snapmirror update to happen on the backend."""
dest_volume_name, dest_vserver, dest_backend = (
self.get_backend_info_for_share(dest_share_obj))
dest_client = get_client_for_backend(dest_backend,
vserver_name=dest_vserver)
src_volume_name, src_vserver, __ = self.get_backend_info_for_share(
source_share_obj)
# Update SnapMirror
dest_client.update_snapmirror_vol(src_vserver,
src_volume_name,
dest_vserver,
dest_volume_name)
def quiesce_then_abort_svm(self, source_share_server, dest_share_server):
source_client, source_vserver = self.get_client_and_vserver_name(
source_share_server)
dest_client, dest_vserver = self.get_client_and_vserver_name(
dest_share_server)
# 1. Attempt to quiesce, then abort
dest_client.quiesce_snapmirror_svm(source_vserver, dest_vserver)
dest_backend = share_utils.extract_host(dest_share_server['host'],
level='backend_name')
config = get_backend_configuration(dest_backend)
retries = config.netapp_snapmirror_quiesce_timeout / 5
@utils.retry(exception.ReplicationException, interval=5,
retries=retries, backoff_rate=1)
def wait_for_quiesced():
snapmirror = dest_client.get_snapmirrors_svm(
source_vserver=source_vserver, dest_vserver=dest_vserver,
desired_attributes=['relationship-status', 'mirror-state']
)[0]
if snapmirror.get('relationship-status') != 'quiesced':
raise exception.ReplicationException(
reason="Snapmirror relationship is not quiesced.")
try:
wait_for_quiesced()
except exception.ReplicationException:
dest_client.abort_snapmirror_svm(source_vserver,
dest_vserver,
clear_checkpoint=False)
def quiesce_then_abort(self, source_share_obj, dest_share_obj):
dest_volume, dest_vserver, dest_backend = (
self.get_backend_info_for_share(dest_share_obj))
dest_client = get_client_for_backend(dest_backend,
vserver_name=dest_vserver)
src_volume, src_vserver, __ = self.get_backend_info_for_share(
source_share_obj)
# 1. Attempt to quiesce, then abort
dest_client.quiesce_snapmirror_vol(src_vserver,
src_volume,
dest_vserver,
dest_volume)
config = get_backend_configuration(dest_backend)
retries = config.netapp_snapmirror_quiesce_timeout / 5
@utils.retry(exception.ReplicationException, interval=5,
retries=retries, backoff_rate=1)
def wait_for_quiesced():
snapmirror = dest_client.get_snapmirrors(
source_vserver=src_vserver, dest_vserver=dest_vserver,
source_volume=src_volume, dest_volume=dest_volume,
desired_attributes=['relationship-status', 'mirror-state']
)[0]
if snapmirror.get('relationship-status') != 'quiesced':
raise exception.ReplicationException(
reason="Snapmirror relationship is not quiesced.")
try:
wait_for_quiesced()
except exception.ReplicationException:
dest_client.abort_snapmirror_vol(src_vserver,
src_volume,
dest_vserver,
dest_volume,
clear_checkpoint=False)
def break_snapmirror(self, source_share_obj, dest_share_obj, mount=True):
"""Breaks SnapMirror relationship.
1. Quiesce any ongoing snapmirror transfers
2. Wait until snapmirror finishes transfers and enters quiesced state
3. Break snapmirror
4. Mount the destination volume so it is exported as a share
"""
dest_volume_name, dest_vserver, dest_backend = (
self.get_backend_info_for_share(dest_share_obj))
dest_client = get_client_for_backend(dest_backend,
vserver_name=dest_vserver)
src_volume_name, src_vserver, __ = self.get_backend_info_for_share(
source_share_obj)
# 1. Attempt to quiesce, then abort
self.quiesce_then_abort(source_share_obj, dest_share_obj)
# 2. Break SnapMirror
dest_client.break_snapmirror_vol(src_vserver,
src_volume_name,
dest_vserver,
dest_volume_name)
# 3. Mount the destination volume and create a junction path
if mount:
dest_client.mount_volume(dest_volume_name)
def resync_snapmirror(self, source_share_obj, dest_share_obj):
"""Resync SnapMirror relationship. """
dest_volume_name, dest_vserver, dest_backend = (
self.get_backend_info_for_share(dest_share_obj))
dest_client = get_client_for_backend(dest_backend,
vserver_name=dest_vserver)
src_volume_name, src_vserver, __ = self.get_backend_info_for_share(
source_share_obj)
dest_client.resync_snapmirror_vol(src_vserver,
src_volume_name,
dest_vserver,
dest_volume_name)
def resume_snapmirror(self, source_share_obj, dest_share_obj):
"""Resume SnapMirror relationship from a quiesced state."""
dest_volume_name, dest_vserver, dest_backend = (
self.get_backend_info_for_share(dest_share_obj))
dest_client = get_client_for_backend(dest_backend,
vserver_name=dest_vserver)
src_volume_name, src_vserver, __ = self.get_backend_info_for_share(
source_share_obj)
dest_client.resume_snapmirror_vol(src_vserver,
src_volume_name,
dest_vserver,
dest_volume_name)
def change_snapmirror_source(self, replica,
orig_source_replica,
new_source_replica, replica_list):
"""Creates SnapMirror relationship from the new source to destination.
1. Delete all snapmirrors involving the replica, but maintain
snapmirror metadata and snapshots for efficiency
2. For DHSS=True scenarios, creates a new vserver peer relationship if
it does not exists
3. Ensure a new source -> replica snapmirror exists
4. Resync new source -> replica snapmirror relationship
"""
replica_volume_name, replica_vserver, replica_backend = (
self.get_backend_info_for_share(replica))
replica_client = get_client_for_backend(replica_backend,
vserver_name=replica_vserver)
new_src_volume_name, new_src_vserver, new_src_backend = (
self.get_backend_info_for_share(new_source_replica))
# 1. delete
for other_replica in replica_list:
if other_replica['id'] == replica['id']:
continue
# We need to delete ALL snapmirror relationships
# involving this replica but do not remove snapmirror metadata
# so that the new snapmirror relationship is efficient.
self.delete_snapmirror(other_replica, replica, release=False)
self.delete_snapmirror(replica, other_replica, release=False)
# 2. vserver operations when driver handles share servers
replica_config = get_backend_configuration(replica_backend)
if (replica_config.driver_handles_share_servers
and replica_vserver != new_src_vserver):
# create vserver peering if does not exists
if not replica_client.get_vserver_peers(replica_vserver,
new_src_vserver):
new_src_client = get_client_for_backend(
new_src_backend, vserver_name=new_src_vserver)
# Cluster name is needed for setting up the vserver peering
new_src_cluster_name = new_src_client.get_cluster_name()
replica_cluster_name = replica_client.get_cluster_name()
replica_client.create_vserver_peer(
replica_vserver, new_src_vserver,
peer_cluster_name=new_src_cluster_name)
if new_src_cluster_name != replica_cluster_name:
new_src_client.accept_vserver_peer(new_src_vserver,
replica_vserver)
# 3. create
# TODO(ameade): Update the schedule if needed.
replica_client.create_snapmirror_vol(new_src_vserver,
new_src_volume_name,
replica_vserver,
replica_volume_name,
schedule='hourly')
# 4. resync
replica_client.resync_snapmirror_vol(new_src_vserver,
new_src_volume_name,
replica_vserver,
replica_volume_name)
@na_utils.trace
def remove_qos_on_old_active_replica(self, orig_active_replica):
old_active_replica_qos_policy = (
self._get_backend_qos_policy_group_name(orig_active_replica)
)
replica_volume_name, replica_vserver, replica_backend = (
self.get_backend_info_for_share(orig_active_replica))
replica_client = get_client_for_backend(
replica_backend, vserver_name=replica_vserver)
try:
replica_client.set_qos_policy_group_for_volume(
replica_volume_name, 'none')
replica_client.mark_qos_policy_group_for_deletion(
old_active_replica_qos_policy)
except exception.StorageCommunicationException:
LOG.exception("Could not communicate with the backend "
"for replica %s to unset QoS policy and mark "
"the QoS policy group for deletion.",
orig_active_replica['id'])
def create_snapmirror_svm(self, source_share_server,
dest_share_server):
"""Sets up a SnapMirror relationship between two vServers.
1. Create a SnapMirror policy for SVM DR
2. Create SnapMirror relationship
3. Initialize data transfer asynchronously
"""
dest_client, dest_vserver = self.get_client_and_vserver_name(
dest_share_server)
src_vserver = self.get_vserver_from_share_server(source_share_server)
# 1: Create SnapMirror policy for SVM DR
dest_backend_name = share_utils.extract_host(dest_share_server['host'],
level='backend_name')
policy_name = self._get_backend_snapmirror_policy_name_svm(
dest_share_server['id'],
dest_backend_name,
)
dest_client.create_snapmirror_policy(policy_name)
# 2. Create SnapMirror relationship
dest_client.create_snapmirror_svm(src_vserver,
dest_vserver,
policy=policy_name,
schedule='hourly')
# 2. Initialize async transfer of the initial data
dest_client.initialize_snapmirror_svm(src_vserver,
dest_vserver)
def get_snapmirrors_svm(self, source_share_server, dest_share_server):
"""Get SnapMirrors between two vServers."""
dest_client, dest_vserver = self.get_client_and_vserver_name(
dest_share_server)
src_vserver = self.get_vserver_from_share_server(source_share_server)
snapmirrors = dest_client.get_snapmirrors_svm(
source_vserver=src_vserver, dest_vserver=dest_vserver,
desired_attributes=['relationship-status',
'mirror-state',
'last-transfer-end-timestamp'])
return snapmirrors
def get_snapmirror_destinations_svm(self, source_share_server,
dest_share_server):
"""Get SnapMirrors between two vServers."""
dest_client, dest_vserver = self.get_client_and_vserver_name(
dest_share_server)
src_vserver = self.get_vserver_from_share_server(source_share_server)
snapmirrors = dest_client.get_snapmirror_destinations_svm(
source_vserver=src_vserver, dest_vserver=dest_vserver)
return snapmirrors
def update_snapmirror_svm(self, source_share_server, dest_share_server):
"""Schedule a SnapMirror update to happen on the backend."""
dest_client, dest_vserver = self.get_client_and_vserver_name(
dest_share_server)
src_vserver = self.get_vserver_from_share_server(source_share_server)
# Update SnapMirror
dest_client.update_snapmirror_svm(src_vserver, dest_vserver)
def quiesce_and_break_snapmirror_svm(self, source_share_server,
dest_share_server):
"""Abort and break a SnapMirror relationship between vServers.
1. Quiesce SnapMirror
2. Break SnapMirror
"""
dest_client, dest_vserver = self.get_client_and_vserver_name(
dest_share_server)
src_vserver = self.get_vserver_from_share_server(source_share_server)
# 1. Attempt to quiesce, then abort
self.quiesce_then_abort_svm(source_share_server, dest_share_server)
# 2. Break SnapMirror
dest_client.break_snapmirror_svm(src_vserver, dest_vserver)
def cancel_snapmirror_svm(self, source_share_server, dest_share_server):
"""Cancels SnapMirror relationship between vServers."""
dest_backend = share_utils.extract_host(dest_share_server['host'],
level='backend_name')
dest_config = get_backend_configuration(dest_backend)
server_timeout = (
dest_config.netapp_server_migration_state_change_timeout)
dest_client, dest_vserver = self.get_client_and_vserver_name(
dest_share_server)
snapmirrors = self.get_snapmirrors_svm(source_share_server,
dest_share_server)
if snapmirrors:
# 1. Attempt to quiesce and break snapmirror
self.quiesce_and_break_snapmirror_svm(source_share_server,
dest_share_server)
# NOTE(dviroel): Lets wait until the destination vserver be
# promoted to 'default' and state 'running', before starting
# shutting down the source
self.wait_for_vserver_state(dest_vserver, dest_client,
subtype='default', state='running',
operational_state='stopped',
timeout=server_timeout)
# 2. Delete SnapMirror
self.delete_snapmirror_svm(source_share_server, dest_share_server)
else:
dest_info = dest_client.get_vserver_info(dest_vserver)
if dest_info is None:
# NOTE(dviroel): Nothing to cancel since the destination does
# not exist.
return
if dest_info.get('subtype') == 'dp_destination':
# NOTE(dviroel): Can be a corner case where no snapmirror
# relationship was found but the destination vserver is stuck
# in DP mode. We need to convert it to 'default' to release
# its resources later.
self.convert_svm_to_default_subtype(dest_vserver, dest_client,
timeout=server_timeout)
def convert_svm_to_default_subtype(self, vserver_name, client,
is_dest_path=True, timeout=300):
interval = 10
retries = (timeout / interval or 1)
@utils.retry(exception.VserverNotReady, interval=interval,
retries=retries, backoff_rate=1)
def wait_for_state():
vserver_info = client.get_vserver_info(vserver_name)
if vserver_info.get('subtype') != 'default':
if is_dest_path:
client.break_snapmirror_svm(dest_vserver=vserver_name)
else:
client.break_snapmirror_svm(source_vserver=vserver_name)
raise exception.VserverNotReady(vserver=vserver_name)
try:
wait_for_state()
except exception.VserverNotReady:
msg = _("Vserver %s did not reach the expected state. Retries "
"exhausted. Aborting.") % vserver_name
raise exception.NetAppException(message=msg)
def delete_snapmirror_svm(self, src_share_server, dest_share_server,
release=True):
"""Ensures all information about a SnapMirror relationship is removed.
1. Abort SnapMirror
2. Delete the SnapMirror
3. Release SnapMirror to cleanup SnapMirror metadata and snapshots
"""
src_client, src_vserver = self.get_client_and_vserver_name(
src_share_server)
dest_client, dest_vserver = self.get_client_and_vserver_name(
dest_share_server)
# 1. Abort any ongoing transfers
try:
dest_client.abort_snapmirror_svm(src_vserver, dest_vserver)
except netapp_api.NaApiError:
# SnapMirror is already deleted
pass
# 2. Delete SnapMirror Relationship and cleanup destination snapshots
try:
dest_client.delete_snapmirror_svm(src_vserver, dest_vserver)
except netapp_api.NaApiError as e:
with excutils.save_and_reraise_exception() as exc_context:
if (e.code == netapp_api.EOBJECTNOTFOUND or
e.code == netapp_api.ESOURCE_IS_DIFFERENT or
"(entry doesn't exist)" in e.message):
LOG.info('No snapmirror relationship to delete')
exc_context.reraise = False
# 3. Release SnapMirror
if release:
src_backend = share_utils.extract_host(src_share_server['host'],
level='backend_name')
src_config = get_backend_configuration(src_backend)
release_timeout = (
src_config.netapp_snapmirror_release_timeout)
self.wait_for_snapmirror_release_svm(src_vserver,
dest_vserver,
src_client,
timeout=release_timeout)
def wait_for_vserver_state(self, vserver_name, client, state=None,
operational_state=None, subtype=None,
timeout=300):
interval = 10
retries = (timeout / interval or 1)
expected = {}
if state:
expected['state'] = state
if operational_state:
expected['operational_state'] = operational_state
if subtype:
expected['subtype'] = subtype
@utils.retry(exception.VserverNotReady, interval=interval,
retries=retries, backoff_rate=1)
def wait_for_state():
vserver_info = client.get_vserver_info(vserver_name)
if not all(item in vserver_info.items() for
item in expected.items()):
raise exception.VserverNotReady(vserver=vserver_name)
try:
wait_for_state()
except exception.VserverNotReady:
msg = _("Vserver %s did not reach the expected state. Retries "
"exhausted. Aborting.") % vserver_name
raise exception.NetAppException(message=msg)
def wait_for_snapmirror_release_svm(self, source_vserver, dest_vserver,
src_client, timeout=300):
interval = 10
retries = (timeout / interval or 1)
@utils.retry(exception.NetAppException, interval=interval,
retries=retries, backoff_rate=1)
def release_snapmirror():
snapmirrors = src_client.get_snapmirror_destinations_svm(
source_vserver=source_vserver, dest_vserver=dest_vserver)
if not snapmirrors:
LOG.debug("No snapmirrors to be released in source location.")
else:
try:
src_client.release_snapmirror_svm(source_vserver,
dest_vserver)
except netapp_api.NaApiError as e:
if (e.code == netapp_api.EOBJECTNOTFOUND or
e.code == netapp_api.ESOURCE_IS_DIFFERENT or
"(entry doesn't exist)" in e.message):
LOG.debug('Snapmirror relationship does not exists '
'anymore.')
msg = _('Snapmirror release sent to source vserver. We will '
'wait for it to be released.')
raise exception.NetAppException(vserver=msg)
try:
release_snapmirror()
except exception.NetAppException:
msg = _("Unable to release the snapmirror from source vserver %s. "
"Retries exhausted. Aborting") % source_vserver
raise exception.NetAppException(message=msg)
| apache-2.0 | -8,708,134,808,909,844,000 | 45.341497 | 79 | 0.566836 | false | 4.481711 | true | false | false |
LuanZhanKuangGe/personal-terrible-crawler | ui/crawler_sisid.py | 1 | 2300 | from urllib import request
from html.parser import HTMLParser
NAME = "HD-Jiggly"
COOKIE = 'cookie.txt'
class parseLinks(HTMLParser):
def __init__(self):
HTMLParser.__init__(self)
self.data = []
self.flag_tbody = False
self.flag_span = False
self.flag_a = False
self.flag_td = False
self.flag_a2 = False
self.link = ''
self.title = ''
self.name = ''
def handle_starttag(self, tag, attrs):
if tag == 'tbody':
for name, value in attrs:
if name == 'id' and value.find("normalthread_") == 0:
self.flag_tbody = True
if tag == 'span'and self.flag_tbody == True:
for name, value in attrs:
if name == 'id' and value.find("thread_") == 0:
self.flag_span = True
if tag == 'a' and self.flag_span == True:
for name, value in attrs:
if name == 'href' and value.find("thread-") == 0:
self.link = "http://174.127.195.166/forum/" + value
self.flag_a = True
if tag == 'td'and self.flag_tbody == True:
for name, value in attrs:
if name == 'class' and value.find("author") == 0:
self.flag_td = True
if tag == 'a' and self.flag_td == True:
self.flag_a2 = True
def handle_data(self, data):
if self.flag_span == True and self.flag_a == True:
self.title = data
self.flag_span = False
self.flag_a = False
if self.flag_td == True and self.flag_a2 == True:
self.name = data
if(self.name == NAME):
self.data.append(self.title)
self.data.append(self.link)
self.flag_td = False
self.flag_a2 = False
self.flag_tbody = False
class crawlerSISID():
def __init__(self, index):
url = "http://174.127.195.166/forum/forum-463-" + str(index) + ".html"
self.parse = parseLinks()
self.request = request.urlopen(url)
self.parse.feed(self.request.read().decode('gbk'))
def crawlerData(self):
self.request.close()
return self.parse.data
# print(crawlerSISID(1).crawlerData())
| gpl-2.0 | -539,299,934,032,095,500 | 31.394366 | 78 | 0.515652 | false | 3.745928 | false | false | false |
lowinger42/ergotime | client/logger.py | 1 | 4506 | #!/usr/bin/env python3
"""
Handle logging
All logging is done througt QT signal/slots, so they can be used from other threads.
Copyright (C) 2020 Anders Lowinger, anders@abundo.se
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
"""
import datetime
import threading
import PyQt5.QtCore as QtCore
import PyQt5.QtWidgets as QtWidgets
class Log(QtCore.QObject):
"""
Log handler. Uses signals to be thread safe
Modeled so stdout/stderr can be directed to this class
"""
# Possible debug subsystems
DEBUG_FILES = 1 << 0
DEBUG_SETTINGS = 1 << 1
DEBUG_ACTIVITYMGR = 1 << 3
DEBUG_REPORTMGR = 1 << 4
DEBUG_MAINWIN = 1 << 5
DEBUG_OPTIONS = 1 << 6
DEBUG_SYSTRAY = 1 << 7
# Setup debug bitmask
DEBUG_LEVEL = 0
DEBUG_LEVEL |= DEBUG_FILES * 0
DEBUG_LEVEL |= DEBUG_SETTINGS * 0
DEBUG_LEVEL |= DEBUG_ACTIVITYMGR * 1
DEBUG_LEVEL |= DEBUG_REPORTMGR * 1
DEBUG_LEVEL |= DEBUG_MAINWIN * 1
DEBUG_LEVEL |= DEBUG_OPTIONS * 1
DEBUG_LEVEL |= DEBUG_SYSTRAY * 1
logTrigger = QtCore.pyqtSignal(int, str, str)
INFO = 0
WARNING = 1
ERROR = 2
DEBUG = 3
CONSOLE = 4
# Map from string to log level
level_dict = {
"info": INFO,
"warning": WARNING,
"error": ERROR,
"debug": DEBUG,
"console": CONSOLE,
}
def __init__(self):
super().__init__()
self.out = None # QT Widget for log output
self.levels = ["INFO", "WARNING", "ERROR", "DEBUG", "CONSOLE"]
self.level = self.CONSOLE
self.logTrigger.connect(self.log)
self._lines = [] # temp buffer until we have an output device
def add_row(self, line):
c = self.out.rowCount()
self.out.setRowCount(c + 1)
self.out.setItem(c, 0, QtWidgets.QTableWidgetItem(line[0]))
self.out.setItem(c, 1, QtWidgets.QTableWidgetItem(line[1]))
self.out.setItem(c, 2, QtWidgets.QTableWidgetItem(line[2]))
self.out.setItem(c, 3, QtWidgets.QTableWidgetItem(line[3]))
if c > 500:
self.out.removeRow(0)
self.out.resizeColumnsToContents()
self.out.scrollToBottom()
def setOut(self, out):
self.out = out
for line in self._lines:
self.add_row(line)
self._lines = []
def setLevel(self, level):
if isinstance(level, str):
level = self.level_dict[level]
self.level = level
def log(self, level, threadname, msg):
if level <= self.level:
now = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
msg = str(msg).replace("\n", ", ")
line = [now, threadname, self.levels[level], msg]
if self.out is None:
self._lines.append(line)
print(" ".join(line))
else:
self.add_row(line)
def info(self, msg):
self.logTrigger.emit(self.INFO, threading.current_thread().getName(), msg)
def warning(self, msg):
self.logTrigger.emit(self.WARNING, threading.current_thread().getName(), msg)
def error(self, msg):
self.logTrigger.emit(self.ERROR, threading.current_thread().getName(), msg)
def debug(self, msg):
self.logTrigger.emit(self.DEBUG, threading.current_thread().getName(), msg)
def debugf(self, mask, msg):
"""
Show debug message, if debug for this type is enabled
"""
if self.DEBUG_LEVEL & mask:
self.logTrigger.emit(self.DEBUG, threading.current_thread().getName(), msg)
def write(self, msg):
msg = msg.strip()
if msg:
self.logTrigger.emit(self.CONSOLE, threading.current_thread().getName(), msg.strip())
def flush(self):
# this is defined so we can redirect stdout/stderr here without warnings
pass
log = Log()
| mit | 17,119,645,099,116,064 | 30.291667 | 97 | 0.607634 | false | 3.805743 | false | false | false |
acislab/HuMaIN_Microservices | call_seg.py | 1 | 6925 | # -*- coding: utf-8 -*-
##########################################################################################
# Developer: Luan,Jingchao Project: HuMaIN (http://humain.acis.ufl.edu)
# Description:
# Script to invoke the OCRopus Segmentation microservice. Given the binarized images'
# directory or an image, for each image return a folder containing all segemnted
# single-line images.
##########################################################################################
# Copyright 2017 Advanced Computing and Information Systems (ACIS) Lab - UF
# (https://www.acis.ufl.edu/)
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################################
import requests, zipfile, StringIO
import time, argparse, os, subprocess
import multiprocessing as mp
# Segmentation service URL
IP = "10.5.146.92"
PORT = "8102"
URL_SEG = "http://" + IP + ":" + PORT + "/segmentationapi"
SESSION = requests.Session()
def str2bool(v):
"""Transfer String to Boolean.
Normalizing all positive string to "True" and all negative string to "False".
Args:
v: original string.
Returns:
Return the original string related boolean. For example, return "True" if the original string is "yes".
"""
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')
def arg_parse():
"""Parse argumentes input by user.
Returns:
A dictionary-like type viriable 'args' which contains all arguments input by user
"""
parser = argparse.ArgumentParser("Call OCRopy Segmentation Service")
parser.add_argument('input', help="The path of an image file, or a folder containing all pre-process images.")
# output parameters
parser.add_argument('-o', '--output', default=None, help="output directory, without the last slash")
# limits
group_limits = parser.add_argument_group('limits')
group_limits.add_argument('--minscale',type=float,default=argparse.SUPPRESS, help='minimum scale permitted')
group_limits.add_argument('--maxlines',type=float,default=argparse.SUPPRESS, help='maximum # lines permitted')
# scale parameters
group_scale = parser.add_argument_group('scale parameters')
group_scale.add_argument('--scale',type=float,default=argparse.SUPPRESS, help='the basic scale of the document (roughly, xheight) 0=automatic')
group_scale.add_argument('--hscale',type=float,default=argparse.SUPPRESS, help='non-standard scaling of horizontal parameters')
group_scale.add_argument('--vscale',type=float,default=argparse.SUPPRESS, help='non-standard scaling of vertical parameters')
# line parameters
group_line = parser.add_argument_group('line parameters')
group_line.add_argument('--threshold',type=float,default=argparse.SUPPRESS, help='baseline threshold')
group_line.add_argument('--noise',type=int,default=argparse.SUPPRESS, help="noise threshold for removing small components from lines")
group_line.add_argument('--usegauss', type=str2bool, default=argparse.SUPPRESS, help='use gaussian instead of uniform')
# column parameters
group_column = parser.add_argument_group('column parameters')
group_column.add_argument('--maxseps',type=int,default=argparse.SUPPRESS, help='maximum black column separators')
group_column.add_argument('--sepwiden',type=int,default=argparse.SUPPRESS, help='widen black separators (to account for warping)')
group_column.add_argument('--maxcolseps',type=int,default=argparse.SUPPRESS, help='maximum # whitespace column separators')
group_column.add_argument('--csminheight',type=float,default=argparse.SUPPRESS, help='minimum column height (units=scale)')
# output parameters
group_column = parser.add_argument_group('output parameters')
group_column.add_argument('--pad',type=int,default=argparse.SUPPRESS, help='adding for extracted lines')
group_column.add_argument('--expand',type=int,default=argparse.SUPPRESS, help='expand mask for grayscale extraction')
args = parser.parse_args()
# Set the default output folder
default_output = ""
if os.path.isfile(args.input):
default_output = os.path.dirname(args.input)
elif os.path.isdir(args.input):
default_output = args.input
else:
parser.print_help()
sys.exit(0)
# Verify or create the output folder
if args.output is None:
args.output = default_output
else:
if not os.path.isdir(args.output):
subprocess.call(["mkdir -p " + args.output], shell=True)
if not os.path.isdir(args.output):
print("Error: Destination folder %s could not be created" % (args.output))
sys.exit(0)
args = vars(args) # Convert the Namespace object "args" to a dict=like object
return args
def call_seg(job):
"""Call Segmentation Service.
Call the Segmentation service, and store the segmented result locally.
Args:
job: a tuple variable (image path, local path to store result, parametes customed by user).
"""
imagepath, dst_dir, parameters = job
# Uploaded iamges
image = {'image': open(imagepath, 'rb')}
# Call segmentation service and get response
resp = SESSION.get(URL_SEG, files=image, data=parameters)
# Unpress the zip file responsed from segmentation service, and save it
if resp.status_code == 200:
# For python 3+, replace with io.BytesIO(resp.content)
z = zipfile.ZipFile(StringIO.StringIO(resp.content))
z.extractall(dst_dir)
print("[OK] '%s' segmentation success!" % os.path.basename(imagepath))
else:
print("[ERROR] '%s' segmentation error!" % os.path.basename(imagepath))
def main(args):
"""Main function.
Call Segmentation service for each image sequencially or parallelly.
"""
input_ = args['input']
output = args['output']
# Only keep the setable parameters
del args['input']
del args['output']
# Call segmentation service
if os.path.isfile(input_):
# one image using a single process
call_seg((input_, output, args))
SESSION.close()
elif os.path.isdir(input_):
# multiple images using multiple processes to call segmentation parallelly
jobs = []
for img in os.listdir(input_):
img_path = os.path.join(input_, img)
jobs.append((img_path, output, args))
pool = mp.Pool(processes=8) # #processes = #CPU by default
pool.map(call_seg, jobs)
# Close processes pool after it is finished
pool.close()
pool.join()
SESSION.close()
if __name__ == '__main__':
args = arg_parse()
main(args) | apache-2.0 | 6,504,962,605,139,420,000 | 39.502924 | 144 | 0.702238 | false | 3.584369 | false | false | false |
adamrvfisher/TechnicalAnalysisLibrary | RMultipleTracker55DayOneUnit.py | 1 | 31417 | # -*- coding: utf-8 -*-
"""
Created on Wed Jul 11 09:04:55 2018
@author: AmatVictoriaCuramIII
"""
#Developed in Python 3.5
#R Multiple Finder; Trade Tracking
import numpy as np
import random as rand
import pandas as pd
import time as t
from DatabaseGrabber import DatabaseGrabber
from YahooGrabber import YahooGrabber
#Inputs - OHLC data
Ticker1 = 'VXX'
Asset1 = YahooGrabber(Ticker1)
##Tasty OHLC; ***ATTN*** insert path for OHLC data
#Asset1 = pd.read_pickle('C:\\Users\\Tasty\\Desktop\\WorkingDirectory\\UVXY')
#Declaration/Assignment
#Empty list
Empty = []
#Empty dataframe
Trades = pd.DataFrame()
##Timing statistics and iteration counter for optimization
#Start = t.time()
#Counter = 0
#start = t.time()
##The next 4 declarations are for use in fixed profit and loss based exits
##Exit stop loss - in percentages --------- however, looking to use ATR based stops
#LongStopLoss = .005
#ShortStopLoss = .005
##Exit profit take -------- However, looking to use other exits, time based, trailing, ATR, etc.
#LongProfitTake = .01
#ShortProfitTake = .01
#Constraints in percentages
Commission = .01
Slippage = .01
#Time series trimmer for in/out sample data
#Asset1a = Asset1[-1250:] #Out
Asset1 = Asset1[:] #In
#
#Numbered subindex
Asset1['SubIndex'] = range(1,len(Asset1)+1)
#Variable windows
donchianwindow = 15
exitwindow = 13
ATRwindow = 20
stopwindow = 13
Counter = 0
#Log Returns
Asset1['LogRet'] = np.log(Asset1['Adj Close']/Asset1['Adj Close'].shift(1))
Asset1['LogRet'] = Asset1['LogRet'].fillna(0)
Asset1['Method1'] = Asset1['High'] - Asset1['Low']
Asset1['Method2'] = abs((Asset1['High'] - Asset1['Close'].shift(1)))
Asset1['Method3'] = abs((Asset1['Low'] - Asset1['Close'].shift(1)))
Asset1['Method1'] = Asset1['Method1'].fillna(0)
Asset1['Method2'] = Asset1['Method2'].fillna(0)
Asset1['Method3'] = Asset1['Method3'].fillna(0)
Asset1['TrueRange'] = Asset1[['Method1','Method2','Method3']].max(axis = 1)
Asset1['ATR'] = Asset1['TrueRange'].rolling(window = ATRwindow,
center=False).mean()
##Market top and bottom calculation
Asset1['RollingMax'] = Asset1['High'].rolling(window=donchianwindow, center=False).max()
Asset1['RollingMin'] = Asset1['Low'].rolling(window=donchianwindow, center=False).min()
#Asset1[['RollingMax','RollingMin','Adj Close']].plot()
##Signal = Price </> min/max
##if price is less than the min go long
##if price is greater than the max go short
Asset1['Signal'] = np.where(Asset1['High'] >= Asset1['RollingMax'].shift(1) , 1, 0)
Asset1['Signal'] = np.where(Asset1['Low'] <= Asset1['RollingMin'].shift(1) , -1, Asset1['Signal'])
#if Rolling Min/Max is still being computed, stay out of market
Asset1['Signal'] = np.where(Asset1['RollingMax'] == np.nan, 0, Asset1['Signal'])
#To help identify "regime changes" i.e. last signal switches from short to long, vice versa
#Asset1['FilledSignal'] = np.where(Asset1['Signal'] == 0, np.nan, Asset1['Signal'] )
#Asset1['FilledSignal'] = Asset1['FilledSignal'].ffill(inplace = False)
#Asset1['FilledSignal'] = Asset1['FilledSignal'].fillna(0)
#Signal sub index numbers for segmenting data for trade analysis
SignalDates = Asset1['SubIndex'].loc[((Asset1['Signal'] != 0))]
#Trade ATR for signal
Asset1['TradeATR'] = np.where(Asset1['Signal'] != 0, Asset1['ATR'].shift(1), np.nan)
#experimental exits
Asset1['LimitExitPrice'] = np.nan
Asset1['ShortExitPrice'] = Asset1['High'].rolling(window=stopwindow, center=False).max()
Asset1['LongExitPrice'] = Asset1['Low'].rolling(window=stopwindow, center=False).min()
#Find the first trade of the signal period, so we can document entry prices
#Declare columns to record entry price and stop for unit one
Asset1['EntryPriceUnitOne'] = np.nan
Asset1['StopPriceUnitOne'] = np.nan
#Be sure to check for gaps on first unit entry and later on exits.
#Default stops and entries
#Long entry first unit
Asset1['EntryPriceUnitOne'] = np.where(Asset1['Signal'] == 1,
Asset1['RollingMax'].shift(1) + .01, np.nan)
#Long gap entry first unit
Asset1['EntryPriceUnitOne'].loc[(Asset1['Signal'] == 1) & (
Asset1['Open'] > Asset1['EntryPriceUnitOne'])] = Asset1['Open']
#Short entry first unit
Asset1['EntryPriceUnitOne'] = np.where(Asset1['Signal'] == -1,
Asset1['RollingMin'].shift(1) - .01, Asset1['EntryPriceUnitOne'])
#Short gap entry first unit
Asset1['EntryPriceUnitOne'].loc[(Asset1['Signal'] == -1) & (
Asset1['Open'] < Asset1['EntryPriceUnitOne'])] = Asset1['Open']
#Long stop first unit
Asset1['StopPriceUnitOne'] = np.where(Asset1['Signal'] == 1,
Asset1['EntryPriceUnitOne'] - (Asset1['TradeATR'] * 2), np.nan)
#Short stop first unit
Asset1['StopPriceUnitOne'] = np.where(Asset1['Signal'] == -1,
Asset1['EntryPriceUnitOne'] + (Asset1['TradeATR'] * 2), Asset1['StopPriceUnitOne'])
#Experimental exits
Asset1['HybridShortExitPrice'] = np.where(Asset1['ShortExitPrice'] < Asset1['StopPriceUnitOne'],
Asset1['ShortExitPrice'], Asset1['StopPriceUnitOne'])
Asset1['HybridLongExitPrice'] = np.where(Asset1['LongExitPrice'] > Asset1['StopPriceUnitOne'],
Asset1['LongExitPrice'], Asset1['StopPriceUnitOne'])
Asset1['HybridShortExitPrice'] = Asset1['HybridShortExitPrice'].ffill()
Asset1['HybridLongExitPrice'] = Asset1['HybridLongExitPrice'].ffill()
#This is a profit target for long trades
Asset1['LimitExitPrice'] = np.where(Asset1['Signal'] == 1,
Asset1['EntryPriceUnitOne'] + (5 * Asset1['TradeATR']), np.nan)
#This is a profit target for short trades
Asset1['LimitExitPrice'] = np.where(Asset1['Signal'] == -1,
Asset1['EntryPriceUnitOne'] - (5 * Asset1['TradeATR']), Asset1['LimitExitPrice'])
#Begin loops for individual trade examination
#Novice indexing abilities
TradeRanger = range(0,len(SignalDates))
#for r in TradeRanger:
TradeSubset = Asset1.loc[(Asset1['SubIndex'] >= SignalDates[0])]
#TradeSubset = Asset1.loc[(Asset1['SubIndex'] >= 59) & (Asset1['SubIndex'] <= 87)]
TradeDirection = TradeSubset['Signal'][0]
TradeSubset['Exit'] = 0
#
#Short exit, 1 = yes, 0 = no
TradeSubset['ShortExit'] = 0
#Long exit, 1 = yes, 0 = no
TradeSubset['LongExit'] = 0
#Did the exit gap overnight? or hit after open
TradeSubset['GapShortExit'] = 0
#Did the exit gap overnight? or hit after open
TradeSubset['GapLongExit'] = 0
##Experimental exits
#TradeSubset['HybridShortExitPrice'] = np.where(TradeSubset['ShortExitPrice'] < TradeSubset['StopPriceUnitOne'],
# TradeSubset['ShortExitPrice'], TradeSubset['StopPriceUnitOne'])
#TradeSubset['HybridLongExitPrice'] = np.where(TradeSubset['LongExitPrice'] > TradeSubset['StopPriceUnitOne'],
# TradeSubset['LongExitPrice'], TradeSubset['StopPriceUnitOne'])
#TradeSubset['HybridShortExitPrice'] = TradeSubset['HybridShortExitPrice'].ffill()
#TradeSubset['HybridLongExitPrice'] = TradeSubset['HybridLongExitPrice'].ffill()
#
#
#
#
#
#1 = Short exit being hit starting the day of the signal.
if TradeDirection == -1:
TradeSubset['ShortExit'].loc[(TradeSubset['High'] > TradeSubset['HybridShortExitPrice'])] = 1
if TradeDirection == 1:
#1 = Long exit being hit starting the day of the signal.
TradeSubset['LongExit'].loc[(TradeSubset['Low'] < TradeSubset['HybridLongExitPrice'])] = 1
#Assess Gaps on days where trade closes
TradeSubset['GapShortExit'].loc[(TradeSubset['ShortExit'] == 1) & (
TradeSubset['Open'] > TradeSubset['HybridShortExitPrice'])] = 1
TradeSubset['GapLongExit'].loc[(TradeSubset['LongExit'] == 1) & (
TradeSubset['Open'] < TradeSubset['HybridLongExitPrice'])] = 1
#Types of exit
TradeSubset['Exit'].loc[(TradeSubset['ShortExit'] == 1)] = 1 #1 indicating short exit
TradeSubset['Exit'].loc[(TradeSubset['LongExit'] == 1)] = 2 #1 indicating long exit
TradeSubset['Exit'].loc[(TradeSubset['GapShortExit'] == 1)] = 3 #1 indicating short exit w/ gap
TradeSubset['Exit'].loc[(TradeSubset['GapLongExit'] == 1)] = 4 #1 indicating long exit w/ gap
while sum(abs(TradeSubset['Exit'])) != 0:
#while Counter < 1:
EntryPriceUnitOne = TradeSubset['EntryPriceUnitOne'][0]
StopPriceUnitOne = TradeSubset['StopPriceUnitOne'][0]
#TradeDirection
TradeDirection = TradeSubset['Signal'][0]
#I have to figure out how to add units..
#TradeSubset['Units'] = 1
#
TradeSubset['Exit'] = 0
#
#Short exit, 1 = yes, 0 = no
TradeSubset['ShortExit'] = 0
#Long exit, 1 = yes, 0 = no
TradeSubset['LongExit'] = 0
#Did the exit gap overnight? or hit after open
TradeSubset['GapShortExit'] = 0
#Did the exit gap overnight? or hit after open
TradeSubset['GapLongExit'] = 0
##Experimental exits
#TradeSubset['HybridShortExitPrice'] = np.where(TradeSubset['ShortExitPrice'] < TradeSubset['StopPriceUnitOne'],
# TradeSubset['ShortExitPrice'], TradeSubset['StopPriceUnitOne'])
#TradeSubset['HybridLongExitPrice'] = np.where(TradeSubset['LongExitPrice'] > TradeSubset['StopPriceUnitOne'],
# TradeSubset['LongExitPrice'], TradeSubset['StopPriceUnitOne'])
#TradeSubset['HybridShortExitPrice'] = TradeSubset['HybridShortExitPrice'].ffill()
#TradeSubset['HybridLongExitPrice'] = TradeSubset['HybridLongExitPrice'].ffill()
#
#
#
#
#
#1 = Short exit being hit starting the day of the signal.
if TradeDirection == -1:
TradeSubset['ShortExit'].loc[(TradeSubset['High'] > TradeSubset['HybridShortExitPrice'])] = 1
if TradeDirection == 1:
#1 = Long exit being hit starting the day of the signal.
TradeSubset['LongExit'].loc[(TradeSubset['Low'] < TradeSubset['HybridLongExitPrice'])] = 1
#Assess Gaps on days where trade closes
TradeSubset['GapShortExit'].loc[(TradeSubset['ShortExit'] == 1) & (
TradeSubset['Open'] > TradeSubset['HybridShortExitPrice'])] = 1
TradeSubset['GapLongExit'].loc[(TradeSubset['LongExit'] == 1) & (
TradeSubset['Open'] < TradeSubset['HybridLongExitPrice'])] = 1
#Types of exit
TradeSubset['Exit'].loc[(TradeSubset['ShortExit'] == 1)] = 1 #1 indicating short exit
TradeSubset['Exit'].loc[(TradeSubset['LongExit'] == 1)] = 2 #1 indicating long exit
TradeSubset['Exit'].loc[(TradeSubset['GapShortExit'] == 1)] = 3 #1 indicating short exit w/ gap
TradeSubset['Exit'].loc[(TradeSubset['GapLongExit'] == 1)] = 4 #1 indicating long exit w/ gap
#
#
#
#
#List comprehension to find exit taken for subset.
#The next function gives a position on the TradeSubset index
ExitTaken = TradeSubset['Exit'][next((n for n, x in enumerate(TradeSubset['Exit']) if x), 0)]
#The length of the trade
LengthOfTrade = int(next((n for n, x in enumerate(TradeSubset['Exit']) if x), 0))
#The SubIndex of the exit date is for continuing looking for rentry in new subset
SubIndexOfEntry = TradeSubset['SubIndex'][0]
SubIndexOfExit = TradeSubset['SubIndex'][next((n for n, x in enumerate(TradeSubset['Exit']) if x), 0)]
OpenPriceOnGap = TradeSubset['Open'][LengthOfTrade]
if ExitTaken == 1: # if exiting short trade, exit during market day
TradeReturn = (EntryPriceUnitOne - StopPriceUnitOne)/EntryPriceUnitOne
elif ExitTaken == 2: # if exiting long trade, exitduring market day
TradeReturn = (StopPriceUnitOne - EntryPriceUnitOne)/EntryPriceUnitOne
elif ExitTaken == 3: # if exiting short trade with gap
TradeReturn = (EntryPriceUnitOne - OpenPriceOnGap)/EntryPriceUnitOne
elif ExitTaken == 4: # if exiting long trade with gap
TradeReturn = (OpenPriceOnGap - EntryPriceUnitOne)/EntryPriceUnitOne
#Log Trade details in Trade dataframe
Empty.append(ExitTaken)
Empty.append(LengthOfTrade)
Empty.append(EntryPriceUnitOne)
Empty.append(StopPriceUnitOne)
Empty.append(SubIndexOfEntry)
Empty.append(SubIndexOfExit)
Empty.append(TradeDirection)
Empty.append(OpenPriceOnGap)
Empty.append(TradeReturn)
#Empty.append(RMultiple)
Emptyseries = pd.Series(Empty)
Trades[Counter] = Emptyseries.values
Empty[:] = []
#This trimmer trims the Trade out of the TradeSubset, then trims to the next signal!
TradeSubset = TradeSubset[(LengthOfTrade + 1):]
SignalTrim = next((n for n, x in enumerate(TradeSubset['Signal']) if x), 0)
TradeSubset = TradeSubset[SignalTrim:]
#
Counter = Counter + 1
print(Counter)
#The last trade that is still open
if sum(abs(TradeSubset['Signal'])) != 0:
EntryPriceUnitOne = TradeSubset['EntryPriceUnitOne'][0]
StopPriceUnitOne = TradeSubset['StopPriceUnitOne'][0]
ExitTaken = 0
LengthOfTrade = len(TradeSubset)
TradeDirection = TradeSubset['Signal'][0]
if TradeDirection == 1:
TradeReturn = (TradeSubset['HybridLongExitPrice'][-1] - EntryPriceUnitOne)/EntryPriceUnitOne
# etc..
elif TradeDirection == -1:
TradeReturn = (EntryPriceUnitOne - TradeSubset['HybridLongExitPrice'][-1])/EntryPriceUnitOne
SubIndexOfEntry = TradeSubset['SubIndex'][0]
SubIndexOfExit = np.nan
OpenPriceOnGap = np.nan
Empty.append(ExitTaken)
Empty.append(LengthOfTrade)
Empty.append(EntryPriceUnitOne)
Empty.append(StopPriceUnitOne)
Empty.append(SubIndexOfEntry)
Empty.append(SubIndexOfExit)
Empty.append(TradeDirection)
Empty.append(OpenPriceOnGap)
Empty.append(TradeReturn)
Emptyseries = pd.Series(Empty)
Trades[Counter] = Emptyseries.values
Empty[:] = []
Trades = Trades.rename(index={0: "ExitTaken", 1: "LengthOfTrade", 2: "EntryPriceUnitOne",
3: "StopPriceUnitOne", 4: "SubIndexOfEntry", 5: "SubIndexOfExit",
6: "TradeDirection", 7: "OpenPriceOnGap", 8: "TradeReturn"})
Asset1['Brackets'] = 1
for d in Trades:
Asset1['Brackets'].loc[(Asset1['SubIndex'] == Trades[d]['SubIndexOfExit'])] = 1 + Trades[d]['TradeReturn']
NumWinningTrades = len(Asset1['Brackets'][Asset1['Brackets'] > 1])
NumLosingTrades = len(Asset1['Brackets'][Asset1['Brackets'] < 1])
AvgWin = Asset1['Brackets'][Asset1['Brackets'] > 1].mean()
AvgLoss = Asset1['Brackets'][Asset1['Brackets'] < 1].mean()
RewardRisk = AvgWin/AvgLoss
WinRate = NumWinningTrades / (NumWinningTrades + NumLosingTrades)
LossRate = NumLosingTrades / (NumWinningTrades + NumLosingTrades)
Expectancy = (WinRate * RewardRisk) - (LossRate)
Asset1['Multiplier'] = Asset1['Brackets'].cumprod()
Asset1['Multiplier'].plot()
print(Expectancy)
#for a in Trades:
# if Trades[a][8] == np.nan:
# Trades = Trades.drop(a)
#In the event that we are stopped out, we want to continue to look for rentry.
#while sum(abs(TradeSubset['Signal'])) != 0:
# EntryPriceUnitOne = TradeSubset['EntryPriceUnitOne'][0]
# StopPriceUnitOne = TradeSubset['StopPriceUnitOne'][0]
#
# #I have to figure out how to add units..
# TradeSubset['Units'] = 1
#
# TradeSubset['Exit'] = 0
#
# #Short exit, 1 = yes, 0 = no
# TradeSubset['ShortExit'] = 0
# #Long exit, 1 = yes, 0 = no
# TradeSubset['LongExit'] = 0
# #Did the exit gap overnight? or hit after open
# TradeSubset['GapShortExit'] = 0
# #Did the exit gap overnight? or hit after open
# TradeSubset['GapLongExit'] = 0
#
# #1 = Short exit being hit starting the day of the signal.
# TradeSubset['ShortExit'].loc[(Asset1['FilledSignal'] == -1) & (
# TradeSubset['High'] > TradeSubset['HybridShortExitPrice'])] = 1
# #1 = Long exit being hit starting the day of the signal.
# TradeSubset['LongExit'].loc[(Asset1['FilledSignal'] == 1) & (
# TradeSubset['Low'] < TradeSubset['HybridLongExitPrice'])] = 1
#
#
# #Assess Gaps on days where trade closes
# TradeSubset['GapShortExit'].loc[(TradeSubset['ShortExit'] == 1) & (
# TradeSubset['Open'] > TradeSubset['HybridShortExitPrice'])] = 1
#
# TradeSubset['GapLongExit'].loc[(TradeSubset['LongExit'] == 1) & (
# TradeSubset['Open'] < TradeSubset['HybridLongExitPrice'])] = 1
#
# #Types of exit
# TradeSubset['Exit'].loc[(TradeSubset['ShortExit'] == 1)] = 1 #1 indicating short exit
# TradeSubset['Exit'].loc[(TradeSubset['LongExit'] == 1)] = 2 #1 indicating long exit
# TradeSubset['Exit'].loc[(TradeSubset['GapShortExit'] == 1)] = 3 #1 indicating short exit w/ gap
# TradeSubset['Exit'].loc[(TradeSubset['GapLongExit'] == 1)] = 4 #1 indicating long exit w/ gap
#
# #List comprehension to find exit taken for subset.
# #The next function gives a position on the TradeSubset index
# ExitTaken = TradeSubset['Exit'][next((n for n, x in enumerate(TradeSubset['Exit']) if x), 0)]
# #The length of the trade
# LengthOfTrade = int(next((n for n, x in enumerate(TradeSubset['Exit']) if x), 0))
# #The SubIndex of the exit date is for continuing looking for rentry in new subset
# SubIndexOfExit = TradeSubset['SubIndex'][next((n for n, x in enumerate(TradeSubset['Exit']) if x), 0)]
# SubIndexOfEntry = TradeSubset['SubIndex'][0]
# #TradeDirection
# TradeDirection = TradeSubset['Signal'][0]
# OpenPriceOnGap = TradeSubset['Open'][LengthOfTrade]
# if ExitTaken == 1: # if exiting short trade, exit during market day
# TradeReturn = (EntryPriceUnitOne - StopPriceUnitOne)/EntryPriceUnitOne
# elif ExitTaken == 2: # if exiting long trade, exitduring market day
# TradeReturn = (StopPriceUnitOne - EntryPriceUnitOne)/EntryPriceUnitOne
# elif ExitTaken == 3: # if exiting short trade with gap
# TradeReturn = (EntryPriceUnitOne - OpenPriceOnGap)/EntryPriceUnitOne
# elif ExitTaken == 4: # if exiting long trade with gap
# TradeReturn = (OpenPriceOnGap - EntryPriceUnitOne)/EntryPriceUnitOne
#
# #In the event that we are stopped out, we want to continue to look for rentry.
# TradeSubset = TradeSubset[(LengthOfTrade + 1):]
#Create individual trade subsets for examination
#TradeSubIndex = Asset1['SubIndex'].loc[(Asset1['OriginalSignal'] != 0)]
#TradeDates = pd.DataFrame()
#try:
# for i in range(0, len(TradeSubIndex)):
# TradeDates[i] = TradeSubIndex[i]-1,TradeSubIndex[i+1]
#except IndexError:
# pass
#quick reference matrix for exits
#ExitReturns = pd.Series(index=range(0,10))
#ExitReturns[0] = 0
#ExitReturns[1] = 1 + LongProfitTake
#ExitReturns[2] = 1 + ShortProfitTake
#ExitReturns[3] = 0
#ExitReturns[4] = 0
#ExitReturns[5] = 1 - LongStopLoss
#ExitReturns[6] = 1 - ShortStopLoss
#ExitReturns[7] = 1 - LongStopLoss
#ExitReturns[8] = 1 - ShortStopLoss
#ExitReturns[9] = 0
#ExitReturns[10] = 0
#Short, units added
#TradeSubset['Units'].loc[(TradeSubset['FilledSignal'][0] == -1) & (Asset1['Low'] < EntryPriceUnitTwo)] = 2
#TradeSubset['Units'].loc[(TradeSubset['FilledSignal'][0] == -1) & (Asset1['Low'] < EntryPriceUnitThree)] = 3
#TradeSubset['Units'].loc[(TradeSubset['FilledSignal'][0] == -1) & (Asset1['Low'] < EntryPriceUnitFour)] = 4
#Long, units added
#TradeSubset['Units'].loc[(TradeSubset['FilledSignal'][0] == 1) & (Asset1['High'] > EntryPriceUnitTwo)] = 2
#TradeSubset['Units'].loc[(TradeSubset['FilledSignal'][0] == 1) & (Asset1['High'] > EntryPriceUnitThree)] = 3
#TradeSubset['Units'].loc[(TradeSubset['FilledSignal'][0] == 1) & (Asset1['High'] > EntryPriceUnitFour)] = 4
#for l in range(0,len(TradeSubset['Units'])):
# TradeSubset['Units'].loc[(TradeSubset['Units'] < TradeSubset['Units'].shift(1))] = TradeSubset['Units'].shift(1)
#TradeSubset['Units'].loc[(TradeSubset['Units'] < TradeSubset['Units'].shift(1))] = TradeSubset['Units'].shift(1)
##If it's the original signal, record entry price
#Asset1['EntryPrice'].loc[(Asset1['OriginalSignal'] != 0)] = Asset1['Adj Close']
#
##Assess spread/unfavorable fills here!
##Asset1['EntryPriceSlippage'] = Asset1['EntryPrice']
##Long slippage
##Asset1['EntryPriceSlippage'].loc[(Asset1['EntryPrice'] != 0) & (
## Asset1['Signal'] == 1)] = Asset1['EntryPrice'] * (1 + Slippage)
##Short slippage
##Asset1['EntryPriceSlippage'].loc[(Asset1['EntryPrice'] != 0) & (
## Asset1['Signal'] == -1)] = Asset1['EntryPrice'] * (1 - Slippage)
##
##Run the entry price DOWN the column until new position is taken
##Asset1['EntryPriceSlippage'] = Asset1['EntryPriceSlippage'].ffill(inplace=False)
##Fill nan with 0 for entry price
##Asset1['EntryPriceSlippage'] = Asset1['EntryPriceSlippage'].fillna(0)
#
##Declare StopPrice column
#Asset1['StopPrice'] = np.nan
##Long stop calculation
#Asset1['StopPrice'].loc[(Asset1['EntryPrice'] != 0) & (
# Asset1['OriginalSignal'] == 1)] = Asset1['EntryPrice'] * (1 - LongStopLoss)
##Short stop calculation
#Asset1['StopPrice'].loc[(Asset1['EntryPrice'] != 0) & (
# Asset1['OriginalSignal'] == -1)] = Asset1['EntryPrice'] * (1 + ShortStopLoss)
##Forward fill
#Asset1['StopPrice'] = Asset1['StopPrice'].ffill(inplace=False)
#Asset1['StopPrice'] = Asset1['StopPrice'].fillna(0)
#
##Declare ProfitPrice column
#Asset1['ProfitPrice'] = np.nan
##Long stop calculation
#Asset1['ProfitPrice'].loc[(Asset1['EntryPrice'] != 0) & (
# Asset1['OriginalSignal'] == 1)] = Asset1['EntryPrice'] * (1 + LongProfitTake)
##Short stop calculation
#Asset1['ProfitPrice'].loc[(Asset1['EntryPrice'] != 0) & (
# Asset1['OriginalSignal'] == -1)] = Asset1['EntryPrice'] * (1 - ShortProfitTake)
##Forward fill
#Asset1['ProfitPrice'] = Asset1['ProfitPrice'].ffill(inplace=False)
#Asset1['ProfitPrice'] = Asset1['ProfitPrice'].fillna(0)
#
#Asset1['Exit'] = 0
##This will be the final return stream. Generally I use a regime of
##(-1, or 0, or +1) multiplied by the next day's log return to get equity curve
#Asset1['BracketReturns'] = 1
#
##Short Take Gain exit, 1 = yes, 0 = no
#Asset1['STG'] = 0
##Short Take Gain exit, 1 = yes, 0 = no
#Asset1['SSL'] = 0
##Short Stop Loss exit, 1 = yes, 0 = no
#Asset1['LTG'] = 0
##Long Stop Loss exit, 1 = yes, 0 = no
#Asset1['LSL'] = 0
#
##For initial exits
#Asset1['OriginalSTG'] = 0
#Asset1['OriginalSSL'] = 0
#Asset1['OriginalLTG'] = 0
#Asset1['OriginalLSL'] = 0
#
#Asset1['GapSTG'] = 0
#Asset1['GapSSL'] = 0
#Asset1['GapLTG'] = 0
#Asset1['GapLSL'] = 0
#
##1 = STG being hit starting the day after the signal. After initial hit, 1s
##will run down the column even though the trade should be closed
#Asset1['STG'].loc[(Asset1['Signal'] == -1) & (
# Asset1['OriginalSignal'] == 0) & (Asset1['Low'] < Asset1['ProfitPrice'])] = 1
##find initial exit
##Asset1['OriginalSTG'].loc[Asset1['STG'] != Asset1['STG'].shift(1)] = Asset1['STG']
#
#Asset1['LTG'].loc[(Asset1['Signal'] == 1) & (
# Asset1['OriginalSignal'] == 0) & (Asset1['High'] > Asset1['ProfitPrice'])] = 1
##Asset1['OriginalLTG'].loc[Asset1['LTG'] != Asset1['LTG'].shift(1)] = Asset1['LTG']
#
#Asset1['SSL'].loc[(Asset1['Signal'] == -1) & (
# Asset1['OriginalSignal'] == 0) & (Asset1['High'] > Asset1['StopPrice'])] = 1
##Asset1['OriginalSSL'].loc[Asset1['STG'] != Asset1['SSL'].shift(1)] = Asset1['SSL']
#
#Asset1['LSL'].loc[(Asset1['Signal'] == 1) & (
# Asset1['OriginalSignal'] == 0) & (Asset1['Low'] < Asset1['StopPrice'])] = 1
##Asset1['OriginalLSL'].loc[Asset1['LSL'] != Asset1['LSL'].shift(1)] = Asset1['LSL']
#
##Assess Gaps on days where trade closes
#Asset1['GapSTG'].loc[(Asset1['STG'] == 1) & (
# Asset1['Open'] < Asset1['ProfitPrice'])] = 1
#Asset1['GapSSL'].loc[(Asset1['SSL'] == 1) & (
# Asset1['Open'] > Asset1['StopPrice'])] = 1
#Asset1['GapLTG'].loc[(Asset1['LTG'] == 1) & (
# Asset1['Open'] > Asset1['ProfitPrice'])] = 1
#Asset1['GapLSL'].loc[(Asset1['LSL'] == 1) & (
# Asset1['Open'] < Asset1['StopPrice'])] = 1
#
##Days where StopPrice and ProfitPrice are both touched
#Asset1['LongDD'] = np.where((Asset1['LTG'] + Asset1['LSL']) == 2, 1, 0)
#Asset1['ShortDD'] = np.where((Asset1['STG'] + Asset1['SSL']) == 2, 1, 0)
#Asset1['DoubleDay'] = Asset1['LongDD'] + Asset1['ShortDD']
#
##Exit on DoubleDays - 1 & 2; LTG - 3; LSL - 4; STG - 5, SSL - 6.
##Preference given to stoploss on 'expensive' days
#Asset1['Exit'].loc[(Asset1['LTG'] == 1)] = 1 #exit as gain
#Asset1['Exit'].loc[(Asset1['STG'] == 1)] = 2 #exit as gain
#Asset1['Exit'].loc[(Asset1['GapSTG'] == 1)] = 3 #exit as gain
#Asset1['Exit'].loc[(Asset1['GapLTG'] == 1)] = 4 #exit as gain
#Asset1['Exit'].loc[(Asset1['LSL'] == 1)] = 5 #exit as loss
#Asset1['Exit'].loc[(Asset1['SSL'] == 1)] = 6 #exit as loss
#Asset1['Exit'].loc[(Asset1['LongDD'] == 1)] == 7 #exit long position at loss
#Asset1['Exit'].loc[(Asset1['ShortDD'] == 1)] == 8 #exit as short position at loss
#Asset1['Exit'].loc[(Asset1['GapSSL'] == 1)] = 9 #exit as loss
#Asset1['Exit'].loc[(Asset1['GapLSL'] == 1)] = 10 #exit as loss
#
##Create individual trade subsets for examination
#TradeSubIndex = Asset1['SubIndex'].loc[(Asset1['OriginalSignal'] != 0)]
#TradeDates = pd.DataFrame()
#try:
# for i in range(0, len(TradeSubIndex)):
# TradeDates[i] = TradeSubIndex[i]-1,TradeSubIndex[i+1]
#except IndexError:
# pass
#
##quick reference matrix for exits
#ExitReturns = pd.Series(index=range(0,10))
#ExitReturns[0] = 0
#ExitReturns[1] = 1 + LongProfitTake
#ExitReturns[2] = 1 + ShortProfitTake
#ExitReturns[3] = 0
#ExitReturns[4] = 0
#ExitReturns[5] = 1 - LongStopLoss
#ExitReturns[6] = 1 - ShortStopLoss
#ExitReturns[7] = 1 - LongStopLoss
#ExitReturns[8] = 1 - ShortStopLoss
#ExitReturns[9] = 0
#ExitReturns[10] = 0
#
##Trade Analysis from 0th trade
#for ii in TradeDates.columns:
# TradeData = Asset1[TradeDates[ii][0]:TradeDates[ii][1]]
# #the 'next' function yields index position of first non 0 exit
# ExitTaken = TradeData['Exit'][next((n for n, x in enumerate(TradeData['Exit']) if x), 0)]
# SubIndexOfExit = TradeData['SubIndex'][next((n for n, x in enumerate(TradeData['Exit']) if x), 0)]
# TradeDuration = len(TradeData) - 1
# TradeDirection = TradeData['Signal'][0]
# TradeReturn = ExitReturns[ExitTaken]
# RMultiple = (1 - TradeReturn)/ShortStopLoss
##If no stops are hit and there is a signal change, take P/L and switch position
# if ExitTaken == 0:
# SubIndexOfExit = TradeData['SubIndex'][-1]
# if TradeDirection == 1:
# TradeReturn = 1 + ((TradeData['Adj Close'][-1] - TradeData['Adj Close'][0])/TradeData['Adj Close'][0])
# elif TradeDirection == -1:
# TradeReturn = 1 + ((TradeData['Adj Close'][0] - TradeData['Adj Close'][-1])/TradeData['Adj Close'][0])
# else:
# pass
##Assess Gaps
# #GAP STG
# if ExitTaken == 3:
# TradeReturn = 1 + ((TradeData['Adj Close'][0] - TradeData['Open'][TradeDuration])/TradeData['Adj Close'][0])
# else:
# pass
# #GAP LTG
# if ExitTaken == 4:
# TradeReturn = 1 + ((TradeData['Open'][TradeDuration] - TradeData['Adj Close'][0])/TradeData['Adj Close'][0])
# else:
# pass
# #GAP SSL
# if ExitTaken == 9:
# TradeReturn = 1 + ((TradeData['Adj Close'][0] - TradeData['Open'][TradeDuration])/TradeData['Adj Close'][0])
# else:
# pass
# #GAP LSL
# if ExitTaken == 10:
# TradeReturn = 1 + ((TradeData['Open'][TradeDuration] - TradeData['Adj Close'][0])/TradeData['Adj Close'][0])
# else:
# pass
# RMultiple = (TradeReturn - 1)/ShortStopLoss
# Empty.append(ExitTaken)
# Empty.append(SubIndexOfExit)
# Empty.append(TradeDuration)
# Empty.append(TradeDirection)
# Empty.append(TradeReturn)
# Empty.append(RMultiple)
# Emptyseries = pd.Series(Empty)
# Dataset[ii] = Emptyseries.values
# Empty[:] = []
##
#Dataset = Dataset.rename(index={0: "ExitTaken", 1: "SubIndex", 2: "TradeDuration",
# 3: "TradeDirection", 4: "TradeReturn", 5: "RMultiple"})
#
#Asset1['Brackets'] = 1
#Asset1['SlippageCommissionBrackets'] = 1
#for d in Dataset:
# Asset1['SlippageCommissionBrackets'].loc[(Asset1['SubIndex'] == Dataset[d]['SubIndex'])] = Dataset[d]['TradeReturn'] - Slippage - Commission
#for d in Dataset:
# Asset1['Brackets'].loc[(Asset1['SubIndex'] == Dataset[d]['SubIndex'])] = Dataset[d]['TradeReturn']
#NumWinningTrades = len(Asset1['Brackets'][Asset1['Brackets'] > 1])
#NumLosingTrades = len(Asset1['Brackets'][Asset1['Brackets'] < 1])
#AvgWin = Asset1['Brackets'][Asset1['Brackets'] > 1].mean()
#AvgLoss = Asset1['Brackets'][Asset1['Brackets'] < 1].mean()
#RewardRisk = AvgWin/AvgLoss
#WinRate = NumWinningTrades / (NumWinningTrades + NumLosingTrades)
#LossRate = NumLosingTrades / (NumWinningTrades + NumLosingTrades)
#Expectancy = (WinRate * RewardRisk) - (LossRate)
#
#Asset1['Multiplier'] = Asset1['Brackets'].cumprod().plot()
#print(Expectancy)
#
##TradeData = Asset1[TradeDates[0][0]:TradeDates[0][1]]
###the 'next' function yields index position of first non 0 exit
##TradeData['ReIndex'] = range(0,len(TradeData))
##ExitTaken = TradeData['Exit'][next((n for n, x in enumerate(TradeData['Exit']) if x), 0)]
##SubIndexOfExit = TradeData['SubIndex'][next((n for n, x in enumerate(TradeData['Exit']) if x), 0)]
##TradeDuration = TradeData['ReIndex'][next((n for n, x in enumerate(TradeData['Exit']) if x), 0)]
##TradeDirection = TradeData['Signal'][0]
##TradeReturn = ExitReturns[ExitTaken]
##
###If no stops are hit and there is a signal change, take P/L and switch position
##if ExitTaken == 0:
## SubIndexOfExit = TradeData['SubIndex'][-1]
## if TradeDirection == 1:
## TradeReturn = 1 + ((TradeData['Adj Close'][-1] - TradeData['Adj Close'][0])/TradeData['Adj Close'][0])
## elif TradeDirection == -1:
## TradeReturn = 1 + ((TradeData['Adj Close'][0] - TradeData['Adj Close'][-1])/TradeData['Adj Close'][0])
##else:
## pass
###Assess Gaps
###GAP STG
##if ExitTaken == 3:
## TradeReturn = 1 + ((TradeData['Adj Close'][0] - TradeData['Open'][TradeDuration])/TradeData['Adj Close'][0])
##else:
## pass
###GAP LTG
##if ExitTaken == 4:
## TradeReturn = 1 + ((TradeData['Open'][TradeDuration] - TradeData['Adj Close'][0])/TradeData['Adj Close'][0])
##else:
## pass
###GAP SSL
##if ExitTaken == 9:
## TradeReturn = 1 + ((TradeData['Adj Close'][0] - TradeData['Open'][TradeDuration])/TradeData['Adj Close'][0])
##else:
## pass
###GAP LSL
##if ExitTaken == 10:
## TradeReturn = 1 + ((TradeData['Open'][TradeDuration] - TradeData['Adj Close'][0])/TradeData['Adj Close'][0])
##else:
## pass
##Empty.append(ExitTaken)
##Empty.append(SubIndexOfExit)
##Empty.append(TradeDuration)
##Empty.append(TradeDirection)
##Empty.append(TradeReturn)
##Emptyseries = pd.Series(Empty)
###Dataset[ii] = Emptyseries.values
###Empty[:] = []
##print(Emptyseries) | apache-2.0 | -5,094,780,257,082,969,000 | 42.565957 | 145 | 0.646752 | false | 3.01131 | false | false | false |
PeterSurda/PyBitmessage | src/bitmessageqt/messageview.py | 3 | 6476 | """
Custom message viewer with support for switching between HTML and plain
text rendering, HTML sanitization, lazy rendering (as you scroll down),
zoom and URL click warning popup
"""
from PyQt4 import QtCore, QtGui
from safehtmlparser import SafeHTMLParser
from tr import _translate
class MessageView(QtGui.QTextBrowser):
"""Message content viewer class, can switch between plaintext and HTML"""
MODE_PLAIN = 0
MODE_HTML = 1
def __init__(self, parent=0):
super(MessageView, self).__init__(parent)
self.mode = MessageView.MODE_PLAIN
self.html = None
self.setOpenExternalLinks(False)
self.setOpenLinks(False)
self.anchorClicked.connect(self.confirmURL)
self.out = ""
self.outpos = 0
self.document().setUndoRedoEnabled(False)
self.rendering = False
self.defaultFontPointSize = self.currentFont().pointSize()
self.verticalScrollBar().valueChanged.connect(self.lazyRender)
self.setWrappingWidth()
def resizeEvent(self, event):
"""View resize event handler"""
super(MessageView, self).resizeEvent(event)
self.setWrappingWidth(event.size().width())
def mousePressEvent(self, event):
"""Mouse press button event handler"""
if event.button() == QtCore.Qt.LeftButton and self.html and self.html.has_html and self.cursorForPosition(
event.pos()).block().blockNumber() == 0:
if self.mode == MessageView.MODE_PLAIN:
self.showHTML()
else:
self.showPlain()
else:
super(MessageView, self).mousePressEvent(event)
def wheelEvent(self, event):
"""Mouse wheel scroll event handler"""
# super will actually automatically take care of zooming
super(MessageView, self).wheelEvent(event)
if (
QtGui.QApplication.queryKeyboardModifiers() & QtCore.Qt.ControlModifier
) == QtCore.Qt.ControlModifier and event.orientation() == QtCore.Qt.Vertical:
zoom = self.currentFont().pointSize() * 100 / self.defaultFontPointSize
QtGui.QApplication.activeWindow().statusBar().showMessage(_translate(
"MainWindow", "Zoom level %1%").arg(str(zoom)))
def setWrappingWidth(self, width=None):
"""Set word-wrapping width"""
self.setLineWrapMode(QtGui.QTextEdit.FixedPixelWidth)
if width is None:
width = self.width()
self.setLineWrapColumnOrWidth(width)
def confirmURL(self, link):
"""Show a dialog requesting URL opening confirmation"""
if link.scheme() == "mailto":
window = QtGui.QApplication.activeWindow()
window.ui.lineEditTo.setText(link.path())
if link.hasQueryItem("subject"):
window.ui.lineEditSubject.setText(
link.queryItemValue("subject"))
if link.hasQueryItem("body"):
window.ui.textEditMessage.setText(
link.queryItemValue("body"))
window.setSendFromComboBox()
window.ui.tabWidgetSend.setCurrentIndex(0)
window.ui.tabWidget.setCurrentIndex(
window.ui.tabWidget.indexOf(window.ui.send)
)
window.ui.textEditMessage.setFocus()
return
reply = QtGui.QMessageBox.warning(
self,
QtGui.QApplication.translate(
"MessageView",
"Follow external link"),
QtGui.QApplication.translate(
"MessageView",
"The link \"%1\" will open in a browser. It may be a security risk, it could de-anonymise you"
" or download malicious data. Are you sure?").arg(unicode(link.toString())),
QtGui.QMessageBox.Yes,
QtGui.QMessageBox.No)
if reply == QtGui.QMessageBox.Yes:
QtGui.QDesktopServices.openUrl(link)
def loadResource(self, restype, name):
"""
Callback for loading referenced objects, such as an image. For security reasons at the moment doesn't do
anything)
"""
pass
def lazyRender(self):
"""
Partially render a message. This is to avoid UI freezing when loading huge messages. It continues loading as
you scroll down.
"""
if self.rendering:
return
self.rendering = True
position = self.verticalScrollBar().value()
cursor = QtGui.QTextCursor(self.document())
while self.outpos < len(self.out) and self.verticalScrollBar().value(
) >= self.document().size().height() - 2 * self.size().height():
startpos = self.outpos
self.outpos += 10240
# find next end of tag
if self.mode == MessageView.MODE_HTML:
pos = self.out.find(">", self.outpos)
if pos > self.outpos:
self.outpos = pos + 1
cursor.movePosition(QtGui.QTextCursor.End, QtGui.QTextCursor.MoveAnchor)
cursor.insertHtml(QtCore.QString(self.out[startpos:self.outpos]))
self.verticalScrollBar().setValue(position)
self.rendering = False
def showPlain(self):
"""Render message as plain text."""
self.mode = MessageView.MODE_PLAIN
out = self.html.raw
if self.html.has_html:
out = "<div align=\"center\" style=\"text-decoration: underline;\"><b>" + unicode(
QtGui.QApplication.translate(
"MessageView", "HTML detected, click here to display")) + "</b></div><br/>" + out
self.out = out
self.outpos = 0
self.setHtml("")
self.lazyRender()
def showHTML(self):
"""Render message as HTML"""
self.mode = MessageView.MODE_HTML
out = self.html.sanitised
out = "<div align=\"center\" style=\"text-decoration: underline;\"><b>" + unicode(
QtGui.QApplication.translate("MessageView", "Click here to disable HTML")) + "</b></div><br/>" + out
self.out = out
self.outpos = 0
self.setHtml("")
self.lazyRender()
def setContent(self, data):
"""Set message content from argument"""
self.html = SafeHTMLParser()
self.html.reset()
self.html.reset_safe()
self.html.allow_picture = True
self.html.feed(data)
self.html.close()
self.showPlain()
| mit | 4,740,693,595,252,166,000 | 38.730061 | 116 | 0.601452 | false | 4.30299 | false | false | false |
pombredanne/SourceForge-Allura | ForgeBlog/forgeblog/tests/functional/test_root.py | 2 | 7471 | import datetime
from ming.orm.ormsession import ThreadLocalORMSession
from mock import patch
from alluratest.controller import TestController
from allura import model as M
#---------x---------x---------x---------x---------x---------x---------x
# RootController methods exposed:
# index, new_page, search
# PageController methods exposed:
# index, edit, history, diff, raw, revert, update
# CommentController methods exposed:
# reply, delete
class TestRootController(TestController):
def _post(self, slug='', **kw):
d = {
'title':'My Post',
'text':'Nothing to see here',
'labels':'',
'state':'published'}
d.update(kw)
r = self.app.post('/blog%s/save' % slug, params=d)
return r
def _blog_date(self):
return datetime.datetime.utcnow().strftime('%Y/%m')
@patch('forgeblog.model.blog.g.director.create_activity')
def test_activity(self, create_activity):
self._post(state='draft')
assert create_activity.call_count == 0
slug = '/%s/my-post' % self._blog_date()
self._post(slug)
assert create_activity.call_count == 1, create_activity.call_count
assert create_activity.call_args[0][1] == 'created'
create_activity.reset_mock()
self._post(slug, text='new text')
assert create_activity.call_count == 1
assert create_activity.call_args[0][1] == 'modified'
create_activity.reset_mock()
self._post(slug, title='new title')
assert create_activity.call_count == 1
assert create_activity.call_args[0][1] == 'renamed'
def test_root_index(self):
self._post()
d = self._blog_date()
response = self.app.get('/blog/')
assert 'Recent posts' in response
assert 'Nothing to see here' in response
assert '/blog/%s/my-post/edit' % d in response
anon_r = self.app.get('/blog/',
extra_environ=dict(username='*anonymous'))
# anonymous user can't see Edit links
assert 'Nothing to see here' in anon_r
assert '/blog/%s/my-post/edit' % d not in anon_r
def test_root_index_draft(self):
self._post(state='draft')
d = self._blog_date()
response = self.app.get('/blog/')
assert 'Recent posts' in response
assert 'Nothing to see here' in response
assert 'Draft' in response
assert '/blog/%s/my-post/edit' % d in response
anon_r = self.app.get('/blog/',
extra_environ=dict(username='*anonymous'))
# anonymous user can't see draft posts
assert 'Nothing to see here' not in anon_r
def test_root_new_post(self):
response = self.app.get('/blog/new')
assert '<option selected value="published">Published</option>' in response
assert 'Enter your title here' in response
def test_validation(self):
r = self._post(title='')
assert 'You must provide a Title' in r
def test_root_new_search(self):
self._post()
response = self.app.get('/blog/search?q=see')
assert 'Search' in response
def test_paging(self):
[self._post() for i in range(3)]
r = self.app.get('/blog/?limit=1&page=0')
assert 'Newer Entries' not in r
assert 'Older Entries' in r
r = self.app.get('/blog/?limit=1&page=1')
assert 'Newer Entries' in r
assert 'Older Entries' in r
r = self.app.get('/blog/?limit=1&page=2')
assert 'Newer Entries' in r
assert 'Older Entries' not in r
def test_discussion_admin(self):
r = self.app.get('/blog/')
r = self.app.get('/admin/blog/options', validate_chunk=True)
assert 'Allow discussion/commenting on posts' in r
# Turn discussion on
r = self.app.post('/admin/blog/set_options',
params=dict(show_discussion='1'))
self._post()
d = self._blog_date()
r = self.app.get('/blog/%s/my-post/' % d)
assert '<div class="markdown_edit">' in r
# Turn discussion off
r = self.app.post('/admin/blog/set_options')
r = self.app.get('/blog/%s/my-post/' % d)
assert '<div class="markdown_edit">' not in r
def test_post_index(self):
self._post()
d = self._blog_date()
response = self.app.get('/blog/%s/my-post/' % d)
assert 'Nothing to see here' in response
assert '/blog/%s/my-post/edit' % d in response
anon_r = self.app.get('/blog/%s/my-post/' % d,
extra_environ=dict(username='*anonymous'))
# anonymous user can't see Edit links
assert 'Nothing to see here' in anon_r
assert '/blog/%s/my-post/edit' % d not in anon_r
self.app.get('/blog/%s/no-my-post' % d, status=404)
def test_post_index_draft(self):
self._post(state='draft')
d = self._blog_date()
response = self.app.get('/blog/%s/my-post/' % d)
assert 'Nothing to see here' in response
assert 'Draft' in response
assert '/blog/%s/my-post/edit' % d in response
anon_r = self.app.get('/blog/%s/my-post/' % d,
extra_environ=dict(username='*anonymous'))
# anonymous user can't get to draft posts
assert 'Nothing to see here' not in anon_r
def test_post_edit(self):
self._post()
d = self._blog_date()
response = self.app.get('/blog/%s/my-post/edit' % d)
assert 'Nothing' in response
# anon users can't edit
response = self.app.get('/blog/%s/my-post/edit' % d,
extra_environ=dict(username='*anonymous'))
assert 'Nothing' not in response
def test_post_history(self):
self._post()
d = self._blog_date()
self._post('/%s/my-post' % d)
self._post('/%s/my-post' % d)
response = self.app.get('/blog/%s/my-post/history' % d)
assert 'My Post' in response
# two revisions are shown
assert '2 by Test Admin' in response
assert '1 by Test Admin' in response
self.app.get('/blog/%s/my-post?version=1' % d)
self.app.get('/blog/%s/my-post?version=foo' % d, status=404)
def test_post_diff(self):
self._post()
d = self._blog_date()
self._post('/%s/my-post' % d, text='sometext')
self.app.post('/blog/%s/my-post/revert' % d, params=dict(version='1'))
response = self.app.get('/blog/%s/my-post/' % d)
response = self.app.get('/blog/%s/my-post/diff?v1=0&v2=0' % d)
assert 'My Post' in response
def test_feeds(self):
self.app.get('/blog/feed.rss')
self.app.get('/blog/feed.atom')
def test_post_feeds(self):
self._post()
d = self._blog_date()
response = self.app.get('/blog/%s/my-post/feed.rss' % d)
assert 'Nothing to see' in response
response = self.app.get('/blog/%s/my-post/feed.atom' % d)
assert 'Nothing to see' in response
def test_related_artifacts(self):
self._post(title='one')
d = self._blog_date()
self._post(title='two', text='[blog:%s/one]' % d)
M.MonQTask.run_ready()
ThreadLocalORMSession.flush_all()
r= self.app.get('/blog/%s/one/' % d)
assert 'Related' in r
assert 'Blog Post: %s/two' % d in r
| apache-2.0 | 8,516,946,714,885,063,000 | 37.510309 | 82 | 0.566725 | false | 3.578065 | true | false | false |
greyfenrir/taurus | bzt/utils.py | 1 | 79242 | # coding=utf-8
"""
Every project needs its trash heap of miscellaneous functions and classes
Copyright 2015 BlazeMeter Inc.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import codecs
import copy
import csv
import fnmatch
import ipaddress
import itertools
import json
import locale
import logging
import mimetypes
import operator
import os
import platform
import random
import re
import shlex
import shutil
import signal
import subprocess
import sys
import tarfile
import tempfile
import time
import traceback
import webbrowser
import zipfile
from abc import abstractmethod
from collections import defaultdict, Counter
from contextlib import contextmanager
from distutils.version import LooseVersion
from subprocess import CalledProcessError, PIPE, check_output, STDOUT
from webbrowser import GenericBrowser
import math
import psutil
import requests
import requests.adapters
from io import IOBase
from lxml import etree
from progressbar import ProgressBar, Percentage, Bar, ETA
from urllib import parse
from urllib.request import url2pathname
from urwid import BaseScreen
from bzt import TaurusInternalException, TaurusNetworkError, ToolError, TaurusConfigError
LOG = logging.getLogger("")
CALL_PROBLEMS = (CalledProcessError, OSError)
numeric_types = (int, float, complex)
viewvalues = operator.methodcaller("values")
def unicode_decode(string, errors="strict"):
if isinstance(string, bytes):
return string.decode("utf-8", errors)
else:
return string
def communicate(proc): # todo: replace usage of it with sync_run()
out, err = proc.communicate()
out = unicode_decode(out, errors="ignore")
err = unicode_decode(err, errors="ignore")
return out, err
def iteritems(dictionary, **kw):
return iter(dictionary.items(**kw))
def b(string):
return string.encode("latin-1")
def get_stacktrace(exc):
return ''.join(traceback.format_tb(exc.__traceback__)).rstrip()
def reraise(exc_info, exc=None):
_type, message, stacktrace = exc_info
if exc is None:
exc = _type(message)
exc.__traceback__ = stacktrace
raise exc
def stream_decode(string):
if not isinstance(string, str):
return string.decode()
else:
return string
def sync_run(args, env=None):
output = check_output(args, env=env, stderr=STDOUT)
return stream_decode(output).rstrip()
def temp_file(suffix="", prefix="tmp", dir=None):
""" Creates temporary file, returns name of it. User is responsible for deleting the file """
fd, fname = tempfile.mkstemp(suffix=suffix, prefix=prefix, dir=dir)
os.close(fd)
return fname
def simple_body_dict(dic):
""" body dict must have just one level for sending with form params"""
if isinstance(dic, dict):
for key in dic:
if not isinstance(dic[key], (str, numeric_types)):
return False
return True
return False
def get_full_path(path, default=None, step_up=0):
"""
Function expands '~' and adds cwd to path if it's not absolute (relative)
Target doesn't have to exist
:param path:
:param default:
:param step_up:
:return:
"""
if not path:
return default
res = os.path.abspath(os.path.expanduser(path))
for _ in range(step_up):
res = os.path.dirname(res)
return res
BZT_DIR = get_full_path(__file__, step_up=1)
RESOURCES_DIR = os.path.join(BZT_DIR, "resources")
def get_files_recursive(dir_name, exclude_mask=''):
for root, _, files in os.walk(dir_name):
for _file in files:
if not fnmatch.fnmatch(_file, exclude_mask):
yield os.path.join(root, _file)
def parse_java_version(versions):
if versions:
version = versions[0]
if LooseVersion(version) > LooseVersion("6"): # start of openjdk naming
major = re.findall("^([\d]*)", version)
else:
major = re.findall("\.([\d]*)", version)
if major:
return major[0]
def run_once(func):
"""
A decorator to run function only once
:type func: __builtin__.function
:return:
"""
def wrapper(*args, **kwargs):
"""
:param kwargs:
:param args:
"""
if not wrapper.has_run:
wrapper.has_run = True
return func(*args, **kwargs)
wrapper.has_run = False
return wrapper
def replace_in_config(config, samples, substitutes, log=None):
def file_replacer(value, key, container):
if value in samples:
container[key] = substitutes[samples.index(value)]
if container[key] != value and log:
log.debug("Replaced %s with %s", value, container[key])
BetterDict.traverse(config, file_replacer)
def dehumanize_time(str_time):
"""
Convert value like 1d4h33m12s103ms into seconds
Also, incidentally translates strings like "inf" into float("inf")
:param str_time: string to convert
:return: float value in seconds
:raise TaurusInternalException: in case of unsupported unit
"""
if not str_time:
return 0
parser = re.compile(r'([\d\.\-infa]+)([a-zA-Z]*)')
parts = parser.findall(str(str_time).replace(' ', ''))
if len(parts) == 0:
msg = "String format not supported: %s"
raise TaurusInternalException(msg % str_time)
result = 0.0
for value, unit in parts:
try:
value = float(value)
except ValueError:
raise TaurusInternalException("Unsupported float string: %r" % value)
unit = unit.lower()
if unit == 'ms':
result += value / 1000.0
continue
elif unit == 's' or unit == '':
result += value
continue
elif unit == 'm':
result += value * 60
continue
elif unit == 'h':
result += value * 60 * 60
continue
elif unit == 'd':
result += value * 60 * 60 * 24
continue
else:
msg = "String contains unsupported unit %s: %s"
raise TaurusInternalException(msg % (unit, str_time))
return result
def get_bytes_count(str_bytes):
if not str_bytes:
return 0
parser = re.compile(r'([\d\.]+)([a-zA-Z]*)')
parts = parser.findall(str(str_bytes).replace(' ', ''))
if len(parts) != 1:
msg = "String format not supported: %s"
raise TaurusConfigError(msg % str_bytes)
value, unit = parts[0]
try:
value = float(value)
except ValueError:
raise TaurusConfigError("Unsupported float string: %r" % value)
unit = unit.lower()
if unit in ('', 'b'):
result = value
elif unit in ('k', 'kb', 'kib'):
result = value * 1024
elif unit in ('m', 'mb', 'mib'):
result = value * 1024 * 1024
else:
msg = "String contains unsupported unit %s: %s"
raise TaurusConfigError(msg % (unit, str_bytes))
return result
class BetterDict(defaultdict):
"""
Wrapper for defaultdict that able to deep merge other dicts into itself
"""
@classmethod
def from_dict(cls, orig):
"""
# https://stackoverflow.com/questions/50013768/how-can-i-convert-nested-dictionary-to-defaultdict/50013806
"""
if isinstance(orig, dict):
return cls(lambda: None, {k: cls.from_dict(v) for k, v in orig.items()})
elif isinstance(orig, list):
return [cls.from_dict(e) for e in orig]
else:
return orig
def get(self, key, default=defaultdict, force_set=False):
"""
Change get with setdefault
:param force_set:
:type key: object
:type default: object
"""
if default == defaultdict:
default = BetterDict()
if isinstance(default, BaseException) and key not in self:
raise default
if force_set:
value = self.setdefault(key, default)
else:
value = defaultdict.get(self, key, default)
return value
def merge(self, src):
"""
Deep merge other dict into current
:type src: dict
"""
if not isinstance(src, dict):
raise TaurusInternalException("Loaded object is not dict [%s]: %s" % (src.__class__, src))
for key, val in iteritems(src):
prefix = ""
if key[0] in ("^", "~", "$"): # modificator found
prefix = key[0]
key = key[1:]
if prefix == "^": # eliminate flag
# TODO: improve logic - use val contents to see what to eliminate
if key in self:
self.pop(key)
continue
elif prefix == "~": # overwrite flag
if key in self:
self.pop(key)
if isinstance(val, dict):
self.__add_dict(key, val)
elif isinstance(val, list):
self.__add_list(key, val, merge_list_items=(prefix == "$"))
else:
self[key] = val
return self
def __add_dict(self, key, val):
dst = self.get(key, force_set=True)
if isinstance(dst, BetterDict):
dst.merge(val)
elif isinstance(dst, Counter):
self[key] += val
elif isinstance(dst, dict):
raise TaurusInternalException("Mix of DictOfDict and dict is forbidden")
else:
self[key] = BetterDict.from_dict(val)
def __add_list(self, key, val, merge_list_items):
self.__ensure_list_type(val)
if key not in self:
self[key] = []
if not isinstance(self[key], list):
self[key] = val
return
if merge_list_items:
left = self[key]
right = val
for index, righty in enumerate(right):
if index < len(left):
lefty = left[index]
if isinstance(lefty, BetterDict) and isinstance(righty, BetterDict):
lefty.merge(righty)
else:
# todo: should we log all overwriting cases?
LOG.warning("Overwriting the value of %r when merging configs", key)
left[index] = righty
else:
left.insert(index, righty)
else:
self[key].extend(val)
def __ensure_list_type(self, values):
"""
Ensure that values is a list, convert if needed
:param values: dict or list
:return:
"""
for idx, obj in enumerate(values):
if isinstance(obj, dict):
values[idx] = BetterDict.from_dict(obj)
elif isinstance(obj, list):
self.__ensure_list_type(obj)
@classmethod
def traverse(cls, obj, visitor):
"""
Deep traverse dict with visitor. If visitor returns any value, don't traverse into
:type obj: list or dict or object
:type visitor: callable
"""
if isinstance(obj, dict):
for key, val in iteritems(obj):
if not visitor(val, key, obj):
cls.traverse(obj[key], visitor)
elif isinstance(obj, list):
for idx, val in enumerate(obj):
if not visitor(val, idx, obj):
cls.traverse(obj[idx], visitor)
def filter(self, rules, black_list=False):
keys = set(self.keys())
for key in keys:
ikey = "!" + key
if (key in rules) or (ikey in rules): # we have rule for this key
current_black_list = black_list if key in rules else not black_list
rkey = key if key in rules else ikey
if isinstance(rules.get(rkey), dict):
if isinstance(self.get(key), BetterDict): # need to go deeper
self.get(key).filter(rules[rkey], black_list=current_black_list)
elif not current_black_list:
del self[key]
elif current_black_list:
del self[key] # must be blacklisted
elif not black_list:
del self[key] # remove unknown key
current = self.get(key, None)
if isinstance(current, (dict, list)) and not current:
del self[key] # clean empty
def __repr__(self):
return dict(self).__repr__()
def get_uniq_name(directory, prefix, suffix="", forbidden_names=()):
base = os.path.join(directory, prefix)
diff = ""
num = 0
while os.path.exists(base + diff + suffix) or base + diff + suffix in forbidden_names:
num += 1
diff = "-%s" % num
return base + diff + suffix
class TaurusCalledProcessError(CalledProcessError):
def __init__(self, *args, **kwargs):
""" join output and stderr for compatibility """
output = ""
if "output" in kwargs:
output += u"\n>>> {out_start} >>>\n{out}\n<<< {out_end} <<<\n".format(
out_start="START OF STDOUT", out=kwargs["output"], out_end="END OF STDOUT")
if "stderr" in kwargs:
output += u"\n>>> {err_start} >>>\n{err}\n<<< {err_end} <<<\n".format(
err_start="START OF STDERR", err=kwargs.pop("stderr"), err_end="END OF STDERR")
if output:
kwargs["output"] = output
super(TaurusCalledProcessError, self).__init__(*args, **kwargs)
def __str__(self):
base_str = super(TaurusCalledProcessError, self).__str__()
if self.output:
base_str += '\n' + self.output
return base_str
def exec_and_communicate(*args, **kwargs):
process = shell_exec(*args, **kwargs)
out, err = communicate(process)
if process.returncode != 0:
raise TaurusCalledProcessError(process.returncode, cmd=args[0], output=out, stderr=err)
return out, err
def shell_exec(args, cwd=None, stdout=PIPE, stderr=PIPE, stdin=PIPE, shell=False, env=None, pgrp=True):
"""
Wrapper for subprocess starting
"""
if stdout and not isinstance(stdout, (int, IOBase)):
LOG.warning("stdout is not IOBase: %s", stdout)
stdout = None
if stderr and not isinstance(stderr, (int, IOBase)):
LOG.warning("stderr is not IOBase: %s", stderr)
stderr = None
if isinstance(args, str) and not shell:
args = shlex.split(args, posix=not is_windows())
LOG.debug("Executing shell: %s at %s", args, cwd or os.curdir)
kwargs = {
"stdout": stdout,
"stderr": stderr,
"stdin": stdin,
"bufsize": 0,
"cwd": cwd,
"shell": shell,
"env": env
}
if is_windows():
if pgrp:
kwargs["creationflags"] = subprocess.CREATE_NEW_PROCESS_GROUP
return psutil.Popen(args, **kwargs)
else:
kwargs["close_fds"] = True
if pgrp:
kwargs["preexec_fn"] = os.setpgrp
return psutil.Popen(args, **kwargs)
# FIXME: shouldn't we bother closing opened descriptors?
class Environment(object):
def __init__(self, log=None, parent=None):
self.data = {}
self._queue = []
log = log or LOG
self.log = log.getChild(self.__class__.__name__)
if parent:
self._queue.extend(
[(self.__getattribute__(method), args, kwargs) for method, args, kwargs in parent.get_queue()])
def get_queue(self):
return [(method.__name__, args, kwargs) for method, args, kwargs in self._queue]
def set(self, *args, **kwargs):
self._add_to_queue(self._set, *args, **kwargs)
def add_path(self, *args, **kwargs):
self._add_to_queue(self._add_path, *args, **kwargs)
def add_java_param(self, *args, **kwargs):
self._add_to_queue(self._add_java_param, *args, **kwargs)
def update(self, *args, **kwargs):
self._add_to_queue(self._update, *args, **kwargs)
def _add_to_queue(self, *args, **kwargs):
self._queue.append((args[0], args[1:], kwargs))
def _set(self, env):
"""
:type env: dict
"""
for key in env:
key = str(key)
val = env[key]
if is_windows():
key = key.upper()
if key in self.data:
if val is None:
self.log.debug("Remove '%s' from environment", key)
self.data.pop(key)
else:
self.log.debug("Replace '%s' in environment", key)
self.data[key] = str(val)
else:
self._add({key: val}, '', finish=False)
def _add_path(self, pair, finish=False):
self._add(pair, os.pathsep, finish)
def _add_java_param(self, pair, finish=False):
self._add(pair, " ", finish)
def _update(self, env): # compatibility with taurus-server
self.set(env)
def _add(self, pair, separator, finish):
for key in pair:
val = pair[key]
key = str(key)
if is_windows():
key = key.upper()
if val is None:
self.log.debug("Skip empty variable '%s'", key)
return
val = str(val)
if key in self.data:
if finish:
self.data[key] += separator + val # add to the end
else:
self.data[key] = val + separator + self.data[key] # add to the beginning
else:
self.data[key] = str(val)
def get(self, key=None):
self._apply_queue()
if key:
key = str(key)
if is_windows():
key = key.upper()
return self.data.get(key, None)
else:
# full environment
return copy.deepcopy(self.data)
def _apply_queue(self):
self.data = {}
self._set(os.environ)
for method, args, kwargs in self._queue:
method(*args, **kwargs)
class FileReader(object):
SYS_ENCODING = locale.getpreferredencoding()
def __init__(self, filename="", file_opener=None, parent_logger=None):
self.fds = None
if parent_logger:
self.log = parent_logger.getChild(self.__class__.__name__)
else:
self.log = logging.getLogger(self.__class__.__name__)
if file_opener:
self.file_opener = file_opener # external method for opening of file
else:
self.file_opener = lambda f: open(f, mode='rb') # default mode is binary
# for non-trivial openers filename must be empty (more complicate than just open())
# it turns all regular file checks off, see is_ready()
self.name = filename
self.cp = 'utf-8' # default code page is utf-8
self.decoder = codecs.lookup(self.cp).incrementaldecoder()
self.fallback_decoder = codecs.lookup(self.SYS_ENCODING).incrementaldecoder(errors='ignore')
self.offset = 0
def _readlines(self, hint=None):
# get generator instead of list (in regular readlines())
length = 0
for line in self.fds:
yield line
if hint and hint > 0:
length += len(line)
if length >= hint:
return
def is_ready(self):
if not self.fds:
if self.name:
if not os.path.isfile(self.name):
self.log.debug("File not appeared yet: %s", self.name)
return False
if not os.path.getsize(self.name):
self.log.debug("File is empty: %s", self.name)
return False
self.log.debug("Opening file: %s", self.name)
# call opener regardless of the name value as it can use empty name as flag
self.fds = self.file_opener(self.name)
if self.fds:
self.name = self.fds.name
return True
def _decode(self, line, last_pass=False):
try:
return self.decoder.decode(line, final=last_pass)
except UnicodeDecodeError:
self.log.warning("Content encoding of '%s' doesn't match %s", self.name, self.cp)
self.cp = self.SYS_ENCODING
self.decoder = self.fallback_decoder
self.decoder.reset()
self.log.warning("Proposed code page: %s", self.cp)
return self.decoder.decode(line, final=last_pass)
def get_lines(self, size=-1, last_pass=False):
if self.is_ready():
if last_pass:
size = -1
self.fds.seek(self.offset)
for line in self._readlines(hint=size):
self.offset += len(line)
yield self._decode(line, last_pass)
def get_line(self):
line = ""
if self.is_ready():
self.fds.seek(self.offset)
line = self.fds.readline()
self.offset += len(line)
return self._decode(line)
def get_bytes(self, size=-1, last_pass=False, decode=True):
if self.is_ready():
if last_pass:
size = -1
self.fds.seek(self.offset)
_bytes = self.fds.read(size)
self.offset += len(_bytes)
if decode:
return self._decode(_bytes, last_pass)
else:
return _bytes
def __del__(self):
self.close()
def close(self):
if self.fds:
self.fds.close()
def ensure_is_dict(container, key, sub_key):
"""
Ensure that dict item is dict, convert if needed
:type container: dict or list
:type key: basestring or int
:type sub_key: basestring
:return:
"""
if isinstance(container, BetterDict):
container.get(key, force_set=True)
elif isinstance(container, dict): # todo: remove after fixing merge
container[key] = BetterDict()
if not isinstance(container[key], dict): # todo: replace dict with BetterDict after fixing merge
container[key] = BetterDict.from_dict({sub_key: container[key]})
return container[key]
class MultiPartForm(object):
"""
Accumulate the data to be used when posting a form.
http://blog.doughellmann.com/2009/07/
pymotw-urllib2-library-for-opening-urls.html
:type form_fields: list[str,str]
"""
def __init__(self):
self.form_fields = []
self.files = []
self.boundary = make_boundary()
def get_content_type(self):
""" returns content type """
return 'multipart/form-data; boundary=%s' % self.boundary
def add_field(self, name, value):
"""
Add a simple field to the form data.
:type name: str
:type value: str
"""
self.form_fields.append((name, value))
def add_file_as_string(self, fieldname, filename, body, mimetype=None):
""" add raw string file
:type fieldname: str
:type filename: str
:type body: str | bytes
:type mimetype: str
"""
default = 'application/octet-stream'
if mimetype is None:
mimetype = mimetypes.guess_type(filename)[0] or default
self.files.append((fieldname, filename, mimetype, body))
def add_file(self, fieldname, filename, file_handle=None, mimetype=None):
"""Add a file to be uploaded.
:type mimetype: str
:type file_handle: file
:type filename: str
:type fieldname: str
"""
if not file_handle:
with open(filename, 'rb') as fds:
body = fds.read()
filename = os.path.basename(filename)
else:
body = file_handle.read()
self.add_file_as_string(fieldname, filename, body, mimetype)
def __convert_to_list(self):
"""Return a string representing the form, including attached files."""
# Build a list of lists, each containing "lines" of the
# request. Each part is separated by a boundary string.
# Once the list is built, return a string where each
# line is separated by '\r\n'.
parts = []
part_boundary = '--' + self.boundary
# Add the form fields
parts.extend(
[part_boundary, 'Content-Disposition: form-data; name="%s"' % name, '', value, ]
for name, value in self.form_fields
)
# Add the files to upload
parts.extend(
[part_boundary,
'Content-Disposition: file; name="%s"; filename="%s"' % (field_name, filename),
'Content-Type: %s' % content_type, '', body]
for field_name, filename, content_type, body in self.files
)
# Flatten the list and add closing boundary marker,
# then return CR+LF separated data
flattened = list(itertools.chain(*parts))
flattened.append('--' + self.boundary + '--')
return flattened
def form_as_bytes(self):
"""
represents form contents as bytes
"""
result_list = []
for item in self.__convert_to_list():
# if (bytes (3.x), then no processing, just add, else - encode)
if isinstance(item, bytes):
result_list.append(item)
elif isinstance(item, str):
result_list.append(item.encode())
else:
raise TaurusInternalException("Unhandled form data type: %s" % type(item))
res_bytes = b("\r\n").join(result_list)
res_bytes += b("\r\n")
return res_bytes
def to_json(obj, indent=True):
"""
Convert object into indented json
:param indent: whether to generate indented JSON
:param obj: object to convert
:return:
"""
# NOTE: you can set allow_nan=False to fail when serializing NaN/Infinity
return json.dumps(obj, indent=indent, cls=ComplexEncoder)
class JSONDumpable(object):
"""
Marker class for json dumpable classes
"""
pass
class JSONConvertible(object):
@abstractmethod
def __json__(self):
"Convert class instance into JSON-dumpable structure (e.g. dict)"
pass
class ComplexEncoder(json.JSONEncoder):
"""
Magic class to help serialize in JSON any object.
"""
# todo: should we add complex type?
TYPES = (dict, list, tuple, str, int, float, bool, type(None))
def default(self, obj): # pylint: disable=method-hidden
"""
Filters out protected and private fields
:param obj:
:return:
"""
if self.__dumpable(obj):
res = {}
for key, val in iteritems(obj.__dict__):
if not self.__dumpable(val):
# logging.debug("Filtered out: %s.%s", key, val)
pass
elif key.startswith('_'):
# logging.debug("Filtered out: %s", key)
pass
else:
res[key] = val
return res
elif ComplexEncoder.__convertible(obj):
return obj.__json__()
else:
return None
@classmethod
def __dumpable(cls, obj):
"""
Re
:param obj:
:rtype: bool
"""
dumpable_types = tuple(cls.TYPES + (JSONDumpable,))
return isinstance(obj, dumpable_types)
@staticmethod
def __convertible(obj):
return isinstance(obj, JSONConvertible)
@classmethod
def of_basic_type(cls, val):
"""
Returns true if val is of basic type
:param val:
:return:
"""
return isinstance(val, cls.TYPES)
def humanize_time(secs):
"""
taken from http://testingreflections.com/node/6534
:param secs:
:return:
"""
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
return '%02d:%02d:%02d' % (hours, mins, secs)
def guess_csv_dialect(header, force_doublequote=False):
""" completely arbitrary fn to detect the delimiter
:param force_doublequote: bool
:type header: str
:rtype: csv.Dialect
"""
possible_delims = ",;\t"
dialect = csv.Sniffer().sniff(header, delimiters=possible_delims)
if force_doublequote:
dialect.doublequote = True
return dialect
def load_class(full_name):
"""
Load class by its full name like bzt.cli.CLI
:type full_name: str
:return:
:rtype: callable
"""
module_name = full_name[:full_name.rfind('.')]
class_name = full_name[full_name.rfind('.') + 1:]
LOG.debug("Importing module: %s", module_name)
module = __import__(module_name)
for mod in module_name.split('.')[1:]:
module = getattr(module, mod)
LOG.debug("Loading class: '%s' from %s", class_name, module)
return getattr(module, class_name)
def unzip(source_filename, dest_dir, rel_path=None):
"""
:param source_filename:
:param dest_dir:
:param rel_path:
:return:
"""
LOG.debug("Extracting %s to %s", source_filename, dest_dir)
with zipfile.ZipFile(source_filename) as zfd:
for member in zfd.infolist():
if rel_path:
if not member.filename.startswith(rel_path):
continue
else:
member.filename = member.filename[len(rel_path) + 1:]
if not member.filename:
continue
# Path traversal defense copied from
# http://hg.python.org/cpython/file/tip/Lib/http/server.py#l789
LOG.debug("Writing %s%s%s", dest_dir, os.path.sep, member.filename)
zfd.extract(member, dest_dir)
def untar(source_filename, dest_dir, rel_path=None):
with tarfile.open(source_filename, "r|*") as tar:
for member in tar:
if member.isfile():
if member.name is None:
continue
if rel_path is not None and not member.name.startswith(rel_path):
continue
filename = os.path.basename(member.name)
destination = os.path.join(dest_dir, filename)
with open(destination, "wb") as output:
shutil.copyfileobj(tar.extractfile(member), output, member.size)
def make_boundary(text=None):
"""
Generate boundary id
:param text:
:return:
"""
_width = len(repr(sys.maxsize - 1))
_fmt = '%%0%dd' % _width
token = random.randrange(sys.maxsize)
boundary = ('=' * 15) + (_fmt % token) + '=='
if text is None:
return boundary
bnd = boundary
counter = 0
while True:
cre = re.compile(r'^--' + re.escape(bnd) + '(--)?$', re.MULTILINE)
if not cre.search(text):
break
bnd = boundary + '.' + str(counter)
counter += 1
return bnd
def is_int(str_val):
"""
Check if str_val is int type
:param str_val: str
:return: bool
"""
if str_val.startswith('-') and str_val[1:].isdigit():
return True
elif str_val.isdigit():
return True
else:
return False
def shutdown_process(process_obj, log_obj):
count = 60
while process_obj and process_obj.poll() is None:
time.sleep(1)
count -= 1
kill_signal = signal.SIGTERM if count > 0 else signal.SIGKILL
log_obj.info("Terminating process PID %s with signal %s (%s tries left)", process_obj.pid, kill_signal, count)
try:
if is_windows():
cur_pids = psutil.pids()
if process_obj.pid in cur_pids:
jm_proc = psutil.Process(process_obj.pid)
for child_proc in jm_proc.children(recursive=True):
log_obj.debug("Terminating child process: %d", child_proc.pid)
child_proc.send_signal(kill_signal)
os.kill(process_obj.pid, kill_signal)
else:
os.killpg(process_obj.pid, kill_signal)
except OSError as exc:
log_obj.debug("Failed to terminate process: %s", exc)
class LocalFileAdapter(requests.adapters.BaseAdapter):
"""
Protocol Adapter to allow HTTPClient to GET file:// URLs
"""
@staticmethod
def _chkpath(method, path):
"""Return an HTTP status for the given filesystem path."""
if method.lower() in ('put', 'delete'):
return 501, "Not Implemented" # TODO
elif method.lower() not in ('get', 'head'):
return 405, "Method Not Allowed"
elif os.path.isdir(path):
return 400, "Path Not A File"
elif not os.path.isfile(path):
return 404, "File Not Found"
elif not os.access(path, os.R_OK):
return 403, "Access Denied"
else:
return 200, "OK"
def send(self, req, **kwargs): # pylint: disable=unused-argument
"""Return the file specified by the given request
"""
path = os.path.normcase(os.path.normpath(url2pathname(req.path_url)))
response = requests.Response()
response.status_code, response.reason = self._chkpath(req.method, path)
if response.status_code == 200 and req.method.lower() != 'head':
try:
response.raw = open(path, 'rb')
except (OSError, IOError) as err:
response.status_code = 500
response.reason = str(err)
if isinstance(req.url, bytes):
response.url = req.url.decode('utf-8')
else:
response.url = req.url
response.request = req
response.connection = self
return response
def close(self):
pass
class HTTPClient(object):
def __init__(self):
self.session = requests.Session()
self.session.mount('file://', LocalFileAdapter())
self.log = logging.getLogger(self.__class__.__name__)
self.proxy_settings = None
def add_proxy_settings(self, proxy_settings):
if proxy_settings and proxy_settings.get("address"):
self.proxy_settings = proxy_settings
proxy_addr = proxy_settings.get("address")
self.log.info("Using proxy %r", proxy_addr)
proxy_url = parse.urlsplit(proxy_addr)
self.log.debug("Using proxy settings: %s", proxy_url)
username = proxy_settings.get("username")
pwd = proxy_settings.get("password")
scheme = proxy_url.scheme if proxy_url.scheme else 'http'
if username and pwd:
proxy_uri = "%s://%s:%s@%s" % (scheme, username, pwd, proxy_url.netloc)
else:
proxy_uri = "%s://%s" % (scheme, proxy_url.netloc)
self.session.proxies = {"https": proxy_uri, "http": proxy_uri}
self.session.verify = proxy_settings.get('ssl-cert', True)
self.session.cert = proxy_settings.get('ssl-client-cert', None)
def get_proxy_props(self):
props = {}
if not self.proxy_settings or not self.proxy_settings.get("address"):
return props
proxy_url = parse.urlsplit(self.proxy_settings.get("address"))
username = self.proxy_settings.get("username")
pwd = self.proxy_settings.get("password")
for protocol in ["http", "https"]:
props[protocol + '.proxyHost'] = proxy_url.hostname
props[protocol + '.proxyPort'] = proxy_url.port or 80
if username and pwd:
props[protocol + '.proxyUser'] = username
props[protocol + '.proxyPass'] = pwd
return props
@staticmethod
def _save_file_from_connection(conn, filename, reporthook=None):
if not conn.ok:
raise TaurusNetworkError("Connection failed, status code %s" % conn.status_code)
total = int(conn.headers.get('content-length', 0))
block_size = 1024
count = 0
with open(filename, 'wb') as f:
for chunk in conn.iter_content(chunk_size=block_size):
if chunk:
f.write(chunk)
count += 1
if reporthook:
reporthook(count, block_size, total)
def download_file(self, url, filename, reporthook=None, data=None, timeout=None):
headers = None
try:
with self.session.get(url, stream=True, data=data, timeout=timeout) as conn:
self._save_file_from_connection(conn, filename, reporthook=reporthook)
headers = conn.headers
except requests.exceptions.RequestException as exc:
resp = exc.response
self.log.debug("File download resulted in exception: %s", traceback.format_exc())
msg = "Unsuccessful download from %s" % url
if resp is not None:
msg += ": %s - %s" % (resp.status_code, resp.reason)
raise TaurusNetworkError(msg)
except BaseException:
self.log.debug("File download resulted in exception: %s", traceback.format_exc())
raise TaurusNetworkError("Unsuccessful download from %s" % url)
return filename, headers
def request(self, method, url, *args, **kwargs):
self.log.debug('Making HTTP request %s %s', method, url)
try:
return self.session.request(method, url, *args, **kwargs)
except requests.exceptions.RequestException as exc:
resp = exc.response
self.log.debug("Request resulted in exception: %s", traceback.format_exc())
msg = "Request to %s failed" % url
if resp is not None:
msg += ": %s - %s" % (resp.status_code, resp.reason)
raise TaurusNetworkError(msg)
class ExceptionalDownloader(object):
def __init__(self, http_client):
"""
:type http_client: HTTPClient
"""
super(ExceptionalDownloader, self).__init__()
self.http_client = http_client
def get(self, url, filename=None, reporthook=None, data=None, suffix="", timeout=5.0):
if os.getenv("TAURUS_DISABLE_DOWNLOADS", ""):
raise TaurusInternalException("Downloads are disabled by TAURUS_DISABLE_DOWNLOADS env var")
try:
if not filename:
filename = temp_file(suffix)
result = self.http_client.download_file(url, filename, reporthook=reporthook, data=data, timeout=timeout)
except BaseException:
os.remove(filename)
raise
return result
class RequiredTool(object):
"""
Abstract required tool
"""
def __init__(self, log=None, tool_path="", download_link="", http_client=None,
env=None, version=None, installable=True, mandatory=True):
self.http_client = http_client
self.tool_path = os.path.expanduser(tool_path)
self.download_link = download_link
self.mirror_manager = None
self.mandatory = mandatory
self.version = None
if version is not None:
self.version = str(version)
self.installable = installable
self.tool_name = self.__class__.__name__
# for browsermobproxy compatability, remove it later
if not isinstance(log, logging.Logger):
log = None
log = log or LOG
self.log = log.getChild(self.tool_name)
self.env = env or Environment(self.log)
def _get_version(self, output):
return
def call(self, *args, **kwargs):
mixed_env = self.env.get()
mixed_env.update(kwargs.get("env", {}))
kwargs["env"] = mixed_env
return exec_and_communicate(*args, **kwargs)
def check_if_installed(self):
if os.path.exists(self.tool_path):
return True
self.log.debug("File not exists: %s", self.tool_path)
return False
def install(self):
if not self.installable:
msg = "%s isn't found, automatic installation isn't implemented" % self.tool_name
if self.mandatory:
raise ToolError(msg)
else:
self.log.warning(msg)
return
with ProgressBarContext() as pbar:
if not os.path.exists(os.path.dirname(self.tool_path)):
os.makedirs(os.path.dirname(self.tool_path))
downloader = ExceptionalDownloader(self.http_client)
self.log.info("Downloading %s", self.download_link)
downloader.get(self.download_link, self.tool_path, reporthook=pbar.download_callback)
if self.check_if_installed():
return self.tool_path
else:
raise ToolError("Unable to run %s after installation!" % self.tool_name)
def _download(self, suffix=".zip", use_link=False):
if use_link:
links = [self.download_link]
else:
links = self.mirror_manager.mirrors()
downloader = ExceptionalDownloader(self.http_client)
for link in links:
self.log.info("Downloading: %s", link)
with ProgressBarContext() as pbar:
try:
return downloader.get(link, reporthook=pbar.download_callback, suffix=suffix)[0]
except KeyboardInterrupt:
raise
except BaseException as exc:
self.log.error("Error while downloading %s: %s" % (link, exc))
raise TaurusInternalException("%s download failed: No more links to try" % self.tool_name)
class JavaVM(RequiredTool):
def __init__(self, **kwargs):
if "mandatory" not in kwargs:
kwargs["mandatory"] = False
super(JavaVM, self).__init__(installable=False, tool_path="java", **kwargs)
def _get_version(self, output):
versions = re.findall("version\ \"([_\d\.]*)", output)
version = parse_java_version(versions)
if not version:
self.log.warning("Tool version parsing error: %s", output)
return version
def check_if_installed(self):
cmd = [self.tool_path, '-version']
self.log.debug("Trying %s: %s", self.tool_name, cmd)
try:
out, err = self.call(cmd)
except CALL_PROBLEMS as exc:
self.log.debug("Failed to check %s: %s", self.tool_name, exc)
return False
self.version = self._get_version(err)
if err:
out += err
self.log.debug("%s output: %s", self.tool_name, out)
return True
class ProgressBarContext(ProgressBar):
def __init__(self, maxval=0):
widgets = [Percentage(), ' ', Bar(marker='=', left='[', right=']'), ' ', ETA()]
super(ProgressBarContext, self).__init__(widgets=widgets, maxval=maxval, fd=sys.stdout)
def __enter__(self):
if not sys.stdout.isatty():
LOG.debug("No progressbar for non-tty output: %s", sys.stdout)
self.start()
return self
def update(self, value=None):
if sys.stdout.isatty():
super(ProgressBarContext, self).update(value)
def __exit__(self, exc_type, exc_val, exc_tb):
del exc_type, exc_val, exc_tb
if sys.stdout.isatty():
self.finish()
def download_callback(self, block_count, blocksize, totalsize):
if totalsize > 0:
self.maxval = totalsize
progress = block_count * blocksize
self.update(progress if progress <= totalsize else totalsize)
class IncrementableProgressBar(ProgressBarContext):
def __init__(self, maxval):
super(IncrementableProgressBar, self).__init__(maxval=maxval)
def increment(self):
incremented = self.currval + 1
if incremented < self.maxval:
super(IncrementableProgressBar, self).update(incremented)
def catchup(self, started_time=None, current_value=None):
super(IncrementableProgressBar, self).start()
if started_time:
self.start_time = started_time
if current_value and current_value < self.maxval:
self.update(current_value)
class TclLibrary(RequiredTool):
ENV_NAME = "TCL_LIBRARY"
INIT_TCL = "init.tcl"
FOLDER = "tcl"
def check_if_installed(self):
"""
Check if tcl is available
:return:
"""
if is_windows():
self.log.debug("Checking if %s variable is present in environment", TclLibrary.ENV_NAME)
if not os.environ.get(TclLibrary.ENV_NAME, None):
self.log.debug("%s environment variable is not present", TclLibrary.ENV_NAME)
return False
else:
self.log.debug("%s environment variable is present", TclLibrary.ENV_NAME)
return True
else:
self.log.debug("We don't need to check tcl library on this platform")
return True
@staticmethod
def _find_tcl_dir():
lib_dirs = [os.path.dirname(_x) for _x in sys.path if _x.lower().endswith('lib')]
for lib_dir in lib_dirs:
base_dir = os.path.join(lib_dir, TclLibrary.FOLDER)
if os.path.exists(base_dir):
for root, _, files in os.walk(base_dir):
if TclLibrary.INIT_TCL in files:
return root
def _set_env_variable(self, value):
self.log.debug("Setting environment %s=%s", TclLibrary.ENV_NAME, value)
os.environ[TclLibrary.ENV_NAME] = value
def install(self):
"""
:return:
"""
tcl_dir = self._find_tcl_dir()
if tcl_dir:
self.log.debug("Tcl directory was found: %s", tcl_dir)
self._set_env_variable(tcl_dir)
if not self.check_if_installed():
self.log.warning("No Tcl library was found")
class Node(RequiredTool):
def __init__(self, **kwargs):
super(Node, self).__init__(installable=False, **kwargs)
def check_if_installed(self):
node_candidates = ["node", "nodejs"]
for candidate in node_candidates:
try:
self.log.debug("Trying '%r' as Node Tool...", candidate)
out, err = self.call([candidate, '--version'])
except CALL_PROBLEMS as exc:
self.log.debug("%r is not installed: %s", candidate, exc)
continue
if err:
out += err
self.log.debug("%s output: %s", candidate, out)
self.tool_path = candidate
return True
return False
class MirrorsManager(object):
def __init__(self, http_client, base_link, parent_logger):
"""
:type base_link: str
:type http_client: HTTPClient
"""
self.base_link = base_link
self.log = parent_logger.getChild(self.__class__.__name__)
self.http_client = http_client
self.page_source = None
@abstractmethod
def _parse_mirrors(self):
return []
def mirrors(self):
self.log.debug("Retrieving mirrors from page: %s", self.base_link)
downloader = ExceptionalDownloader(self.http_client)
try:
tmp_file = downloader.get(self.base_link)[0]
with open(tmp_file) as fds:
self.page_source = fds.read()
except BaseException:
self.log.debug("Exception: %s", traceback.format_exc())
self.log.error("Can't fetch %s", self.base_link)
return self._parse_mirrors()
@contextmanager
def log_std_streams(logger=None, stdout_level=logging.DEBUG, stderr_level=logging.DEBUG):
"""
redirect standard output/error to taurus logger
"""
out_descriptor = os.dup(1)
err_descriptor = os.dup(2)
stdout = tempfile.SpooledTemporaryFile(mode='w+')
stderr = tempfile.SpooledTemporaryFile(mode='w+')
sys.stdout = stdout
sys.stderr = stderr
os.dup2(stdout.fileno(), 1)
os.dup2(stderr.fileno(), 2)
try:
yield
finally:
stdout.seek(0)
stderr.seek(0)
stdout_str = stdout.read().strip()
stderr_str = stderr.read().strip()
stdout.close()
stderr.close()
sys.stdout = sys.__stdout__
sys.stderr = sys.__stderr__
os.dup2(out_descriptor, 1)
os.dup2(err_descriptor, 2)
os.close(out_descriptor)
os.close(err_descriptor)
if logger:
if stdout_str:
logger.log(stdout_level, "STDOUT: " + stdout_str)
if stderr_str:
logger.log(stderr_level, "STDERR: " + stderr_str)
def open_browser(url):
try:
browser = webbrowser.get()
if type(browser) != GenericBrowser: # pylint: disable=unidiomatic-typecheck
with log_std_streams(logger=LOG):
webbrowser.open(url)
except BaseException as exc:
LOG.warning("Can't open link in browser: %s", exc)
def is_windows():
return platform.system() == 'Windows'
def is_linux():
return 'linux' in sys.platform.lower()
def is_mac():
return 'darwin' in sys.platform.lower()
def platform_bitness():
return 64 if sys.maxsize > 2 ** 32 else 32
EXE_SUFFIX = ".bat" if is_windows() else ".sh"
class DummyScreen(BaseScreen):
"""
Null-object for Screen on non-tty output
"""
def __init__(self, rows=120, cols=40):
super(DummyScreen, self).__init__()
self.size = (rows, cols)
self.ansi_escape = re.compile(r'\x1b[^m]*m')
def get_cols_rows(self):
"""
Dummy cols and rows
:return:
"""
return self.size
def draw_screen(self, size, canvas):
"""
:param size:
:type canvas: urwid.Canvas
"""
data = ""
for char in canvas.content():
line = ""
for part in char:
if isinstance(part[2], str):
line += part[2]
else:
line += part[2].decode()
data += "%s│\n" % line
data = self.ansi_escape.sub('', data)
LOG.info("Screen %sx%s chars:\n%s", size[0], size[1], data)
def which(filename):
"""unix-style `which` implementation"""
locations = os.environ.get("PATH").split(os.pathsep)
candidates = []
for location in locations:
candidate = os.path.join(location, filename)
if os.path.isfile(candidate):
candidates.append(candidate)
return candidates
class PythonGenerator(object):
IMPORTS = ''
INDENT_STEP = 4
def __init__(self, scenario):
self.root = etree.Element("PythonCode")
self.tree = etree.ElementTree(self.root)
self.log = scenario.engine.log.getChild(self.__class__.__name__)
self.scenario = scenario
def add_imports(self):
imports = etree.Element("imports")
imports.text = self.IMPORTS
return imports
@abstractmethod
def build_source_code(self):
pass
@staticmethod
def gen_class_definition(class_name, inherits_from, indent=0):
def_tmpl = "class {class_name}({inherits_from}):"
class_def_element = etree.Element("class_definition", indent=str(indent))
class_def_element.text = def_tmpl.format(class_name=class_name, inherits_from="".join(inherits_from))
return class_def_element
@staticmethod
def gen_method_definition(method_name, params, indent=None):
if indent is None:
indent = PythonGenerator.INDENT_STEP
def_tmpl = "def {method_name}({params}):"
method_def_element = etree.Element("method_definition", indent=str(indent))
method_def_element.text = def_tmpl.format(method_name=method_name, params=",".join(params))
return method_def_element
@staticmethod
def gen_decorator_statement(decorator_name, indent=None):
if indent is None:
indent = PythonGenerator.INDENT_STEP
def_tmpl = "@{decorator_name}"
decorator_element = etree.Element("decorator_statement", indent=str(indent))
decorator_element.text = def_tmpl.format(decorator_name=decorator_name)
return decorator_element
@staticmethod
def gen_statement(statement, indent=None):
if indent is None:
indent = PythonGenerator.INDENT_STEP * 2
statement_elem = etree.Element("statement", indent=str(indent))
statement_elem.text = statement
return statement_elem
def gen_comment(self, comment, indent=None):
return self.gen_statement("# %s" % comment, indent=indent)
def save(self, filename):
with codecs.open(filename, 'w', encoding='utf-8') as fds:
for child in self.root.iter():
if child.text is not None:
indent = int(child.get('indent', "0"))
fds.write(" " * indent + child.text + "\n")
def gen_new_line(self, indent=0):
return self.gen_statement("", indent=indent)
def str_representer(dumper, data):
""" Representer for PyYAML that dumps multiline strings as | scalars """
if len(data.splitlines()) > 1:
return dumper.represent_scalar('tag:yaml.org,2002:str', data, style='|')
return dumper.represent_scalar('tag:yaml.org,2002:str', data)
def humanize_bytes(byteval):
# from http://stackoverflow.com/questions/1094841/reusable-library-to-get-human-readable-version-of-file-size/
# 25613067#25613067
_suffixes = [' ', 'K', 'M', 'G', 'T', 'P']
# determine binary order in steps of size 10
# (coerce to int, // still returns a float)
order = int(math.log(byteval, 2) / 10.0) if byteval else 0
# format file size
# (.4g results in rounded numbers for exact matches and max 3 decimals,
# should never resort to exponent values)
return '{:.4g}{}'.format(byteval / (1 << (order * 10)), _suffixes[order])
class LDJSONReader(object):
def __init__(self, filename, parent_log):
self.log = parent_log.getChild(self.__class__.__name__)
self.file = FileReader(filename=filename,
file_opener=lambda f: open(f, 'rb'),
parent_logger=self.log)
self.partial_buffer = ""
def read(self, last_pass=False):
lines = self.file.get_lines(size=1024 * 1024, last_pass=last_pass)
for line in lines:
if not last_pass and not line.endswith("\n"):
self.partial_buffer += line
continue
line = "%s%s" % (self.partial_buffer, line)
self.partial_buffer = ""
yield json.loads(line)
def get_host_ips(filter_loopbacks=True):
"""
Returns a list of all IP addresses assigned to this host.
:param filter_loopbacks: filter out loopback addresses
"""
ips = []
for _, interfaces in iteritems(psutil.net_if_addrs()):
for iface in interfaces:
addr = str(iface.address)
try:
ip = ipaddress.ip_address(addr)
if filter_loopbacks and ip.is_loopback:
continue
except ValueError:
continue
ips.append(iface.address)
return ips
def is_url(url):
return parse.urlparse(url).scheme in ["https", "http"]
def guess_delimiter(path):
with open(path) as fhd:
header = fhd.read(4096) # 4KB is enough for header
try:
delimiter = guess_csv_dialect(header).delimiter
except BaseException as exc:
LOG.debug(traceback.format_exc())
LOG.warning('CSV dialect detection failed (%s), default delimiter selected (",")', exc)
delimiter = "," # default value
return delimiter
def get_assembled_value(configs, key, protect=False):
"""
Joins values from several configs, "the last is the most important" (strings, lists or dictionaries).
:param configs: list of dicts with target configs
:param key: name of target config
:param protect: use safely, make deepcopy
"""
target_configs = []
for config in configs:
target_config = config.get(key)
if target_config:
if protect:
target_config = copy.deepcopy(target_config)
target_configs.append(target_config)
if not target_configs:
return
res = target_configs.pop(0)
if all(isinstance(config, dict) for config in target_configs):
for config in target_configs:
res.merge(config)
elif all(isinstance(config, list) for config in target_configs):
for config in target_configs:
res.extend(config)
elif all(isinstance(config, (numeric_types, str)) for config in target_configs):
res = target_configs[-1]
else:
raise TaurusConfigError("Incorrect type of '%s' found." % key)
return res
def parse_think_time(think_time, full=False):
distributions = ["uniform", "gaussian", "poisson"]
format_str = "^(%s)\(([\wd.]+)[,\s]+([\wd.]+)\)$"
expr = re.compile(format_str % '|'.join(distributions), re.IGNORECASE)
res = expr.match(str(think_time))
if not res: # constant timer
return think_time
if not full:
return res.group(2).lower() # make it simple!
else:
return [res.group(i + 1).lower() for i in range(3)]
class SoapUIScriptConverter(object):
NAMESPACES = dict(con="http://eviware.com/soapui/config")
def __init__(self, parent_log):
self.log = parent_log.getChild(self.__class__.__name__)
self.tree = None
self.interface = None
def load(self, path):
try:
self.tree = etree.ElementTree()
self.tree.parse(path)
except BaseException as exc:
msg = "XML parsing failed for file %s: %s"
raise TaurusInternalException(msg % (path, exc))
def _extract_headers(self, config_elem):
headers_settings = config_elem.find(
'.//con:settings/con:setting[@id="com.eviware.soapui.impl.wsdl.WsdlRequest@request-headers"]',
namespaces=self.NAMESPACES)
if headers_settings is None:
return None
headers = etree.fromstring(headers_settings.text)
if "{" + self.NAMESPACES['con'] + "}" + "entry" == headers.tag:
entries = [headers]
else:
entries = headers.findall(".//con:entry", namespaces=self.NAMESPACES)
headers = {entry.get('key'): entry.get('value')
for entry in entries}
return headers
def _extract_assertions(self, config_elem):
assertions = []
assertion_tags = config_elem.findall('.//con:assertion', namespaces=self.NAMESPACES)
for assertion in assertion_tags:
# TODO: XPath assertions / JSONPath assertions ?
if assertion.get('type') in ('Simple Contains', 'Simple NotContains'):
subject = assertion.findtext('./con:configuration/token', namespaces=self.NAMESPACES)
use_regex = assertion.findtext('./con:configuration/useRegEx', namespaces=self.NAMESPACES)
negate = assertion.get('type') == 'Simple NotContains'
assertions.append({"contains": [subject],
"subject": "body",
"regexp": use_regex == "true",
"not": negate,
})
return assertions
def _extract_http_request(self, test_step):
config = test_step.find('./con:config', namespaces=self.NAMESPACES)
request = {
"label": test_step.get('name'),
"url": config.find('.//con:endpoint', namespaces=self.NAMESPACES).text}
method = config.get('method')
if method is not None and method != "GET":
request["method"] = method
headers = self._extract_headers(config)
assertions = self._extract_assertions(config)
if headers:
request["headers"] = headers
if assertions:
request["assert"] = assertions
body = config.findtext('./con:request', namespaces=self.NAMESPACES)
if body is not None:
request["body"] = body
params = config.findall('./con:parameters/con:parameter', namespaces=self.NAMESPACES)
if params:
body = {}
for param in params:
key = param.findtext("./con:name", namespaces=self.NAMESPACES)
value = param.findtext("./con:value", namespaces=self.NAMESPACES)
body[key] = value
request["body"] = body
return request
def _extract_soap_endpoint(self, interface_name, operation_name):
interface = self.tree.find("//con:interface[@name='%s']" % interface_name, namespaces=self.NAMESPACES)
if interface is None:
self.log.warning("Can't find intreface %s for operation %s, skipping", interface_name, operation_name)
return None
interface_endpoint = interface.findtext("./con:endpoints/con:endpoint", namespaces=self.NAMESPACES)
operation = interface.find(".//con:operation[@name='%s']" % operation_name, namespaces=self.NAMESPACES)
if operation is None:
self.log.warning("Can't find operation %s for interface %s, skipping", operation_name, interface_name)
return None
operation_endpoint = operation.findtext(".//con:endpoint", namespaces=self.NAMESPACES)
if operation_endpoint is not None:
return operation_endpoint
elif interface_endpoint is not None:
return interface_endpoint
else:
self.log.warning("Can't find endpoint for %s:%s", interface_name, operation_name)
return None
def _extract_soap_request(self, test_step):
label = test_step.get('name')
config = test_step.find('./con:config', namespaces=self.NAMESPACES)
body = config.findtext('./con:request/con:request', namespaces=self.NAMESPACES)
interface = config.findtext('./con:interface', namespaces=self.NAMESPACES)
operation = config.findtext('./con:operation', namespaces=self.NAMESPACES)
self.log.debug("Extracting SOAP request, interface=%r, operation=%r", interface, operation)
endpoint = self._extract_soap_endpoint(interface, operation)
if endpoint is None:
return
request = {
"url": endpoint,
"label": label,
"method": "POST",
"headers": {
"Content-Type": "text/xml; charset=utf-8",
}
}
if body:
request["body"] = body
return request
def _calc_base_address(self, test_step):
config = test_step.find('./con:config', namespaces=self.NAMESPACES)
service = config.get('service')
interfaces = self.tree.xpath('//con:interface', namespaces=self.NAMESPACES)
for interface in interfaces:
if interface.get("name") == service:
endpoint = interface.find('.//con:endpoints/con:endpoint', namespaces=self.NAMESPACES)
if endpoint is not None:
service = endpoint.text
break
return service
def _extract_rest_request(self, test_step):
config = test_step.find('./con:config', namespaces=self.NAMESPACES)
method = config.get('method')
params = self._parse_parent_resources(config)
url = self._calc_base_address(test_step) + config.get('resourcePath')
for param_name in copy.copy(list(params.keys())):
template = "{" + param_name + "}"
if template in url:
param_value = params.pop(param_name)
url = url.replace(template, param_value)
request = {"url": url, "label": test_step.get('name')}
if method is not None and method != "GET":
request["method"] = method
headers = self._extract_headers(config)
assertions = self._extract_assertions(config)
if headers:
request["headers"] = headers
if assertions:
request["assert"] = assertions
body = {}
for key, value in iteritems(params):
body[key] = value
if body:
request["body"] = body
return request
def _parse_parent_resources(self, config):
method_name = config.get('methodName')
for interface in self.interface:
method_obj = interface.find('.//con:method[@name="%s"]' % method_name, namespaces=self.NAMESPACES)
if method_obj is not None:
break
params = BetterDict()
if method_obj is not None:
parent = method_obj.getparent()
while parent.tag.endswith('resource'):
for param in parent.findall('./con:parameters/con:parameter', namespaces=self.NAMESPACES):
param_name = param.findtext('./con:name', namespaces=self.NAMESPACES)
param_value = param.findtext('./con:value', namespaces=self.NAMESPACES)
def_value = param.findtext('./con:default', namespaces=self.NAMESPACES)
if param_value:
params[param_name] = param_value
elif def_value:
params[param_name] = def_value
parent = parent.getparent()
for entry in config.findall('./con:restRequest/con:parameters/con:entry', namespaces=self.NAMESPACES):
params.merge({entry.get("key"): entry.get("value")})
return params
def _extract_properties(self, block, key_prefix=""):
properties = block.findall('./con:properties/con:property', namespaces=self.NAMESPACES)
prop_map = {}
for prop in properties:
key = key_prefix + prop.findtext('./con:name', namespaces=self.NAMESPACES)
value = prop.findtext('./con:value', namespaces=self.NAMESPACES)
prop_map[key] = value
return prop_map
def _extract_execution(self, test_case):
load_exec = {}
load_test = test_case.find('./con:loadTest', namespaces=self.NAMESPACES)
if load_test is not None:
load_exec['concurrency'] = int(load_test.find('./con:threadCount', self.NAMESPACES).text)
load_exec['hold-for'] = int(load_test.find('./con:testLimit', self.NAMESPACES).text)
else:
load_exec['concurrency'] = 1
return load_exec
def _validate_transfer(self, source_type, source_step_name, transfer_type, target_step_name):
source_step = self.tree.find("//con:testStep[@name='%s']" % source_step_name, namespaces=self.NAMESPACES)
if source_step is None:
self.log.warning("Can't find source step (%s) for Property Transfer. Skipping", source_step_name)
return False
source_step_type = source_step.get("type")
if source_step_type not in ["httprequest", "restrequest", "request"]:
self.log.warning("Unsupported source step type for Property Transfer (%s). Skipping", source_step_type)
return False
if source_type != "Response":
self.log.warning("Found Property Transfer with non-response source (%s). Skipping", source_type)
return False
if transfer_type not in ["JSONPATH", "XPATH"]:
self.log.warning("Found Property Transfer with unsupported type (%s). Skipping", transfer_type)
return False
target_step = self.tree.find("//con:testStep[@name='%s']" % target_step_name, namespaces=self.NAMESPACES)
if target_step is None:
self.log.warning("Can't find target step (%s) for Property Transfer. Skipping", target_step_name)
return False
target_step_type = target_step.get("type")
if target_step_type != "properties":
self.log.warning("Unsupported target step type for Property Transfer (%s). Skipping", target_step_type)
return False
return True
def _extract_transfer(self, transfer):
source_type = transfer.findtext('./con:sourceType', namespaces=self.NAMESPACES)
source_step_name = transfer.findtext('./con:sourceStep', namespaces=self.NAMESPACES)
query = transfer.findtext('./con:sourcePath', namespaces=self.NAMESPACES)
transfer_type = transfer.findtext('./con:type', namespaces=self.NAMESPACES)
target_step_name = transfer.findtext('./con:targetStep', namespaces=self.NAMESPACES)
target_prop = transfer.findtext('./con:targetType', namespaces=self.NAMESPACES)
if source_step_name.startswith("#") and source_step_name.endswith("#"):
source_step_name = source_step_name[1:-1]
if not self._validate_transfer(source_type, source_step_name, transfer_type, target_step_name):
return None
extractor = BetterDict()
if transfer_type == "JSONPATH":
extractor.merge({
'extract-jsonpath': {
target_prop: {
'jsonpath': query,
'default': 'NOT_FOUND',
}
}
})
elif transfer_type == "XPATH":
extractor.merge({
'extract-xpath': {
target_prop: {
'xpath': query,
'default': 'NOT_FOUND',
}
}
})
return {source_step_name: extractor}
def _extract_property_transfers(self, test_step):
extractors = BetterDict() # label -> {extract-xpath: ..., extract-jsonpath: ...}
transfers = test_step.findall('./con:config/con:transfers', namespaces=self.NAMESPACES)
if not transfers:
return None
for transfer in transfers:
extracted_transfer = self._extract_transfer(transfer)
if extracted_transfer is not None:
extractors.merge(extracted_transfer)
return extractors
def _extract_scenario(self, test_case, case_level_props):
variables = BetterDict.from_dict(case_level_props)
requests = []
extractors = BetterDict()
steps = test_case.findall('.//con:testStep', namespaces=self.NAMESPACES)
for step in steps:
request = None
if step.get("type") == "httprequest":
request = self._extract_http_request(step)
elif step.get("type") == "restrequest":
request = self._extract_rest_request(step)
elif step.get("type") == "request":
request = self._extract_soap_request(step)
elif step.get("type") == "properties":
config_block = step.find('./con:config', namespaces=self.NAMESPACES)
if config_block is not None:
props = self._extract_properties(config_block)
variables.merge(props)
elif step.get("type") == "transfer":
extracted_extractors = self._extract_property_transfers(step) # label -> extractor
if extracted_extractors:
extractors.merge(extracted_extractors)
elif step.get("type") == "groovy":
request = self._extract_script(step)
if request is not None:
requests.append(request)
for request in requests:
label = request["label"]
if label in extractors:
request.update(extractors[label])
scenario = {
"test-case": test_case.get("name"),
"requests": requests
}
if variables:
scenario["variables"] = variables
return scenario
def _extract_script(self, test_step):
label = test_step.get("name", "Script")
script = test_step.find('./con:config/script', namespaces=self.NAMESPACES).text
if script is not None:
script = script.strip()
return {
"label": label,
"action": "pause",
"target": "current-thread",
"pause-duration": "0ms",
"jsr223": [{
"language": "groovy",
"script-text": script,
}]
}
def _extract_test_case(self, test_case, test_suite, suite_level_props):
case_name = test_case.get("name")
scenario_name = test_suite.get("name") + "-" + case_name
case_properties = self._extract_properties(test_case)
case_properties = {
"#TestCase#" + key: value
for key, value in iteritems(case_properties)
}
case_level_props = BetterDict.from_dict(suite_level_props)
case_level_props.merge(case_properties)
scenario = self._extract_scenario(test_case, case_level_props)
scenario['test-suite'] = test_suite.get("name")
return scenario_name, scenario
def _extract_config(self, project, test_suites, target_test_case=None):
execution = []
scenarios = {}
project_properties = self._extract_properties(project, key_prefix="#Project#")
project_name = project.get("name")
interface_exec, interface_scen = self._extract_interface(project_name, self.interface)
execution.append(interface_exec)
scenarios.update(interface_scen)
for suite in test_suites:
suite_props = BetterDict.from_dict(project_properties)
suite_props.merge(self._extract_properties(suite, key_prefix="#TestSuite#"))
test_cases = suite.findall('.//con:testCase', namespaces=self.NAMESPACES)
for case in test_cases:
case_name = case.get("name")
scenario_name, scenario = self._extract_test_case(case, suite, suite_props)
load_exec = self._extract_execution(case)
load_exec['scenario'] = scenario_name
self.log.debug("Extracted execution for scenario %s", scenario_name)
if not scenario["requests"]:
self.log.warning("No requests extracted for scenario %s, skipping it" % scenario_name)
continue
if target_test_case is None or target_test_case == case_name:
self.log.debug("Extracted scenario: %s", scenario_name)
scenarios[scenario_name] = scenario
execution.append(load_exec)
return {
"execution": execution,
"scenarios": scenarios,
}
def convert_script(self, script_path, target_test_case=None):
if not os.path.exists(script_path):
raise ValueError("SoapUI script %s doesn't exist" % script_path)
self.load(script_path)
self.log.debug("Found namespaces: %s", self.NAMESPACES)
projects = self.tree.xpath('//con:soapui-project', namespaces=self.NAMESPACES)
self.log.debug("Found projects: %s", projects)
project = projects[0]
self.interface = project.findall('.//con:interface', namespaces=self.NAMESPACES)
self.log.debug("Found interface: %s", self.interface)
test_suites = project.findall('.//con:testSuite', namespaces=self.NAMESPACES)
self.log.debug("Found test suites: %s", test_suites)
config = self._extract_config(project, test_suites, target_test_case=target_test_case)
if not config["scenarios"]:
self.log.warning("No scenarios were extracted")
if not config["execution"]:
self.log.warning("No load tests were extracted")
return config
def _extract_interface(self, project_name, interfaces):
execution = {
"concurrency": 1,
"iterations": 1,
"ramp-up": "10s",
"scenario": project_name
}
scenarios = {}
interface_requests = []
for interface in interfaces:
try:
endpoint = interface.find('.//con:endpoint', namespaces=self.NAMESPACES).text
resources = interface.findall('.//con:resource', namespaces=self.NAMESPACES)
if not resources:
interface_requests.append({
"url": endpoint
})
continue
except AttributeError:
continue
for resource in resources:
path = resource.get("path")
url = endpoint + path
methods = resource.findall('.//con:method', namespaces=self.NAMESPACES)
for method in methods:
method_type = method.get("method")
requests = method.findall('con:request', namespaces=self.NAMESPACES)
for request in requests:
request_body = request.find('.//con:request', namespaces=self.NAMESPACES).text
interface_requests.append({
"body": request_body,
"method": method_type,
"url": url
})
scenarios.update({project_name: {"requests": interface_requests}})
return execution, scenarios
def find_soapui_test_case(self, test_case, scenarios):
matching_scenarios = [
(name, scen)
for name, scen in iteritems(scenarios)
if scen.get("test-case") == test_case
]
if len(matching_scenarios) == 0:
sorted_scenarios = sorted((name, scen) for name, scen in iteritems(scenarios))
scenario_name, scenario = next(iter(sorted_scenarios))
if test_case is None:
self.log.warning("No `test-case` specified for SoapUI project, will use '%s'",
scenario.get("test-case"))
else:
msg = "No matching test cases found for name '%s', using the '%s'"
self.log.warning(msg, test_case, scenario.get("test-case"))
elif len(matching_scenarios) > 1:
scenario_name, scenario = next(iter(matching_scenarios))
msg = "Multiple test cases found for name '%s', using case '%s' from suite '%s'"
self.log.warning(msg, test_case, scenario.get('test-case'), scenario.get('test-suite'))
else:
scenario_name, scenario = next(iter(matching_scenarios))
return scenario_name, scenario
| apache-2.0 | 8,118,429,725,673,698,000 | 32.776641 | 118 | 0.573624 | false | 4.07781 | true | false | false |
zelros/bunt | tools/comparator.py | 1 | 1525 | # -*- coding: utf-8 -*-
from api_managers import api_builder
from tools import loader
import logging
logger = logging.getLogger(__name__)
class Comparator:
def __init__(self, criteria, apis, scorer, fallback_name):
self.criteria = criteria
self.apis = apis
self.scorer = scorer
self.fallback_name = fallback_name
self.results = {}
def compare(self):
results = {}
logger.info('Comparator :')
for language in self.criteria:
logger.info('\tlanguage: {}'.format(language))
results[language] = {}
for criterion in self.criteria[language]:
logger.info('\t\tcriterion: {}'.format(criterion))
# get data_frame
df = loader.load(language, criterion)
logger.info('\t\t\tdata ready')
results[language][criterion] = {}
for api in self.apis:
api_manager = api_builder.build_api(api_name=api, fallback_name=self.fallback_name,
language=language, params={})
self.scorer.fit(api_manager, df)
logger.info('\t\t\tscoring {}'.format(api))
self.scorer.score()
results[language][criterion][str(api_manager)] = {'scores': self.scorer.scores,
'risk_rate': self.scorer.risk_rate}
self.results = results
| mit | -6,137,808,317,277,305,000 | 35.309524 | 105 | 0.518689 | false | 4.593373 | false | false | false |
edwardsnj/rmidb2 | rmidb2/secpwhash.py | 1 | 1268 | import os
from hashlib import sha256
from hmac import HMAC
import random
# def random_bytes(num_bytes):
# return "".join(chr(random.randrange(256)) for i in xrange(num_bytes))
def random_bytes(num_bytes):
return os.urandom(num_bytes)
def pbkdf_sha256(password, salt, iterations):
result = password
for i in xrange(iterations):
result = HMAC(result, salt, sha256).digest() # use HMAC to apply the salt
return result
NUM_ITERATIONS = 5000
def hash_password(plain_password, salt=None):
if not salt:
salt = random_bytes(8) # 64 bits
plain_password = str(plain_password)
hashed_password = pbkdf_sha256(plain_password, salt, NUM_ITERATIONS)
# return the salt and hashed password, encoded in base64 and split with ","
return salt.encode("base64").strip() + "," + hashed_password.encode("base64").strip()
def check_password(saved_password_entry, plain_password):
salt, hashed_password = saved_password_entry.split(",")
salt = salt.decode("base64")
return saved_password_entry == hash_password(plain_password, salt)
# password_entry = hash_password("mysecret")
# print password_entry # will print, for example: 8Y1ZO8Y1pi4=,r7Acg5iRiZ/x4QwFLhPMjASESxesoIcdJRSDkqWYfaA=
# check_password(password_entry, "mysecret") # returns True
| mit | -1,856,692,794,521,895,200 | 32.368421 | 107 | 0.731861 | false | 3.276486 | false | false | false |
jjgoings/nfl-colley-method | getdata.py | 1 | 1594 | import sys
import urllib
from numpy import genfromtxt
import numpy as np
def get_teams():
f = urllib.urlopen("http://www.masseyratings.com/scores.php?s=199229&sub=199229&all=1&mode=3&exhib=on&format=2")
s = f.read().split()
my_teamnames = {}
for i in range(0,len(s)/2):
my_teamnames.update({i : s[i*2 + 1]})
return my_teamnames
def get_games(year,exhibition):
if year == 2013:
if exhibition == True:
f = urllib.urlopen('http://www.masseyratings.com/scores.php?s=199229&sub=199229&all=1&mode=3&exhib=on&format=1')
elif exhibition == False:
f = urllib.urlopen('http://www.masseyratings.com/scores.php?s=199229&sub=199229&all=1&mode=3&format=1')
else:
sys.exit('"exhibition" must be "True" or "False"')
elif year == 2012:
if exhibition == True:
f = urllib.urlopen('http://www.masseyratings.com/scores.php?s=181613&sub=181613&all=1&mode=3&exhib=on&format=1')
elif exhibition == False:
f = urllib.urlopen('http://www.masseyratings.com/scores.php?s=181613&sub=181613&all=1&mode=3&format=1')
else:
sys.exit('"exhibition" must be "True" or "False"')
else:
sys.exit('Not a valid year')
s = f.read()
if exhibition == False:
file_name = str('games_'+str(year)+'.txt')
elif exhibition == True:
file_name = str('games_'+str(year)+'_exhib.txt')
k = open(file_name,'w')
k.write(s)
k.close()
f.close()
my_games = genfromtxt(file_name, dtype = None, delimiter=',')
return my_games
| gpl-3.0 | 306,782,854,921,786,430 | 34.422222 | 124 | 0.605395 | false | 2.924771 | false | false | false |
Azure/azure-sdk-for-python | sdk/aks/azure-mgmt-devspaces/azure/mgmt/devspaces/operations/_controllers_operations.py | 1 | 32241 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class ControllersOperations(object):
"""ControllersOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~dev_spaces_management_client.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.Controller"
"""Gets an Azure Dev Spaces Controller.
Gets the properties for an Azure Dev Spaces Controller.
:param resource_group_name: Resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the resource.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Controller, or the result of cls(response)
:rtype: ~dev_spaces_management_client.models.Controller
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Controller"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'name': self._serialize.url("name", name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]([_-]*[a-zA-Z0-9])*$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DevSpacesErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('Controller', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevSpaces/controllers/{name}'} # type: ignore
def _create_initial(
self,
resource_group_name, # type: str
name, # type: str
controller, # type: "_models.Controller"
**kwargs # type: Any
):
# type: (...) -> "_models.Controller"
cls = kwargs.pop('cls', None) # type: ClsType["_models.Controller"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'name': self._serialize.url("name", name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]([_-]*[a-zA-Z0-9])*$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(controller, 'Controller')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DevSpacesErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Controller', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Controller', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevSpaces/controllers/{name}'} # type: ignore
def begin_create(
self,
resource_group_name, # type: str
name, # type: str
controller, # type: "_models.Controller"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.Controller"]
"""Creates an Azure Dev Spaces Controller.
Creates an Azure Dev Spaces Controller with the specified create parameters.
:param resource_group_name: Resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the resource.
:type name: str
:param controller: Controller create parameters.
:type controller: ~dev_spaces_management_client.models.Controller
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either Controller or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~dev_spaces_management_client.models.Controller]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Controller"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_initial(
resource_group_name=resource_group_name,
name=name,
controller=controller,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Controller', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'name': self._serialize.url("name", name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]([_-]*[a-zA-Z0-9])*$'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevSpaces/controllers/{name}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'name': self._serialize.url("name", name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]([_-]*[a-zA-Z0-9])*$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DevSpacesErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevSpaces/controllers/{name}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes an Azure Dev Spaces Controller.
Deletes an existing Azure Dev Spaces Controller.
:param resource_group_name: Resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the resource.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
name=name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'name': self._serialize.url("name", name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]([_-]*[a-zA-Z0-9])*$'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevSpaces/controllers/{name}'} # type: ignore
def update(
self,
resource_group_name, # type: str
name, # type: str
controller_update_parameters, # type: "_models.ControllerUpdateParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.Controller"
"""Updates an Azure Dev Spaces Controller.
Updates the properties of an existing Azure Dev Spaces Controller with the specified update
parameters.
:param resource_group_name: Resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the resource.
:type name: str
:param controller_update_parameters: Parameters for updating the Azure Dev Spaces Controller.
:type controller_update_parameters: ~dev_spaces_management_client.models.ControllerUpdateParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Controller, or the result of cls(response)
:rtype: ~dev_spaces_management_client.models.Controller
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Controller"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'name': self._serialize.url("name", name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]([_-]*[a-zA-Z0-9])*$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(controller_update_parameters, 'ControllerUpdateParameters')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DevSpacesErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Controller', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Controller', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevSpaces/controllers/{name}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ControllerList"]
"""Lists the Azure Dev Spaces Controllers in a resource group.
Lists all the Azure Dev Spaces Controllers with their properties in the specified resource
group and subscription.
:param resource_group_name: Resource group to which the resource belongs.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ControllerList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~dev_spaces_management_client.models.ControllerList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ControllerList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ControllerList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.DevSpacesErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevSpaces/controllers'} # type: ignore
def list(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ControllerList"]
"""Lists the Azure Dev Spaces Controllers in a subscription.
Lists all the Azure Dev Spaces Controllers with their properties in the subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ControllerList or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~dev_spaces_management_client.models.ControllerList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ControllerList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ControllerList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
error = self._deserialize.failsafe_deserialize(_models.DevSpacesErrorResponse, response)
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.DevSpaces/controllers'} # type: ignore
def list_connection_details(
self,
resource_group_name, # type: str
name, # type: str
list_connection_details_parameters, # type: "_models.ListConnectionDetailsParameters"
**kwargs # type: Any
):
# type: (...) -> "_models.ControllerConnectionDetailsList"
"""Lists connection details for an Azure Dev Spaces Controller.
Lists connection details for the underlying container resources of an Azure Dev Spaces
Controller.
:param resource_group_name: Resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the resource.
:type name: str
:param list_connection_details_parameters: Parameters for listing connection details of Azure
Dev Spaces Controller.
:type list_connection_details_parameters: ~dev_spaces_management_client.models.ListConnectionDetailsParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ControllerConnectionDetailsList, or the result of cls(response)
:rtype: ~dev_spaces_management_client.models.ControllerConnectionDetailsList
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ControllerConnectionDetailsList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.list_connection_details.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'name': self._serialize.url("name", name, 'str', max_length=63, min_length=1, pattern=r'^[a-zA-Z0-9]([_-]*[a-zA-Z0-9])*$'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(list_connection_details_parameters, 'ListConnectionDetailsParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DevSpacesErrorResponse, response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('ControllerConnectionDetailsList', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_connection_details.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DevSpaces/controllers/{name}/listConnectionDetails'} # type: ignore
| mit | 6,755,182,040,412,394,000 | 48.75463 | 203 | 0.63931 | false | 4.269766 | true | false | false |
camillemonchicourt/Geotrek | geotrek/common/factories.py | 1 | 1280 | import factory
from django.contrib.contenttypes.models import ContentType
from paperclip.models import Attachment
from geotrek.authent.factories import UserFactory
from geotrek.common.utils.testdata import get_dummy_uploaded_file
from . import models
class OrganismFactory(factory.Factory):
FACTORY_FOR = models.Organism
organism = factory.Sequence(lambda n: u"Organism %s" % n)
class FileTypeFactory(factory.Factory):
FACTORY_FOR = models.FileType
type = factory.Sequence(lambda n: u"FileType %s" % n)
class AttachmentFactory(factory.Factory):
"""
Create an attachment. You must provide an 'obj' keywords,
the object (saved in db) to which the attachment will be bound.
"""
FACTORY_FOR = Attachment
attachment_file = get_dummy_uploaded_file()
filetype = factory.SubFactory(FileTypeFactory)
creator = factory.SubFactory(UserFactory)
title = factory.Sequence(u"Title {0}".format)
legend = factory.Sequence(u"Legend {0}".format)
# date_insert, date_update
@classmethod
def _prepare(cls, create, obj=None, **kwargs):
kwargs['content_type'] = ContentType.objects.get_for_model(obj)
kwargs['object_id'] = obj.pk
return super(AttachmentFactory, cls)._prepare(create, **kwargs)
| bsd-2-clause | -1,162,122,698,969,685,800 | 26.826087 | 71 | 0.717188 | false | 3.710145 | false | false | false |
ginogervasio/pytolab-trends | trends/db.py | 1 | 9714 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import json
import logging
import time
import redis
import redis.exceptions
import MySQLdb
import config
import data
import exceptions
import log
class Db(object):
db_mem = None
db_mem_posts = None
db_disk_posts = None
db_cursor = None
retries = 360
retry_wait = 10
cmd_retries = 10
cmd_retry_wait = 10
def __init__(self):
c = config.Config()
self.config = c.cfg
self.log = logging.getLogger('db')
self.dir_root = self.config.get('trends', 'root')
def setup(self):
"""
Setup the connection to Redis DB and to MySQL DB.
"""
self.setup_redis()
self.setup_mysql_loop()
# Get marker to know if a post id is in Redis or MySQL.
self.posts_tid = int(self.get('posts_tid'))
def setup_redis(self):
"""Connections to Redis."""
host = self.config.get('redis', 'host')
port = self.config.getint('redis', 'port')
self.db_mem = redis.Redis(host=host, port=port, db=0)
self.db_mem_posts = redis.Redis(host=host, port=port, db=1)
def setup_mysql_loop(self):
"""Setup connection to Redis until it succeeds"""
retry = 0
while retry < self.retries:
try:
self.setup_mysql()
return
except exceptions.DbError:
if retry < self.retries:
time.sleep(self.retry_wait)
retry += 1
self.log.error(
'%d retries to connect to MySQL failed', self.retries)
raise exceptions.DbError()
def setup_mysql(self):
"""Setup connections to MySQL"""
user = self.config.get('mysql', 'user')
password = self.config.get('mysql', 'password')
db = self.config.get('mysql', 'db')
host = self.config.get('mysql', 'host')
try:
self.db_disk_posts = MySQLdb.connect(host=host,
user=user, passwd=password, db=db,
use_unicode=True, charset='utf8')
self.db_cursor = self.db_disk_posts.cursor()
except MySQLdb.Error:
self.log.error('Problem to connect to MySQL host %s', host)
raise exceptions.DbError()
def redis_cmd(self, cmd, *args):
"""Redis command to DB index 0"""
return self.redis_command(0, cmd, *args)
def redis_cmd_db_1(self, cmd, *args):
"""Redis command to DB index 1"""
return self.redis_command(1, cmd, *args)
def redis_command(self, db, cmd, *args):
"""Command to Redis.
Try cmd_retries times.
"""
if db == 0:
dbr = self.db_mem
else:
dbr = self.db_mem_posts
retry = 0
while retry < self.cmd_retries:
try:
return getattr(dbr, cmd)(*args)
except redis.exceptions.RedisError:
self.log.error('Redis cmd %s error', cmd)
retry += 1
if retry <= self.cmd_retries:
time.sleep(self.cmd_retry_wait)
except AttributeError:
self.log.error('Redis cmd %s does not exist', cmd)
raise exceptions.DbError()
raise exceptions.DbError()
def get(self, key, db= 0):
if db == 0:
return self.redis_cmd('get', key)
else:
return self.redis_cmd_db_1('get', key)
def set(self, key, value, db=0):
if db == 0:
return self.redis_cmd('set', key, value)
else:
return self.redis_cmd_db_1('set', key, value)
def delete(self, key):
return self.redis_cmd('delete', key)
def exists(self, key):
return self.redis_cmd('exists', key)
def incr(self, key):
return self.redis_cmd('incr', key)
def rpush(self, key, value):
return self.redis_cmd('rpush', key, value)
def lrange(self, key, start, stop):
return self.redis_cmd('lrange', key, start, stop)
def lset(self, key, index, value):
return self.redis_cmd('lset', key, index, value)
def lindex(self, key, index):
return self.redis_cmd('lindex', key, index)
def mysql_command(self, cmd, sql, writer, commit, *args):
"""Command to MySQL.
Try cmd_retries times."""
retry = 0
while retry < self.cmd_retries:
try:
r = getattr(self.db_cursor, cmd)(sql, args)
if writer:
if commit:
self.db_disk_posts.commit()
return r
else:
return self.db_cursor.fetchall()
except (MySQLdb.OperationalError, MySQLdb.InternalError):
self.log.error('MySQL cmd %s DB error', cmd)
# reconnect
self.setup_mysql_loop()
retry = 0
except MySQLdb.Error:
self.log.error('MySQL cmd %s sql %s failed', cmd, sql)
retry += 1
if retry <= self.cmd_retries:
time.sleep(self.cmd_retry_wait)
except AttributeError:
self.log.error('MySQL cmd %s does not exist', cmd)
raise exceptions.DbError()
raise exceptions.DbError()
def sql_read(self, sql, *args):
"""Read command to MySQL."""
return self.mysql_command('execute', sql, False, False, *args)
def sql_write(self, sql, *args):
"""Write command to MySQL."""
return self.mysql_command('execute', sql, True, True, *args)
def sql_write_no_commit(self, sql, *args):
"""Write command to MySQL but no commit."""
return self.mysql_command('execute', sql, True, False, *args)
def sql_commit(self):
"""Commit changes to disk"""
self.db_disk_posts.commit()
def set_post(self, post_id, value):
"""Add/Update post value in Redis or MySQL based on posts id marker...
"""
if post_id >= self.posts_tid:
self.set('post:%d' % (post_id,), value, db=1)
else:
sql = 'insert into tp_post(post_id, post) values(%s, %s)'\
'on duplicate key update post=%s'
self.sql_write(sql, post_id, value, value)
def add_post(self, post_id, value):
"""Add post in MySQL
"""
sql = 'insert into tp_post(post_id, post) values(%s, %s)'
self.sql_write(sql, post_id, value)
def get_post(self, post_id):
"""Get post value from Redis or MySQL based on posts id marker...
"""
if post_id >= self.posts_tid:
r = self.get('post:%d' % (post_id,), db=1)
else:
try:
sql = 'select post from tp_post where post_id=%s'
r = self.sql_read(sql, post_id)
except exceptions.DbError:
r = None
return r
def set_person_score(self, post_id, person_id, score):
"""Set the person's sentiment score based on the tweet
"""
sql = 'insert into tp_person_score(post_id, person_id, score) values(%s, %s, %s)'
self.sql_write(sql, post_id, person_id, str(score))
def get_persons(self):
"""
Get list of persons from db
"""
names = self.redis_cmd('lrange', 'persons', 0, -1)
persons = []
for n in names:
s = n.split(':')
person = {}
person['id'] = int(s[0])
person['first_name'] = s[1]
person['name'] = s[2]
person['nickname'] = s[3]
person['group'] = int(s[4])
person['words'] = json.loads(s[5])
pc = self.lindex('person:%d:posts_count' % int(s[0]), -1)
posts_count = int((pc if pc else 0))
person['posts_count'] = (posts_count if posts_count > 0 else 0)
rels = self.lindex('person:%d:rel' % int(s[0]), -1)
person['rel'] = json.loads((rels if rels else '{}'))
sentiment = self.lindex('person:%d:sentiment' % int(s[0]), -1)
person['sentiment'] = float((sentiment if sentiment else 0))
sentiment_avg = self.get('person:%d:sentiment_avg' % int(s[0]))
person['sentiment_avg'] = float((sentiment_avg if sentiment_avg else 0.0))
sentiment_total = self.get('person:%d:sentiment_total_count' % int(s[0]))
person['sentiment_total_count'] = int((sentiment_total if sentiment_total else 0))
persons.append(person)
return persons
def set_persons(self):
"""
Set list of persons in db
"""
key = 'persons'
self.redis_cmd('delete', key)
with open('%s/names.txt' % (self.dir_root), 'r') as f:
for line in f:
self.redis_cmd('rpush', key, line.rstrip('\n'))
def iter_posts(self):
post_id_start = 108673
post_id_end = 8561087
last_id = post_id_start
while True:
sql = 'select post_id, post from tp_post'\
' where post_id > %s and post_id <= %s order by post_id'\
' limit 1000'
rows = self.sql_read(sql, last_id, post_id_end)
if not rows:
break
last_id = rows[-1][0]
r = []
for row in rows:
d = data.parse_post(row[1])
d['post_id'] = row[0]
r.append(d)
yield r
def get_person_ids_from_post_id(self, post_id):
sql = 'select person_id from tp_person_post where post_id = %s'
rows = self.sql_read(sql, post_id)
return [row[0] for row in rows]
| mit | -2,490,943,459,274,629,000 | 32.965035 | 94 | 0.525736 | false | 3.717566 | true | false | false |
hechi/rooftop | rooftop/forms.py | 1 | 1043 | # -*- coding: utf-8 -*-
from django import forms
class UserprofileForm(forms.Form):
oldPassword = forms.CharField(label='Old Password',widget=forms.PasswordInput())
newPassword = forms.CharField(label='New Password',widget=forms.PasswordInput())
confirmPassword = forms.CharField(label='Confirm Password',widget=forms.PasswordInput())
class AddUserForm(forms.Form):
username = forms.CharField(label='Username')
firstname = forms.CharField(label='Firstname')
lastname = forms.CharField(label='Lastname')
email = forms.CharField(label='Email')
password = forms.CharField(label='Password',widget=forms.PasswordInput())
class EditUserForm(forms.Form):
firstname = forms.CharField(label='Firstname')
lastname = forms.CharField(label='Lastname')
email = forms.CharField(label='Email')
password = forms.CharField(label='Password',widget=forms.PasswordInput())
class AddGroupForm(forms.Form):
groupname = forms.CharField(label='Groupname')
description = forms.CharField(label='Description')
| mit | -7,918,245,075,627,724,000 | 42.458333 | 92 | 0.741131 | false | 4.042636 | false | false | false |
eneldoserrata/marcos_openerp | marcos_addons/hotel-7.0/hotel_reservation/report/hotel_reservation_report.py | 1 | 5382 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-Today Serpent Consulting Services Pvt. Ltd. (<http://www.serpentcs.com>)
# Copyright (C) 2004 OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import time
from openerp.report import report_sxw
class reservation_detail_report(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(reservation_detail_report, self).__init__(cr, uid, name, context)
self.localcontext.update( {
'time': time,
'get_data': self.get_data,
'get_checkin': self.get_checkin,
'get_checkout': self.get_checkout,
# 'get_room':self.get_room,
'get_room_type':self._get_room_type,
'get_room_nos':self._get_room_nos,
'get_room_used_detail':self._get_room_used_detail,
})
self.context=context
def _get_room_type(self, reservation_line):
room_types = ''
for line in reservation_line:
if line.categ_id:
room_types += line.categ_id.name
room_types += ' '
return room_types
def _get_room_nos(self, reservation_line):
room_nos = ''
for line in reservation_line:
for room in line.reserve:
room_nos += room.name
room_nos += ' '
return room_nos
def get_data(self, date_start, date_end):
reservation_obj = self.pool.get('hotel.reservation')
tids = reservation_obj.search(self.cr, self.uid, [('checkin', '>=', date_start), ('checkout', '<=', date_end)])
res = reservation_obj.browse(self.cr, self.uid, tids)
return res
def get_checkin(self, date_start, date_end):
reservation_obj = self.pool.get('hotel.reservation')
tids = reservation_obj.search(self.cr, self.uid, [('checkin', '>=', date_start), ('checkin', '<=', date_end)])
res = reservation_obj.browse(self.cr, self.uid, tids)
return res
def get_checkout(self,date_start,date_end):
reservation_obj = self.pool.get('hotel.reservation')
tids = reservation_obj.search(self.cr,self.uid,[('checkout', '>=', date_start),('checkout', '<=', date_end)])
res = reservation_obj.browse(self.cr,self.uid,tids)
return res
def _get_room_used_detail(self, date_start, date_end):
room_used_details = []
hotel_room_obj = self.pool.get('hotel.room')
room_ids = hotel_room_obj.search(self.cr, self.uid, [])
for room in hotel_room_obj.browse(self.cr, self.uid, room_ids):
counter = 0
details = {}
if room.room_reservation_line_ids:
for room_resv_line in room.room_reservation_line_ids:
if room_resv_line.check_in >= date_start and room_resv_line.check_in <= date_end:
counter += 1
if counter >= 1:
details.update({'name': room.name or '', 'no_of_times_used': counter})
room_used_details.append(details)
return room_used_details
# def get_room(self, date_start, date_end):
# self.cr.execute("select pt.name,count(pt.name) as No_of_times from hotel_reservation as hr " \
# "inner join hotel_reservation_line as hrl on hrl.line_id=hr.id " \
# "inner join hotel_reservation_line_room_rel as hrlrr on hrlrr.room_id=hrl.id " \
# "inner join product_product as pp on pp.product_tmpl_id=hrlrr.hotel_reservation_line_id " \
# "inner join product_template as pt on pt.id=pp.product_tmpl_id " \
# "where hr.state<>'draft' and hr.checkin >= %s and hr.checkout <= %s group by pt.name " \
# ,(date_start,date_end))
# res2=self.cr.dictfetchall()
# return res2
report_sxw.report_sxw('report.reservation.detail', 'hotel.reservation', 'addons/hotel_reservation/report/room_res.rml', parser=reservation_detail_report)
report_sxw.report_sxw('report.checkin.detail', 'hotel.reservation', 'addons/hotel_reservation/report/checkinlist.rml', parser=reservation_detail_report)
report_sxw.report_sxw('report.checkout.detail', 'hotel.reservation', 'addons/hotel_reservation/report/checkoutlist.rml', parser=reservation_detail_report)
report_sxw.report_sxw('report.maxroom.detail', 'hotel.reservation', 'addons/hotel_reservation/report/maxroom.rml', parser=reservation_detail_report)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | agpl-3.0 | 1,520,401,525,855,404,500 | 48.842593 | 154 | 0.605909 | false | 3.604823 | false | false | false |
tensorflow/lingvo | lingvo/tasks/car/input_preprocessors.py | 1 | 137776 | # Lint as: python3
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Input preprocessors."""
from lingvo import compat as tf
from lingvo.core import base_layer
from lingvo.core import py_utils
from lingvo.core import schedule
from lingvo.tasks.car import car_lib
from lingvo.tasks.car import detection_3d_lib
from lingvo.tasks.car import geometry
from lingvo.tasks.car import ops
import numpy as np
# pylint:disable=g-direct-tensorflow-import
from tensorflow.python.ops import inplace_ops
# pylint:enable=g-direct-tensorflow-import
def _ConsistentShuffle(tensors, seed):
"""Shuffle multiple tensors with the same shuffle order."""
shuffled_idx = tf.range(tf.shape(tensors[0])[0])
shuffled_idx = tf.random.shuffle(shuffled_idx, seed=seed)
return tuple([tf.gather(t, shuffled_idx) for t in tensors])
def _GetApplyPointMaskFn(points_mask):
"""Returns a function that applies a mask to one of our points tensors."""
def _ApplyPointMaskFn(points_tensor):
"""Applies a mask to the points tensor."""
if points_tensor is None:
return points_tensor
return tf.boolean_mask(points_tensor, points_mask)
return _ApplyPointMaskFn
def _Dense(sparse):
return tf.sparse_to_dense(
sparse_indices=sparse.indices,
output_shape=sparse.dense_shape,
sparse_values=sparse.values,
default_value=0)
class Preprocessor(base_layer.BaseLayer):
"""Base class for input preprocessor.
Input preprocessors expect the combined output of all extractors and performs
a transformation on them. Input preprocessors can add/edit/remove fields
from the NestedMap of features.
Note: Features correspond to that for one example (no batch dimension).
Sub-classes need to implement the following three functions:
1) TransformFeatures(features): Given a NestedMap of features representing the
output of all the extractors, apply a transformation on the features.
2) TransformShapes(shapes): Given a corresponding NestedMap of shapes,
produce a NestedMap of shapes that corresponds to the transformation of the
features after TransformFeatures.
3) TransformDTypes(dtypes): Given a corresponding NestedMap of dtypes,
produce a NestedMap of dtypes that corresponds to the transformation of the
features after TransformFeatures.
The preprocessor is expected to explicitly pass through untouched fields.
For example, a preprocessor that does data augmentation should modify the
features NestedMap on the fields it cares about augmenting, and then return
the features NestedMap.
"""
@classmethod
def Params(cls):
"""Default params."""
p = super().Params()
p.name = cls.__name__
return p
def FProp(self, theta, features):
"""Performs TransformFeatures."""
del theta # unused
return self.TransformFeatures(features)
def TransformFeatures(self, features):
"""Transforms the features for one example.
Args:
features: A `NestedMap` of tensors.
Returns:
A `NestedMap` of tensors corresponding.
"""
raise NotImplementedError()
def TransformBatchedFeatures(self, features):
"""Transforms the features for a batch of examples.
Args:
features: A `NestedMap` of batched tensors.
Returns:
A `NestedMap` of tensors corresponding.
"""
dtypes = features.Transform(lambda v: v.dtype)
dtypes = self.TransformDTypes(dtypes)
# Default impl uses map_fn.
result = tf.map_fn(
self.TransformFeatures, elems=features, dtype=dtypes, back_prop=False)
return result
def TransformShapes(self, shapes):
"""Sets correct shapes corresponding to TransformFeatures.
Args:
shapes: A `NestedMap` of TensorShapes, corresponding to the
pre-transformed features.
Returns:
A `NestedMap` of TensorShapes corresponding to the transformed features.
"""
raise NotImplementedError()
def TransformDTypes(self, dtypes):
"""Sets correct dtypes corresponding to TransformFeatures.
Args:
dtypes: A `NestedMap` of DTypes, corresponding to the pre-transformed
features.
Returns:
A `NestedMap` of DTypes corresponding to the transformed features.
"""
raise NotImplementedError()
class EntryPreprocessor(Preprocessor):
"""A Preprocessor that transforms a NestedMap sub-structure.
Some preprocessors want to apply a function to any NestedMap whose key matches
a specific prefix. An EntryPreprocessor provides an interface for specifying
the function transformation for a NestedMap of inputs, adding, modifying, or
deleting the entries in that NestedMap.
For example, if an input contains a nested structure such as:
- lasers.front.xyz
.features
- lasers.side.xyz
.features
and one wants to apply a transform that modifies the .xyz features
on both structures, one can define an EntryPreprocessor that implements:
UpdateEntry(entry):
UpdateEntryShape(shapes):
UpdateEntryDType(dtypes):
and set self.params.prefixes = ['lasers.front', 'lasers.side']
where the prefixes refer to a fully-qualified NestedMap sub-structure.
The arguments to these functions will contain just the NestedMap structure
whose key prefix can be found in self.params.prefixes. One can then modify
these structures as desired.
Example:
def UpdateEntry(self, entry):
# entry is a NestedMap.
assert 'xyz' in entry
entry.xyz = self._ApplyFn(entry.xyz)
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('prefixes', ['pseudo_ri'], 'List of keys to apply to.')
return p
def _ApplyToMatchingStructure(self, nested_map, fn):
"""Apply fn to any NestedMap sub-structure whose prefix is in p.prefixes."""
p = self.params
# Don't mutate the original.
nested_map = nested_map.DeepCopy()
updated_entries = []
for prefix in p.prefixes:
entry = nested_map.GetItem(prefix)
if not isinstance(entry, py_utils.NestedMap):
raise TypeError('Prefix key {} selected a {}, not a NestedMap!'.format(
prefix, type(entry)))
fn(entry)
updated_entries.append(entry)
return nested_map, updated_entries
def UpdateEntry(self, entry):
"""Update the Tensors in a NestedMap entry.
Args:
entry: A NestedMap of Tensors.
"""
raise NotImplementedError()
def UpdateEntryShape(self, shapes):
"""Update the shapes in a NestedMap entry.
Args:
shapes: A NestedMap of TensorShapes.
"""
raise NotImplementedError()
def UpdateEntryDType(self, dtypes):
"""Transform the dtypes in a NestedMap entry.
Args:
dtypes: A NestedMap of dtypes.
"""
raise NotImplementedError()
def TransformFeatures(self, features):
features, _ = self._ApplyToMatchingStructure(features, self.UpdateEntry)
return features
def TransformShapes(self, shapes):
shapes, _ = self._ApplyToMatchingStructure(shapes, self.UpdateEntryShape)
return shapes
def TransformDTypes(self, dtypes):
dtypes, _ = self._ApplyToMatchingStructure(dtypes, self.UpdateEntryDType)
return dtypes
class CreateDecoderCopy(Preprocessor):
"""Creates references to current lasers, images, and labels.
This is useful if the data is further transformed.
If desired, the keys that are copied can be customized by overriding the
default keys param.
This preprocessor expects features to optionally contain the following keys:
- lasers - a NestedMap of tensors
- images - a NestedMap of tensors
- labels - a NestedMap of tensors
Adds the following features (if the features existed):
- decoder_copy.lasers - a copy of the lasers NestedMap
- decoder_copy.images - a copy of the images NestedMap
- decoder_copy.labels - a copy of the labels NestedMap
The processor also by default pads the laser features; this can be disabled
by setting the pad_lasers param to None.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('keys', ['lasers', 'labels', 'images'],
'Keys to look for and copy if exists.')
p.Define('parent_key', 'decoder_copy', 'The key to nest the copies under.')
p.Define('pad_lasers', PadLaserFeatures.Params(),
'Params for a layer that pads the laser features.')
p.name = 'create_decoder_copy'
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if p.pad_lasers is not None:
self.CreateChild('pad_lasers', p.pad_lasers)
def _DeepCopyIfExists(self, keys, nested_map, parent_key):
"""Deep copy a specific key to a parent key if it exists."""
for key in keys:
if key in nested_map:
if parent_key not in nested_map:
nested_map[parent_key] = py_utils.NestedMap()
nested_map[parent_key][key] = nested_map[key].DeepCopy()
return nested_map
def TransformFeatures(self, features):
p = self.params
features = self._DeepCopyIfExists(p.keys, features, p.parent_key)
if p.pad_lasers is not None:
features[p.parent_key] = self.pad_lasers.TransformFeatures(
features[p.parent_key])
return features
def TransformShapes(self, shapes):
p = self.params
shapes = self._DeepCopyIfExists(p.keys, shapes, p.parent_key)
if p.pad_lasers is not None:
shapes[p.parent_key] = self.pad_lasers.TransformShapes(
shapes[p.parent_key])
return shapes
def TransformDTypes(self, dtypes):
p = self.params
dtypes = self._DeepCopyIfExists(p.keys, dtypes, p.parent_key)
if p.pad_lasers is not None:
dtypes[p.parent_key] = self.pad_lasers.TransformDTypes(
dtypes[p.parent_key])
return dtypes
class FilterByKey(Preprocessor):
"""Filters features to keep only specified keys.
This keeps only feature entries that are specified. This allows us to reduce
the number of fields returned. For example, during training, one may not
need the actual laser points if training with a pillars based model that
has a preprocessor that already maps the points to grid.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'keep_key_prefixes', [''], 'Prefixes of keys to keep. If this '
'contains the empty string, then it will keep all the keys.')
return p
def _FilterFn(self, key, entry):
"""Filter a nested map."""
del entry # unused
p = self.params
for prefix in p.keep_key_prefixes:
if key.startswith(prefix):
return True
return False
def TransformFeatures(self, features):
return features.FilterKeyVal(self._FilterFn)
def TransformShapes(self, shapes):
return shapes.FilterKeyVal(self._FilterFn)
def TransformDTypes(self, dtypes):
return dtypes.FilterKeyVal(self._FilterFn)
class FilterGroundTruthByNumPoints(Preprocessor):
"""Removes ground truth boxes with less than params.min_num_points points.
This preprocessor expects features to contain the following keys::
labels.labels of shape [..., L]
labels.bboxes_3d of shape [..., L, 7]
labels.bboxes_3d_mask of shape [..., L]
labels.unfiltered_bboxes_3d_mask of shape [..., L]
labels.bboxes_3d_num_points of shape [..., L].
Modifies the bounding box data to turn off ground truth objects that don't
meet the params.min_num_points point filter:
labels.labels: Boxes with less than params.min_num_points have their label
set to params.background_id (defaults to 0).
labels.bboxes_3d_mask: Boxes with less than params.min_num_points are set
to 0.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'min_num_points', 1, 'The minimum number of points allowed before '
'the associated ground truth box is turned off. Defaults to 1.')
p.Define(
'background_id', 0, 'The ID of the background class we set '
'filtered boxes to. Defaults to 0.')
return p
def TransformFeatures(self, features):
p = self.params
bbox_is_valid = tf.greater_equal(features.labels.bboxes_3d_num_points,
p.min_num_points)
features.labels.labels = tf.where(
bbox_is_valid, features.labels.labels,
p.background_id * tf.ones_like(features.labels.labels))
features.labels.bboxes_3d_mask *= tf.cast(bbox_is_valid, tf.float32)
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class FilterGroundTruthByDifficulty(Preprocessor):
"""Removes groundtruth boxes based on detection difficulty.
This preprocessor expects features to contain the following keys::
labels.single_frame_detection_difficulties of shape [..., L]
labels.labels of shape [..., L]
labels.bboxes_3d_mask of shape [..., L]
labels.unfiltered_bboxes_3d_mask of shape [..., L]
The preprocessor masks out the bboxes_3d_mask / labels based on whether
single_frame_detection_difficulties is greater than p.difficulty_threshold.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'background_id', 0, 'The ID of the background class we set '
'filtered boxes to. Defaults to 0.')
p.Define(
'difficulty_threshold', 1,
'Filter groundtruth bounding boxes whose detection difficulty is '
'greater than `difficulty_threshold`')
return p
def TransformFeatures(self, features):
p = self.params
bbox_is_valid = tf.less_equal(
features.labels.single_frame_detection_difficulties,
p.difficulty_threshold)
features.labels.labels = tf.where(
bbox_is_valid, features.labels.labels,
p.background_id * tf.ones_like(features.labels.labels))
features.labels.bboxes_3d_mask *= tf.cast(bbox_is_valid, tf.float32)
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class CountNumberOfPointsInBoxes3D(Preprocessor):
"""Computes bboxes_3d_num_points.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- labels.bboxes_3d of shape [L, 7]
- labels.bboxes_3d_mask of shape [L]
and optionally points_padding of shape [P] corresponding to the padding.
if points_padding is None, then all points are considered valid.
Adds the following features:
labels.bboxes_3d_num_points: [L] - integer tensor containing the number of
laser points for each corresponding bbox.
"""
def TransformFeatures(self, features):
points_xyz = features.lasers.points_xyz
if 'points_padding' in features.lasers:
points_mask = 1 - features.lasers.points_padding
points_xyz = tf.boolean_mask(points_xyz, points_mask)
points_in_bboxes_mask = geometry.IsWithinBBox3D(points_xyz,
features.labels.bboxes_3d)
bboxes_3d_num_points = tf.reduce_sum(
tf.cast(points_in_bboxes_mask, tf.int32), axis=0, keepdims=False)
bboxes_3d_num_points *= tf.cast(features.labels.bboxes_3d_mask, tf.int32)
features.labels.bboxes_3d_num_points = bboxes_3d_num_points
return features
def TransformShapes(self, shapes):
num_bboxes = shapes.labels.bboxes_3d[0]
shapes.labels.bboxes_3d_num_points = tf.TensorShape([num_bboxes])
return shapes
def TransformDTypes(self, dtypes):
dtypes.labels.bboxes_3d_num_points = tf.int32
return dtypes
class AddPerPointLabels(Preprocessor):
"""Computes the class and bbox id of each point.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- labels.bboxes_3d of shape [L, 7]
- labels.labels of shape [L]
This makes an assumption that each point is only in 1 box, which should
almost always true in 3D. In cases where this is not true, the largest
label integer and largest bbox_id will be assigned.
NOTE: Be very careful that this is performed after any modifications
to the semantic labels of each point in the pointcloud. Examples of this
would be operators like GroundTruthAugmentation, or DropBoxesOutOfRange.
Adds the following features:
lasers.points_label: [P] - integer tensor containing the class id of each
point.
lasers.points_bbox_id: [P] - integer tensor containing box id of each
point from 0 to num_bboxes, where an id of num_bboxes indicates a
background point.
lasers.points_bbox_3d: [P, 7] - float tensor containing bounding box of
each point.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'per_dimension_adjustment', None,
'A list of len 3 of floats with the amount (in meters) to add to '
'each dimension of the box before using it to select points. '
'If enabled, this is designed to protect against overly tight box '
'annotations that appear in KITTI.')
return p
def TransformFeatures(self, features):
p = self.params
points_xyz = features.lasers.points_xyz
bboxes_3d = features.labels.bboxes_3d
num_points, _ = py_utils.GetShape(points_xyz)
num_bboxes, _ = py_utils.GetShape(bboxes_3d)
if p.per_dimension_adjustment:
if len(p.per_dimension_adjustment) != 3:
raise ValueError(
'param `per_dimension_adjustment` expected to be len 3.')
dims_adjustment = tf.constant([0, 0, 0] + p.per_dimension_adjustment +
[0])
bboxes_3d = bboxes_3d + dims_adjustment
# Find which points are in each box and what class each box is.
points_in_bboxes_mask = geometry.IsWithinBBox3D(points_xyz, bboxes_3d)
points_in_bboxes_mask = tf.cast(points_in_bboxes_mask, tf.int32)
points_in_bboxes_mask = py_utils.HasShape(points_in_bboxes_mask,
[num_points, num_bboxes])
# points_in_bboxes_mask is a [num_points, num_bboxes] 0/1 tensor
# indicating whether that point is in a given box.
# Each point should only be in one box, so after broadcasting the label
# across the binary mask, we do a reduce_max to get the max label id
# for each point. Since each point only belongs to one box, it will be
# the only non-zero (background) label in that box.
# Note: We assume background to be class_id == 0
points_label = tf.reduce_max(
points_in_bboxes_mask * features.labels.labels, axis=1)
points_bbox_id = tf.argmax(
points_in_bboxes_mask, axis=1, output_type=tf.int32)
# If the class is background, make its id == num_bboxes
points_bbox_id = tf.where(points_label > 0, points_bbox_id,
tf.broadcast_to(num_bboxes, [num_points]))
# For each point, get the bbox_3d data.
dummy_bbox = tf.constant([[0, 0, 0, 0, 0, 0, 0]], dtype=tf.float32)
bboxes_3d = tf.concat([bboxes_3d, dummy_bbox], axis=0)
points_bbox_3d = tf.gather(bboxes_3d, points_bbox_id)
points_label = tf.reshape(points_label, [num_points])
points_bbox_id = tf.reshape(points_bbox_id, [num_points])
features.lasers.points_label = points_label
features.lasers.points_bbox_id = points_bbox_id
features.lasers.points_bbox_3d = points_bbox_3d
return features
def TransformShapes(self, shapes):
num_points = shapes.lasers.points_xyz[0]
shapes.lasers.points_label = tf.TensorShape([num_points])
shapes.lasers.points_bbox_id = tf.TensorShape([num_points])
shapes.lasers.points_bbox_3d = tf.TensorShape([num_points, 7])
return shapes
def TransformDTypes(self, dtypes):
dtypes.lasers.points_label = tf.int32
dtypes.lasers.points_bbox_id = tf.int32
dtypes.lasers.points_bbox_3d = tf.float32
return dtypes
class PointsToGrid(Preprocessor):
"""Bins points to a 3D-grid using custom op: ops.point_to_grid.
Expects features to have keys:
- lasers.points_xyz of shape [P, 3]
and optionally points_padding of shape [P] corresponding to the padding.
if points_padding is None, then all points are considered valid.
If normalizing the labels is enabled, then also expects:
- labels.weights
- labels.bboxes_td
- labels.bboxes_td_mask
- labels.bboxes_3d_mask
Let:
gx, gy, gz = p.grid_size
F = 3 + num_laser_features
Adds the following features:
grid_centers: [gx, gy, gz, 3]: For each grid cell, the (x,y,z)
floating point coordinate of its center.
grid_num_points: [gx, gy, gz]: The number of points in each grid
cell (integer).
laser_grid: [gx, gy, gz, num_points_per_cell, F] - A 5D floating
point Tensor containing the laser data placed into a fixed grid.
Modifies the bboxes in labels to also be within the grid range x/y by default.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('num_points_per_cell', 100,
'The maximum number of points per cell.')
p.Define('grid_size', (40, 40, 1), 'Grid size along x,y,z axis.')
# The max range of x and y is [-80, 80].
p.Define('grid_range_x', (-80, 80), 'The X-axis Range covered by the grid')
p.Define('grid_range_y', (-80, 80), 'The Y-axis Range covered by the grid')
p.Define('grid_range_z', (-2, 4), 'The Z-axis Range covered by the grid')
p.Define('normalize_td_labels', True,
'Whether to clip the labels to the grid limits.')
return p
def _NormalizeLabels(self, ymin, xmin, ymax, xmax, x_range, y_range):
"""Normalizes the bboxes within a given range."""
assert x_range, 'Must specify x_range if clipping.'
assert y_range, 'Must specify y_range if clipping.'
assert len(x_range) == 2, 'x_range %s must be 2 elements.' % x_range
assert len(y_range) == 2, 'y_range %s must be 2 elements.' % y_range
x_range_min = x_range[0]
x_range_len = x_range[1] - x_range[0]
y_range_min = y_range[0]
y_range_len = y_range[1] - y_range[0]
xmin = tf.cast(xmin - x_range_min, tf.float32) / tf.cast(
x_range_len, tf.float32)
xmax = tf.cast(xmax - x_range_min, tf.float32) / tf.cast(
x_range_len, tf.float32)
ymin = tf.cast(ymin - y_range_min, tf.float32) / tf.cast(
y_range_len, tf.float32)
ymax = tf.cast(ymax - y_range_min, tf.float32) / tf.cast(
y_range_len, tf.float32)
return ymin, xmin, ymax, xmax
def TransformFeatures(self, features):
p = self.params
points_xyz = features.lasers.points_xyz
points_feature = features.lasers.points_feature
if ('points_padding' in features.lasers and
features.lasers.points_padding is not None):
points_mask = 1 - features.lasers.points_padding
points_xyz = tf.boolean_mask(points_xyz, points_mask)
points_feature = tf.boolean_mask(points_feature, points_mask)
points_full = tf.concat([points_xyz, points_feature], axis=-1)
points_grid_full, grid_centers, num_points = ops.point_to_grid(
points_full, p.num_points_per_cell, p.grid_size[0], p.grid_size[1],
p.grid_size[2], p.grid_range_x, p.grid_range_y, p.grid_range_z)
features.laser_grid = points_grid_full
features.grid_centers = grid_centers
features.grid_num_points = num_points
if p.normalize_td_labels:
# Normalize bboxes_td w.r.t grid range.
obb = features.labels
x_range = p.grid_range_x
y_range = p.grid_range_y
ymin, xmin, ymax, xmax = tf.unstack(obb.bboxes_td[..., :4], axis=-1)
ymin, xmin, ymax, xmax = self._NormalizeLabels(
ymin, xmin, ymax, xmax, x_range=x_range, y_range=y_range)
obb.bboxes_td = tf.concat(
[tf.stack([ymin, xmin, ymax, xmax], axis=-1), obb.bboxes_td[..., 4:]],
axis=-1)
return features
def TransformShapes(self, shapes):
p = self.params
shapes.grid_centers = tf.TensorShape(list(p.grid_size) + [3])
shapes.grid_num_points = tf.TensorShape(list(p.grid_size))
shapes.laser_grid = tf.TensorShape(
list(p.grid_size) +
[p.num_points_per_cell, 3 + shapes.lasers.points_feature[-1]])
return shapes
def TransformDTypes(self, dtypes):
dtypes.grid_centers = tf.float32
dtypes.grid_num_points = tf.int32
dtypes.laser_grid = tf.float32
return dtypes
class _PointPillarGridSettings:
"""Settings for PointPillars model defined in paper.
https://arxiv.org/abs/1812.05784
"""
# Chooses grid sizes that are a multiple of 16 to support point pillars
# model requirements. These also happen to match the values
# in the PointPillars paper (voxel width of 0.16m in x, y)
GRID_X = 432
GRID_Y = 496
GRID_Z = 1
# These fields are set in the subclasses.
GRID_X_RANGE = None
GRID_Y_RANGE = None
GRID_Z_RANGE = None
@classmethod
def UpdateGridParams(cls, grid_params):
"""Apply PointPillars settings to grid_params."""
grid_params.grid_size = (cls.GRID_X, cls.GRID_Y, cls.GRID_Z)
grid_params.grid_range_x = cls.GRID_X_RANGE
grid_params.grid_range_y = cls.GRID_Y_RANGE
grid_params.grid_range_z = cls.GRID_Z_RANGE
@classmethod
def UpdateAnchorGridParams(cls, anchor_params, output_stride=2):
"""Apply PointPillars settings to anchor_params."""
# Set anchor settings to match grid settings.
# Grid size for anchors is half the resolution.
anchor_params.grid_size = (cls.GRID_X // output_stride,
cls.GRID_Y // output_stride, cls.GRID_Z)
anchor_params.grid_range_x = cls.GRID_X_RANGE
anchor_params.grid_range_y = cls.GRID_Y_RANGE
# Grid along z axis should be pinned to 0.
anchor_params.grid_range_z = (0, 0)
def MakeGridSettings(grid_x_range, grid_y_range, grid_z_range, grid_x, grid_y,
grid_z):
"""Returns configured class for PointPillar grid settings."""
class GridSettings(_PointPillarGridSettings):
GRID_X_RANGE = grid_x_range
GRID_Y_RANGE = grid_y_range
GRID_Z_RANGE = grid_z_range
GRID_X = grid_x
GRID_Y = grid_y
GRID_Z = grid_z
return GridSettings
PointPillarGridCarSettings = MakeGridSettings(
grid_x_range=(0, 69.12),
grid_y_range=(-39.68, 39.68),
grid_z_range=(-3, 1),
grid_x=432,
grid_y=496,
grid_z=1)
PointPillarGridPedCycSettings = MakeGridSettings(
grid_x_range=(0, 47.36),
grid_y_range=(-19.84, 19.84),
grid_z_range=(-2.5, 0.5),
grid_x=432,
grid_y=496,
grid_z=1)
class GridToPillars(Preprocessor):
"""Create pillars from a grid of points.
Expects features to have keys:
grid_centers: [gx, gy, gz, 3]
grid_num_points: [gx, gy, gz]
laser_grid: [gx, gy, gz, num_points_per_cell, F]
Adds the following features:
point_count: [num_pillars]. The number of points in the pillar.
point_locations: [num_pillars, 3]. The grid location of each pillar.
pillar_points: [num_pillars, num_points_per_cell, F]. Points of each
pillar.
Drops the following features by default:
laser_grid
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('num_points_per_cell', 100,
'The maximum number of points per cell.')
p.Define('num_pillars', 12000, 'The maximum number of pillars to produce.')
p.Define('drop_laser_grid', True, 'Whether to drop the laser_grid feature.')
# The density based sampler is more expensive.
p.Define('use_density_sampler', False,
'Use a density based sampler during pillar selection.')
return p
def _GumbelTransform(self, probs):
"""Adds gumbel noise to log probabilities for multinomial sampling.
This enables fast sampling from a multinomial distribution without
replacement. See https://arxiv.org/abs/1611.01144 for details.
A colab that demonstrates this in practice is here:
http://colab/drive/1iuMt2n_r7dKPQG9T0UVMuK3fkbBayKjd
Args:
probs: A 1-D float tensor containing probabilities, summing to 1.
Returns:
A 1-D float tensor of the same size of probs, with gumbel noise added to
log probabilities. Taking the top k elements from this provides a
multinomial sample without replacement.
"""
p = self.params
log_prob = tf.math.log(probs)
probs_shape = tf.shape(probs)
uniform_samples = tf.random.uniform(
shape=probs_shape,
dtype=probs.dtype,
seed=p.random_seed,
name='uniform_samples')
gumbel_noise = -tf.math.log(-tf.math.log(uniform_samples))
return gumbel_noise + log_prob
def _DensitySample(self, num_points):
p = self.params
# Flatten to [nx * ny * nz] for convenience during sampling.
num_grid_points = np.prod(p.grid_size)
flattened_num_points = tf.reshape(num_points, [num_grid_points])
# Normalize flattened_num_points to sum to 1.
flattened_num_points = tf.cast(flattened_num_points, tf.float32)
flattened_num_points /= tf.reduce_sum(flattened_num_points)
# TODO(jngiam): Consider generalizing this to enable other methods of
# sampling: e.g., use largest deviation in z-axis. The gumbel transform
# can still be applied regardless.
# Add gumbel noise for multinomial sampling.
sampling_logits = self._GumbelTransform(flattened_num_points)
_, locations = tf.nn.top_k(
sampling_logits, k=min(p.num_pillars, num_grid_points))
# Unravel coordinates back to grid locations.
locations = tf.unravel_index(locations, p.grid_size)
# Unravel index will return a 3 x num_locations tensor, this needs to be
# transposed so that we have it as num_locations x 3.
locations = py_utils.HasShape(locations, [3, -1])
locations = tf.transpose(locations)
return locations
def TransformFeatures(self, features):
p = self.params
num_points = features.grid_num_points
if p.use_density_sampler:
locations = self._DensitySample(num_points)
else:
# Select non-empty cells uniformly at random.
locations = tf.random.shuffle(tf.cast(tf.where(num_points > 0), tf.int32))
num_features = py_utils.GetShape(features.laser_grid)[-1]
# [nx, ny, nz, np, 4] (x, y, z, f)
points = features.laser_grid
# [K, np, 4] (x, y, z, f)
points = tf.gather_nd(points, locations)
# [nx, ny, nz, 1, 3] (cx, cy, cz)
centers = features.grid_centers[..., tf.newaxis, :]
# [K, 1, 3] (cx, cy, cz)
centers = tf.gather_nd(centers, locations)
# NOTE: If there are fewer pillars than p.num_pillars, the following
# padding creates many 'fake' pillars at grid cell (0, 0, 0) with
# an all-zero pillar. Hopefully, the model can learn to ignore these.
#
# pillar_points[i, :, :] is the pillar located at pillar_locations[i, :3],
# and pillar_points[i, :, :] == points_grid_full[pillar_locations[i, :3]].
# for 0 <= i < pillar_count;
# pillar_locations[i, :3] are zero-ed, for i >= pillar_count.
features.pillar_count = tf.shape(locations)[0]
features.pillar_locations = py_utils.PadOrTrimTo(locations,
[p.num_pillars, 3])
features.pillar_points = py_utils.PadOrTrimTo(
points, [p.num_pillars, p.num_points_per_cell, num_features])
features.pillar_centers = py_utils.PadOrTrimTo(centers,
[p.num_pillars, 1, 3])
if p.drop_laser_grid:
del features['laser_grid']
return features
def TransformShapes(self, shapes):
p = self.params
num_features = shapes.laser_grid[-1]
shapes.pillar_count = tf.TensorShape([])
shapes.pillar_locations = tf.TensorShape([p.num_pillars, 3])
shapes.pillar_points = tf.TensorShape(
[p.num_pillars, p.num_points_per_cell, num_features])
shapes.pillar_centers = tf.TensorShape([p.num_pillars, 1, 3])
if p.drop_laser_grid:
del shapes['laser_grid']
return shapes
def TransformDTypes(self, dtypes):
p = self.params
dtypes.pillar_count = tf.int32
dtypes.pillar_locations = tf.int32
dtypes.pillar_points = tf.float32
dtypes.pillar_centers = tf.float32
if p.drop_laser_grid:
del dtypes['laser_grid']
return dtypes
class GridAnchorCenters(Preprocessor):
"""Create anchor centers on a grid.
Anchors are placed in the middle of each grid cell. For example, on a 2D grid
range (0 -> 10, 0 -> 10) with a 10 x 5 grid size, the anchors will be placed
at [(0.5, 1), (0.5, 3), ... , (9.5, 7), (9.5, 9)].
Adds the following features:
anchor_centers: [num_locations, 3] - Floating point output containing the
center (x, y, z) locations for tiling anchor boxes.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'grid_size', (20, 20, 1), 'Grid size along x,y,z axis. This will '
'be used to generate the anchor center locations. Note that this '
'would likely be different from the grid_* parameters in '
'LaserGridExtractor: the grid extractor may choose to extract '
'points more densely. Instead, this should correspond to the '
'model\'s prediction layer: the predicted anchor box residuals '
'should match this grid.')
p.Define('grid_range_x', (-25, 25), 'The x-axis range covered by the grid.')
p.Define('grid_range_y', (-25, 25), 'The y-axis range covered by the grid.')
p.Define('grid_range_z', (0, 0), 'The z-axis range covered by the grid.')
return p
def TransformFeatures(self, features):
p = self.params
utils_3d = detection_3d_lib.Utils3D()
# Compute the grid cell size and adjust the range sent to dense coordinates
# by half a cell size so as to ensure that the anchors are placed in the
# center of each grid cell.
grid_size_x, grid_size_y, grid_size_z = p.grid_size
grid_cell_sizes = [
float(p.grid_range_x[1] - p.grid_range_x[0]) / grid_size_x,
float(p.grid_range_y[1] - p.grid_range_y[0]) / grid_size_y,
float(p.grid_range_z[1] - p.grid_range_z[0]) / grid_size_z,
]
half_size_x, half_size_y, half_size_z = np.asarray(grid_cell_sizes) / 2.0
grid_shape = list(p.grid_size) + [3]
anchor_centers = utils_3d.CreateDenseCoordinates([
[
p.grid_range_x[0] + half_size_x,
p.grid_range_x[1] - half_size_x,
grid_size_x
],
[
p.grid_range_y[0] + half_size_y,
p.grid_range_y[1] - half_size_y,
grid_size_y
],
[
p.grid_range_z[0] + half_size_z,
p.grid_range_z[1] - half_size_z,
grid_size_z
],
]) # pyformat: disable
features.anchor_centers = tf.reshape(anchor_centers, grid_shape)
return features
def TransformShapes(self, shapes):
p = self.params
shapes.anchor_centers = tf.TensorShape(list(p.grid_size) + [3])
return shapes
def TransformDTypes(self, dtypes):
dtypes.anchor_centers = tf.float32
return dtypes
class SparseCenterSelector(Preprocessor):
"""Select centers for anchors and cells.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
and optionally points_padding of shape [P] corresponding to the padding.
if points_padding is None, then all points are considered valid.
If lasers.num_seeded_points of shape [] is provided, it indicates that the
first num_seeded_points of lasers.points_xyz should be used as seeds for
farthest point sampling (e.g., always chosen). Currently the concept
of seeding is not implemented for anything but farthest point sampling.
Adds the following features:
anchor_centers: [num_cell_centers, 3] - Floating point output containing the
center (x, y, z) locations for tiling anchor boxes.
cell_center_xyz: [num_cell_centers, 3] - Floating point output containing
the center (x, y, z) locations for each cell to featurize.
"""
_SAMPLING_METHODS = ['farthest_point', 'random_uniform']
@classmethod
def Params(cls):
p = super().Params()
p.Define('num_cell_centers', 256, 'Number of centers.')
p.Define(
'features_preparation_layers', [],
'A list of Params for layers to run on the features before '
'performing farthest point sampling. For example, one may wish to '
'drop points out of frustum for KITTI before selecting centers. '
'Note that these layers will not mutate the original features, '
'instead, a copy will be made.')
p.Define(
'sampling_method', 'farthest_point',
'Which sampling method to use. One of {}'.format(cls._SAMPLING_METHODS))
p.Define(
'fix_z_to_zero', True, 'Whether to fix z to 0 when retrieving the '
'center xyz coordinates.')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if p.sampling_method not in self._SAMPLING_METHODS:
raise ValueError('Param `sampling_method` must be one of {}.'.format(
self._SAMPLING_METHODS))
if p.features_preparation_layers is not None:
self.CreateChildren('features_preparation_layers',
p.features_preparation_layers)
def _FarthestPointSampleCenters(self, points_xyz, num_seeded_points):
"""Samples centers with Farthest Point Sampling.
Args:
points_xyz: An unpadded tf.float32 Tensor of shape [P, 3] with per point
(x, y, z) locations. We expect any padded points to be removed before
this function is called.
num_seeded_points: integer indicating how many of the first
num_seeded_points points in points_xyz should be considered
as seeds for FPS (always chosen).
Returns:
A tf.float32 Tensor of shape [p.num_cell_centers, 3] with selected centers
to use as anchors.
"""
p = self.params
num_points = tf.shape(points_xyz)[0]
points_padding = tf.zeros((num_points,), dtype=tf.float32)
padded_num_points = tf.maximum(num_points, p.num_cell_centers)
# Pad both the points and padding if for some reason the input pointcloud
# has less points than p.num_cell_centers.
points_xy = py_utils.PadOrTrimTo(points_xyz[:, :2], [padded_num_points, 2])
points_padding = py_utils.PadOrTrimTo(
points_padding, [padded_num_points], pad_val=1.0)
sampled_idx, _ = car_lib.FarthestPointSampler(
points_xy[tf.newaxis, ...],
points_padding[tf.newaxis, ...],
p.num_cell_centers,
num_seeded_points=num_seeded_points,
random_seed=p.random_seed)
sampled_idx = sampled_idx[0, :]
# Gather centers.
if p.fix_z_to_zero:
centers = tf.concat([
tf.gather(points_xy, sampled_idx),
tf.zeros((p.num_cell_centers, 1)),
], axis=-1) # pyformat: disable
else:
centers = tf.gather(points_xyz, sampled_idx)
return centers
def _RandomUniformSampleCenters(self, points_xyz):
"""Samples centers with Random Uniform Sampling.
Args:
points_xyz: An unpadded tf.float32 Tensor of shape [P, 3] with per point
(x, y, z) locations. We expect any padded points to be removed before
this function is called.
Returns:
A tf.float32 Tensor of shape [p.num_cell_centers, 3] with selected centers
to use as anchors.
"""
p = self.params
# We want the center Z value to be 0 so just exclude it
centers_xy = tf.random.shuffle(points_xyz[:, :2], seed=p.random_seed)
selected_centers_xy = py_utils.PadOrTrimTo(centers_xy,
[p.num_cell_centers, 2])
return tf.concat([selected_centers_xy,
tf.zeros((p.num_cell_centers, 1))],
axis=-1)
def _SampleCenters(self, points_xyz, num_seeded_points):
p = self.params
if p.sampling_method == 'farthest_point':
return self._FarthestPointSampleCenters(points_xyz, num_seeded_points)
elif p.sampling_method == 'random_uniform':
if num_seeded_points > 0:
raise NotImplementedError(
'Random sampling with seeded points not yet implemented.')
return self._RandomUniformSampleCenters(points_xyz)
else:
raise ValueError('Param `sampling_method` must be one of {}.'.format(
self._SAMPLING_METHODS))
def TransformFeatures(self, features):
p = self.params
prepared_features = features.DeepCopy()
for prep_layer in self.features_preparation_layers:
prepared_features = prep_layer.FPropDefaultTheta(prepared_features)
num_seeded_points = prepared_features.lasers.get('num_seeded_points', 0)
points_data = prepared_features.lasers
points_xyz = points_data.points_xyz
if 'points_padding' in points_data:
points_padding = points_data.points_padding
points_mask = 1 - points_padding
points_xyz = tf.boolean_mask(points_xyz, points_mask)
centers = self._SampleCenters(points_xyz, num_seeded_points)
centers = py_utils.HasShape(centers, [p.num_cell_centers, 3])
features.anchor_centers = centers
features.cell_center_xyz = centers
return features
def TransformShapes(self, shapes):
p = self.params
shapes.anchor_centers = tf.TensorShape([p.num_cell_centers, 3])
shapes.cell_center_xyz = tf.TensorShape([p.num_cell_centers, 3])
return shapes
def TransformDTypes(self, dtypes):
dtypes.anchor_centers = tf.float32
dtypes.cell_center_xyz = tf.float32
return dtypes
class SparseCellGatherFeatures(Preprocessor):
"""Select local features for each cell.
This preprocessor expects features to contain:
- lasers.points_xyz of shape [P, 3]
- lasers.points_feature of shape [P, F]
- cell_center_xyz of shape [C, 3]
and optionally points_padding of shape [P] corresponding to the padding.
if points_padding is None, then all points are considered valid.
Adds the following features:
cell_points_xyz: [num_centers, num_points_per_cell, 3] - Floating point
output containing the (x, y, z) locations for each point for a given
center.
cell_feature: [num_centers, num_points_per_cell, F] - Floating point output
containing the features for each point for a given center.
cell_points_padding: [num_centers, num_points_per_cell] - 0/1 padding
for the points in each cell.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('num_points_per_cell', 128, 'The number of points per cell.')
p.Define('max_distance', 3.0, 'Max distance of point to cell center.')
p.Define(
'sample_neighbors_uniformly', False,
'Whether to sample the neighbor points for every cell center '
'uniformly at random. If False, this will default to selecting by '
'distance.')
return p
def TransformFeatures(self, features):
p = self.params
num_centers = py_utils.GetShape(features.cell_center_xyz, 1)[0]
num_features = py_utils.GetShape(features.lasers.points_feature)[-1]
points_xyz = features.lasers.points_xyz
points_feature = features.lasers.points_feature
if 'points_padding' in features.lasers:
points_mask = 1 - features.lasers.points_padding
points_xyz = tf.boolean_mask(points_xyz, points_mask)
points_feature = tf.boolean_mask(points_feature, points_mask)
# Note: points_xyz and points_feature must be unpadded as we pass
# padding=None to neighborhood indices. Ensuring that it is unpadded
# helps improve performance.
# Get nearby points using kNN.
sample_indices, sample_indices_padding = car_lib.NeighborhoodIndices(
tf.expand_dims(points_xyz, 0),
tf.expand_dims(features.cell_center_xyz, 0),
p.num_points_per_cell,
points_padding=None,
max_distance=p.max_distance,
sample_neighbors_uniformly=p.sample_neighbors_uniformly)
# Take first example since NeighboorhoodIndices expects batch dimension.
sample_indices = sample_indices[0, :, :]
sample_indices_padding = sample_indices_padding[0, :, :]
sample_indices = py_utils.HasShape(sample_indices,
[num_centers, p.num_points_per_cell])
cell_points_xyz = tf.gather(points_xyz, sample_indices)
cell_points_xyz = py_utils.HasShape(cell_points_xyz,
[num_centers, p.num_points_per_cell, 3])
cell_feature = tf.gather(points_feature, sample_indices)
cell_feature = py_utils.HasShape(
cell_feature, [num_centers, p.num_points_per_cell, num_features])
cell_points_padding = py_utils.HasShape(
sample_indices_padding, [num_centers, p.num_points_per_cell])
features.update({
'cell_points_xyz': cell_points_xyz,
'cell_feature': cell_feature,
'cell_points_padding': cell_points_padding,
})
return features
def TransformShapes(self, shapes):
p = self.params
num_centers = shapes.cell_center_xyz[0]
base_shape = [num_centers, p.num_points_per_cell]
num_features = shapes.lasers.points_feature[-1]
shapes.cell_points_xyz = tf.TensorShape(base_shape + [3])
shapes.cell_feature = tf.TensorShape(base_shape + [num_features])
shapes.cell_points_padding = tf.TensorShape(base_shape)
return shapes
def TransformDTypes(self, dtypes):
dtypes.cell_points_xyz = tf.float32
dtypes.cell_feature = tf.float32
dtypes.cell_points_padding = tf.float32
return dtypes
class SparseCellCentersTopK(Preprocessor):
"""Given selected centers and gathered points/features, apply a filter.
This preprocessor expects features to contain `cell_center_xyz` and all
entries in params.features_to_modify, and that the leading dimension should
all be the same (num_cell_centers from SparseCenterSelector).
We then modify all values in features that are specified in
params.features_to_modify by sorting them with the specified sort function
(specified by params.sort_by) operating on features.cell_center_xyz, and then
taking the top K (specified by params.num_cell_centers) along the first
dimension.
"""
_REGISTERED_SORT_FUNCTIONS = ['distance']
@classmethod
def Params(cls):
p = super().Params()
p.Define('num_cell_centers', 512, 'The number of centers after filtering.')
p.Define(
'sort_by', 'distance', 'A string specifying which sort function '
'to use. Currently we just support `distance`.')
p.Define('features_to_modify', [
'cell_center_xyz', 'anchor_centers', 'cell_points_xyz', 'cell_feature',
'cell_points_padding'
], 'A list of keys from the features dict to modify.')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if p.sort_by not in self._REGISTERED_SORT_FUNCTIONS:
raise ValueError('{} not supported. We only support {}.'.format(
p.sort_by, self._REGISTERED_SORT_FUNCTIONS))
if len(p.features_to_modify) < 1:
raise ValueError('Need to modify at least one feature.')
def _SortByDistance(self, features):
dist = tf.linalg.norm(features.cell_center_xyz, axis=-1)
return tf.argsort(dist, axis=-1, direction='ASCENDING')
def _Sort(self, features):
p = self.params
if p.sort_by == 'distance':
return self._SortByDistance(features)
else:
raise ValueError('Unsupported sort function: {}.'.format(p.sort_by))
def TransformFeatures(self, features):
p = self.params
sort_indices = self._Sort(features)
sort_indices_top_k = sort_indices[:p.num_cell_centers, ...]
# Gather each of the relevant items
for key in p.features_to_modify:
shape = py_utils.GetShape(features[key])
output_shape = [p.num_cell_centers] + shape[1:]
features[key] = py_utils.PadOrTrimTo(
tf.gather(features[key], sort_indices_top_k), output_shape)
return features
def TransformShapes(self, shapes):
p = self.params
for key in p.features_to_modify:
shapes[key] = tf.TensorShape([p.num_cell_centers] + shapes[key][1:])
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class TileAnchorBBoxes(Preprocessor):
"""Creates anchor_bboxes given anchor_centers.
This preprocessor expects features to contain the following keys:
- anchor_centers of shape [...base shape..., 3]
Adds the following features:
anchor_bboxes: base_shape + [7] - Floating point anchor box
output containing the anchor boxes and the 7 floating point
values for each box that define the box (x, y, z, dx, dy, dz, phi).
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('anchor_box_dimensions', [],
'List of anchor box sizes per center.')
p.Define('anchor_box_offsets', [], 'List of anchor box offsets per center.')
p.Define('anchor_box_rotations', [],
'List of anchor box rotations per center.')
return p
def TransformFeatures(self, features):
p = self.params
utils_3d = detection_3d_lib.Utils3D()
assert p.anchor_box_dimensions
assert p.anchor_box_offsets
assert p.anchor_box_rotations
base_shape = py_utils.GetShape(features.anchor_centers)[:-1]
num_box_per_center = len(p.anchor_box_dimensions)
anchor_centers = tf.reshape(features.anchor_centers, [-1, 3])
anchor_bboxes = utils_3d.MakeAnchorBoxes(
anchor_centers, tf.identity(p.anchor_box_dimensions),
tf.identity(p.anchor_box_offsets), tf.identity(p.anchor_box_rotations))
features.anchor_bboxes = tf.reshape(anchor_bboxes,
base_shape + [num_box_per_center, 7])
return features
def TransformShapes(self, shapes):
p = self.params
base_shape = shapes.anchor_centers[:-1]
num_box_per_center = len(p.anchor_box_dimensions)
shapes.anchor_bboxes = base_shape.concatenate([num_box_per_center, 7])
return shapes
def TransformDTypes(self, dtypes):
dtypes.anchor_bboxes = tf.float32
return dtypes
class _AnchorBoxSettings:
"""Helper class to parameterize and update anchor box settings."""
# Implementations should fill out the following class members.
DIMENSION_PRIORS = []
ROTATIONS = []
CENTER_X_OFFSETS = []
CENTER_Y_OFFSETS = []
CENTER_Z_OFFSETS = []
@classmethod
def NumAnchors(cls):
return np.prod([
len(cls.DIMENSION_PRIORS),
len(cls.ROTATIONS),
len(cls.CENTER_X_OFFSETS),
len(cls.CENTER_Y_OFFSETS),
len(cls.CENTER_Z_OFFSETS)
])
@classmethod
def GenerateAnchorSettings(cls):
"""Generate anchor settings.
Returns:
A `NestedMap` containing three lists of the same length:
- anchor_box_dimensions
- anchor_box_rotations
- anchor_box_offsets
These can be used with the TileAnchorBBoxes preprocessor.
"""
anchor_box_dimensions = []
anchor_box_rotations = []
anchor_box_offsets = []
# The following is equivalent to a formulation of itertools.product, but
# is explicitly listed for readability.
# *Please note*: The ordering is important for ModelV2, which makes
# assumptions that the offset dimensions come first.
for cx in cls.CENTER_X_OFFSETS:
for cy in cls.CENTER_Y_OFFSETS:
for cz in cls.CENTER_Z_OFFSETS:
for rot in cls.ROTATIONS:
for dims in cls.DIMENSION_PRIORS:
anchor_box_dimensions += [dims]
anchor_box_rotations += [rot]
anchor_box_offsets += [(cx, cy, cz)]
# Check one of the lists has entries.
assert anchor_box_dimensions
return py_utils.NestedMap(
anchor_box_dimensions=anchor_box_dimensions,
anchor_box_rotations=anchor_box_rotations,
anchor_box_offsets=anchor_box_offsets)
@classmethod
def Update(cls, params):
"""Updates anchor box settings from input configuration lists.
Given dimensions priors, rotations, and offsets, computes the cartesian
product of the settings.
Args:
params: The KITTIAnchorExtractorBase.Params() object to update.
Returns:
Params updated with the anchor settings.
In total there are N combinations, where each (anchor_box_dimensions[i],
anchor_box_rotations[i], anchor_box_offsets[i]) for i in range(N) is an
option.
"""
p = params
settings = cls.GenerateAnchorSettings()
p.anchor_box_dimensions = settings.anchor_box_dimensions
p.anchor_box_rotations = settings.anchor_box_rotations
p.anchor_box_offsets = settings.anchor_box_offsets
return p
def MakeAnchorBoxSettings(dimension_priors, rotations, center_x_offsets,
center_y_offsets, center_z_offsets):
"""Returns a configured class for setting anchor box settings."""
class CustomAnchorBoxSettings(_AnchorBoxSettings):
DIMENSION_PRIORS = dimension_priors
ROTATIONS = rotations
CENTER_X_OFFSETS = center_x_offsets
CENTER_Y_OFFSETS = center_y_offsets
CENTER_Z_OFFSETS = center_z_offsets
return CustomAnchorBoxSettings
class SparseCarV1AnchorBoxSettings(_AnchorBoxSettings):
"""Anchor box settings for training on Cars for Sparse models."""
# Borrowed from PointPillar dimension prior for cars.
DIMENSION_PRIORS = [(1.6, 3.9, 1.56)]
# 4 Rotations with axis aligned and both diagonals.
ROTATIONS = [0, np.pi / 2, np.pi / 4, 3 * np.pi / 4]
# 25 offsets per anchor box with fixed z offset at -1.
CENTER_X_OFFSETS = np.linspace(-1.5, 1.5, 5)
CENTER_Y_OFFSETS = np.linspace(-1.5, 1.5, 5)
CENTER_Z_OFFSETS = [-1.]
class PointPillarAnchorBoxSettingsCar(_AnchorBoxSettings):
DIMENSION_PRIORS = [(1.6, 3.9, 1.56)]
ROTATIONS = [0, np.pi / 2]
# Fixed offset for every anchor box, based on a reading of the paper / code
# 0 offsets for x and y, and -1 for z.
CENTER_X_OFFSETS = [0.]
CENTER_Y_OFFSETS = [0.]
CENTER_Z_OFFSETS = [-1.]
class PointPillarAnchorBoxSettingsPed(PointPillarAnchorBoxSettingsCar):
DIMENSION_PRIORS = [(0.6, 0.8, 1.73)]
CENTER_Z_OFFSETS = [-0.6]
class PointPillarAnchorBoxSettingsCyc(PointPillarAnchorBoxSettingsCar):
DIMENSION_PRIORS = [(0.6, 1.76, 1.73)]
CENTER_Z_OFFSETS = [-0.6]
class PointPillarAnchorBoxSettingsPedCyc(PointPillarAnchorBoxSettingsCar):
DIMENSION_PRIORS = [(0.6, 0.8, 1.7), (0.6, 1.76, 1.73)]
CENTER_Z_OFFSETS = [-0.6]
class AnchorAssignment(Preprocessor):
"""Perform anchor assignment on the features.
This preprocessor expects features to contain the following keys:
- anchor_bboxes of shape [...base shape..., 7]
- labels.bboxes_3d
- labels.labels
- labels.bboxes_3d_mask
Adds the following features:
anchor_localization_residuals: base_shape + [7] floating point tensor of
residuals. The model is expected to regress against these residuals as
targets. The residuals can be converted back into bboxes using
detection_3d_lib.Utils3D.ResidualsToBBoxes.
assigned_gt_idx: base_shape - The corresponding index of the ground
truth bounding box for each anchor box in anchor_bboxes, anchors not
assigned will have idx be set to -1.
assigned_gt_bbox: base_shape + [7] - The corresponding ground
truth bounding box for each anchor box in anchor_bboxes.
assigned_gt_labels: base_shape - The assigned groundtruth label
for each anchor box.
assigned_gt_similarity_score: base_shape - The similarity score
for each assigned anchor box.
assigned_cls_mask: base_shape mask for classification loss per anchor.
This should be 1.0 if the anchor has a foreground or background
assignment; otherwise, it will be assigned to 0.0.
assigned_reg_mask: base_shape mask for regression loss per anchor.
This should be 1.0 if the anchor has a foreground assignment;
otherwise, it will be assigned to 0.0.
Note: background anchors do not have regression targets.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'foreground_assignment_threshold', 0.5,
'Score (usually IOU) threshold for assigning a box as foreground.')
p.Define(
'background_assignment_threshold', 0.35,
'Score (usually IOU) threshold for assigning a box as background.')
return p
def TransformFeatures(self, features):
p = self.params
utils_3d = detection_3d_lib.Utils3D()
# anchor_bboxes will be returned with shape [#centers, #boxes_per_center, 7]
# flatten boxes here for matching.
base_shape = py_utils.GetShape(features.anchor_bboxes)[:-1]
anchor_bboxes = tf.reshape(features.anchor_bboxes, [-1, 7])
assigned_anchors = utils_3d.AssignAnchors(
anchor_bboxes,
features.labels.bboxes_3d,
features.labels.labels,
features.labels.bboxes_3d_mask,
foreground_assignment_threshold=p.foreground_assignment_threshold,
background_assignment_threshold=p.background_assignment_threshold)
# Add new features.
features.assigned_gt_idx = tf.reshape(assigned_anchors.assigned_gt_idx,
base_shape)
features.assigned_gt_bbox = tf.reshape(assigned_anchors.assigned_gt_bbox,
base_shape + [7])
features.assigned_gt_labels = tf.reshape(
assigned_anchors.assigned_gt_labels, base_shape)
features.assigned_gt_similarity_score = tf.reshape(
assigned_anchors.assigned_gt_similarity_score, base_shape)
features.assigned_cls_mask = tf.reshape(assigned_anchors.assigned_cls_mask,
base_shape)
features.assigned_reg_mask = tf.reshape(assigned_anchors.assigned_reg_mask,
base_shape)
# Compute residuals.
features.anchor_localization_residuals = utils_3d.LocalizationResiduals(
features.anchor_bboxes, features.assigned_gt_bbox)
return features
def TransformShapes(self, shapes):
base_shape = shapes.anchor_bboxes[:-1]
box_shape = base_shape.concatenate([7])
shapes.anchor_localization_residuals = box_shape
shapes.assigned_gt_idx = base_shape
shapes.assigned_gt_bbox = box_shape
shapes.assigned_gt_labels = base_shape
shapes.assigned_gt_similarity_score = base_shape
shapes.assigned_cls_mask = base_shape
shapes.assigned_reg_mask = base_shape
return shapes
def TransformDTypes(self, dtypes):
dtypes.anchor_localization_residuals = tf.float32
dtypes.assigned_gt_idx = tf.int32
dtypes.assigned_gt_bbox = tf.float32
dtypes.assigned_gt_labels = tf.int32
dtypes.assigned_gt_similarity_score = tf.float32
dtypes.assigned_cls_mask = tf.float32
dtypes.assigned_reg_mask = tf.float32
return dtypes
class DropLaserPointsOutOfRange(Preprocessor):
"""Drops laser points that are out of pre-defined x/y/z ranges.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- lasers.points_feature of shape [P, F]
and optionally points_padding of shape [P] corresponding to the padding.
if points_padding is None, then all points are considered valid.
Modifies the following features:
Removes or sets padding to 1 for all points outside a given range. Modifies
all items in the lasers subdictionary like lasers.points_xyz,
lasers.points_feature, lasers.points_padding, and optionally
lasers.points_label, lasers.points_bbox_id.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('keep_x_range', (-np.inf, np.inf),
'Only points that have x coordinates within this range are kept.')
p.Define('keep_y_range', (-np.inf, np.inf),
'Only points that have y coordinates within this range are kept.')
p.Define(
'keep_z_range', (-np.inf, np.inf),
'Only points that have z coordinates within this range are kept. '
'Approximate ground-removal can be performed by specifying a '
'lower-bound on the z-range.')
return p
def TransformFeatures(self, features):
p = self.params
points_xyz = features.lasers.points_xyz
if 'points_padding' in features.lasers:
points_mask = tf.cast(1 - features.lasers.points_padding, tf.bool)
else:
# All points are real, we keep points unpadded by applying boolean_mask
# on points_mask later.
points_mask = tf.ones_like(points_xyz[:, 0], dtype=tf.bool)
min_x, max_x = p.keep_x_range
min_y, max_y = p.keep_y_range
min_z, max_z = p.keep_z_range
# Short-circuit if all ranges are set to -inf, inf.
if (np.all(np.isneginf([min_x, min_y, min_z])) and
np.all(np.isposinf([max_x, max_y, max_z]))):
return features
if min_x != -np.inf:
points_mask &= points_xyz[:, 0] >= min_x
if min_y != -np.inf:
points_mask &= points_xyz[:, 1] >= min_y
if min_z != -np.inf:
points_mask &= points_xyz[:, 2] >= min_z
if max_x != np.inf:
points_mask &= points_xyz[:, 0] <= max_x
if max_y != np.inf:
points_mask &= points_xyz[:, 1] <= max_y
if max_z != np.inf:
points_mask &= points_xyz[:, 2] <= max_z
if 'points_padding' in features.lasers:
# Suffices to just update the padding.
features.lasers.points_padding = 1. - tf.cast(points_mask, tf.float32)
else:
features.lasers = features.lasers.Transform(
_GetApplyPointMaskFn(points_mask))
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class KITTIDropPointsOutOfFrustum(Preprocessor):
"""Drops laser points that are outside of the camera frustum.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- lasers.points_feature of shape [P, F]
- images.velo_to_image_plane of shape [3, 4]
- images.width of shape [1]
- images.height of shape [1]
and optionally points_padding of shape [P] corresponding to the padding.
if points_padding is None, then all points are considered valid.
Modifies the following features:
lasers.points_xyz, lasers.points_feature, lasers.points_padding, and
optionally lasers.points_label, lasers.points_bbox_id so that
points outside the frustum have padding set to 1 or are removed.
"""
def TransformFeatures(self, features):
# Drop points behind the car (behind x-axis = 0).
images = features.images
front_indices = features.lasers.points_xyz[:, 0] >= 0
if 'points_padding' not in features.lasers:
# Keep tensors unpadded and small using boolean_mask.
features.lasers.points_xyz = tf.boolean_mask(features.lasers.points_xyz,
front_indices)
features.lasers.points_feature = tf.boolean_mask(
features.lasers.points_feature, front_indices)
# Drop those points outside the image plane.
points_image = geometry.PointsToImagePlane(features.lasers.points_xyz,
images.velo_to_image_plane)
in_image_plane = (
(points_image[:, 0] >= 0) &
(points_image[:, 0] <= tf.cast(images.width, tf.float32)) &
(points_image[:, 1] >= 0) &
(points_image[:, 1] <= tf.cast(images.height, tf.float32)))
if 'points_padding' in features.lasers:
# Update padding to only include front indices and in image plane.
points_mask = tf.cast(1 - features.lasers.points_padding, tf.bool)
points_mask &= front_indices
points_mask &= in_image_plane
features.lasers.points_padding = 1. - tf.cast(points_mask, tf.float32)
else:
features.lasers = features.lasers.Transform(
_GetApplyPointMaskFn(in_image_plane))
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class RandomWorldRotationAboutZAxis(Preprocessor):
"""Rotates the world randomly as a form of data augmentation.
Rotations are performed around the *z-axis*. This assumes that the car is
always level. In general, we'd like to instead rotate the car on the spot,
this would then make sense for cases where the car is on a slope.
When there are leading dimensions, this will rotate the boxes with the same
transformation across all the frames. This is useful when the input is a
sequence of frames from the same run segment.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [..., 3]
- labels.bboxes_3d of shape [..., 7]
Modifies the following features:
lasers.points_xyz, labels.bboxes_3d with the same rotation applied to both.
Adds the following features:
world_rot_z which contains the rotation applied to the example.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'max_rotation', None,
'The rotation amount will be randomly picked from '
'[-max_rotation, max_rotation).')
p.Define(
'include_world_rot_z', True,
'Whether to include the applied rotation as an additional tensor. '
'It can be helpful to disable this when using the preprocessor in a '
'way that expects the structure of the features to be the same '
'(e.g., as a branch in tf.cond).')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if p.max_rotation is None:
raise ValueError('max_rotation needs to be specified, instead of None.')
def TransformFeatures(self, features):
p = self.params
rot = tf.random.uniform((),
minval=-p.max_rotation,
maxval=p.max_rotation,
seed=p.random_seed)
# Rotating about the z-axis is equal to experiencing yaw.
pose = [0., 0., 0., rot, 0., 0.]
# Rotate points.
features.lasers.points_xyz = geometry.CoordinateTransform(
features.lasers.points_xyz, pose)
# Rotate bboxes, note that heading has a special case.
bboxes_xyz = features.labels.bboxes_3d[..., :3]
bboxes_dims = features.labels.bboxes_3d[..., 3:6]
bboxes_rot = features.labels.bboxes_3d[..., 6:]
bboxes_xyz = geometry.CoordinateTransform(bboxes_xyz, pose)
# The heading correction should subtract rot from the bboxes rotations.
bboxes_rot = geometry.WrapAngleRad(bboxes_rot - rot)
features.labels.bboxes_3d = tf.concat([bboxes_xyz, bboxes_dims, bboxes_rot],
axis=-1)
if p.include_world_rot_z:
features.world_rot_z = rot
return features
def TransformShapes(self, shapes):
if self.params.include_world_rot_z:
shapes.world_rot_z = tf.TensorShape([])
return shapes
def TransformDTypes(self, dtypes):
if self.params.include_world_rot_z:
dtypes.world_rot_z = tf.float32
return dtypes
class DropPointsOutOfFrustum(Preprocessor):
"""Drops points outside of pre-defined theta / phi ranges.
Note that the ranges for keep_phi_range can be negative, this is because the
phi values wrap around 2*pi. Thus, a valid range that filters the 90 deg
frontal field of view of the car can be specified as [-pi/4, pi/4].
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- lasers.points_feature of shape [P, F]
Modifies the following features:
- lasers.points_xyz removing any points out of frustum.
- lasers.points_feature removing any points out of frustum.
Note: We expect a downstream processor that filters out boxes with few points
to drop the corresponding bboxes.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('keep_theta_range', (0., np.pi),
'Only points that have theta coordinates within this range.')
p.Define('keep_phi_range', (0., 2. * np.pi),
'Only points that have phi coordinates within this range.')
return p
def TransformFeatures(self, features):
p = self.params
if 'points_padding' in features.lasers:
raise ValueError('DropPointsOutOfFrustum preprocessor does not support '
'padded lasers.')
points_xyz = features.lasers.points_xyz
points_feature = features.lasers.points_feature
min_theta, max_theta = p.keep_theta_range
if (min_theta < 0. or min_theta > np.pi or max_theta < 0. or
max_theta > np.pi):
raise ValueError('Valid values for theta are between 0 and pi, '
'keep_theta_range={}'.format(p.keep_theta_range))
if min_theta > max_theta:
raise ValueError('min_theta must be <= max_theta, '
'keep_theta_range={}'.format(p.keep_theta_range))
min_phi, max_phi = p.keep_phi_range
if (min_phi < -2. * np.pi or min_phi > 2. * np.pi or
max_phi < -2. * np.pi or max_phi > 2. * np.pi):
raise ValueError('Valid values for phi are between -2*pi and 2*pi,'
'keep_phi_range={}'.format(p.keep_phi_range))
if min_phi > max_phi:
raise ValueError('min_phi must be <= max_phi, '
'keep_phi_range={}'.format(p.keep_phi_range))
_, theta, phi = tf.unstack(
geometry.SphericalCoordinatesTransform(points_xyz), axis=-1)
# phi is returned in range [-pi, pi], we shift the values which are between
# [-pi, 0] to be [pi, 2pi] instead to make the logic below easier to follow.
# Hence, all phi values after this will be [0, 2pi].
phi = tf.where(phi >= 0., phi, 2. * np.pi + phi)
# Theta does not have circular boundary conditions, a simple check suffices.
points_mask = (theta >= min_theta) & (theta <= max_theta)
if min_phi < 0. and max_phi < 0.:
# Both are less than zero, we just just add 2pi and will use the regular
# check.
min_phi += 2. * np.pi
max_phi += 2. * np.pi
if min_phi < 0.:
# The minimum threshold is below 0, so we split into checking between
# (0 to min_phi) and (0 to max_phi). Note that min_phi is negative, but
# phi is always positive, so we take 2*pi + min_phi to get the range of
# appropriate values.
points_mask &= (phi >= (2. * np.pi + min_phi)) | (phi <= max_phi)
else:
# Both must be greater than 0 if we get to this condition.
assert min_phi >= 0.
assert max_phi >= 0.
points_mask &= (phi >= min_phi) & (phi <= max_phi)
features.lasers.points_xyz = tf.boolean_mask(points_xyz, points_mask)
features.lasers.points_feature = tf.boolean_mask(points_feature,
points_mask)
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class DropBoxesOutOfRange(Preprocessor):
"""Drops boxes outside of pre-defined x/y/z ranges (boundaries inclusive).
This preprocessor expects features to contain the following keys:
- labels.bboxes_3d of shape [N, 7]
- labels.bboxes_3d_mask of shape [N]
Modifies the following features:
- labels.bboxes_3d_mask to mask out any additional boxes.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('keep_x_range', (-np.inf, np.inf),
'Only boxes that have x coordinates within this range are kept.')
p.Define('keep_y_range', (-np.inf, np.inf),
'Only boxes that have y coordinates within this range are kept.')
p.Define('keep_z_range', (-np.inf, np.inf),
'Only boxes that have z coordinates within this range are kept.')
return p
def TransformFeatures(self, features):
p = self.params
min_x, max_x = p.keep_x_range
min_y, max_y = p.keep_y_range
min_z, max_z = p.keep_z_range
# Short-circuit if all ranges are set to -inf, inf.
if (np.all(np.isneginf([min_x, min_y, min_z])) and
np.all(np.isposinf([max_x, max_y, max_z]))):
return features
# For each bounding box, compute whether any of its extrema
# fall outside of the range.
bboxes_3d_corners = geometry.BBoxCorners(
features.labels.bboxes_3d[tf.newaxis, ...])[0]
bboxes_3d_corners = py_utils.HasShape(bboxes_3d_corners, [-1, 8, 3])
min_bbox_x = tf.reduce_min(bboxes_3d_corners[:, :, 0], axis=-1)
max_bbox_x = tf.reduce_max(bboxes_3d_corners[:, :, 0], axis=-1)
min_bbox_y = tf.reduce_min(bboxes_3d_corners[:, :, 1], axis=-1)
max_bbox_y = tf.reduce_max(bboxes_3d_corners[:, :, 1], axis=-1)
min_bbox_z = tf.reduce_min(bboxes_3d_corners[:, :, 2], axis=-1)
max_bbox_z = tf.reduce_max(bboxes_3d_corners[:, :, 2], axis=-1)
mask = (
tf.math.logical_and(min_bbox_x >= min_x, max_bbox_x <= max_x)
& tf.math.logical_and(min_bbox_y >= min_y, max_bbox_y <= max_y)
& tf.math.logical_and(min_bbox_z >= min_z, max_bbox_z <= max_z))
max_num_boxes = py_utils.GetShape(features.labels.bboxes_3d_mask)
mask = py_utils.HasShape(mask, max_num_boxes)
features.labels.bboxes_3d_mask *= tf.cast(mask, tf.float32)
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class PadLaserFeatures(Preprocessor):
"""Pads laser features so that the dimensions are fixed.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- lasers.points_feature of shape [P, F]
and optionally points_padding of shape [P] corresponding to the padding.
if points_padding is None, then all points are considered valid.
Modifies the following features:
lasers.points_xyz and lasers.points_feature to add padding.
Optionally also modifies lasers.points_label and lasers.points_bbox_id
if they exist to add padding.
Modifies/adds the following features:
labels.points_padding of shape [P] representing the padding.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('max_num_points', 128500,
'Max number of points to pad the points to.')
return p
def TransformFeatures(self, features):
p = self.params
if 'points_padding' in features.lasers:
points_mask = 1 - features.lasers.points_padding
points_mask = tf.cast(points_mask, tf.bool)
features.lasers = features.lasers.Transform(
_GetApplyPointMaskFn(points_mask))
npoints = tf.shape(features.lasers.points_xyz)[0]
features.lasers.points_padding = tf.ones([npoints])
shuffled_idx = tf.range(npoints)
shuffled_idx = tf.random.shuffle(shuffled_idx, seed=p.random_seed)
def _PadOrTrimFn(points_tensor):
# Shuffle before trimming so we have a random sampling
points_tensor = tf.gather(points_tensor, shuffled_idx)
return py_utils.PadOrTrimTo(points_tensor, [p.max_num_points] +
points_tensor.shape[1:].as_list())
features.lasers = features.lasers.Transform(_PadOrTrimFn)
features.lasers.points_padding = 1.0 - features.lasers.points_padding
return features
def TransformShapes(self, shapes):
p = self.params
def _TransformShape(points_shape):
return tf.TensorShape([p.max_num_points] + points_shape[1:].as_list())
shapes.lasers = shapes.lasers.Transform(_TransformShape)
shapes.lasers.points_padding = tf.TensorShape([p.max_num_points])
return shapes
def TransformDTypes(self, dtypes):
dtypes.lasers.points_padding = tf.float32
return dtypes
class WorldScaling(Preprocessor):
"""Scale the world randomly as a form of data augmentation.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- labels.bboxes_3d of shape [L, 7]
Modifies the following features:
lasers.points_xyz, labels.bboxes_3d with the same scaling applied to both.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('scaling', None, 'The scaling range.')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if p.scaling is None:
raise ValueError('scaling needs to be specified, instead of None.')
if len(p.scaling) != 2:
raise ValueError('scaling needs to be a list of two elements.')
def TransformFeatures(self, features):
p = self.params
scaling = tf.random.uniform((),
minval=p.scaling[0],
maxval=p.scaling[1],
seed=p.random_seed,
dtype=features.lasers.points_xyz.dtype)
# Scale points [num_points, 3].
features.lasers.points_xyz *= scaling
# Scaling bboxes (location and dimensions).
bboxes_xyz = features.labels.bboxes_3d[..., :3] * scaling
bboxes_dims = features.labels.bboxes_3d[..., 3:6] * scaling
bboxes_rot = features.labels.bboxes_3d[..., 6:]
features.labels.bboxes_3d = tf.concat([bboxes_xyz, bboxes_dims, bboxes_rot],
axis=-1)
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class RandomDropLaserPoints(Preprocessor):
"""Randomly dropout laser points and the corresponding features.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- lasers.points_feature of shape [P, F]
Modifies the following features:
lasers.points_xyz, lasers.points_feature.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('keep_prob', 0.95, 'Probability for keeping points.')
return p
def TransformFeatures(self, features):
p = self.params
num_points, _ = py_utils.GetShape(features.lasers.points_xyz)
pts_keep_sample_prob = tf.random.uniform([num_points],
minval=0,
maxval=1,
seed=p.random_seed)
pts_keep_mask = pts_keep_sample_prob < p.keep_prob
if 'points_padding' in features.lasers:
# Update points_padding so that where pts_keep_mask is True,
# points_padding remains 0.
points_mask = 1 - features.lasers.points_padding
points_mask *= tf.cast(pts_keep_mask, tf.float32)
features.lasers.points_padding = 1 - points_mask
else:
features.lasers.points_xyz = tf.boolean_mask(features.lasers.points_xyz,
pts_keep_mask)
features.lasers.points_feature = tf.boolean_mask(
features.lasers.points_feature, pts_keep_mask)
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class RandomFlipY(Preprocessor):
"""Flip the world along axis Y as a form of data augmentation.
When there are leading dimensions, this will flip the boxes with the same
transformation across all the frames. This is useful when the input is a
sequence of frames from the same run segment.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [..., 3]
- labels.bboxes_3d of shape [..., 7]
Modifies the following features:
lasers.points_xyz, labels.bboxes_3d with the same flipping applied to both.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('flip_probability', 0.5, 'Probability of flipping.')
return p
def TransformFeatures(self, features):
p = self.params
threshold = 1. - p.flip_probability
choice = tf.random.uniform(
(), minval=0.0, maxval=1.0, seed=p.random_seed) >= threshold
# Flip points
points_xyz = features.lasers.points_xyz
points_y = tf.where(choice, -points_xyz[..., 1:2], points_xyz[..., 1:2])
features.lasers.points_xyz = tf.concat(
[points_xyz[..., 0:1], points_y, points_xyz[..., 2:3]], axis=-1)
# Flip boxes
bboxes_xyz = features.labels.bboxes_3d[..., :3]
bboxes_y = tf.where(choice, -bboxes_xyz[..., 1:2], bboxes_xyz[..., 1:2])
bboxes_xyz = tf.concat(
[bboxes_xyz[..., 0:1], bboxes_y, bboxes_xyz[..., 2:3]], axis=-1)
# Compensate rotation.
bboxes_dims = features.labels.bboxes_3d[..., 3:6]
bboxes_rot = features.labels.bboxes_3d[..., 6:]
bboxes_rot = tf.where(choice, geometry.WrapAngleRad(-bboxes_rot),
bboxes_rot)
features.labels.bboxes_3d = tf.concat([bboxes_xyz, bboxes_dims, bboxes_rot],
axis=-1)
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class GlobalTranslateNoise(Preprocessor):
"""Add global translation noise of xyz coordinates to points and boxes.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- labels.bboxes_3d of shape [L, 7]
Modifies the following features:
lasers.points_xyz, labels.bboxes_3d with the same
random translation noise applied to both.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('noise_std', [0.2, 0.2, 0.2],
'Standard deviation of translation noise per axis.')
return p
def TransformFeatures(self, features):
p = self.params
# Use three different seeds but the same base seed so
# that the values are different.
base_seed = p.random_seed
x_seed = base_seed
y_seed = None if base_seed is None else base_seed + 1
z_seed = None if base_seed is None else base_seed + 2
random_translate_x = tf.random.normal((),
mean=0.0,
stddev=p.noise_std[0],
seed=x_seed)
random_translate_y = tf.random.normal((),
mean=0.0,
stddev=p.noise_std[1],
seed=y_seed)
random_translate_z = tf.random.normal((),
mean=0.0,
stddev=p.noise_std[2],
seed=z_seed)
pose = tf.stack([
random_translate_x, random_translate_y, random_translate_z, 0.0, 0.0,
0.0
],
axis=0)
# Translate points.
points_xyz = features.lasers.points_xyz
features.lasers.points_xyz = geometry.CoordinateTransform(points_xyz, pose)
# Translate boxes
bboxes_xyz = features.labels.bboxes_3d[..., :3]
bboxes_xyz = geometry.CoordinateTransform(bboxes_xyz, pose)
features.labels.bboxes_3d = tf.concat(
[bboxes_xyz, features.labels.bboxes_3d[..., 3:]], axis=-1)
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class RandomBBoxTransform(Preprocessor):
"""Randomly transform bounding boxes and the points inside them.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- lasers.points_feature of shape [P, F]
- lasers.points_padding of shape [P]
- labels.bboxes_3d of shape [L, 7]
- labels.bboxes_3d_mask of shape [L]
Modifies the following features:
lasers.points_{xyz,feature,padding}, labels.bboxes_3d with the
transformed bounding boxes and points.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'max_rotation', None,
'The rotation amount will be randomly picked from '
'[-max_rotation, max_rotation).')
# At the moment we don't use this because it can cause boxes to collide with
# each other. We need to compute box intersections when deciding whether to
# apply the translation jitter. Theoretically we should also do this for
# rotation.
p.Define('noise_std', [0.0, 0.0, 0.0],
'Standard deviation of translation noise per axis.')
p.Define(
'max_scaling', None,
'An optional float list of length 3. When max_scaling is not none, '
'delta parameters s_x, s_y, s_z are drawn from '
'[-max_scaling[i], max_scaling[i]] where i is in [0, 2].')
p.Define(
'max_shearing', None,
'An optional float list of length 6. When max_shearing is not none, '
'shearing parameters sh_x^y, sh_x^z, sh_y^x, sh_y^z, sh_z^x, sh_z^y are'
'drawn from [-max_shearing[i], max_shearing[i]], where i is in [0, 5].')
p.Define(
'max_num_points_per_bbox', 16384,
'The maximum number of points that fall within a bounding box. '
'Bounding boxes with more points than this value will '
'have some points droppped.')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if p.max_rotation is None:
raise ValueError('max_rotation needs to be specified, instead of None.')
if p.max_scaling is not None:
if len(p.max_scaling) != 3:
raise ValueError('max_scaling needs to be specified as either None or '
'list of 3 floating point numbers, instead of {}.'
''.format(p.max_scaling))
if p.max_shearing is not None:
if len(p.max_shearing) != 6:
raise ValueError('max_shearing needs to be specified as either None or '
'list of 6 floating point numbers, instead of {}.'
''.format(p.max_shearing))
def _Foreground(self, features, points_xyz, points_feature, real_bboxes_3d,
points_in_bbox_mask, rotation, translate_pose, transform_fn):
"""Extract and transform foreground points and features."""
out_bbox_xyz, out_bbox_feature, out_bbox_mask = self._ForLoopBuffers(
features)
# Only iterate over the actual number of boxes in the scene.
actual_num_bboxes = tf.reduce_sum(
tf.cast(features.labels.bboxes_3d_mask, tf.int32))
ret = py_utils.ForLoop(
body=transform_fn,
start=0,
limit=actual_num_bboxes,
delta=1,
loop_state=py_utils.NestedMap(
points_xyz=points_xyz,
points_feature=points_feature,
bboxes_3d=real_bboxes_3d,
points_in_bbox_mask=points_in_bbox_mask,
rotation=rotation,
translate_pose=translate_pose,
out_bbox_points=out_bbox_xyz,
out_bbox_feature=out_bbox_feature,
out_bbox_mask=out_bbox_mask))
# Gather all of the transformed points and features
out_bbox_xyz = tf.reshape(ret.out_bbox_points, [-1, 3])
num_features = features.lasers.points_feature.shape[-1]
out_bbox_feature = tf.reshape(ret.out_bbox_feature, [-1, num_features])
out_bbox_mask = tf.cast(tf.reshape(ret.out_bbox_mask, [-1]), tf.bool)
fg_xyz = tf.boolean_mask(out_bbox_xyz, out_bbox_mask)
fg_feature = tf.boolean_mask(out_bbox_feature, out_bbox_mask)
return fg_xyz, fg_feature
def _Background(self, points_xyz, points_feature, points_in_bbox_mask):
# If a point is in any bounding box, it is a foreground point.
foreground_points_mask = tf.reduce_any(points_in_bbox_mask, axis=-1)
# All others are background. We rotate all of the foreground points to
# final_points_* and keep the background points unchanged
background_points_mask = tf.math.logical_not(foreground_points_mask)
background_points_xyz = tf.boolean_mask(points_xyz, background_points_mask)
background_points_feature = tf.boolean_mask(points_feature,
background_points_mask)
return background_points_xyz, background_points_feature
def _ForLoopBuffers(self, features):
"""Create and return the buffers for the for loop."""
p = self.params
bboxes_3d = features.labels.bboxes_3d
# Compute the shapes and create the buffers for the For loop.
max_num_bboxes = tf.shape(bboxes_3d)[0]
per_box_shape = [max_num_bboxes, p.max_num_points_per_bbox, 3]
out_bbox_points = inplace_ops.empty(
per_box_shape, dtype=tf.float32, init=True)
num_features = features.lasers.points_feature.shape[-1]
bbox_feature_shape = [
max_num_bboxes, p.max_num_points_per_bbox, num_features
]
out_bbox_feature = inplace_ops.empty(
bbox_feature_shape, dtype=tf.float32, init=True)
per_box_mask_shape = [max_num_bboxes, p.max_num_points_per_bbox]
out_bbox_mask = inplace_ops.empty(
per_box_mask_shape, dtype=tf.float32, init=True)
return out_bbox_points, out_bbox_feature, out_bbox_mask
def TransformFeatures(self, features):
p = self.params
num_features = features.lasers.points_feature.shape[-1]
def Transform(i, state):
"""Transform the points in bounding box `i`."""
state.points_xyz = tf.reshape(state.points_xyz, [-1, 3])
bbox_mask = tf.reshape(state.points_in_bbox_mask[:, i], [-1])
# Fetch only the points in the bounding box.
points_xyz_masked = tf.boolean_mask(state.points_xyz, bbox_mask)
points_feature_masked = tf.boolean_mask(state.points_feature, bbox_mask)
num_points = tf.shape(points_xyz_masked)[0]
# TODO(vrv): Fold the following into a single transformation
# matrix.
#
# Translate the box to the origin, then rotate the desired
# rotation angle.
translation_vec = state.bboxes_3d[i, 0:3]
rotation_vec = [state.rotation[i], 0., 0.]
pose = tf.concat([-translation_vec, rotation_vec], axis=0)
points_xyz_adj = geometry.CoordinateTransform(points_xyz_masked, pose)
if p.max_scaling is not None or p.max_shearing is not None:
# Translate the points in the bounding box by moving dz/2 so that the
# bottom of the bounding box is at Z = 0 when any of the two
# (max_scaling or max_shearing) is not None
translation_scale_or_shear = tf.stack(
[0., 0., state.bboxes_3d[i, 5] / 2], axis=0)
pose1 = tf.concat([translation_scale_or_shear, [0., 0., 0.]], axis=0)
points_xyz_adj = geometry.CoordinateTransform(points_xyz_adj, pose1)
else:
translation_scale_or_shear = tf.stack([0., 0., 0.], axis=0)
if p.max_scaling is not None:
# Perform scaling to the point cloud
# Scaling matrix
# [[s_x+1 0 0]
# [ 0 s_y+1 0]
# [ 0 0 s_z+1]]
sx = tf.random.uniform([],
minval=-p.max_scaling[0],
maxval=p.max_scaling[0],
seed=p.random_seed)
sy = tf.random.uniform([],
minval=-p.max_scaling[1],
maxval=p.max_scaling[1],
seed=p.random_seed)
sz = tf.random.uniform([],
minval=-p.max_scaling[2],
maxval=p.max_scaling[2],
seed=p.random_seed)
scaling_matrix = tf.stack(
[[sx + 1., 0., 0.], [0., sy + 1., 0.], [0., 0., sz + 1.]], axis=0)
points_xyz_adj = tf.einsum('ij,kj->ki', scaling_matrix, points_xyz_adj)
if p.max_shearing is not None:
# Perform shearing to the point cloud
# Shearing matrix
# [[1 sh_x^y sh_x^z]
# [sh_y^x 1 sh_y^z]
# [sh_z^x sh_z^y 1 ]]
sxy = tf.random.uniform([],
minval=-p.max_shearing[0],
maxval=p.max_shearing[0],
seed=p.random_seed)
sxz = tf.random.uniform([],
minval=-p.max_shearing[1],
maxval=p.max_shearing[1],
seed=p.random_seed)
syx = tf.random.uniform([],
minval=-p.max_shearing[2],
maxval=p.max_shearing[2],
seed=p.random_seed)
syz = tf.random.uniform([],
minval=-p.max_shearing[3],
maxval=p.max_shearing[3],
seed=p.random_seed)
szx = tf.random.uniform([],
minval=-p.max_shearing[4],
maxval=p.max_shearing[4],
seed=p.random_seed)
szy = tf.random.uniform([],
minval=-p.max_shearing[5],
maxval=p.max_shearing[5],
seed=p.random_seed)
shearing_matrix = tf.stack(
[[1., sxy, sxz], [syx, 1., syz], [szx, szy, 1.]], axis=0)
points_xyz_adj = tf.einsum('ij,kj->ki', shearing_matrix, points_xyz_adj)
# Translate the points back, adding noise if needed.
translation_with_noise = (
translation_vec - translation_scale_or_shear +
state.translate_pose[i])
pose2 = tf.concat([translation_with_noise, [0., 0., 0.]], axis=0)
final_points_xyz = geometry.CoordinateTransform(points_xyz_adj, pose2)
# final_points_xyz is an [M, 3] Tensor where M is the number of points in
# the box.
points_mask = tf.ones([num_points], dtype=tf.float32)
final_points_xyz = py_utils.PadOrTrimTo(final_points_xyz,
[p.max_num_points_per_bbox, 3])
final_points_feature = py_utils.PadOrTrimTo(
points_feature_masked, [p.max_num_points_per_bbox, num_features])
points_mask = py_utils.PadOrTrimTo(points_mask,
[p.max_num_points_per_bbox])
state.out_bbox_points = inplace_ops.alias_inplace_update(
state.out_bbox_points, [i], tf.expand_dims(final_points_xyz, 0))
state.out_bbox_feature = inplace_ops.alias_inplace_update(
state.out_bbox_feature, [i], tf.expand_dims(final_points_feature, 0))
state.out_bbox_mask = inplace_ops.alias_inplace_update(
state.out_bbox_mask, [i], tf.expand_dims(points_mask, 0))
return state
# Get the points and features that reside in boxes.
if 'points_padding' in features.lasers:
points_mask = 1 - features.lasers.points_padding
points_xyz = tf.boolean_mask(features.lasers.points_xyz, points_mask)
points_feature = tf.boolean_mask(features.lasers.points_feature,
points_mask)
else:
points_xyz = features.lasers.points_xyz
points_feature = features.lasers.points_feature
# Fetch real bounding boxes and compute point mask.
real_bboxes_3d = tf.boolean_mask(features.labels.bboxes_3d,
features.labels.bboxes_3d_mask)
points_in_bbox_mask = geometry.IsWithinBBox3D(points_xyz, real_bboxes_3d)
# Choose a random rotation for every real box.
num_boxes = tf.shape(real_bboxes_3d)[0]
rotation = tf.random.uniform([num_boxes],
minval=-p.max_rotation,
maxval=p.max_rotation,
seed=p.random_seed)
base_seed = p.random_seed
x_seed = base_seed
y_seed = None if base_seed is None else base_seed + 1
z_seed = None if base_seed is None else base_seed + 2
random_translate_x = tf.random.normal([num_boxes],
mean=0.0,
stddev=p.noise_std[0],
seed=x_seed)
random_translate_y = tf.random.normal([num_boxes],
mean=0.0,
stddev=p.noise_std[1],
seed=y_seed)
random_translate_z = tf.random.normal([num_boxes],
mean=0.0,
stddev=p.noise_std[2],
seed=z_seed)
translate_pose = tf.stack(
[random_translate_x, random_translate_y, random_translate_z], axis=1)
fg_xyz, fg_feature = self._Foreground(features, points_xyz, points_feature,
real_bboxes_3d, points_in_bbox_mask,
rotation, translate_pose, Transform)
# Concatenate them with the background points and features.
bg_xyz, bg_feature = self._Background(points_xyz, points_feature,
points_in_bbox_mask)
all_points = tf.concat([bg_xyz, fg_xyz], axis=0)
all_features = tf.concat([bg_feature, fg_feature], axis=0)
# Shuffle the points/features randomly.
all_points, all_features = _ConsistentShuffle((all_points, all_features),
p.random_seed)
# Padding should technically be unnecessary: the number of points before and
# after should be the same, but in practice we sometimes seem to drop a few
# points, and so we pad to make the shape fixed.
#
# TODO(vrv): Identify the source of this problem and then assert a shape
# matching check.
if 'points_padding' in features.lasers:
features.lasers.points_xyz = py_utils.PadOrTrimTo(
all_points, tf.shape(features.lasers.points_xyz))
features.lasers.points_feature = py_utils.PadOrTrimTo(
all_features, tf.shape(features.lasers.points_feature))
total_points = tf.shape(all_points)[0]
features.lasers.points_padding = 1.0 - py_utils.PadOrTrimTo(
tf.ones([total_points]), tf.shape(features.lasers.points_padding))
else:
features.lasers.points_xyz = all_points
features.lasers.points_feature = all_features
# Translate noise.
bboxes_xyz = real_bboxes_3d[..., :3]
bboxes_xyz += translate_pose[..., :3]
bboxes_dim = real_bboxes_3d[..., 3:6]
# Rotate bboxes by their corresponding rotation.
bboxes_rot = real_bboxes_3d[..., 6:]
bboxes_rot -= rotation[:, tf.newaxis]
features.labels.bboxes_3d = py_utils.PadOrTrimTo(
tf.concat([bboxes_xyz, bboxes_dim, bboxes_rot], axis=-1),
tf.shape(features.labels.bboxes_3d))
features.labels.bboxes_3d_mask = py_utils.PadOrTrimTo(
tf.ones(tf.shape(real_bboxes_3d)[0]),
tf.shape(features.labels.bboxes_3d_mask))
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class GroundTruthAugmentor(Preprocessor):
"""Augment bounding box labels and points from a database.
This preprocessor expects features to contain the following keys:
lasers.points_xyz of shape [P, 3]
lasers.points_feature of shape [P, F]
lasers.points_padding of shape [P]
labels.bboxes_3d of shape [L, 7]
labels.bboxes_3d_mask of shape [L]
labels.labels of shape [L]
Modifies the above features so that additional objects from
a groundtruth database are added.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'groundtruth_database', None,
'If not None, loads groundtruths from this database and adds '
'them to the current scene. Groundtruth database is expected '
'to be a TFRecord of KITTI or Waymo crops.')
p.Define(
'num_db_objects', None,
'Number of objects in the database. Because we use TFRecord '
'we cannot easily query the number of objects efficiencly.')
p.Define('max_num_points_per_bbox', 2048,
'Maximum number of points in each bbox to augment with.')
p.Define(
'filter_min_points', 0,
'Minimum number of points each database object must have '
'to be included in an example.')
p.Define(
'filter_max_points', None,
'Maximum number of points each database object must have '
'to be included in an example.')
p.Define(
'difficulty_sampling_probability', None,
'Probability for sampling ground truth example whose difficulty '
'equals {0, 1, 2, 3, ...}. Example: [1.0, 1.0, 1.0, 1.0] for '
'uniform sampling 4 different difficulties. Default value is '
'None = uniform sampling for all difficulties.')
p.Define(
'class_sampling_probability', None,
'Probability for sampling ground truth example based on its class index'
' Example: For KITTI classes are [Background, Car, Van, Truck, '
'Pedestrian, Person_sitting, Cyclist, Tram, Misc, DontCare], using '
'probability vector [0., 1.0, 1.0, 0., 0., 0., 0.,0., 0., 0.], we '
'uniformly sampling Car and Van. Default value is None: Uses '
'label_filter flag and does not sample based on class.')
p.Define('filter_min_difficulty', 0,
'Filter ground truth boxes whose difficulty is < this value.')
p.Define('max_augmented_bboxes', 15,
'Maximum number of augmented bounding boxes per scene.')
p.Define(
'label_filter', [],
'A list where if specified, only examples of these label integers will '
'be included in an example.')
p.Define(
'batch_mode', False, 'Bool value to control whether the whole'
'groundtruth database is loaded or partially loaded to save memory'
'usage. Setting to False loads the whole ground truth database into '
'memory. Otherwise, only a fraction of the data will be loaded into '
'the memory.')
return p
def _ReadDB(self, file_patterns):
"""Read the groundtruth database and return as a NestedMap of Tensors."""
p = self.params
def Process(record):
"""Process a groundtruth record."""
feature_map = {
'num_points': tf.io.FixedLenFeature((), tf.int64, 0),
'points': tf.io.VarLenFeature(dtype=tf.float32),
'points_feature': tf.io.VarLenFeature(dtype=tf.float32),
'bbox_3d': tf.io.VarLenFeature(dtype=tf.float32),
'label': tf.io.FixedLenFeature((), tf.int64, 0),
'difficulty': tf.io.FixedLenFeature((), tf.int64, 0),
'text': tf.io.VarLenFeature(dtype=tf.string),
}
example_data = tf.io.parse_single_example(record, feature_map)
num_points = example_data['num_points']
points = tf.reshape(_Dense(example_data['points']), [num_points, 3])
features = tf.reshape(
_Dense(example_data['points_feature']), [num_points, 1])
points_mask = tf.ones(num_points, dtype=tf.bool)
# TODO(vrv): Use random selection instead of first N points.
points = py_utils.PadOrTrimTo(points, [p.max_num_points_per_bbox, 3])
features = py_utils.PadOrTrimTo(features, [p.max_num_points_per_bbox, 1])
points_mask = py_utils.PadOrTrimTo(points_mask,
[p.max_num_points_per_bbox])
bboxes_3d = tf.reshape(_Dense(example_data['bbox_3d']), [7])
label = tf.cast(example_data['label'], tf.int32)
difficulty = tf.cast(example_data['difficulty'], tf.int32)
return (points, features, points_mask, bboxes_3d, label, difficulty)
if p.batch_mode:
# Prepare dataset for ground truth bounding boxes. Randomly shuffle the
# file patterns.
file_count = len(tf.io.gfile.glob(file_patterns))
dataset = tf.stateless_list_files(file_patterns)
dataset = dataset.apply(tf.stateless_cache_dataset())
dataset = dataset.apply(
tf.stateless_shuffle_dataset(
buffer_size=file_count, reshuffle_each_iteration=True))
dataset = dataset.interleave(
tf.data.TFRecordDataset, cycle_length=10, num_parallel_calls=10)
dataset = dataset.repeat()
# Only prefetch a few objects from the database to reduce memory
# consumption.
dataset = dataset.map(Process, num_parallel_calls=10)
# We need more bboxes than max_augmented_bboxes in a batch, because some
# of the boxes are filtered out.
dataset = dataset.batch(p.max_augmented_bboxes * 10)
dataset = dataset.apply(tf.stateless_cache_dataset()).prefetch(
p.max_augmented_bboxes * 30)
else:
# Prepare dataset for ground truth bounding boxes.
dataset = tf.stateless_list_files(file_patterns)
dataset = dataset.interleave(
tf.data.TFRecordDataset, cycle_length=10, num_parallel_calls=10)
# Read the entire dataset into memory.
dataset = dataset.take(p.num_db_objects)
dataset = dataset.map(Process, num_parallel_calls=10)
# We batch the output of the dataset into a very large Tensor, then cache
# it in memory.
dataset = dataset.batch(p.num_db_objects)
dataset = dataset.apply(tf.stateless_cache_dataset()).repeat()
iterator = dataset.make_one_shot_iterator()
input_batch = iterator.get_next()
(db_points_xyz, db_points_feature, db_points_mask, db_bboxes, db_labels,
db_difficulties) = input_batch
return py_utils.NestedMap(
points_xyz=db_points_xyz,
points_feature=db_points_feature,
points_mask=db_points_mask,
bboxes_3d=db_bboxes,
labels=db_labels,
difficulties=db_difficulties)
def _CreateExampleFilter(self, db):
"""Construct db example filter.
Args:
db: NestedMap of the following Tensors: points_mask - [N, P] - The points
mask for every object in the database, where N is the number of objects
and P is the maximum number of points per object. labels - [N] - int32
Label for each object in the database. difficulties - [N] - int32
Difficulty for each label in the database.
Returns:
A [N] boolean Tensor for each object in the database, True if
that corresponding object passes the filter.
"""
p = self.params
db_points_mask = db.points_mask
db_label = db.labels
db_difficulty = db.difficulties
num_objects_in_database = tf.shape(db_points_mask)[0]
# Filter number of objects.
points_per_object = tf.reduce_sum(tf.cast(db_points_mask, tf.int32), axis=1)
example_filter = points_per_object >= p.filter_min_points
if p.filter_max_points:
example_filter = tf.math.logical_and(
example_filter, points_per_object <= p.filter_max_points)
if p.difficulty_sampling_probability is not None:
# Sample db based on difficulity of each example.
sampling_prob = p.difficulty_sampling_probability
db_difficulty_probability = tf.zeros_like(db_difficulty, dtype=tf.float32)
for difficulty_idx, difficulty_prob in enumerate(sampling_prob):
db_difficulty_probability += (
tf.cast(tf.equal(db_difficulty, difficulty_idx), tf.float32) *
difficulty_prob)
sampled_filter = tf.random.uniform(
tf.shape(example_filter),
minval=0,
maxval=1,
dtype=tf.float32,
seed=p.random_seed)
sampled_filter = sampled_filter < db_difficulty_probability
example_filter &= sampled_filter
else:
# Filter out db examples below min difficulty
example_filter = tf.math.logical_and(
example_filter, db_difficulty >= p.filter_min_difficulty)
example_filter = tf.reshape(example_filter, [num_objects_in_database])
db_label = tf.reshape(db_label, [num_objects_in_database])
if p.class_sampling_probability is not None:
# Sample example based on its class probability.
sampling_prob = p.class_sampling_probability
db_class_probability = tf.zeros_like(db_label, dtype=tf.float32)
for class_idx, class_prob in enumerate(sampling_prob):
db_class_probability += (
tf.cast(tf.equal(db_label, class_idx), tf.float32) * class_prob)
sampled_filter = tf.random.uniform(
tf.shape(example_filter),
minval=0,
maxval=1,
dtype=tf.float32,
seed=p.random_seed)
sampled_filter = sampled_filter < db_class_probability
example_filter &= sampled_filter
elif p.label_filter:
# Filter based on labels.
# Create a label filter where all is false
valid_labels = tf.constant(p.label_filter)
label_mask = tf.reduce_any(
tf.equal(db_label[..., tf.newaxis], valid_labels), axis=1)
example_filter = tf.math.logical_and(example_filter, label_mask)
return example_filter
# TODO(vrv): Create an overlap filter that also ensures that boxes don't
# overlap with groundtruth points, so that the scenes are more plausible.
def _FilterIndices(self, gt_bboxes_3d, db_bboxes, db_idx):
"""Identify database boxes that don't overlap with other boxes."""
# We accomplish overlap filtering by first computing the pairwise 3D IoU of
# all boxes (concatenated) as a way of computing pairwise box overlaps.
num_gt_bboxes = tf.shape(gt_bboxes_3d)[0]
filtered_bboxes = tf.gather(db_bboxes, db_idx)
all_bboxes = tf.concat([gt_bboxes_3d, filtered_bboxes], axis=0)
pairwise_overlap = ops.pairwise_iou3d(all_bboxes, all_bboxes)
# We now have an M x M matrix with 1s on the diagonal and non-zero entries
# whenever a box collides with another.
#
# To increase the number of boxes selected, we filter the upper triangular
# entries so that the boxes are chosen greedily: boxes with smaller indices
# will be selected before later boxes, because earlier boxes will not appear
# to collide with later boxes, but later boxes may collide with earlier
# ones.
pairwise_overlap = tf.linalg.band_part(pairwise_overlap, -1, 0)
# We compute the sum of the IoU overlaps for all database boxes.
db_overlap_sums = tf.reduce_sum(pairwise_overlap[num_gt_bboxes:], axis=1)
# Those boxes that don't overlap with any other boxes will only have
# a 1.0 IoU with itself.
non_overlapping_boxes = tf.reshape(db_overlap_sums <= 1., [-1])
# Filter to select only those object ids that pass this filter.
db_idx = tf.boolean_mask(db_idx, non_overlapping_boxes)
return db_idx
def TransformFeatures(self, features):
p = self.params
tf.logging.info('Loading groundtruth database at %s' %
(p.groundtruth_database))
db = self._ReadDB(p.groundtruth_database)
original_features_shape = tf.shape(features.lasers.points_feature)
# Compute the number of bboxes to augment.
num_bboxes_in_scene = tf.reduce_sum(
tf.cast(features.labels.bboxes_3d_mask, tf.int32))
max_bboxes = tf.shape(features.labels.bboxes_3d_mask)[0]
num_augmented_bboxes = tf.minimum(max_bboxes - num_bboxes_in_scene,
p.max_augmented_bboxes)
# Compute an object index over all objects in the database.
num_objects_in_database = tf.shape(db.points_xyz)[0]
db_idx = tf.range(num_objects_in_database)
# Find those indices whose examples pass the filters, and select only those
# indices.
example_filter = self._CreateExampleFilter(db)
db_idx = tf.boolean_mask(db_idx, example_filter)
# At this point, we might still have a large number of object candidates,
# from which we only need a sample.
# To reduce the amount of computation, we randomly subsample to slightly
# more than we want to augment.
db_idx = tf.random.shuffle(
db_idx, seed=p.random_seed)[0:num_augmented_bboxes * 5]
# After filtering, further filter out the db boxes that would occlude with
# other boxes (including other database boxes).
#
# Gather the filtered ground truth bounding boxes according to the mask, so
# we can compute overlaps below.
gt_bboxes_3d_mask = tf.cast(features.labels.bboxes_3d_mask, tf.bool)
gt_bboxes_3d = tf.boolean_mask(features.labels.bboxes_3d, gt_bboxes_3d_mask)
gt_bboxes_3d = py_utils.HasShape(gt_bboxes_3d, [num_bboxes_in_scene, 7])
db_idx = self._FilterIndices(gt_bboxes_3d, db.bboxes_3d, db_idx)
# From the filtered object ids, select only as many boxes as we need.
shuffled_idx = db_idx[0:num_augmented_bboxes]
num_augmented_bboxes = tf.shape(shuffled_idx)[0]
# Gather based off the indices.
sampled_points_xyz = tf.gather(db.points_xyz, shuffled_idx)
sampled_points_feature = tf.gather(db.points_feature, shuffled_idx)
sampled_mask = tf.reshape(
tf.gather(db.points_mask, shuffled_idx),
[num_augmented_bboxes, p.max_num_points_per_bbox])
sampled_bboxes = tf.gather(db.bboxes_3d, shuffled_idx)
sampled_labels = tf.gather(db.labels, shuffled_idx)
# Mask points/features.
sampled_points_xyz = tf.boolean_mask(sampled_points_xyz, sampled_mask)
sampled_points_feature = tf.boolean_mask(sampled_points_feature,
sampled_mask)
# Flatten before concatenation with ground truths.
sampled_points_xyz = tf.reshape(sampled_points_xyz, [-1, 3])
sampled_points_feature = tf.reshape(sampled_points_feature,
[-1, original_features_shape[-1]])
sampled_bboxes = tf.reshape(sampled_bboxes, [-1, 7])
# Concatenate the samples with the ground truths.
if 'points_padding' in features.lasers:
points_mask = tf.cast(1. - features.lasers.points_padding, tf.bool)
# Densify the original points.
dense_points_xyz = tf.boolean_mask(features.lasers.points_xyz,
points_mask)
dense_points_feature = tf.boolean_mask(features.lasers.points_feature,
points_mask)
# Concatenate the dense original points with our new sampled oints.
points_xyz = tf.concat([dense_points_xyz, sampled_points_xyz], axis=0)
points_feature = tf.concat([dense_points_feature, sampled_points_feature],
axis=0)
original_points_shape = tf.shape(features.lasers.points_xyz)
features.lasers.points_xyz = py_utils.PadOrTrimTo(points_xyz,
original_points_shape)
features.lasers.points_feature = py_utils.PadOrTrimTo(
points_feature, original_features_shape)
# Compute the modified mask / padding.
final_points_mask = py_utils.PadOrTrimTo(
tf.ones(tf.shape(points_xyz)[0]),
tf.shape(features.lasers.points_padding))
features.lasers.points_padding = 1. - final_points_mask
else:
points_xyz = tf.concat([features.lasers.points_xyz, sampled_points_xyz],
axis=0)
points_feature = tf.concat(
[features.lasers.points_feature, sampled_points_feature], axis=0)
features.lasers.points_xyz = points_xyz
features.lasers.points_feature = points_feature
# Reconstruct a new, dense, bboxes_3d vector that includes the filtered
# groundtruth bounding boxes followed by the database augmented boxes.
bboxes_3d = tf.concat([gt_bboxes_3d, sampled_bboxes], axis=0)
bboxes_3d = py_utils.PadOrTrimTo(bboxes_3d, [max_bboxes, 7])
features.labels.bboxes_3d = bboxes_3d
bboxes_3d_mask = tf.ones(
num_bboxes_in_scene + num_augmented_bboxes, dtype=tf.float32)
features.labels.bboxes_3d_mask = py_utils.PadOrTrimTo(
bboxes_3d_mask, [max_bboxes])
gt_labels = tf.boolean_mask(features.labels.labels, gt_bboxes_3d_mask)
gt_labels = py_utils.HasShape(gt_labels, [num_bboxes_in_scene])
labels = tf.concat([gt_labels, sampled_labels], axis=0)
features.labels.labels = py_utils.PadOrTrimTo(labels, [max_bboxes])
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class FrustumDropout(Preprocessor):
"""Randomly drops out points in a frustum.
All points are first converted to spherical coordinates, and then a point
is randomly selected. All points in the frustum around that point within
a given phi, theta angle width and distance to the original greater than
a given value are dropped with probability = 1 - keep_prob.
Here, we can specify whether the dropped frustum is the union or intersection
of the phi and theta angle filters.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- lasers.points_feature of shape [P, F]
Optionally points_padding of shape [P] corresponding to the padding.
if points_padding is None, then all points are considered valid.
Modifies the following features:
lasers.points_xyz, lasers.points_feature, lasers.points_padding with points
randomly dropped out.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('theta_width', 0.03, 'Theta angle width for dropping points.')
p.Define('phi_width', 0.0, 'Phi angle width for dropping points.')
p.Define(
'distance', 0.0, 'Drop points that have larger distance to the'
'origin than the value given here.')
p.Define(
'keep_prob', 0.0, 'keep_prob: 1. = drop no points in the Frustum,'
'0 = drop all points, between 0 and 1 = down sample the points.')
p.Define(
'drop_type', 'union', 'Drop either the union or intersection of '
'phi width and theta width.')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if p.phi_width < 0:
raise ValueError('phi_width must be >= 0, phi_width={}'.format(
p.phi_width))
if p.theta_width < 0:
raise ValueError('theta_width must be >= 0, theta_width={}'.format(
p.theta_width))
if p.distance < 0:
raise ValueError('distance must be >= 0, distance={}'.format(p.distance))
if p.keep_prob < 0 or p.keep_prob > 1:
raise ValueError('keep_prob must be >= 0 and <=1, keep_prob={}'.format(
p.keep_prob))
if p.drop_type not in ['union', 'intersection']:
raise ValueError('drop_type must be union or intersection ,'
'drop_type={}'.format(p.drop_type))
def TransformFeatures(self, features):
p = self.params
points_xyz = features.lasers.points_xyz
points_feature = features.lasers.points_feature
if 'points_padding' in features.lasers:
points_padding = features.lasers.points_padding
else:
points_padding = None
if points_padding is not None:
points_mask = tf.cast(1 - points_padding, tf.bool)
num_total_points = py_utils.GetShape(points_mask)[0]
real_points_idx = tf.boolean_mask(
tf.range(0, num_total_points, dtype=tf.int32), points_mask)
num_points = py_utils.GetShape(real_points_idx)[0]
else:
points_mask = tf.ones_like(points_xyz[:, 0], dtype=tf.bool)
num_total_points = py_utils.GetShape(points_mask)[0]
num_points = py_utils.GetShape(points_xyz)[0]
r, theta, phi = tf.unstack(
geometry.SphericalCoordinatesTransform(points_xyz), axis=-1)
def _PickRandomPoint():
point_idx = tf.random.uniform((),
minval=0,
maxval=num_points,
dtype=tf.int32)
if points_padding is not None:
point_idx = real_points_idx[point_idx]
return point_idx
# Pick a point at random and drop all points that are near that point in the
# frustum for distance larger than r; repeat this for both theta and phi.
if p.theta_width > 0:
theta_half_width = p.theta_width / 2.
point_idx = _PickRandomPoint()
# Points within theta width and further than distance will be dropped.
theta_drop_filter = ((theta < (theta[point_idx] + theta_half_width)) &
(theta > (theta[point_idx] - theta_half_width)) &
(r > p.distance))
else:
theta_drop_filter = tf.zeros_like(points_mask, dtype=tf.bool)
if p.phi_width > 0:
phi_half_width = p.phi_width / 2.
point_idx = _PickRandomPoint()
# Points within phi width and further than distance will be dropped.
phi_drop_filter = ((phi < (phi[point_idx] + phi_half_width)) &
(phi >
(phi[point_idx] - phi_half_width)) & (r > p.distance))
else:
phi_drop_filter = tf.zeros_like(points_mask, dtype=tf.bool)
# Create drop_filter by combining filters. This contains a filter for the
# points to be removed. One can use the intersection method to limit the
# dropped points be within both phi and theta ranges.
if p.drop_type == 'union':
drop_filter = theta_drop_filter | phi_drop_filter
elif p.drop_type == 'intersection':
drop_filter = theta_drop_filter & phi_drop_filter
if p.keep_prob == 0:
# Drop all points in drop_filter.
down_sampling_filter = drop_filter
else:
# Randomly drop points in drop_filter based on keep_prob.
sampling_drop_filter = tf.random.uniform([num_total_points],
minval=0,
maxval=1,
dtype=tf.float32)
# Points greater than the threshold (keep_prob) will be dropped.
sampling_drop_filter = sampling_drop_filter > p.keep_prob
# Instead of dropping all points in the frustum, we drop out points
# that are in the selected frustum (drop_filter).
down_sampling_filter = drop_filter & sampling_drop_filter
points_mask &= ~down_sampling_filter
if points_padding is not None:
features.lasers.points_padding = 1 - tf.cast(points_mask, tf.float32)
else:
features.lasers.points_xyz = tf.boolean_mask(points_xyz, points_mask)
features.lasers.points_feature = tf.boolean_mask(points_feature,
points_mask)
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class RepeatPreprocessor(Preprocessor):
"""Repeat a preprocessor multiple times.
This preprocessor takes a preprocessor as a subprocessor and apply the
subprocessor to features multiple times (repeat_count).
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('repeat_count', 1, 'Number of times the subprocessor is applied to'
' features.')
p.Define('subprocessor', None, 'One of the input preprocessors.')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if p.subprocessor is None:
raise ValueError('No subprocessor was specified for RepeatPreprocessor.')
if p.repeat_count < 0 or not isinstance(p.repeat_count, int):
raise ValueError(
'repeat_count must be >= 0 and int, repeat_count={}'.format(
p.repeat_count))
self.CreateChild('subprocessor', p.subprocessor)
def TransformFeatures(self, features):
p = self.params
for _ in range(p.repeat_count):
features = self.subprocessor.FPropDefaultTheta(features)
return features
def TransformShapes(self, shapes):
p = self.params
for _ in range(p.repeat_count):
shapes = self.subprocessor.TransformShapes(shapes)
return shapes
def TransformDTypes(self, dtypes):
p = self.params
for _ in range(p.repeat_count):
dtypes = self.subprocessor.TransformDTypes(dtypes)
return dtypes
class RandomApplyPreprocessor(Preprocessor):
"""Randomly apply a preprocessor with certain probability.
This preprocessor takes a preprocessor as a subprocessor and apply the
subprocessor to features with certain probability.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('prob', 1.0, 'The probability the subprocessor being executed.')
p.Define('subprocessor', None, 'Params for an input preprocessor.')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if p.subprocessor is None:
raise ValueError('No subprocessor was specified for RepeatPreprocessor.')
if p.prob < 0 or p.prob > 1 or not isinstance(p.prob, float):
raise ValueError(
'prob must be >= 0 and <=1 and float type, prob={}'.format(p.prob))
self.CreateChild('subprocessor', p.subprocessor)
def TransformFeatures(self, features):
p = self.params
choice = tf.random.uniform(
(), minval=0.0, maxval=1.0, seed=p.random_seed) <= p.prob
# Features is passed downstream and may be modified, we make deep copies
# here to use with tf.cond to avoid having tf.cond access updated
# versions. Note that we need one copy for each branch in case the branches
# further modify features.
features_0, features_1 = features.DeepCopy(), features.DeepCopy()
features = tf.cond(choice,
lambda: self.subprocessor.TransformFeatures(features_0),
lambda: features_1)
return features
def TransformShapes(self, shapes):
shapes_transformed = self.subprocessor.TransformShapes(shapes)
if not shapes.IsCompatible(shapes_transformed):
raise ValueError(
'NestedMap structures are different between shapes and transformed'
'shapes. Original shapes: {}. Transformed shapes: {}'.format(
shapes, shapes_transformed))
def IsCompatibleWith(a, b):
return a.is_compatible_with(b)
if not all(
py_utils.Flatten(
py_utils.Transform(IsCompatibleWith, shapes, shapes_transformed))):
raise ValueError(
'Shapes after transformation - {} are different from original '
'shapes - {}.'.format(shapes_transformed, shapes))
return shapes
def TransformDTypes(self, dtypes):
transformed_dtypes = self.subprocessor.TransformDTypes(dtypes)
if transformed_dtypes != dtypes:
raise ValueError(
'DTypes after transformation of preprocessor - {} should be '
'the same as {}, but get {}.'.format(self.params.subprocessor, dtypes,
transformed_dtypes))
return dtypes
class ConstantPreprocessor(Preprocessor):
"""Preprocessor that produces specified constant values in a nested output."""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'constants', py_utils.NestedMap(),
'Map of key names to numpy arrays of constant values to use. '
'Must be a NestedMap or dict convertible to NestedMap.')
return p
def TransformFeatures(self, features):
constants = py_utils.NestedMap(self.params.constants)
features.update(constants.Transform(tf.constant))
return features
def TransformShapes(self, shapes):
constants = py_utils.NestedMap(self.params.constants)
shapes.update(
constants.Transform(lambda x: tf.TensorShape(np.array(x).shape)))
return shapes
def TransformDTypes(self, dtypes):
constants = py_utils.NestedMap(self.params.constants)
dtypes.update(constants.Transform(lambda x: tf.as_dtype(np.array(x).dtype)))
return dtypes
class IdentityPreprocessor(Preprocessor):
"""Preprocessor that passes all inputs through.
This may be useful for situations where one wants a 'no-op' preprocessor, such
as being able to randomly choose to do nothing among a set of preprocessor
choices.
"""
def TransformFeatures(self, features):
return features
def TransformShapes(self, shapes):
return shapes
def TransformDTypes(self, dtypes):
return dtypes
class RandomChoicePreprocessor(Preprocessor):
"""Randomly applies a preprocessor with specified weights.
The input at features[p.weight_tensor_key] must be a floating point vector
Tensor whose length matches the number of subprocessors to select among. The
values in that Tensor are interpreted as relative weights.
For example, if p.subprocessors = [preprocessor1, preprocessor2] and the
weights are [1., 2.], then preprocessor1 will be applied with probability 1/3,
and preprocessor2 will be applied with probability 2/3.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'subprocessors', [],
'Params for preprocessors. Each value should be a tuple of '
'(Preprocessor.Params(), BaseSchedule.Params()), where the schedule '
'defines the weights to use over time.')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if not p.subprocessors:
raise ValueError('No subprocessors were specified.')
subprocessors, schedules = zip(*p.subprocessors)
def _FilterNonSchedules(v):
return not issubclass(getattr(v, 'cls', False), schedule.BaseSchedule)
invalid_values = [_FilterNonSchedules(s) for s in schedules]
if any(invalid_values):
raise TypeError('Not all schedule values were schedules: '
f'{invalid_values}')
self.CreateChildren('subprocessors', list(subprocessors))
self.CreateChildren('schedules', list(schedules))
def TransformFeatures(self, features):
p = self.params
choice_list = []
weight_list = []
# Pass a unique copy of the input to each branch, in case the
# subprocessor destructively modifies the features in unexpected ways.
for subp, sched in zip(self.subprocessors, self.schedules):
choice_list.append(
lambda subp=subp: subp.TransformFeatures(features.DeepCopy()))
weight_list.append(sched.Value())
weight_tensor = tf.stack(weight_list)
chosen_bin = tf.random.categorical(
tf.math.log(weight_tensor[tf.newaxis]),
1,
seed=p.random_seed,
dtype=tf.int32)[0, 0]
features = tf.switch_case(chosen_bin, branch_fns=choice_list)
return features
def TransformShapes(self, shapes):
transformed_shapes = [
subp.TransformShapes(shapes.DeepCopy()) for subp in self.subprocessors
]
if not all(transformed_shapes[0] == curr for curr in transformed_shapes):
raise ValueError('Shapes after transformations were not identical: '
f'{transformed_shapes}')
return transformed_shapes[0]
def TransformDTypes(self, dtypes):
transformed_dtypes = [
subp.TransformDTypes(dtypes.DeepCopy()) for subp in self.subprocessors
]
if not all(transformed_dtypes[0] == curr for curr in transformed_dtypes):
raise ValueError('DTypes after transformations were not identical: '
f'{transformed_dtypes}')
return transformed_dtypes[0]
class Sequence(Preprocessor):
"""Packages a sequence of preprocessors as one preprocessor."""
@classmethod
def Params(cls):
p = super().Params()
p.Define(
'preprocessors', [], 'A list of preprocessors. '
'Each should be of type Preprocessor.Params().')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
self.CreateChildren('preprocessors', p.preprocessors)
def TransformFeatures(self, features):
for preprocessor in self.preprocessors:
features = preprocessor.TransformFeatures(features)
return features
def TransformShapes(self, shapes):
for preprocessor in self.preprocessors:
shapes = preprocessor.TransformShapes(shapes)
return shapes
def TransformDTypes(self, dtypes):
for preprocessor in self.preprocessors:
dtypes = preprocessor.TransformDTypes(dtypes)
return dtypes
class SparseSampler(Preprocessor):
"""Fused SparseCenterSelector and SparseCellGatherFeatures.
This preprocessor expects features to contain the following keys:
- lasers.points_xyz of shape [P, 3]
- lasers.points_feature of shape [P, F]
Adds the following features:
anchor_centers - [num_centers, 3] - Floating point output containing the
center (x, y, z) locations for tiling anchor boxes.
cell_center_xyz - [num_centers, 3] - Floating point output containing
the center (x, y, z) locations for each cell to featurize.
cell_center_padding - [num_centers] - 0/1 padding for each center.
cell_points_xyz - [num_centers, num_neighbors, 3] - Floating point
output containing the (x, y, z) locations for each point for a given
center.
cell_feature - [num_centers, num_neighbors, F] - Floating point output
containing the features for each point for a given center.
cell_points_padding - [num_centers, num_neighbors] - 0/1 padding
for the points in each cell.
"""
@classmethod
def Params(cls):
p = super().Params()
p.Define('center_selector', 'farthest', 'Method to sample centers. '
'Valid options - uniform, farthest.')
p.Define('neighbor_sampler', 'uniform', 'Method to select neighbors. '
'Valid options - uniform, closest.')
p.Define('num_centers', 16, 'The number of centers to sample.')
p.Define(
'features_preparation_layers', [],
'A list of Params for layers to run on the features before '
'performing farthest point sampling. For example, one may wish to '
'drop points out of frustum for KITTI before selecting centers. '
'Note that these layers will not mutate the original features, '
'instead, a copy will be made.')
p.Define(
'keep_z_range', (-np.inf, np.inf),
'Only points that have z coordinates within this range are kept. '
'Approximate ground-removal can be performed by specifying a '
'lower-bound on the z-range.')
p.Define('num_neighbors', 64, 'Sample these many points within the '
'neighorhood.')
p.Define(
'max_distance', 1.0, 'Points with L2 distances from a center '
'larger than this threshold are not considered to be in the '
'neighborhood.')
return p
def __init__(self, params):
super().__init__(params)
p = self.params
if p.features_preparation_layers:
self.CreateChildren('features_preparation_layers',
p.features_preparation_layers)
def TransformFeatures(self, features):
p = self.params
n, m = p.num_centers, p.num_neighbors
prepared_features = features.DeepCopy()
if p.features_preparation_layers:
for prep_layer in self.features_preparation_layers:
prepared_features = prep_layer.FPropDefaultTheta(prepared_features)
points_data = prepared_features.lasers
points = py_utils.HasShape(points_data.points_xyz, [-1, 3])
if 'points_padding' in points_data:
points_mask = 1 - points_data.points_padding
points = tf.boolean_mask(points, points_mask)
# If num_points < num_centers, pad points to have at least num_centers
# points.
num_points = tf.shape(points)[0]
required_num_points = tf.maximum(num_points, p.num_centers)
zeros = tf.zeros([required_num_points - num_points, 3])
points = tf.concat([points, zeros], axis=0)
num_seeded_points = points_data.get('num_seeded_points', 0)
neighbor_algorithm = 'auto'
# Based on benchmarks, the hash solution works better when the number of
# centers is >= 16 and there are at least 10k points per point cloud.
if p.num_centers >= 16:
neighbor_algorithm = 'hash'
centers, center_paddings, indices, indices_paddings = ops.sample_points(
points=tf.expand_dims(points, 0),
points_padding=tf.zeros([1, required_num_points], tf.float32),
num_seeded_points=num_seeded_points,
center_selector=p.center_selector,
neighbor_sampler=p.neighbor_sampler,
neighbor_algorithm=neighbor_algorithm,
num_centers=p.num_centers,
center_z_min=p.keep_z_range[0],
center_z_max=p.keep_z_range[1],
num_neighbors=p.num_neighbors,
max_distance=p.max_distance,
random_seed=p.random_seed if p.random_seed else -1)
centers = py_utils.HasShape(centers, [1, n])[0, :]
center_paddings = py_utils.HasShape(center_paddings, [1, n])[0, :]
indices = py_utils.HasShape(indices, [1, n, m])[0, :]
indices_paddings = py_utils.HasShape(indices_paddings, [1, n, m])[0, :]
features.cell_center_padding = center_paddings
features.cell_center_xyz = py_utils.HasShape(
tf.gather(points, centers), [n, 3])
features.anchor_centers = features.cell_center_xyz
features.cell_points_xyz = py_utils.HasShape(
tf.gather(points, indices), [n, m, 3])
features.cell_feature = tf.gather(points_data.points_feature, indices)
features.cell_points_padding = indices_paddings
return features
def TransformShapes(self, shapes):
p = self.params
n, m, f = p.num_centers, p.num_neighbors, shapes.lasers.points_feature[-1]
shapes.anchor_centers = tf.TensorShape([n, 3])
shapes.cell_center_padding = tf.TensorShape([n])
shapes.cell_center_xyz = tf.TensorShape([n, 3])
shapes.cell_points_xyz = tf.TensorShape([n, m, 3])
shapes.cell_feature = tf.TensorShape([n, m, f])
shapes.cell_points_padding = tf.TensorShape([n, m])
return shapes
def TransformDTypes(self, dtypes):
dtypes.anchor_centers = tf.float32
dtypes.cell_center_padding = tf.float32
dtypes.cell_center_xyz = tf.float32
dtypes.cell_points_xyz = tf.float32
dtypes.cell_feature = tf.float32
dtypes.cell_points_padding = tf.float32
return dtypes
| apache-2.0 | -1,775,499,709,604,169,200 | 37.10177 | 80 | 0.652196 | false | 3.658709 | false | false | false |
ZeekWang/YoudaoDictAlfredWorkflow | alfred.py | 1 | 3295 | # -*- coding: utf-8 -*-
# Copyright 2013 Dr. Jan Müller
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either expressed or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import os
import plistlib
import unicodedata
import sys
from xml.etree.ElementTree import Element, SubElement, tostring
"""
You should run your script via /bin/bash with all escape options ticked.
The command line should be
python yourscript.py "{query}" arg2 arg3 ...
"""
UNESCAPE_CHARACTERS = u""" ;()"""
_MAX_RESULTS_DEFAULT = 9
preferences = plistlib.readPlist('info.plist')
bundleid = preferences['bundleid']
class Item(object):
@classmethod
def unicode(cls, value):
try:
items = iter(value.items())
except AttributeError:
return unicode(value)
else:
return dict(map(unicode, item) for item in items)
def __init__(self, attributes, title, subtitle, icon=None):
self.attributes = attributes
self.title = title
self.subtitle = subtitle
self.icon = icon
def __str__(self):
return tostring(self.xml()).decode('utf-8')
def xml(self):
item = Element(u'item', self.unicode(self.attributes))
for attribute in (u'title', u'subtitle', u'icon'):
value = getattr(self, attribute)
if value is None:
continue
if len(value) == 2 and isinstance(value[1], dict):
(value, attributes) = value
else:
attributes = {}
SubElement(item, attribute, self.unicode(attributes)).text = self.unicode(value)
return item
def args(characters=None):
return tuple(unescape(decode(arg), characters) for arg in sys.argv[1:])
def config():
return _create('config')
def decode(s):
return unicodedata.normalize('NFD', s.decode('utf-8'))
def uid(uid):
return u'-'.join(map(str, (bundleid, uid)))
def unescape(query, characters=None):
for character in (UNESCAPE_CHARACTERS if (characters is None) else characters):
query = query.replace('\\%s' % character, character)
return query
def work(volatile):
path = {
True: '~/Library/Caches/com.runningwithcrayons.Alfred-2/Workflow Data',
False: '~/Library/Application Support/Alfred 2/Workflow Data'
}[bool(volatile)]
return _create(os.path.join(os.path.expanduser(path), bundleid))
def write(text):
sys.stdout.write(text)
def xml(items, maxresults=_MAX_RESULTS_DEFAULT):
root = Element('items')
for item in itertools.islice(items, maxresults):
root.append(item.xml())
return tostring(root, encoding='utf-8')
def _create(path):
if not os.path.isdir(path):
os.mkdir(path)
if not os.access(path, os.W_OK):
raise IOError('No write access: %s' % path)
return path | apache-2.0 | 3,484,946,271,677,520,000 | 29.794393 | 92 | 0.651488 | false | 3.790564 | false | false | false |
gleydsonm/pynet_ex | class4/exercise7.py | 1 | 1048 | #!/usr/bin/env python
'''
Exercise 5 - Class 4
Gleydson Mazioli <gleydsonmazioli@gmail.com
'''
import netmiko
def remote_connect(host):
'''
Connect to a remote host
'''
return netmiko.ConnectHandler(**host)
def send_command(conn, cmd):
'''
Send a command
'''
return conn.send_command(cmd)
def run_config_command(conn, cmd):
'''
Run a command in config mode
'''
return conn.send_config_set(cmd)
def main():
'''
Main function
'''
pynet2 = {
'device_type': 'cisco_ios',
'ip': '50.76.53.27',
'username': 'pyclass',
'password': '88newclass',
'port': '8022'
}
for rtr in ['pynet2']:
my_rtr = remote_connect(eval(rtr))
config_cmds = ['logging buffered 65535']
print 'Running commands on {}'.format(rtr)
print run_config_command(my_rtr, config_cmds)
print my_rtr.send_command("show run | inc logging buffered")
print '\n'
my_rtr.disconnect()
if __name__ == "__main__":
main()
| apache-2.0 | -5,268,592,781,599,579,000 | 18.773585 | 68 | 0.567748 | false | 3.275 | true | false | false |
rnicoll/dogecoin | test/functional/feature_signet.py | 1 | 3282 | #!/usr/bin/env python3
# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test basic signet functionality"""
from decimal import Decimal
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal
# Dogecoin: Had to replace the version on this as it was invalid under AuxPoW rules.
bad_signet_blksig_block = '03006200a585d01fddeed2b0ed42703e0a048407c05509e3e55d241b3f8bb5a3002c1af2f575c83235984e7dc4afc1f30944c170462e84437ab6f2d52e16878a79e4678bd1914d5f7af7001f5f71000001010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff025151feffffff0200f2052a010000001600149243f727dd5343293eb83174324019ec16c2630f0000000000000000776a24aa21a9ede2f61c3f71d1defd3fa999dfa36953755c690689799962b48bebd836974e8cf94c4fecc7daa2490047304402205e423a8754336ca99dbe16509b877ef1bf98d008836c725005b3c787c41ebe46022047246e4467ad7cc7f1ad98662afcaf14c115e0095a227c7b05c5182591c23e7e01000120000000000000000000000000000000000000000000000000000000000000000000000000'
class SignetBasicTest(BitcoinTestFramework):
def set_test_params(self):
self.chain = "signet"
self.num_nodes = 6
self.setup_clean_chain = True
shared_args1 = ["-signetchallenge=51"] # OP_TRUE
shared_args2 = ["-signetchallenge=50"] # Dogecoin: OP_FALSE, but we don't actually use these nodes
# we use the Bitcoin default challenge except we do it as a 2-of-2, which means it should fail
shared_args3 = ["-signetchallenge=522103ad5e0edad18cb1f0fc0d28a3d4f1f3e445640337489abb10404f2d1e086be430210359ef5021964fe22d6f8e05b2463c9540ce96883fe3b278760f048f5189f2e6c452ae"]
self.extra_args = [
shared_args1, shared_args1,
shared_args2, shared_args2,
shared_args3, shared_args3,
]
def run_test(self):
self.log.info("basic tests using OP_TRUE challenge")
self.log.info('getmininginfo')
mining_info = self.nodes[0].getmininginfo()
assert_equal(mining_info['blocks'], 0)
assert_equal(mining_info['chain'], 'signet')
assert 'currentblocktx' not in mining_info
assert 'currentblockweight' not in mining_info
assert_equal(mining_info['networkhashps'], Decimal('0'))
assert_equal(mining_info['pooledtx'], 0)
self.nodes[0].generate(1)
# Dogecoin: No default Signet network, so pregenerated blocks are not relevant.
# self.log.info("pregenerated signet blocks check")
# height = 0
# for block in signet_blocks:
# assert_equal(self.nodes[2].submitblock(block, None)
# height += 1
# assert_equal(self.nodes[2].getblockcount(), height)
self.log.info("pregenerated signet blocks check (incompatible solution)")
assert_equal(self.nodes[4].submitblock(bad_signet_blksig_block), 'bad-signet-blksig')
self.log.info("test that signet logs the network magic on node start")
with self.nodes[0].assert_debug_log(["Signet derived magic (message start)"]):
self.restart_node(0)
if __name__ == '__main__':
SignetBasicTest().main()
| mit | -3,319,875,437,481,084,000 | 51.095238 | 686 | 0.742535 | false | 3.192607 | true | false | false |
jimpick/jaikuengine | common/test/util.py | 1 | 4946 | # Copyright 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import datetime
import logging
import re
import time as py_time
from django.conf import settings
from common import api
from common import clock
from common import exception
from common.protocol import sms
from common.protocol import xmpp
utcnow = lambda: clock.utcnow()
_re_match_url = re.compile(r'(http://[^/]+(/[^\s]+))', re.M)
def get_url(s):
m = _re_match_url.search(s)
if not m:
return None
return m.group(1)
def get_relative_url(s):
m = _re_match_url.search(s)
if not m:
return None
return m.group(2)
def exhaust_queue(nick):
for i in xrange(1000):
try:
api.task_process_actor(api.ROOT, nick)
except exception.ApiNoTasks:
break
def exhaust_queue_any():
for i in xrange(1000):
try:
api.task_process_any(api.ROOT)
except exception.ApiNoTasks:
break
class TestXmppConnection(xmpp.XmppConnection):
def send_message(self, to_jid_list, message):
logging.debug('XMPP SEND -> %s: %s', to_jid_list, message)
for jid in to_jid_list:
xmpp.outbox.append((jid, message))
class TestSmsConnection(sms.SmsConnection):
def send_message(self, to_list, message):
to_list = self.filter_targets(to_list, message)
logging.debug('SMS SEND -> %s: %s', to_list, message)
for recp in to_list:
sms.outbox.append((recp, message))
class FakeRequest(object):
def __init__(self, **kw):
self.user = kw.get('user', None)
self.POST = kw.get('post', {})
self.GET = kw.get('get', {})
@property
def REQUEST(self):
return dict(list(self.POST.items()) + list(self.GET.items()))
class FakeMemcache(object):
""" a disappointingly full-featured fake memcache :( """
def __init__(self, *args, **kw):
self._data = {}
pass
def _get_valid(self, key):
if key not in self._data:
return None
data = self._data[key]
if data[1]:
now = py_time.mktime(utcnow().timetuple())
if now > data[1]:
#logging.info('invalid key, %s, %s > %s', key, now, data[1])
return None
#logging.info('valid key, %s returning: %s', key, data[0])
return data[0]
def set(self, key, value, time=0):
if time:
if time < 2592000: # approx 1 month
time = py_time.mktime(utcnow().timetuple()) + time
#logging.info('setting key %s to %s', key, (value, time))
self._data[key] = (value, time)
return True
def set_multi(self, mapping, time=0, key_prefix=''):
for k, v in mapping.iteritems():
self.set(key_prefix + k, v, time=time)
return []
def add(self, key, value, time=0):
if self._get_valid(key) is not None:
return False
self.set(key, value, time)
return True
def incr(self, key, delta=1):
data = self._get_valid(key)
if data is None:
return None
data_tup = self._data[key]
try:
count = int(data)
except ValueError:
return None
count += delta
self.set(key, count, time=data_tup[1])
return count
def decr(self, key, delta=1):
return incr(key, delta=-(delta))
def delete(self, key, seconds=0):
# NOTE: doesn't support seconds
try:
del self._data[key]
return 2
except KeyError:
return 1
def get(self, key):
return self._get_valid(key)
def get_multi(self, keys, key_prefix=''):
out = {}
for k in keys:
v = self._get_valid(key_prefix + k)
out[k] = v
return out
class ClockOverride(object):
old = None
kw = None
def __init__(self, module, **kw):
self.kw = kw
self.old = {}
self.module = module
def override(self):
self.old = getattr(self.module, 'utcnow')
new_utcnow = lambda: (datetime.datetime.utcnow() +
datetime.timedelta(**self.kw))
setattr(self.module, 'utcnow', new_utcnow)
def reset(self):
setattr(self.module, 'utcnow', self.old)
def override_clock(module, **kw):
o = ClockOverride(module, **kw)
o.override()
return o
class SettingsOverride(object):
old = None
kw = None
def __init__(self, **kw):
self.kw = kw
self.old = {}
def override(self):
for k, v in self.kw.iteritems():
self.old[k] = getattr(settings, k, None)
setattr(settings, k, v)
def reset(self):
for k, v in self.old.iteritems():
setattr(settings, k, v)
def override(**kw):
o = SettingsOverride(**kw)
o.override()
return o
| apache-2.0 | -2,671,414,803,026,199,000 | 24.106599 | 74 | 0.6296 | false | 3.310576 | false | false | false |
tfadgreef/Automated-DNA-Design | src/randomizer.py | 1 | 13561 | # randomizer class
# This program is part of the TUPack software package and makes use of NuPack 3.0. NuPack can be found on <www.nupack.org>g>
# This class creates an object performing random sequence mutations to outline the spread of the free energy predictions.
# For more info about the program, please read the user manual.
#
# written by Sander Rodenburg
# Eindhoven University of Technology
# 2012
# importing the NUPACK object, for running complexes and for reading the NuPack output files
import nupack
# importing the Strand library, containing the objects Strand and StrandRelation for implementing the DNA network
import Strand
# importing other libraries
import random
import math
import time
import os
import sys
import Tkinter
import tkFileDialog
import copy
class randomizer:
def __init__(self, nRuns, maxCxSize=1, seqFile=None, bindingFile=None, saveRuns=[]):
# defining seed for random
random.seed(time.time())
#random.seed(1)
# lists holding the Strands, duplexes and Inhibitors
self.Strands = []
self.Duplexes = []
self.Inhibitors = []
# list for keeping the order in which the sequences should be adjusted
self.adjustOrder = []
# NuPack maximum complex size
self.maxCxSize = maxCxSize
# runs in which the program should make a distinct save
self.saveRuns = saveRuns
# if the files are not defined in the class parameters, a filechooser pops up
try:
self.seqFile = open(seqFile, "r")
except (TypeError, IOError):
self.seqFile = self._chooseFile("Select sequence file")
try:
self.bindingFile = open(bindingFile, "r")
except (TypeError, IOError):
self.bindingFile = self._chooseFile("Select binding file")
# if there are still no files selected, raise an error
if self.seqFile == None or self.bindingFile == None:
sys.exit("One or more input files are missing.")
#defining file path
wd = os.path.dirname(self.seqFile.name)
# changing directory to input file path
ioDir = wd+"/NuPackIO"
if not os.path.exists(ioDir):
os.mkdir(ioDir)
os.chdir(wd+"/NuPackIO")
# initializing the NUPACK object in silent mode
self.nupack = nupack.NUPACK(prefix=os.getcwd()+"/cxFile",
paramFile=os.path.abspath(os.path.join(os.getcwd(), os.path.pardir))+"/parameters.txt",
silent=True)
# reading input files for initial sequence generations
self._readSeqFile(self.seqFile)
self._readBindingFile(self.bindingFile)
# close the files
self.seqFile.close()
self.bindingFile.close()
# adjust initial sequences
self._adjustSequenceSet()
self._adjustInhibitors()
# run the mainloop for simulated annealing and sequence mutation
self._mainloop(nRuns, False)
print "Done."
print "Files stored in " +os.getcwd()
def _mainloop(self, nRuns, screenOut=False):
gcFile = open("GC_Content.txt", "w")
feFile = open("Free Energies.txt", "w")
if screenOut:
self.printSequences()
run = 0
# for each run
for Run in range(nRuns):
run += 1
self._processMutation()
NpOut = self._runNuPack()
fe = NpOut
gc = []
for strand in self.Strands:
gc += [strand.getGC()]
if run == 1:
self._writeFeFile(fe, feFile, True)
self._writeGcFile(gc, gcFile, True)
else:
self._writeFeFile(fe, feFile, False)
self._writeGcFile(gc, gcFile, False)
# if the run is in the save list, run an extra time on another file name to prevent overwriting the files
if run in self.saveRuns:
self._saveRun("run"+str(run))
if run % 1000 == 0:
print "Done "+str(run)+" of "+str(nRuns)
if screenOut:
self.printSequences(True)
#self.printSequences(True)
gcFile.close()
feFile.close()
# copy the output files, to prevent overwriting
def _saveRun(self, title):
os.system("cp cxFile.in " +title+".in")
os.system("cp cxFile.cx " +title+ ".cx")
os.system("cp cxFile.ocx-mfe " +title+".ocx-mfe")
# print the sequences, reversed or normal
def printSequences(self, comp=False):
for seq in self.Strands:
if seq.Type == "binding" or seq.Type == "inhibitor":
if comp:
seq.Sequence.reverse()
print seq.Sequence + " (REVERSED)"
seq.Sequence.reverse()
else:
print seq.Sequence
else:
print seq.Sequence
print "\n=================\n"
# write the GC file
def _writeGcFile(self, gcList, gcFile, printHeader=True):
# if the header should be written
if printHeader:
header = ""
for strand in self.Strands:
header += strand.Name + "\t"
gcFile.write(header.rstrip("\t") + "\n")
gcData = ""
for strandGC in gcList:
gcData += str(strandGC) + "\t"
gcFile.write(gcData.rstrip("\t") + "\n")
# function for writing the output file containing the errors
def _writeErrFile(self, errList, errFile, printHeader=True):
if printHeader:
errFile.write("Error:\tdError:\n")
errFile.write(str(errList[0])+"\t"+str(errList[1])+"\n")
# function for reading and writing NuPack complexes
def _runNuPack(self):
# write the nupack input files '.in' and '.list'
self._writeNpInput("cxFile", self.maxCxSize)
# run the nupack binary complexes
self.nupack.runComplexes()
# add the output to the free energy list
NpOut = self.nupack.readLastOutput()
return NpOut
# makes a random mutation in a sequence, and adjusts this in dependant sequences
def _processMutation(self):
# make a random mutation
mutated = self._makeRandomMutation()
seqList = []
for strand in self.Strands:
seqList.append(strand.Name)
# make the order in which the sequences should be made complementary
self.adjustOrder = []
self._makeAdjustOrder(mutated, seqList)
# make the sequences and the inhibitors complementary
self._adjustSequenceSet()
self._adjustInhibitors()
# makes the strands and inhibitors complementary in the right order
def _adjustInhibitors(self):
for duplex in self.Inhibitors:
if duplex.RelationType == "inhibiting":
duplex.adjustSequences(duplex.Strand1.Name)
for duplex in self.Inhibitors:
if duplex.RelationType == "binding":
duplex.adjustSequences(duplex.Strand2.Name)
# makes all strands complementary
def _adjustSequenceSet(self):
# if there is a specified order
if self.adjustOrder != []:
for order in self.adjustOrder:
dupIndex = 0
for dup in self.Duplexes:
if (dup.Strand1.Name == order[0] and dup.Strand2.Name == order[1]):
dup.adjustSequences(dup.Strand1.Name)
if (dup.Strand1.Name == order[1] and dup.Strand2.Name == order[0]):
dup.adjustSequences(dup.Strand2.Name)
# if the order is not important
else:
for duplex in self.Duplexes:
if duplex.Strand1.Mutated and duplex.Strand2.Mutated:
pass
if duplex.Strand1.Mutated and not duplex.Strand2.Mutated:
duplex.adjustSequences(duplex.Strand1.Name)
if not duplex.Strand1.Mutated and duplex.Strand2.Mutated:
duplex.adjustSequences(duplex.Strand2.Name)
if not duplex.Strand1.Mutated and not duplex.Strand2.Mutated:
duplex.adjustSequences(duplex.Strand1.Name)
for strand in self.Strands:
strand.Mutated = False
# make a random mutation
def _makeRandomMutation(self):
templates = []
index = []
for strand in self.Strands:
# only templates can be mutated
if strand.Type == "template":
templates += [strand]
# pick a random template strand
template = random.choice(templates)
# randomize the whole sequence
template.randomize()
return template
# makes the order in which the sequences should be adjusted
def _makeAdjustOrder(self, baseStrand, seqList):
if baseStrand.Name in seqList:
seqList.remove(baseStrand.Name)
for dup in self.Duplexes:
Continue = False
if dup.Strand1.Name == baseStrand.Name:
compStrand = dup.Strand2
Continue = True
if dup.Strand2.Name == baseStrand.Name:
compStrand = dup.Strand1
Continue = True
if Continue:
if compStrand.Name in seqList:
self.adjustOrder += [[baseStrand.Name, compStrand.Name]]
self._makeAdjustOrder(compStrand, seqList)
# function for reading the sequence input
def _readSeqFile(self, seqFile):
try:
# reading the sequences file
ID = 1
for line in seqFile:
line = line.strip("\n").strip("\r").split("\t")
newStrand = Strand.Strand(line[1].strip("\r"), line[0], ID)
self.Strands += [newStrand]
newStrand.defineN()
if line[2] == "T":
newStrand.Type = "template"
elif line[2] == "B":
newStrand.Type = "binding"
elif line[2] == "I":
newStrand.Type = "inhibitor"
ID += 1
#print newStrand.Name, newStrand.Type
except IndexError:
sys.exit("The sequence file has a wrong format. Check the user manual for the right file formats.")
# reads the binding file
def _readBindingFile(self, bindingFile):
try:
# for each line in the binding file
for line in bindingFile:
line = line.strip("\n").strip("\r").split("\t")
strandFound = False
for strand in self.Strands:
if strand.Name == line[0]:
Strand_1 = strand
strandFound = True
if strand.Name == line[1]:
Strand_2 = strand
strandFound = True
if strandFound == False:
sys.exit(line[0]+" or "+line[1]+" is not defined in sequence file.")
# if type is inhibitor, define in the duplex object whether it is a normal binding or an inhibiting duplex
if Strand_2.Type == "inhibitor":
if line[4] == "B":
duplex = Strand.StrandRelation(Strand_1, Strand_2, "binding")
duplex.setImmutable()
elif line[4] == "I":
duplex = Strand.StrandRelation(Strand_1, Strand_2, "inhibiting")
else:
sys.exit("Inhibitor binding types must be specified.")
duplex.TargetEnergy = float(line[3])
duplex.defineBindingStructure(line[2], ["(", ")", "#"])
self.Inhibitors += [duplex]
else:
duplex = Strand.StrandRelation(Strand_1, Strand_2)
duplex.TargetEnergy = float(line[3])
duplex.defineBindingStructure(line[2], ["(", ")"])
self.Duplexes += [duplex]
except IndexError:
sys.exit("The binding file has a wrong format. Check the user manual for the right file formats.")
# function for writing the free energies to a file.
def _writeFeFile(self, feList, feFile, printHeader=True):
header = ""
freeE = ""
for j in range(len(feList)):
# if the permutation is done with one strand
if len(feList[j]) == 3:
for strand in self.Strands:
if strand.ID == feList[j][0]:
if printHeader:
header += strand.Name+"\t"
# add free energies to list
freeE += str(feList[j][1]) +"\t"
# if the permutation is done with two strands
if len(feList[j]) == 4:
for duplex in self.Duplexes + self.Inhibitors:
if (duplex.Strand1.ID == feList[j][0] and duplex.Strand2.ID == feList[j][1]) or (duplex.Strand1.ID == feList[j][1] and duplex.Strand2.ID == feList[j][0]):
if printHeader:
header += duplex.Name+"\t"
freeE += str(feList[j][2]) +"\t"
if printHeader:
feFile.write(header.rstrip("\t") + "\n")
feFile.write(freeE.rstrip("\t") + "\n")
# function for writing the NuPack input files
def _writeNpInput(self, fileName, maxCxSize):
# open the input files for NuPack input
cxFile = open(fileName+".in", "w")
if maxCxSize == 1:
listFile = open(fileName+".list", "w")
# define the number of sequences and the maximum complex size
nSeqs = len(self.Strands)
# write the '.in' file
cxFile.write(str(nSeqs)+"\n")
for strand in self.Strands:
cxFile.write(str(strand.Sequence)+"\n")
cxFile.write(str(maxCxSize))
# close the '.in' file
cxFile.close()
if maxCxSize == 1:
output = ""
# write the '.list' file
# write normal duplexes
for duplex in self.Duplexes:
id1 = duplex.Strand1.ID
id2 = duplex.Strand2.ID
output += str(id1)+" "+str(id2)+"\n"
if self.Inhibitors != []:
for duplex in self.Inhibitors:
id1 = duplex.Strand1.ID
id2 = duplex.Strand2.ID
output += str(id1)+" "+str(id2)+"\n"
output = output.rstrip("\n")
listFile.write(output)
# close the '.list' file
listFile.close()
# function for popping up a file chooser
def _chooseFile(self, Title):
# initialize the Tk object
root = Tkinter.Tk()
# withdraw the main window
root.withdraw()
# open file chooser on the current directory
File = tkFileDialog.askopenfile(parent=root, mode='r', title=Title, initialdir=os.getcwd())
# exit the windows
root.quit()
# return files
return File
| gpl-3.0 | -4,341,163,803,710,181,400 | 29.068736 | 164 | 0.622594 | false | 3.582827 | false | false | false |
kevin-intel/scikit-learn | sklearn/impute/_base.py | 2 | 32946 | # Authors: Nicolas Tresegnie <nicolas.tresegnie@gmail.com>
# Sergey Feldman <sergeyfeldman@gmail.com>
# License: BSD 3 clause
import numbers
import warnings
from collections import Counter
import numpy as np
import numpy.ma as ma
from scipy import sparse as sp
from scipy import stats
from ..base import BaseEstimator, TransformerMixin
from ..utils.sparsefuncs import _get_median
from ..utils.validation import check_is_fitted
from ..utils.validation import FLOAT_DTYPES
from ..utils._mask import _get_mask
from ..utils import is_scalar_nan
def _check_inputs_dtype(X, missing_values):
if (X.dtype.kind in ("f", "i", "u") and
not isinstance(missing_values, numbers.Real)):
raise ValueError("'X' and 'missing_values' types are expected to be"
" both numerical. Got X.dtype={} and "
" type(missing_values)={}."
.format(X.dtype, type(missing_values)))
def _most_frequent(array, extra_value, n_repeat):
"""Compute the most frequent value in a 1d array extended with
[extra_value] * n_repeat, where extra_value is assumed to be not part
of the array."""
# Compute the most frequent value in array only
if array.size > 0:
if array.dtype == object:
# scipy.stats.mode is slow with object dtype array.
# Python Counter is more efficient
counter = Counter(array)
most_frequent_count = counter.most_common(1)[0][1]
# tie breaking similarly to scipy.stats.mode
most_frequent_value = min(
value for value, count in counter.items()
if count == most_frequent_count
)
else:
mode = stats.mode(array)
most_frequent_value = mode[0][0]
most_frequent_count = mode[1][0]
else:
most_frequent_value = 0
most_frequent_count = 0
# Compare to array + [extra_value] * n_repeat
if most_frequent_count == 0 and n_repeat == 0:
return np.nan
elif most_frequent_count < n_repeat:
return extra_value
elif most_frequent_count > n_repeat:
return most_frequent_value
elif most_frequent_count == n_repeat:
# tie breaking similarly to scipy.stats.mode
return min(most_frequent_value, extra_value)
class _BaseImputer(TransformerMixin, BaseEstimator):
"""Base class for all imputers.
It adds automatically support for `add_indicator`.
"""
def __init__(self, *, missing_values=np.nan, add_indicator=False):
self.missing_values = missing_values
self.add_indicator = add_indicator
def _fit_indicator(self, X):
"""Fit a MissingIndicator."""
if self.add_indicator:
self.indicator_ = MissingIndicator(
missing_values=self.missing_values, error_on_new=False)
self.indicator_._fit(X, precomputed=True)
else:
self.indicator_ = None
def _transform_indicator(self, X):
"""Compute the indicator mask.'
Note that X must be the original data as passed to the imputer before
any imputation, since imputation may be done inplace in some cases.
"""
if self.add_indicator:
if not hasattr(self, 'indicator_'):
raise ValueError(
"Make sure to call _fit_indicator before "
"_transform_indicator"
)
return self.indicator_.transform(X)
def _concatenate_indicator(self, X_imputed, X_indicator):
"""Concatenate indicator mask with the imputed data."""
if not self.add_indicator:
return X_imputed
hstack = sp.hstack if sp.issparse(X_imputed) else np.hstack
if X_indicator is None:
raise ValueError(
"Data from the missing indicator are not provided. Call "
"_fit_indicator and _transform_indicator in the imputer "
"implementation."
)
return hstack((X_imputed, X_indicator))
def _more_tags(self):
return {'allow_nan': is_scalar_nan(self.missing_values)}
class SimpleImputer(_BaseImputer):
"""Imputation transformer for completing missing values.
Read more in the :ref:`User Guide <impute>`.
.. versionadded:: 0.20
`SimpleImputer` replaces the previous `sklearn.preprocessing.Imputer`
estimator which is now removed.
Parameters
----------
missing_values : int, float, str, np.nan or None, default=np.nan
The placeholder for the missing values. All occurrences of
`missing_values` will be imputed. For pandas' dataframes with
nullable integer dtypes with missing values, `missing_values`
should be set to `np.nan`, since `pd.NA` will be converted to `np.nan`.
strategy : string, default='mean'
The imputation strategy.
- If "mean", then replace missing values using the mean along
each column. Can only be used with numeric data.
- If "median", then replace missing values using the median along
each column. Can only be used with numeric data.
- If "most_frequent", then replace missing using the most frequent
value along each column. Can be used with strings or numeric data.
If there is more than one such value, only the smallest is returned.
- If "constant", then replace missing values with fill_value. Can be
used with strings or numeric data.
.. versionadded:: 0.20
strategy="constant" for fixed value imputation.
fill_value : string or numerical value, default=None
When strategy == "constant", fill_value is used to replace all
occurrences of missing_values.
If left to the default, fill_value will be 0 when imputing numerical
data and "missing_value" for strings or object data types.
verbose : integer, default=0
Controls the verbosity of the imputer.
copy : boolean, default=True
If True, a copy of X will be created. If False, imputation will
be done in-place whenever possible. Note that, in the following cases,
a new copy will always be made, even if `copy=False`:
- If X is not an array of floating values;
- If X is encoded as a CSR matrix;
- If add_indicator=True.
add_indicator : boolean, default=False
If True, a :class:`MissingIndicator` transform will stack onto output
of the imputer's transform. This allows a predictive estimator
to account for missingness despite imputation. If a feature has no
missing values at fit/train time, the feature won't appear on
the missing indicator even if there are missing values at
transform/test time.
Attributes
----------
statistics_ : array of shape (n_features,)
The imputation fill value for each feature.
Computing statistics can result in `np.nan` values.
During :meth:`transform`, features corresponding to `np.nan`
statistics will be discarded.
indicator_ : :class:`~sklearn.impute.MissingIndicator`
Indicator used to add binary indicators for missing values.
``None`` if add_indicator is False.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
See Also
--------
IterativeImputer : Multivariate imputation of missing values.
Examples
--------
>>> import numpy as np
>>> from sklearn.impute import SimpleImputer
>>> imp_mean = SimpleImputer(missing_values=np.nan, strategy='mean')
>>> imp_mean.fit([[7, 2, 3], [4, np.nan, 6], [10, 5, 9]])
SimpleImputer()
>>> X = [[np.nan, 2, 3], [4, np.nan, 6], [10, np.nan, 9]]
>>> print(imp_mean.transform(X))
[[ 7. 2. 3. ]
[ 4. 3.5 6. ]
[10. 3.5 9. ]]
Notes
-----
Columns which only contained missing values at :meth:`fit` are discarded
upon :meth:`transform` if strategy is not "constant".
"""
def __init__(self, *, missing_values=np.nan, strategy="mean",
fill_value=None, verbose=0, copy=True, add_indicator=False):
super().__init__(
missing_values=missing_values,
add_indicator=add_indicator
)
self.strategy = strategy
self.fill_value = fill_value
self.verbose = verbose
self.copy = copy
def _validate_input(self, X, in_fit):
allowed_strategies = ["mean", "median", "most_frequent", "constant"]
if self.strategy not in allowed_strategies:
raise ValueError("Can only use these strategies: {0} "
" got strategy={1}".format(allowed_strategies,
self.strategy))
if self.strategy in ("most_frequent", "constant"):
# If input is a list of strings, dtype = object.
# Otherwise ValueError is raised in SimpleImputer
# with strategy='most_frequent' or 'constant'
# because the list is converted to Unicode numpy array
if isinstance(X, list) and \
any(isinstance(elem, str) for row in X for elem in row):
dtype = object
else:
dtype = None
else:
dtype = FLOAT_DTYPES
if not is_scalar_nan(self.missing_values):
force_all_finite = True
else:
force_all_finite = "allow-nan"
try:
X = self._validate_data(X, reset=in_fit,
accept_sparse='csc', dtype=dtype,
force_all_finite=force_all_finite,
copy=self.copy)
except ValueError as ve:
if "could not convert" in str(ve):
new_ve = ValueError("Cannot use {} strategy with non-numeric "
"data:\n{}".format(self.strategy, ve))
raise new_ve from None
else:
raise ve
_check_inputs_dtype(X, self.missing_values)
if X.dtype.kind not in ("i", "u", "f", "O"):
raise ValueError("SimpleImputer does not support data with dtype "
"{0}. Please provide either a numeric array (with"
" a floating point or integer dtype) or "
"categorical data represented either as an array "
"with integer dtype or an array of string values "
"with an object dtype.".format(X.dtype))
return X
def fit(self, X, y=None):
"""Fit the imputer on X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Input data, where ``n_samples`` is the number of samples and
``n_features`` is the number of features.
Returns
-------
self : SimpleImputer
"""
X = self._validate_input(X, in_fit=True)
# default fill_value is 0 for numerical input and "missing_value"
# otherwise
if self.fill_value is None:
if X.dtype.kind in ("i", "u", "f"):
fill_value = 0
else:
fill_value = "missing_value"
else:
fill_value = self.fill_value
# fill_value should be numerical in case of numerical input
if (self.strategy == "constant" and
X.dtype.kind in ("i", "u", "f") and
not isinstance(fill_value, numbers.Real)):
raise ValueError("'fill_value'={0} is invalid. Expected a "
"numerical value when imputing numerical "
"data".format(fill_value))
if sp.issparse(X):
# missing_values = 0 not allowed with sparse data as it would
# force densification
if self.missing_values == 0:
raise ValueError("Imputation not possible when missing_values "
"== 0 and input is sparse. Provide a dense "
"array instead.")
else:
self.statistics_ = self._sparse_fit(X,
self.strategy,
self.missing_values,
fill_value)
else:
self.statistics_ = self._dense_fit(X,
self.strategy,
self.missing_values,
fill_value)
return self
def _sparse_fit(self, X, strategy, missing_values, fill_value):
"""Fit the transformer on sparse data."""
missing_mask = _get_mask(X, missing_values)
mask_data = missing_mask.data
n_implicit_zeros = X.shape[0] - np.diff(X.indptr)
statistics = np.empty(X.shape[1])
if strategy == "constant":
# for constant strategy, self.statistcs_ is used to store
# fill_value in each column
statistics.fill(fill_value)
else:
for i in range(X.shape[1]):
column = X.data[X.indptr[i]:X.indptr[i + 1]]
mask_column = mask_data[X.indptr[i]:X.indptr[i + 1]]
column = column[~mask_column]
# combine explicit and implicit zeros
mask_zeros = _get_mask(column, 0)
column = column[~mask_zeros]
n_explicit_zeros = mask_zeros.sum()
n_zeros = n_implicit_zeros[i] + n_explicit_zeros
if strategy == "mean":
s = column.size + n_zeros
statistics[i] = np.nan if s == 0 else column.sum() / s
elif strategy == "median":
statistics[i] = _get_median(column,
n_zeros)
elif strategy == "most_frequent":
statistics[i] = _most_frequent(column,
0,
n_zeros)
super()._fit_indicator(missing_mask)
return statistics
def _dense_fit(self, X, strategy, missing_values, fill_value):
"""Fit the transformer on dense data."""
missing_mask = _get_mask(X, missing_values)
masked_X = ma.masked_array(X, mask=missing_mask)
super()._fit_indicator(missing_mask)
# Mean
if strategy == "mean":
mean_masked = np.ma.mean(masked_X, axis=0)
# Avoid the warning "Warning: converting a masked element to nan."
mean = np.ma.getdata(mean_masked)
mean[np.ma.getmask(mean_masked)] = np.nan
return mean
# Median
elif strategy == "median":
median_masked = np.ma.median(masked_X, axis=0)
# Avoid the warning "Warning: converting a masked element to nan."
median = np.ma.getdata(median_masked)
median[np.ma.getmaskarray(median_masked)] = np.nan
return median
# Most frequent
elif strategy == "most_frequent":
# Avoid use of scipy.stats.mstats.mode due to the required
# additional overhead and slow benchmarking performance.
# See Issue 14325 and PR 14399 for full discussion.
# To be able access the elements by columns
X = X.transpose()
mask = missing_mask.transpose()
if X.dtype.kind == "O":
most_frequent = np.empty(X.shape[0], dtype=object)
else:
most_frequent = np.empty(X.shape[0])
for i, (row, row_mask) in enumerate(zip(X[:], mask[:])):
row_mask = np.logical_not(row_mask).astype(bool)
row = row[row_mask]
most_frequent[i] = _most_frequent(row, np.nan, 0)
return most_frequent
# Constant
elif strategy == "constant":
# for constant strategy, self.statistcs_ is used to store
# fill_value in each column
return np.full(X.shape[1], fill_value, dtype=X.dtype)
def transform(self, X):
"""Impute all missing values in X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data to complete.
Returns
-------
X_imputed : {ndarray, sparse matrix} of shape \
(n_samples, n_features_out)
`X` with imputed values.
"""
check_is_fitted(self)
X = self._validate_input(X, in_fit=False)
statistics = self.statistics_
if X.shape[1] != statistics.shape[0]:
raise ValueError("X has %d features per sample, expected %d"
% (X.shape[1], self.statistics_.shape[0]))
# compute mask before eliminating invalid features
missing_mask = _get_mask(X, self.missing_values)
# Delete the invalid columns if strategy is not constant
if self.strategy == "constant":
valid_statistics = statistics
valid_statistics_indexes = None
else:
# same as np.isnan but also works for object dtypes
invalid_mask = _get_mask(statistics, np.nan)
valid_mask = np.logical_not(invalid_mask)
valid_statistics = statistics[valid_mask]
valid_statistics_indexes = np.flatnonzero(valid_mask)
if invalid_mask.any():
missing = np.arange(X.shape[1])[invalid_mask]
if self.verbose:
warnings.warn("Deleting features without "
"observed values: %s" % missing)
X = X[:, valid_statistics_indexes]
# Do actual imputation
if sp.issparse(X):
if self.missing_values == 0:
raise ValueError("Imputation not possible when missing_values "
"== 0 and input is sparse. Provide a dense "
"array instead.")
else:
# if no invalid statistics are found, use the mask computed
# before, else recompute mask
if valid_statistics_indexes is None:
mask = missing_mask.data
else:
mask = _get_mask(X.data, self.missing_values)
indexes = np.repeat(
np.arange(len(X.indptr) - 1, dtype=int),
np.diff(X.indptr))[mask]
X.data[mask] = valid_statistics[indexes].astype(X.dtype,
copy=False)
else:
# use mask computed before eliminating invalid mask
if valid_statistics_indexes is None:
mask_valid_features = missing_mask
else:
mask_valid_features = missing_mask[:, valid_statistics_indexes]
n_missing = np.sum(mask_valid_features, axis=0)
values = np.repeat(valid_statistics, n_missing)
coordinates = np.where(mask_valid_features.transpose())[::-1]
X[coordinates] = values
X_indicator = super()._transform_indicator(missing_mask)
return super()._concatenate_indicator(X, X_indicator)
def inverse_transform(self, X):
"""Convert the data back to the original representation.
Inverts the `transform` operation performed on an array.
This operation can only be performed after :class:`SimpleImputer` is
instantiated with `add_indicator=True`.
Note that ``inverse_transform`` can only invert the transform in
features that have binary indicators for missing values. If a feature
has no missing values at ``fit`` time, the feature won't have a binary
indicator, and the imputation done at ``transform`` time won't be
inverted.
.. versionadded:: 0.24
Parameters
----------
X : array-like of shape \
(n_samples, n_features + n_features_missing_indicator)
The imputed data to be reverted to original data. It has to be
an augmented array of imputed data and the missing indicator mask.
Returns
-------
X_original : ndarray of shape (n_samples, n_features)
The original X with missing values as it was prior
to imputation.
"""
check_is_fitted(self)
if not self.add_indicator:
raise ValueError("'inverse_transform' works only when "
"'SimpleImputer' is instantiated with "
"'add_indicator=True'. "
f"Got 'add_indicator={self.add_indicator}' "
"instead.")
n_features_missing = len(self.indicator_.features_)
non_empty_feature_count = X.shape[1] - n_features_missing
array_imputed = X[:, :non_empty_feature_count].copy()
missing_mask = X[:, non_empty_feature_count:].astype(bool)
n_features_original = len(self.statistics_)
shape_original = (X.shape[0], n_features_original)
X_original = np.zeros(shape_original)
X_original[:, self.indicator_.features_] = missing_mask
full_mask = X_original.astype(bool)
imputed_idx, original_idx = 0, 0
while imputed_idx < len(array_imputed.T):
if not np.all(X_original[:, original_idx]):
X_original[:, original_idx] = array_imputed.T[imputed_idx]
imputed_idx += 1
original_idx += 1
else:
original_idx += 1
X_original[full_mask] = self.missing_values
return X_original
class MissingIndicator(TransformerMixin, BaseEstimator):
"""Binary indicators for missing values.
Note that this component typically should not be used in a vanilla
:class:`Pipeline` consisting of transformers and a classifier, but rather
could be added using a :class:`FeatureUnion` or :class:`ColumnTransformer`.
Read more in the :ref:`User Guide <impute>`.
.. versionadded:: 0.20
Parameters
----------
missing_values : int, float, string, np.nan or None, default=np.nan
The placeholder for the missing values. All occurrences of
`missing_values` will be imputed. For pandas' dataframes with
nullable integer dtypes with missing values, `missing_values`
should be set to `np.nan`, since `pd.NA` will be converted to `np.nan`.
features : {'missing-only', 'all'}, default='missing-only'
Whether the imputer mask should represent all or a subset of
features.
- If 'missing-only' (default), the imputer mask will only represent
features containing missing values during fit time.
- If 'all', the imputer mask will represent all features.
sparse : bool or 'auto', default='auto'
Whether the imputer mask format should be sparse or dense.
- If 'auto' (default), the imputer mask will be of same type as
input.
- If True, the imputer mask will be a sparse matrix.
- If False, the imputer mask will be a numpy array.
error_on_new : bool, default=True
If True, transform will raise an error when there are features with
missing values in transform that have no missing values in fit. This is
applicable only when `features='missing-only'`.
Attributes
----------
features_ : ndarray, shape (n_missing_features,) or (n_features,)
The features indices which will be returned when calling ``transform``.
They are computed during ``fit``. For ``features='all'``, it is
to ``range(n_features)``.
n_features_in_ : int
Number of features seen during :term:`fit`.
.. versionadded:: 0.24
Examples
--------
>>> import numpy as np
>>> from sklearn.impute import MissingIndicator
>>> X1 = np.array([[np.nan, 1, 3],
... [4, 0, np.nan],
... [8, 1, 0]])
>>> X2 = np.array([[5, 1, np.nan],
... [np.nan, 2, 3],
... [2, 4, 0]])
>>> indicator = MissingIndicator()
>>> indicator.fit(X1)
MissingIndicator()
>>> X2_tr = indicator.transform(X2)
>>> X2_tr
array([[False, True],
[ True, False],
[False, False]])
"""
def __init__(self, *, missing_values=np.nan, features="missing-only",
sparse="auto", error_on_new=True):
self.missing_values = missing_values
self.features = features
self.sparse = sparse
self.error_on_new = error_on_new
def _get_missing_features_info(self, X):
"""Compute the imputer mask and the indices of the features
containing missing values.
Parameters
----------
X : {ndarray or sparse matrix}, shape (n_samples, n_features)
The input data with missing values. Note that ``X`` has been
checked in ``fit`` and ``transform`` before to call this function.
Returns
-------
imputer_mask : {ndarray or sparse matrix}, shape \
(n_samples, n_features)
The imputer mask of the original data.
features_with_missing : ndarray, shape (n_features_with_missing)
The features containing missing values.
"""
if not self._precomputed:
imputer_mask = _get_mask(X, self.missing_values)
else:
imputer_mask = X
if sp.issparse(X):
imputer_mask.eliminate_zeros()
if self.features == 'missing-only':
n_missing = imputer_mask.getnnz(axis=0)
if self.sparse is False:
imputer_mask = imputer_mask.toarray()
elif imputer_mask.format == 'csr':
imputer_mask = imputer_mask.tocsc()
else:
if not self._precomputed:
imputer_mask = _get_mask(X, self.missing_values)
else:
imputer_mask = X
if self.features == 'missing-only':
n_missing = imputer_mask.sum(axis=0)
if self.sparse is True:
imputer_mask = sp.csc_matrix(imputer_mask)
if self.features == 'all':
features_indices = np.arange(X.shape[1])
else:
features_indices = np.flatnonzero(n_missing)
return imputer_mask, features_indices
def _validate_input(self, X, in_fit):
if not is_scalar_nan(self.missing_values):
force_all_finite = True
else:
force_all_finite = "allow-nan"
X = self._validate_data(X, reset=in_fit,
accept_sparse=('csc', 'csr'), dtype=None,
force_all_finite=force_all_finite)
_check_inputs_dtype(X, self.missing_values)
if X.dtype.kind not in ("i", "u", "f", "O"):
raise ValueError("MissingIndicator does not support data with "
"dtype {0}. Please provide either a numeric array"
" (with a floating point or integer dtype) or "
"categorical data represented either as an array "
"with integer dtype or an array of string values "
"with an object dtype.".format(X.dtype))
if sp.issparse(X) and self.missing_values == 0:
# missing_values = 0 not allowed with sparse data as it would
# force densification
raise ValueError("Sparse input with missing_values=0 is "
"not supported. Provide a dense "
"array instead.")
return X
def _fit(self, X, y=None, precomputed=False):
"""Fit the transformer on X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Input data, where ``n_samples`` is the number of samples and
``n_features`` is the number of features.
If `precomputed` is True, then `X` is a mask of the
input data.
precomputed : bool
Whether the input data is a mask.
Returns
-------
imputer_mask : {ndarray or sparse matrix}, shape (n_samples, \
n_features)
The imputer mask of the original data.
"""
if precomputed:
if not (hasattr(X, 'dtype') and X.dtype.kind == 'b'):
raise ValueError("precomputed is True but the input data is "
"not a mask")
self._precomputed = True
else:
self._precomputed = False
# Need not validate X again as it would have already been validated
# in the Imputer calling MissingIndicator
if not self._precomputed:
X = self._validate_input(X, in_fit=True)
self._n_features = X.shape[1]
if self.features not in ('missing-only', 'all'):
raise ValueError("'features' has to be either 'missing-only' or "
"'all'. Got {} instead.".format(self.features))
if not ((isinstance(self.sparse, str) and
self.sparse == "auto") or isinstance(self.sparse, bool)):
raise ValueError("'sparse' has to be a boolean or 'auto'. "
"Got {!r} instead.".format(self.sparse))
missing_features_info = self._get_missing_features_info(X)
self.features_ = missing_features_info[1]
return missing_features_info[0]
def fit(self, X, y=None):
"""Fit the transformer on X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Input data, where ``n_samples`` is the number of samples and
``n_features`` is the number of features.
Returns
-------
self : object
Returns self.
"""
self._fit(X, y)
return self
def transform(self, X):
"""Generate missing values indicator for X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data to complete.
Returns
-------
Xt : {ndarray or sparse matrix}, shape (n_samples, n_features) \
or (n_samples, n_features_with_missing)
The missing indicator for input data. The data type of ``Xt``
will be boolean.
"""
check_is_fitted(self)
# Need not validate X again as it would have already been validated
# in the Imputer calling MissingIndicator
if not self._precomputed:
X = self._validate_input(X, in_fit=False)
else:
if not (hasattr(X, 'dtype') and X.dtype.kind == 'b'):
raise ValueError("precomputed is True but the input data is "
"not a mask")
imputer_mask, features = self._get_missing_features_info(X)
if self.features == "missing-only":
features_diff_fit_trans = np.setdiff1d(features, self.features_)
if (self.error_on_new and features_diff_fit_trans.size > 0):
raise ValueError("The features {} have missing values "
"in transform but have no missing values "
"in fit.".format(features_diff_fit_trans))
if self.features_.size < self._n_features:
imputer_mask = imputer_mask[:, self.features_]
return imputer_mask
def fit_transform(self, X, y=None):
"""Generate missing values indicator for X.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
The input data to complete.
Returns
-------
Xt : {ndarray or sparse matrix}, shape (n_samples, n_features) \
or (n_samples, n_features_with_missing)
The missing indicator for input data. The data type of ``Xt``
will be boolean.
"""
imputer_mask = self._fit(X, y)
if self.features_.size < self._n_features:
imputer_mask = imputer_mask[:, self.features_]
return imputer_mask
def _more_tags(self):
return {
"allow_nan": True,
"X_types": ["2darray", "string"],
"preserves_dtype": [],
}
| bsd-3-clause | 1,939,547,547,607,645,400 | 37.264808 | 79 | 0.556031 | false | 4.374718 | false | false | false |
btrent/knave | pychess/ic/__init__.py | 1 | 7376 | from pychess import Variants
from pychess.Utils.const import *
# RatingType
TYPE_BLITZ, TYPE_STANDARD, TYPE_LIGHTNING, TYPE_WILD, \
TYPE_BUGHOUSE, TYPE_CRAZYHOUSE, TYPE_SUICIDE, TYPE_LOSERS, TYPE_ATOMIC, \
TYPE_UNTIMED, TYPE_EXAMINED, TYPE_OTHER = range(12)
class GameType (object):
def __init__ (self, fics_name, short_fics_name, rating_type,
display_text=None, variant_type=NORMALCHESS):
self.fics_name = fics_name
self.short_fics_name = short_fics_name
self.rating_type = rating_type
if display_text:
self.display_text=display_text
self.variant_type = variant_type
@property
def variant (self):
return Variants.variants[self.variant_type]
def __repr__ (self):
s = "<GameType "
s += "fics_name='%s', " % self.fics_name
s += "short_fics_name='%s', " % self.short_fics_name
s += "rating_type=%d, " % self.rating_type
s += "variant_type=%d, " % self.variant_type
s += "display_text='%s'>" % self.display_text
return s
class NormalGameType (GameType):
def __init__ (self, fics_name, short_fics_name, rating_type, display_text):
GameType.__init__(self, fics_name, short_fics_name, rating_type,
display_text=display_text)
class VariantGameType (GameType):
def __init__ (self, fics_name, short_fics_name, rating_type, variant_type):
GameType.__init__(self, fics_name, short_fics_name, rating_type,
variant_type=variant_type)
@property
def display_text (self):
assert self.variant_type != None
return Variants.variants[self.variant_type].name
@property
def seek_text (self):
return self.fics_name.replace("/", " ")
class WildGameType (VariantGameType):
_instances = []
def __init__ (self, fics_name, variant_type):
VariantGameType.__init__(self, fics_name, "w", TYPE_WILD,
variant_type=variant_type)
WildGameType._instances.append(self)
@classmethod
def instances (cls):
return cls._instances
GAME_TYPES = {
"blitz": NormalGameType("blitz", "b", TYPE_BLITZ, _("Blitz")),
"standard": NormalGameType("standard", "s", TYPE_STANDARD, _("Standard")),
"lightning": NormalGameType("lightning", "l", TYPE_LIGHTNING, _("Lightning")),
"untimed": NormalGameType("untimed", "u", TYPE_UNTIMED, _("Untimed")),
"examined": NormalGameType("examined", "e", TYPE_EXAMINED, _("Examined")),
"nonstandard": NormalGameType("nonstandard", "n", TYPE_OTHER, _("Other")),
"atomic": VariantGameType("atomic", "x", TYPE_ATOMIC, ATOMICCHESS),
"bughouse": VariantGameType("bughouse", "B", TYPE_BUGHOUSE, BUGHOUSECHESS),
"crazyhouse": VariantGameType("crazyhouse", "z", TYPE_CRAZYHOUSE, CRAZYHOUSECHESS),
"losers": VariantGameType("losers", "L", TYPE_LOSERS, LOSERSCHESS),
"suicide": VariantGameType("suicide", "S", TYPE_SUICIDE, SUICIDECHESS),
"wild/fr": WildGameType("wild/fr", FISCHERRANDOMCHESS),
"wild/0": WildGameType("wild/0", WILDCASTLECHESS),
"wild/1": WildGameType("wild/1", WILDCASTLESHUFFLECHESS),
"wild/2": WildGameType("wild/2", SHUFFLECHESS),
"wild/3": WildGameType("wild/3", RANDOMCHESS),
"wild/4": WildGameType("wild/4", ASYMMETRICRANDOMCHESS),
"wild/5": WildGameType("wild/5", UPSIDEDOWNCHESS),
"wild/8": WildGameType("wild/8", PAWNSPUSHEDCHESS),
"wild/8a": WildGameType("wild/8a", PAWNSPASSEDCHESS)
}
VARIANT_GAME_TYPES = {}
for key in GAME_TYPES:
if isinstance(GAME_TYPES[key], VariantGameType):
VARIANT_GAME_TYPES[GAME_TYPES[key].variant_type] = GAME_TYPES[key]
# The following 3 GAME_TYPES_* data structures don't have any real entries
# for the WildGameType's in GAME_TYPES above, and instead use
# a dummy type for the all-encompassing "Wild" FICS rating for wild/* games
GAME_TYPES_BY_SHORT_FICS_NAME = {
"w": GameType("wild", "w", TYPE_WILD, display_text=_("Wild"))
}
for key in GAME_TYPES:
if not isinstance(GAME_TYPES[key], WildGameType):
GAME_TYPES_BY_SHORT_FICS_NAME[GAME_TYPES[key].short_fics_name] = \
GAME_TYPES[key]
GAME_TYPES_BY_RATING_TYPE = {}
for key in GAME_TYPES_BY_SHORT_FICS_NAME:
GAME_TYPES_BY_RATING_TYPE[GAME_TYPES_BY_SHORT_FICS_NAME[key].rating_type] = \
GAME_TYPES_BY_SHORT_FICS_NAME[key]
GAME_TYPES_BY_FICS_NAME = {}
for key in GAME_TYPES_BY_SHORT_FICS_NAME:
GAME_TYPES_BY_FICS_NAME[GAME_TYPES_BY_SHORT_FICS_NAME[key].fics_name] = \
GAME_TYPES_BY_SHORT_FICS_NAME[key]
def type_to_display_text (typename):
if "loaded from" in typename.lower():
typename = typename.split()[-1]
if typename in GAME_TYPES:
return GAME_TYPES[typename].display_text
# Default solution for eco/A00 and a few others
elif "/" in typename:
a, b = typename.split("/")
a = a[0].upper() + a[1:]
b = b[0].upper() + b[1:]
return a + " " + b
else:
# Otherwise forget about it
return typename[0].upper() + typename[1:]
def time_control_to_gametype (minutes, gain):
assert type(minutes) == int and type(gain) == int
assert minutes >= 0 and gain >= 0
gainminutes = gain > 0 and (gain*60)-1 or 0
if minutes is 0:
return GAME_TYPES["untimed"]
elif (minutes*60) + gainminutes >= (15*60):
return GAME_TYPES["standard"]
elif (minutes*60) + gainminutes >= (3*60):
return GAME_TYPES["blitz"]
else:
return GAME_TYPES["lightning"]
TYPE_ADMINISTRATOR, TYPE_BLINDFOLD, TYPE_COMPUTER, \
TYPE_TEAM, TYPE_UNREGISTERED, TYPE_CHESS_ADVISOR, \
TYPE_SERVICE_REPRESENTATIVE, TYPE_TOURNAMENT_DIRECTOR, TYPE_MAMER_MANAGER, \
TYPE_GRAND_MASTER, TYPE_INTERNATIONAL_MASTER, TYPE_FIDE_MASTER, \
TYPE_WOMAN_GRAND_MASTER, TYPE_WOMAN_INTERNATIONAL_MASTER, TYPE_WOMAN_FIDE_MASTER,\
TYPE_DUMMY_ACCOUNT = range(16)
TITLE_TYPE_DISPLAY_TEXTS = (
_("Administrator"), _("Blindfold Account"), _("Computer"),
_("Team Account"), _("Unregistered"), _("Chess Advisor"),
_("Service Representative"), _("Tournament Director"), _("Mamer Manager"),
_("Grand Master"), _("International Master"), _("FIDE Master"),
_("Woman Grand Master"), _("Woman International Master"), _("Woman FIDE Master"),
_("Dummy Account"),
)
TITLE_TYPE_DISPLAY_TEXTS_SHORT = (
_("*"), _("B"), _("C"),
_("T"), _("U"), _("CA"),
_("SR"), _("TD"), _("TM"),
_("GM"), _("IM"), _("FM"),
_("WGM"), _("WIM"), _("WFM"), _("D")
)
TITLES = { # From FICS 'help who'
"*": TYPE_ADMINISTRATOR,
"B": TYPE_BLINDFOLD,
"C": TYPE_COMPUTER,
"T": TYPE_TEAM,
"U": TYPE_UNREGISTERED,
"CA": TYPE_CHESS_ADVISOR,
"SR": TYPE_SERVICE_REPRESENTATIVE,
"TD": TYPE_TOURNAMENT_DIRECTOR,
"TM": TYPE_MAMER_MANAGER,
"GM": TYPE_GRAND_MASTER,
"IM": TYPE_INTERNATIONAL_MASTER,
"FM": TYPE_FIDE_MASTER,
"WFM": TYPE_WOMAN_FIDE_MASTER,
"WIM": TYPE_WOMAN_INTERNATIONAL_MASTER,
"WGM": TYPE_WOMAN_GRAND_MASTER,
"D": TYPE_DUMMY_ACCOUNT,
}
HEX_TO_TITLE = {
0x1 : TYPE_UNREGISTERED,
0x2 : TYPE_COMPUTER,
0x4 : TYPE_GRAND_MASTER,
0x8 : TYPE_INTERNATIONAL_MASTER,
0x10 : TYPE_FIDE_MASTER,
0x20 : TYPE_WOMAN_GRAND_MASTER,
0x40 : TYPE_WOMAN_INTERNATIONAL_MASTER,
0x80 : TYPE_WOMAN_FIDE_MASTER,
}
| gpl-3.0 | 7,960,760,588,244,372,000 | 38.655914 | 87 | 0.627847 | false | 2.953945 | false | false | false |
harej/wikiproject_scripts | unported/project_category_audit.py | 2 | 3608 | # -*- coding: utf-8 -*-
"""
Audits WikiProjects for inconsistencies between their project pages and their categories
Copyright (C) 2015 James Hare
Licensed under MIT License: http://mitlicense.org
"""
import pywikibot
from project_index import WikiProjectTools
class ProjectCategoryAudit:
def go(self):
wptools = WikiProjectTools()
# Get list of WikiProjects that also have a self-named category
output = 'This report highlights discrepancies in WikiProject categorization between WikiProjects and their self-named categories.\n\n'
query = 'select page_title from page left join redirect on page.page_id = redirect.rd_from where page_title like "WikiProject\_%" and page_namespace = 4 and page_title in (select page_title from page where page_title like "WikiProject\_%" and page_namespace = 14) and rd_title is null;'
for row in wptools.query('wiki', query, None):
project = row[0].decode('utf-8')
cl_projectspace = [] # read as "category links, Wikipedia namespace"
cl_categoryspace = [] # read as "category links, Category namespace"
for match in wptools.query('wiki', 'select cl_to from categorylinks join page on categorylinks.cl_from=page.page_id where page_namespace = 4 and page_title = "{0}" and cl_to like "%\_WikiProjects" and cl_to not like "Active\_%" and cl_to not like "Semi-active\_%" and cl_to not like "Inactive\_%" and cl_to not like "Defunct\_%";'.format(project), None):
cl_projectspace.append(match[0].decode('utf-8').replace('_', ' '))
for match in wptools.query('wiki', 'select cl_to from categorylinks join page on categorylinks.cl_from=page.page_id where page_namespace = 14 and page_title = "{0}" and cl_to like "%\_WikiProjects" and cl_to not like "Active\_%" and cl_to not like "Semi-active\_%" and cl_to not like "Inactive\_%" and cl_to not like "Defunct\_%";'.format(project), None):
cl_categoryspace.append(match[0].decode('utf-8').replace('_', ' '))
cl_projectspace.sort()
cl_categoryspace.sort()
if cl_projectspace == cl_categoryspace:
continue # Don't bother generating a report if both category lists match perfectly
both = list(set(cl_projectspace).intersection(cl_categoryspace))
project = project.replace('_', ' ')
output += "* '''{0}'''\n".format(project)
output += "** [[Wikipedia:{0}]]: ".format(project)
for entry in cl_projectspace:
if entry in both:
output += "<span style='color: #999'>{0}</span> – ".format(entry)
else:
output += "<span style='color: #FF0000'>{0}</span> – ".format(entry)
output = output[:-2] + "\n" # Truncate trailing endash and add line break
output += "** [[:Category:{0}]]: ".format(project)
for entry in cl_categoryspace:
if entry in both:
output += "<span style='color: #999'>{0}</span> –".format(entry)
else:
output += "<span style='color: #FF0000'>{0}</span> –".format(entry)
output = output[:-2] + "\n" # Truncate trailing endash and add line break
return output
if __name__ == "__main__":
audit = ProjectCategoryAudit()
report = audit.go()
bot = pywikibot.Site('en', 'wikipedia')
page = pywikibot.Page(bot, 'User:Reports bot/WikiProject category audit')
page.text = report
page.save('Updating report', minor=False, quiet=True)
| mit | 8,073,987,688,695,981,000 | 49 | 367 | 0.617222 | false | 3.801478 | false | false | false |
ykoga-kyutech/Brain_Hacker | handlers/group_handler.py | 2 | 2801 | import tornado.web
from handlers.base_handler import BaseHandler
from models.group import Group
from models.user import User
from .util import check_group_permission
from forms.forms import GroupForm
class GroupsHandler(BaseHandler):
@tornado.web.authenticated
def get(self):
group_name = self.get_argument('group_name', '')
groups = Group.search_name(group_name)
self.render('group/groups.html', groups=groups)
@tornado.web.authenticated
def post(self):
form = GroupForm(self.request.arguments)
if form.validate():
group = Group(**form.data)
user_id = self.get_current_user_id()
user = User.get(user_id)
user.groups.append(group)
group.save()
self.redirect(self.reverse_url('group', group.id))
else:
self.redirect(self.reverse_url('groups')) # Todo エラーメッセージを渡す
class GroupHandler(BaseHandler):
@check_group_permission
@tornado.web.authenticated
def get(self, group_id):
group = Group.get(group_id)
self.render('group/group.html', group=group)
class GroupEditHandler(BaseHandler):
@check_group_permission
@tornado.web.authenticated
def post(self, group_id):
form = GroupForm(self.request.arguments)
if form.validate():
group = Group.get(group_id)
group.update(**form.data)
group.save()
self.redirect(self.reverse_url('group', group_id))
else:
self.redirect(self.reverse_url('group', group_id))
class GroupDeleteHandler(BaseHandler):
@check_group_permission
@tornado.web.authenticated
def post(self, group_id):
Group.get(group_id).delete()
self.redirect(self.reverse_url('groups'))
class GroupMemberAdditionHandler(BaseHandler):
@check_group_permission
@tornado.web.authenticated
def get(self, group_id):
user_name = self.get_argument('user_name', '')
users = User.search_name(user_name)
users = [user for user in users if not user.belongs_to_group(group_id)]
group = Group.get(group_id)
self.render('group/member_addition.html', users=users, group=group)
@check_group_permission
@tornado.web.authenticated
def post(self, group_id):
user_id = self.get_argument('user_id', '')
user = User.get(user_id)
group = Group.get(group_id)
user.groups.append(group)
user.save()
self.redirect(self.reverse_url('member_addition', group_id))
class GroupUserHandler(BaseHandler):
@check_group_permission
@tornado.web.authenticated
def get(self, group_id):
group = Group.get(group_id)
self.render('group/group_users.html', group=group) | mit | 6,915,205,663,668,687,000 | 29.217391 | 79 | 0.643397 | false | 3.666227 | false | false | false |
noba3/KoTos | addons/plugin.video.movie25/resources/libs/sports/tsn.py | 1 | 4564 | import urllib,urllib2,re,cookielib,string, urlparse,sys,os
import xbmc, xbmcgui, xbmcaddon, xbmcplugin,urlresolver
from resources.libs import main
#Mash Up - by Mash2k3 2012.
from t0mm0.common.addon import Addon
from resources.universal import playbackengine, watchhistory
addon_id = 'plugin.video.movie25'
selfAddon = xbmcaddon.Addon(id=addon_id)
addon = Addon('plugin.video.movie25', sys.argv)
art = main.art
wh = watchhistory.WatchHistory('plugin.video.movie25')
def TSNDIR():
main.addDir('Featured','http://m.tsn.ca/home?p_p_id=feed_WAR_xlmagic_INSTANCE_C4iW&p_p_lifecycle=2&p_p_state=normal&p_p_mode=view&p_p_resource_id=getPage&p_p_cacheability=cacheLevelPage&p_p_col_id=column-1&p_p_col_pos=6&p_p_col_count=9&_feed_WAR_xlmagic_INSTANCE_C4iW_page=0&_feed_WAR_xlmagic_INSTANCE_C4iW_portrait=false',97,art+'/tsn.png')
main.addDir('NHL','http://m.tsn.ca/nhl?p_p_id=feed_WAR_xlmagic_INSTANCE_75Sw&p_p_lifecycle=2&p_p_state=normal&p_p_mode=view&p_p_resource_id=getPage&p_p_cacheability=cacheLevelPage&p_p_col_id=column-1&p_p_col_pos=2&p_p_col_count=3&_feed_WAR_xlmagic_INSTANCE_75Sw_page=0&_feed_WAR_xlmagic_INSTANCE_75Sw_portrait=false',97,art+'/tsn.png')
main.addDir('NFL','http://m.tsn.ca/nfl?p_p_id=feed_WAR_xlmagic_INSTANCE_u0tU&p_p_lifecycle=2&p_p_state=normal&p_p_mode=view&p_p_resource_id=getPage&p_p_cacheability=cacheLevelPage&p_p_col_id=column-1&p_p_col_pos=2&p_p_col_count=3&_feed_WAR_xlmagic_INSTANCE_u0tU_page=0&_feed_WAR_xlmagic_INSTANCE_u0tU_portrait=false',97,art+'/tsn.png')
#main.addDir('NBA','nba',97,art+'/tsn.png')
main.addDir('CFL','http://m.tsn.ca/cfl?p_p_id=feed_WAR_xlmagic_INSTANCE_8WBz&p_p_lifecycle=2&p_p_state=normal&p_p_mode=view&p_p_resource_id=getPage&p_p_cacheability=cacheLevelPage&p_p_col_id=column-1&p_p_col_pos=2&p_p_col_count=3&_feed_WAR_xlmagic_INSTANCE_8WBz_page=0&_feed_WAR_xlmagic_INSTANCE_8WBz_portrait=false',97,art+'/tsn.png')
main.addDir('MLB','http://m.tsn.ca/mlb?p_p_id=feed_WAR_xlmagic_INSTANCE_5wRo&p_p_lifecycle=2&p_p_state=normal&p_p_mode=view&p_p_resource_id=getPage&p_p_cacheability=cacheLevelPage&p_p_col_id=column-1&p_p_col_pos=2&p_p_col_count=3&_feed_WAR_xlmagic_INSTANCE_5wRo_page=0&_feed_WAR_xlmagic_INSTANCE_5wRo_portrait=false',97,art+'/tsn.png')
main.GA("Sports","TSN")
def TSNLIST(murl):
main.GA("TSN","TSN-list")
link=main.OPENURL(murl)
link=link.replace('\r','').replace('\n','').replace('\t','').replace(' ','').replace(' ','')
match=re.compile('''href="([^"]+)"><span class="tileText"><span class="overlay"><img src="([^"]+)" style=.+?/><img class="videoOverlay" src=".+?" /></span><span class=".+?" style=".+?">([^<]+)</span></span>''').findall(link)
for url,thumb,name in match:
url=main.REDIRECT(url)
main.addPlayMs(name,url,98,thumb,'','','','','')
paginate=re.compile('_page=(\d+)&_',re.DOTALL).findall(murl)
if paginate:
purl=int(paginate[0])+ 1
xurl=re.sub('_page=(\d+)&_','_page='+str(purl)+'&_',murl)
main.addDir('[COLOR blue]Next[/COLOR]',xurl,97,art+'/next2.png')
def TSNLINK(mname,murl,thumb):
#got help from TSN plugin by TEEFER
main.GA("TSN-list","Watched")
ok=True
link=main.OPENURL(murl)
m3u8 = re.compile('"(http[^"]+m3u8)"').findall(link)[0]
link2=main.OPENURL(m3u8)
stream = re.compile("(http.+?)Adaptive").findall(link2)[0]
if len(stream)==0:
xbmc.executebuiltin("XBMC.Notification(Sorry!,Playable Only in Canada,5000)")
else:
if selfAddon.getSetting("tsn-qua") == "0":
stream_url = stream+'Adaptive_08.mp4.m3u8'
elif selfAddon.getSetting("tsn-qua") == "1":
stream_url = stream+'Adaptive_05.mp4.m3u8'
elif selfAddon.getSetting("tsn-qua") == "2":
stream_url = stream+'Adaptive_01.mp4.m3u8'
# play with bookmark
player = playbackengine.PlayWithoutQueueSupport(resolved_url=stream_url, addon_id=addon_id, video_type='', title=mname,season='', episode='', year='',img=thumb,infolabels='', watchedCallbackwithParams=main.WatchedCallbackwithParams,imdb_id='')
#WatchHistory
if selfAddon.getSetting("whistory") == "true":
wh.add_item(mname+' '+'[COLOR green]TSN[/COLOR]', sys.argv[0]+sys.argv[2], infolabels='', img=thumb, fanart='', is_folder=False)
player.KeepAlive()
return ok
| gpl-2.0 | -3,551,755,663,308,800,500 | 70.3125 | 349 | 0.643734 | false | 2.734572 | false | false | false |
xinghalo/DMInAction | src/spider/BrandSpider/brand2.py | 1 | 8020 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import urllib
import urllib2
import re
import json
class Spider:
def __init__(self):
self.url = 'https://www.paizi.com/'
self.user_agent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.115 Safari/537.36'
self.headers = { 'User-Agent' : self.user_agent }
def getBrandCategory(self):
content = self.getPageContext(self.url)
indexItems = self.resolveIndexContent(content)
for indexItem in indexItems:
firstCategory = indexItem[1]
firstCategoryContent = self.getPageContext('https:'+str(indexItem[0]))
firstCategoryItems = self.resolveFirstCategoryContent(firstCategoryContent)
for firstCategroyItem in firstCategoryItems:
sencondCategory = firstCategroyItem[1]
secondCategoryContent = self.getPageContext('https:'+str(firstCategroyItem[0]))
secondCategoryItems = self.resolveSecondCategoryContent(secondCategoryContent)
for secondCategoryItem in secondCategoryItems:
thirdCategory = secondCategoryItem[1]
thirdCategoryContent = self.getPageContext('https:'+str(secondCategoryItem[0]))
realUrl = self.getRealUrl(thirdCategoryContent)
realThirdCategoryContent = self.getPageContext('https:'+str(realUrl))
index = self.getMaxPage(realThirdCategoryContent)
# 解析当前页
realThridCategoryItems = self.resolveLastPage(realThirdCategoryContent)
for realThirdCategoryItem in realThridCategoryItems:
brandCategory = realThirdCategoryItem[1] # 名称
aboutContent = self.getPageContext('https:'+str(realThirdCategoryItem[0]))
aboutItems = self.resolveAboutPage(aboutContent)
brandContent = self.getPageContext('https:'+str(aboutItems))
info = self.resolveBrandField(brandContent)
print info[0],',',info[1],',',info[2],',',info[3],',',info[4],',',info[5],',',info[6],',',info[7],',',info[8],',',info[9],',',info[10],',',info[11],',',info[12]
def resolveDan(self,content):
try:
pattern = re.compile('.*?<p><font color="#4993F4">主体规模:</font>(.*?)</p>.*?')
return re.findall(pattern,content)[0]
except:
return 'null'
def resolveZhuTiGuiMo(self,content):
try:
pattern = re.compile('.*?<p><font color="#4993F4">主体规模:</font>(.*?)</p>.*?')
return re.findall(pattern,content)
except:
return 'null'
def resolveBrandField(self,content):
zhutiguimo = 'null'
danweixingzhi = 'null'
zichanleixing = 'null'
chuangjianshijian = 'null'
boss = 'null'
address = 'null'
zizhirongyu = 'null'
score = 0
price = 'null'
rank = 'null'
sales = 'null'
renqi = 'null'
try:
pattern = re.compile('.*?<p style="height: 30px;line-height: 20px;">(.*?)</p>.*?')
result = re.findall(pattern,content)
if len(result) >0 :
name = result[0]
pattern = re.compile('.*?<p><font color="#4993F4">主体规模:</font>(.*?)</p>.*?')
result = re.findall(pattern,content)
if len(result) >0 :
zhutiguimo = result[0]
pattern = re.compile('.*?<p><font color="#4993F4">单位性质:</font>(.*?)</p>.*?')
result = re.findall(pattern,content)
if len(result) >0 :
danweixingzhi = result[0]
pattern = re.compile('.*?<p><font color="#4993F4">资产类型:</font>(.*?)</p>.*?')
result = re.findall(pattern,content)
if len(result) >0 :
zichanleixing = result[0]
pattern = re.compile('.*?<p><font color="#4993F4">成立于:</font>(.*?)</p>.*?')
result = re.findall(pattern,content)
if len(result) >0 :
chuangjianshijian = result[0]
pattern = re.compile('.*?<p><font color="#4993F4">创办人、主要负责人或法人:</font>(.*?)</p>.*?')
result = re.findall(pattern,content)
if len(result) >0 :
boss = result[0]
pattern = re.compile('.*?<p><font color="#4993F4">发源地或总部所在地:</font>(.*?)</p>.*?')
result = re.findall(pattern,content)
if len(result) >0 :
address = result[0]
pattern = re.compile('.*?<p class="x"><span>*</span><font color="#4993F4">资质荣誉:</font>(.*?)</p>.*?')
result = re.findall(pattern,content)
if len(result) >0 :
zizhirongyu = result[0]
# <p class="zf">总分:92分</p>
pattern = re.compile('.*?<p class="zf">总分:(.*?)分</p>.*?')
result = re.findall(pattern,content)
if len(result) >0 :
score = result[0]
# <p>综合排名:第<b style="color:#F60;" _hover-ignore="1">193</b>位</p>
pattern = re.compile('.*?<p>综合排名:第<b style="color:#F60;">(.*?)</b>位</p>.*?')
result = re.findall(pattern,content)
if len(result) >0 :
rank = result[0]
# <p>品牌价值:<a href="//jiazhi.paizi.com/s?keys=惠普" style="color:#F60; font-weight:bold;" target="_blank">41832</a>百万元
pattern = re.compile('.*?<p>品牌价值:<a href=".*?" style="color:#F60; font-weight:bold;" target="_blank">(.*?)</a>(.*?).*?')
result = re.findall(pattern,content)
if len(result) == 2 :
price = str(result[0],result[1])
# <p>估算销量:<b style="color:#F60;">4831</b>件/月</p>
pattern = re.compile('.*?<p>估算销量:<b style="color:#F60;">(.*?)</b>(.*?)</p>.*?')
result = re.findall(pattern,content)
if len(result) == 2 :
sales = str(result[0],result[1])
# <p>品牌人气:<em id="zs_pprq">222811</em>
pattern = re.compile('.*?<p>品牌人气:<em id="zs_pprq">(.*?)</em>.*?')
result = re.findall(pattern,content)
if len(result) > 0 :
renqi = result[0]
return [name,zhutiguimo,danweixingzhi,zichanleixing,chuangjianshijian,boss,address,zizhirongyu,score,price,rank,sales,renqi]
except:
print '解析品牌属性出错'
return []
def resolvePageName(self,content):
try:
pattern = re.compile('.*?<p style="height: 30px;line-height: 20px;">(.*?)</p>.*?')
return re.findall(pattern,content)
except:
print '解析品牌页面出错'
return []
def getPageContext(self,url):
# print '爬取页面',url
try:
request = urllib2.Request(url,headers = self.headers)
response = urllib2.urlopen(request)
return response.read()
except:
1
# print '忽略url发送出错问题'
def run(self):
self.getBrandCategory()
def resolveIndexContent(self,content):
try:
pattern = re.compile('.*?<h3><em></em><a href="(.*?)">(.*?)</a></h3>.*?')
return re.findall(pattern,content)
except:
# print '忽略解析品牌页面出错问题'
return []
def resolveFirstCategoryContent(self,content):
try:
pattern = re.compile('.*?<div class="con06">([\s\S]*?)<div class="con07">.*?')
div = re.findall(pattern,content)
pattern = re.compile('.*?<strong><a href="(.*?)">(.*?)</a></strong>.*?')
return re.findall(pattern,div[0])
except:
# print '忽略解析品牌页面出错问题'
return []
def resolveSecondCategoryContent(self,content):
try:
pattern = re.compile('.*?<div class="c-3">([\s\S]*?)</div>.*?')
div = re.findall(pattern,content)
pattern = re.compile('.*?<a href="(.*?)">(.*?)</a>.*?')
return re.findall(pattern,div[0])
except:
# print '忽略解析品牌页面出错问题'
return []
def getRealUrl(self,content):
try:
pattern = re.compile('.*?<a href="(.*?)">.*?品牌大全></a>.*?')
return re.findall(pattern,content)[0]
except:
print "解析出错"
return []
def getMaxPage(self,content):
try:
pattern = re.compile('.*?\.\.<a href=".*?">(\d)</a>.*?')
index = re.findall(pattern,content)
if len(index) == 0:
return 0
else:
return index[0]
except:
print "获取最大值出错"
return []
def resolveLastPage(self,content):
# <div class="c03"><p>名称:<a href="
try:
pattern = re.compile('.*?<p>名称:<a href="(.*?)">(.*?)</a></p>.*?')
return re.findall(pattern,content)
except:
print "解析出错"
return []
def resolveAboutPage(self,content):
try:
pattern = re.compile('.*?<a href="(.*?)">关于.*?')
return re.findall(pattern,content)[0]
except:
return []
spider = Spider()
spider.run() | apache-2.0 | -4,018,235,135,387,909,600 | 30.118852 | 166 | 0.626581 | false | 2.594668 | false | false | false |
dpaiton/OpenPV | projects/HyPerRetina/tuning/tune_main_gar.py | 2 | 31500 | #!/usr/bin/env python
#For tuning retina on DARPA Heli
#Created by Dylan Paiton
from tune_functions import *
###################
### ###
### Global vars ###
### ###
###################
run_PetaVision = 0 #Will just create params file if set to 0
pipe_output_to_file = 1
mpi_np = '4'
mpi_rows = '2'
mpi_columns = '2'
num_steps_list = ['2000'] #[str(450*33)]#
stochastic_flag = '1' #preActivityNotRate = !stochastic_flag
PA_delay = '2.0'
param_template_name = 'retina_params.template'
run_name = 'biggraywhiteblackspots' #'Heli_Challenge_026'#
#######################################################################################
## PATH PARAMS
#######################################################################################
#Gar
wrkspc_path = '/Users/garkenyon/workspace-sync-anterior'
remote_wrkspc_path = wrkspc_path #'/home/gkenyon/workspace-sync-anterior'#
data_path = wrkspc_path #remote_wrkspc_path #'/nh/compneuro/Data'
#Dylan
#wrkspc_path = '/Users/dpaiton/Documents/Work/LANL/workspace'
#remote_wrkspc_path = wrkspc_path
#data_path = wrkspc_path
#Common
out_filename = run_name
results_path = data_path+'/HyPerRetina/output/'+run_name
remote_input_path = remote_wrkspc_path+'/HyPerRetina/input'
input_path = wrkspc_path+'/HyPerRetina/input'
tuning_path = wrkspc_path+'/HyPerRetina/tuning'
param_in_file = tuning_path+'/'+param_template_name
param_out_file = tuning_path+'/params_files/'+out_filename
run_file = wrkspc_path+'/HyPerRetina/Debug_remote/HyPerRetina'
#######################################################################################
## INPUT MOVIE (One must be enabled)
input_file = remote_input_path+'/filenames_graywhiteblackspots_big.txt'#'/filenames_graywhiteblackspots.txt'#'/heli_challenge_026_framenames.txt'#
## Declare layers
#INPUTS
Movie = 1
#ANN INPUT COPY
ImageBuffer = 1
ConstantVrest = 1
#CONE
Cone = 1
ConeSigmoidON = 1
ConeSigmoidOFF = ConeSigmoidON
#BIPOLAR
BipolarON = 1
BipolarSigmoidON = 1
BipolarOFF = BipolarON
BipolarSigmoidOFF = BipolarSigmoidON
#HORIZONTAL
Horizontal = 1
HorizontalGap = 1
HorizontalSigmoidON = 1
HorizontalSigmoidOFF = 1
#WFAMACRINE
WFAmacrineON = 1
WFAmacrineSigmoidON = 1
WFAmacrineOFF = WFAmacrineON
WFAmacrineSigmoidOFF = WFAmacrineSigmoidON
#PAAmacrie
PAAmacrineON = 1
PAAmacrineGapON = 1
PAAmacrineOFF = PAAmacrineON
PAAmacrineGapOFF = PAAmacrineGapON
#SFAmacrine
SFAmacrine = 1
SFAmacrineSigmoid = 1
SFAmacrineGap = 1
#GANGLION
GanglionON = 1
GanglionGapON = 1
GanglionOFF = GanglionON
GanglionGapOFF = GanglionGapON
## Declare conn strength values
## frange is (start, stop, step)
ImageImageBuffer = ["%g" % x for x in frange(40,0,0)] # Image to ImageBuffer
ConstantVrestImageBuffer = ["%g" % x for x in frange(1,0,0)] # ConstantVrest to ImageBuffer
ImageBufferCone = ["%g" % x for x in frange(1,0,0)] # ImageBuffer to Cone
ConeSigmoidBipolar = ["%g" % x for x in frange(0.5,0,0)] # ConeSigmoid to Bipolar
ConeSigmoidHorizontal = ["%g" % x for x in frange(0.5,0,0)] # ConeSigmoid to Horizontal
HorizontalGapHorizontal = ["%g" % x for x in frange(3,0,0)] # HorizontalGap to Horizontal
HorizontalSigmoidConeON = ["%g" % x for x in frange(1,0,0)] # HorizontalSigmoidON to Cone
HorizontalSigmoidBipolarOFF = ["%g" % x for x in frange(2.5,0,0)] # HorizontalSigmoidOFF to BipolarOFF
BipolarSigmoidSFAmacrine = ["%g" % x for x in frange(1,0,0)] # BipolarSigmoid to SFAmacrine
BipolarSigmoidWFAmacrine = ["%g" % x for x in frange(1,0,0)] # BipolarSigmoid to WFAmacrine
BipolarSigmoidPAAmacrine = ["%g" % x for x in frange(0.1,0,0)] # BipolarSigmoid to WFAmacrine
BipolarSigmoidGanglion = ["%g" % x for x in frange(3.0,0,0)] # BipolarSigmoid to Ganglion
SFAmacrineGapSFAmacrine = ["%g" % x for x in frange(1,0,0)] # SFAmacrineGAP to SFAmacrine
SFAmacrineSigmoidPAAmacrine = ["%g" % x for x in frange(2,0,0)] #
WFAmacrineSigmoidBipolarON = ["%g" % x for x in frange(0.10,0,0)] # WFAmacrineSigmoidON to Bipolar
WFAmacrineSigmoidBipolarOFF = ["%g" % x for x in frange(0.10,0,0)] # WFAmacrineSigmoidOFF to Bipolar
WFAmacrineONSFAmacrine = ["%g" % x for x in frange(1,0,0)] # WFAmacrineON to SFAmacrine
WFAmacrineOFFSFAmacrine = ["%g" % x for x in frange(1,0,0)] # WFAmacrineOFF to SFAmacrine
WFAmacrineSigmoidGanglionON = ["%g" % x for x in frange(0.5,0,0)] # WFAmacrineSigmoidON to GanglionON
WFAmacrineSigmoidGanglionOFF = ["%g" % x for x in frange(0.5,0,0)] # WFAmacrineSigmoidOFF to GanglionOFF
PAAmacrineWFAmacrine = ["%g" % x for x in frange(2.0,0,0)] # PAAmacrine to WFAmacrine
PAAmacrineGapPAAmacrine = ["%g" % x for x in frange(1.5,0,0)] # PAAmacrineGap to PAAmacrine
PAAmacrinePAAmacrine = ["%g" % x for x in frange(3.0,0,0)] #
PAAmacrineGapGanglion = ["%g" % x for x in frange(0.5,0,0)] # PAAmacrineGap to Ganglion
PAAmacrineGanglion = ["%g" % x for x in frange(24,0,0)] #
GanglionGapPAAmacrine = ["%g" % x for x in frange(3.0,0,0)] # GanglionGap to PAAmacrine
#List possible connections
conn_list = ["ImageImageBuffer",
"ConstantVrestImageBuffer",
"ImageBufferCone",
"ConeSigmoidBipolar",
"ConeSigmoidHorizontal",
"HorizontalGapHorizontal",
"HorizontalSigmoidConeON",
"HorizontalSigmoidBipolarOFF",
"BipolarSigmoidSFAmacrine",
"BipolarSigmoidWFAmacrine",
"BipolarSigmoidPAAmacrine",
"BipolarSigmoidGanglion",
"SFAmacrineGapSFAmacrine",
"SFAmacrineSigmoidPAAmacrine",
"WFAmacrineSigmoidBipolarON",
"WFAmacrineSigmoidBipolarOFF",
"WFAmacrineONSFAmacrine",
"WFAmacrineOFFSFAmacrine",
"WFAmacrineSigmoidGanglionON",
"WFAmacrineSigmoidGanglionOFF",
"PAAmacrineWFAmacrine",
"PAAmacrineGapPAAmacrine",
"PAAmacrinePAAmacrine",
"PAAmacrineGapGanglion",
"PAAmacrineGanglion",
"GanglionGapPAAmacrine"]
conn_lol = [ImageImageBuffer,
ConstantVrestImageBuffer,
ImageBufferCone,
ConeSigmoidBipolar,
ConeSigmoidHorizontal,
HorizontalGapHorizontal,
HorizontalSigmoidConeON,
HorizontalSigmoidBipolarOFF,
BipolarSigmoidSFAmacrine,
BipolarSigmoidWFAmacrine,
BipolarSigmoidPAAmacrine,
BipolarSigmoidGanglion,
SFAmacrineGapSFAmacrine,
SFAmacrineSigmoidPAAmacrine,
WFAmacrineSigmoidBipolarON,
WFAmacrineSigmoidBipolarOFF,
WFAmacrineONSFAmacrine,
WFAmacrineOFFSFAmacrine,
WFAmacrineSigmoidGanglionON,
WFAmacrineSigmoidGanglionOFF,
PAAmacrineWFAmacrine,
PAAmacrineGapPAAmacrine,
PAAmacrinePAAmacrine,
PAAmacrineGapGanglion,
PAAmacrineGanglion,
GanglionGapPAAmacrine]
print "tune_params: Verifying parameters."
## Assert that all parameter lists are the same length or of length 1
max_list_len = max([len(x) for x in conn_lol]) #max lenght of sub list in lol
if not all(len(i)==max_list_len or len(i)==1 for i in conn_lol):
exit("\ntune_params: ERROR: One of the lists is not the right size!\n")
## Check to see if any of the strengths are set to 0
## nonZeroStrength is true if there is a nonzero strength (false if strength is 0)
nonZeroStrength = [strength not in '0' for connlist in conn_lol for strength in [max(connlist)]] # max val in each list is not 0
if len(conn_lol) is not len(nonZeroStrength):
exit("\ntune_params: ERROR: nonZeroStrength array is not the appropriate length")
## Open file
if os.path.isfile(param_in_file):
try:
print "tune_params: Opening template param file "+param_in_file+"."
in_fid = open(param_in_file)
param_lines = in_fid.readlines()
in_fid.close()
except IOError as e:
print "tune_params: Failed to open file "+param_in_file+" with error:\n"
exit(e)
else:
exit("\ntune_params: ERROR: Couldn't find file "+param_in_file+"!\n")
## Modify pvp file and run petavision for each parameter
for num_steps in num_steps_list:
for param_idx in range(max_list_len):
out_lines = param_lines[:] # Dereference to make copy of list
idx_out_filename = out_filename+str(param_idx)+'.pv'
full_out_file = param_out_file+'_p'+str(param_idx)+'_ns'+num_steps+'.pv'
full_results_path = results_path+'/p'+str(param_idx)+'/ns'+num_steps
print "tune_params: Modifying template file."
for line_num in range(len(out_lines)):
line = out_lines[line_num]
## Activate layers that have been set in the global vars section
enable = False
if 'Movie "Movie"' in line and Movie==1:
enable = True
elif 'ANNLayer "ImageBuffer"' in line and ImageBuffer==1:
enable = True
elif 'ANNLayer "ConstantVrest"' in line and ConstantVrest==1:
enable = True
elif 'LIFGap "Cone"' in line and Cone==1:
enable = True
elif 'SigmoidLayer "ConeSigmoidON"' in line and ConeSigmoidON==1:
enable = True
elif 'SigmoidLayer "ConeSigmoidOFF"' in line and ConeSigmoidOFF==1:
enable = True
elif 'LIF "BipolarON"' in line and BipolarON==1:
enable = True
elif 'SigmoidLayer "BipolarSigmoidON"' in line and BipolarSigmoidON==1:
enable = True
elif 'LIFGap "Horizontal"' in line and Horizontal==1:
enable = True
elif 'GapLayer "HorizontalGap"' in line and HorizontalGap==1:
enable = True
elif 'SigmoidLayer "HorizontalSigmoidON"' in line and HorizontalSigmoidON==1:
enable = True
elif 'SigmoidLayer "HorizontalSigmoidOFF"' in line and HorizontalSigmoidOFF==1:
enable = True
elif 'LIF "WFAmacrineON"' in line and WFAmacrineON==1:
enable = True
elif 'GapLayer "SFAmacrineGap"' in line and SFAmacrineGap==1:
enable = True
elif 'SigmoidLayer "WFAmacrineSigmoidON"' in line and WFAmacrineSigmoidON==1:
enable = True
elif 'LIFGap "GanglionON"' in line and GanglionON==1:
enable = True
elif 'GapLayer "GanglionGapON"' in line and GanglionGapON==1:
enable = True
elif 'LIFGap "PAAmacrineON"' in line and PAAmacrineON==1:
enable = True
elif 'GapLayer "PAAmacrineGapON"' in line and PAAmacrineGapON==1:
enable = True
elif 'LIF "BipolarOFF"' in line and BipolarOFF==1:
enable = True
elif 'SigmoidLayer "BipolarSigmoidOFF"' in line and BipolarSigmoidOFF==1:
enable = True
elif 'LIF "WFAmacrineOFF"' in line and WFAmacrineOFF==1:
enable = True
elif 'SigmoidLayer "WFAmacrineSigmoidOFF"' in line and WFAmacrineSigmoidOFF==1:
enable = True
elif 'LIFGap "GanglionOFF"' in line and GanglionOFF==1:
enable = True
elif 'GapLayer "GanglionGapOFF"' in line and GanglionGapOFF==1:
enable = True
elif 'LIFGap "PAAmacrineOFF"' in line and PAAmacrineOFF==1:
enable = True
elif 'GapLayer "PAAmacrineGapOFF"' in line and PAAmacrineGapOFF==1:
enable = True
elif 'LIFGap "SFAmacrine"' in line and SFAmacrine==1:
enable = True
elif 'SigmoidLayer "SFAmacrineSigmoid"' in line and SFAmacrineSigmoid==1:
enable = True
elif 'KernelConn "ImageToImageBuffer"' in line and ImageBuffer==1: #########Connections
zero_index = [idx for idx, enum in enumerate([param in 'ImageImageBuffer' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "ConstantVrestToImageBuffer"' in line and ConstantVrest==1 and ImageBuffer==1:
zero_index = [idx for idx, enum in enumerate([param in 'ConstantVrestImageBuffer' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'GapConn "ImageBufferToCone"' in line and Cone==1:
zero_index = [idx for idx, enum in enumerate([param in 'ImageBufferCone' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "ConeSigmoidONToBipolarON"' in line and ConeSigmoidON==1 and BipolarON==1:
zero_index = [idx for idx, enum in enumerate([param in 'ConeSigmoidBipolar' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "ConeSigmoidONToHorizontal"' in line and ConeSigmoidON==1 and Horizontal==1:
zero_index = [idx for idx, enum in enumerate([param in 'ConeSigmoidHorizontal' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'GapConn "HorizontalGapToHorizontal"' in line and HorizontalGap==1 and Horizontal==1:
zero_index = [idx for idx, enum in enumerate([param in 'HorizontalGapHorizontal' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "HorizontalSigmoidToConeON"' in line and HorizontalSigmoidON==1 and Cone==1:
zero_index = [idx for idx, enum in enumerate([param in 'HorizontalSigmoidConeON' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "HorizontalSigmoidToBipolarOFF"' in line and HorizontalSigmoidOFF==1 and Cone==1:
zero_index = [idx for idx, enum in enumerate([param in 'HorizontalSigmoidBipolarOFF' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "BipolarSigmoidONToGanglionON"' in line and BipolarSigmoidON==1 and GanglionON==1:
zero_index = [idx for idx, enum in enumerate([param in 'BipolarSigmoidGanglion' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'GapConn "SFAmacrineGapToSFAmacrine"' in line and SFAmacrineGap==1 and SFAmacrine==1:
zero_index = [idx for idx, enum in enumerate([param in 'SFAmacrineGapSFAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "WFAmacrineSigmoidONToBipolarON"' in line and WFAmacrineSigmoidON==1 and BipolarON==1:
zero_index = [idx for idx, enum in enumerate([param in 'WFAmacrineSigmoidBipolarON' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "BipolarSigmoidONToWFAmacrineON"' in line and BipolarSigmoidON==1 and WFAmacrineON==1:
zero_index = [idx for idx, enum in enumerate([param in 'BipolarSigmoidWFAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "BipolarSigmoidONToPAAmacrineON"' in line and BipolarSigmoidON==1 and PAAmacrineON==1:
zero_index = [idx for idx, enum in enumerate([param in 'BipolarSigmoidPAAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'GapConn "GanglionGapONToPAAmacrineON"' in line and GanglionGapON==1 and PAAmacrineON==1:
zero_index = [idx for idx, enum in enumerate([param in 'GanglionGapPAAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'GapConn "PAAmacrineGapONToGanglionON"' in line and PAAmacrineGapON==1 and GanglionON==1:
zero_index = [idx for idx, enum in enumerate([param in 'PAAmacrineGapGanglion' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'GapConn "PAAmacrineGapONToPAAmacrineON"' in line and PAAmacrineGapON==1 and PAAmacrineON==1:
zero_index = [idx for idx, enum in enumerate([param in 'PAAmacrineGapPAAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "PAAmacrineONToGanglionON"' in line and PAAmacrineON==1 and GanglionON==1:
zero_index = [idx for idx, enum in enumerate([param in 'PAAmacrineGanglion' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "PAAmacrineONToPAAmacrineON"' in line and PAAmacrineON==1:
zero_index = [idx for idx, enum in enumerate([param in 'PAAmacrinePAAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "BipolarSigmoidONToSFAmacrine"' in line and BipolarSigmoidON==1 and SFAmacrine==1:
zero_index = [idx for idx, enum in enumerate([param in 'BipolarSigmoidSFAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "WFAmacrineSigmoidONToGanglionON"' in line and WFAmacrineSigmoidON==1 and GanglionON==1:
zero_index = [idx for idx, enum in enumerate([param in 'WFAmacrineSigmoidGanglionON' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "SFAmacrineSigmoidToPAAmacrineON"' in line and SFAmacrineSigmoid==1 and PAAmacrineON==1:
zero_index = [idx for idx, enum in enumerate([param in 'SFAmacrineSigmoidPAAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "PAAmacrineONToWFAmacrineON"' in line and PAAmacrineON==1 and WFAmacrineON==1:
zero_index = [idx for idx, enum in enumerate([param in 'PAAmacrineWFAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "WFAmacrineSigmoidONToSFAmacrine"' in line and WFAmacrineSigmoidON==1 and SFAmacrine==1:
zero_index = [idx for idx, enum in enumerate([param in 'WFAmacrineONSFAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "WFAmacrineSigmoidOFFToSFAmacrine"' in line and WFAmacrineSigmoidOFF==1 and SFAmacrine==1:
zero_index = [idx for idx, enum in enumerate([param in 'WFAmacrineOFFSFAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "ConeSigmoidOFFToBipolarOFF"' in line and ConeSigmoidOFF==1 and BipolarOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'ConeSigmoidBipolar' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "BipolarSigmoidOFFToWFAmacrineOFF"' in line and BipolarSigmoidOFF==1 and WFAmacrineOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'BipolarSigmoidWFAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "BipolarSigmoidOFFToPAAmacrineOFF"' in line and BipolarSigmoidOFF==1 and PAAmacrineOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'BipolarSigmoidPAAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "WFAmacrineSigmoidOFFToBipolarOFF"' in line and WFAmacrineSigmoidOFF==1 and BipolarOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'WFAmacrineSigmoidBipolarOFF' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "BipolarSigmoidOFFToGanglionOFF"' in line and BipolarSigmoidOFF==1 and GanglionOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'BipolarSigmoidGanglion' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "BipolarSigmoidOFFToSFAmacrine"' in line and BipolarSigmoidOFF==1 and SFAmacrine==1:
zero_index = [idx for idx, enum in enumerate([param in 'BipolarSigmoidSFAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'GapConn "GanglionGapOFFToPAAmacrineOFF"' in line and GanglionGapOFF==1 and PAAmacrineOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'GanglionGapPAAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'GapConn "PAAmacrineGapOFFToGanglionOFF"' in line and PAAmacrineGapOFF==1 and GanglionOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'PAAmacrineGapGanglion' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'GapConn "PAAmacrineGapOFFToPAAmacrineOFF"' in line and PAAmacrineGapOFF==1 and PAAmacrineOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'PAAmacrineGapPAAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "PAAmacrineOFFToGanglionOFF"' in line and PAAmacrineOFF==1 and GanglionOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'PAAmacrineGanglion' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "PAAmacrineOFFToPAAmacrineOFF"' in line and PAAmacrineOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'PAAmacrinePAAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "WFAmacrineSigmoidOFFToGanglionOFF"' in line and WFAmacrineSigmoidOFF==1 and GanglionOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'WFAmacrineSigmoidGanglionOFF' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "SFAmacrineSigmoidToPAAmacrineOFF"' in line and SFAmacrineSigmoid==1 and PAAmacrineOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'SFAmacrineSigmoidPAAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "PAAmacrineOFFToWFAmacrineOFF"' in line and PAAmacrineOFF==1 and WFAmacrineOFF==1:
zero_index = [idx for idx, enum in enumerate([param in 'PAAmacrineWFAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
elif 'KernelConn "WFAmacrineSigmoidOFFToSFAmacrine"' in line and WFAmacrineSigmoidOFF==1 and SFAmacrine==1:
zero_index = [idx for idx, enum in enumerate([param in 'WFAmacrineSigmoidSFAmacrine' for param in conn_list]) if enum==True]
if len(zero_index)>0:
if nonZeroStrength[zero_index[0]]:
enable = True
if enable == True:
out_lines = enable_block(line_num,out_lines)
enable = False
## Make substitutions for desired param values
indices = [idx for idx, enum in enumerate([param in line for param in conn_list]) if enum == True] #list of indices (locations in line) where word of interest (param) is located
if len(indices) > 0: #if the current line has any of the parameters
for lol_idx in indices:
if len(conn_lol[lol_idx])>1:
new_line = re.sub(conn_list[lol_idx],conn_lol[lol_idx][param_idx],out_lines[line_num],count=1)
else:
new_line = re.sub(conn_list[lol_idx],conn_lol[lol_idx][0],out_lines[line_num],count=1)
out_lines[line_num] = new_line
line = new_line
if 'NUMSTEPS' in line:
new_line = re.sub('NUMSTEPS',num_steps,line,count=0)
out_lines[line_num] = new_line
line = new_line
if 'OUTPATH' in line:
new_line = re.sub('OUTPATH',full_results_path,line,count=0)
out_lines[line_num] = new_line
line = new_line
if 'PARAMSFILE' in line:
new_line = re.sub('PARAMSFILE',idx_out_filename,line,count=0)
out_lines[line_num] = new_line
line = new_line
if 'INIMGPATH' in line:
new_line = re.sub('INIMGPATH',input_file,line,count=0)
out_lines[line_num] = new_line
line = new_line
if 'INMOVPATH' in line:
new_line = re.sub('INMOVPATH',input_file,line,count=0)
out_lines[line_num] = new_line
line = new_line
if 'STOCHASTICRELFLAG' in line:
new_line = re.sub('STOCHASTICRELFLAG',stochastic_flag,line,count=0)
out_lines[line_num] = new_line
line = new_line
if 'PADelay' in line:
new_line = re.sub('PADelay',PA_delay,line,count=0)
out_lines[line_num] = new_line
line = new_line
if 'PREACTNOTRATE' in line:
if stochastic_flag is '0':
new_line = re.sub('PREACTNOTRATE','1',line,count=0)
elif stochastic_flag is '1':
new_line = re.sub('PREACTNOTRATE','0',line,count=0)
else:
print("\ntune_params: STOCHASTICRELFLAG must be 0 or 1")
exit()
out_lines[line_num] = new_line
line = new_line
#####ENDFOR - line_num
##Write to output file
print "tune_params: Writing new params file:\n "+full_out_file
try:
out_fid = open(full_out_file,'w')
except IOError as e:
print "\ntune_params: Failed to open file "+full_out_file+" with error:\n"
exit(e)
for out_line in out_lines:
out_fid.write("%s" % out_line)
out_fid.close()
## Run petavision for this output file
if run_PetaVision:
print "tune_params: Running PetaVision.\n\n"
os.system('mkdir -p '+full_results_path)
mpi_cmd = '/opt/local/bin/openmpirun -np '+mpi_np
if pipe_output_to_file:
run_cmd = mpi_cmd+' '+run_file+' -rows '+mpi_rows+' -columns '+mpi_columns+' -p '+full_out_file+' > '+full_results_path+'/stdout.txt'
else:
run_cmd = mpi_cmd+' '+run_file+' -rows '+mpi_rows+' -columns '+mpi_columns+' -p '+full_out_file
os.system('time '+run_cmd)
os.system('cp '+full_out_file+' '+full_results_path)
print "\n\ntune_params: Finished running PetaVision."
#####ENDFOR - param_idx
#####ENDFOR - num_steps
#####ENDFUNCTION
| epl-1.0 | -96,675,837,093,836,750 | 54.360281 | 189 | 0.576444 | false | 3.400259 | false | false | false |
haodongchen/BSseeker2-tools | Step1.py | 1 | 2096 | #!/usr/bin/env python
# Copyright (c) 2014 UCLA
# Authors: Haodong Chen and Thomas M. Vondriska
#
# This software is distributable under the terms of the GNU General
# Public License (GPL) v2, the text of which can be found at
# http://www.gnu.org/copyleft/gpl.html. Installing, importing or
# otherwise using this module constitutes acceptance of the terms of
# this License.
#
# Disclaimer
#
# This software is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# Comments and/or additions are welcome (send e-mail to:
# hdchen@ucla.edu).
import os, sys, subprocess
from optparse import OptionParser
from utils import *
def Step1():
# Demultiplex
parser = OptionParser()
parser.add_option("-i", "--input", type="string", dest="folder", \
help="Input file folder", default=os.getcwd() )
parser.add_option("--conf", type="string", dest="CONFPATH")
(options, args) = parser.parse_args()
Params = Conf_read(options.CONFPATH)
if Params['MULTIPLEX'] != "False":
Params['BARCODES'] = ','.join([str(int(x)) for x in Params['BARCODES'].split(",")])
file_list_1 = [x for x in os.listdir(options.folder) if \
(x.endswith("_qseq.txt") or x.endswith("_qseq.txt.gz")) \
and x.split("_")[-3] == "1"]
file_list_1.sort()
f1 = file_list_1[int(os.environ["SGE_TASK_ID"]) - 1]
f1name = f1.split("_")
f1name[-3] = "2"
f2 = "_".join(f1name)
p = '-I %s -s %s -b %s -l %s -m %s'%(options.folder, f1, f2, Params['BARCODES'], Params['BCMISMATCH'])
cmd = ["./MyDemultiplex.py"]
cmd.extend(p.split(" "))
#print >> sys.stdout, " ".join(cmd)
process = subprocess.Popen(" ".join(cmd), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = process.communicate()
print >> sys.stdout, out
if __name__ == "__main__":
Step1()
| gpl-2.0 | 7,939,004,548,819,058,000 | 39.307692 | 110 | 0.621183 | false | 3.369775 | false | false | false |
cernbox/smashbox | lib/test_concurrentDirMove.py | 1 | 4328 | __doc__ = """
This test moves concurrently a directory ('mover' worker) while
files are added to it ('adder' worker) . The expected outcome is that
all added files are kept on the server and are found in the final directory.
"""
from smashbox.utilities import *
if platform.system().lower() == "darwin":
do_not_report_as_failure()
nfiles = int(config.get('concurrentMoveDir_nfiles',100))
filesize = int(config.get('concurrentMoveDir_filesize',10))
delaySeconds = int(config.get('concurrentMoveDir_delaySeconds',3)) # if delaySeconds > 0 then remover waits; else the adder waits;
testsets = [
{'concurrentMoveDir_nfiles':100,
'concurrentMoveDir_filesize':10,
'concurrentMoveDir_delaySeconds':10 }, # removing the directory while lots of tiny files are uploaded
{'concurrentMoveDir_nfiles':10,
'concurrentMoveDir_filesize':OWNCLOUD_CHUNK_SIZE(1.1),
'concurrentMoveDir_delaySeconds':5 }, # removing the directory while a large file is chunk-uploaded
{'concurrentMoveDir_nfiles':2,
'concurrentMoveDir_filesize':OWNCLOUD_CHUNK_SIZE(5),
'concurrentMoveDir_delaySeconds':5 }, # removing the directory while a large file is chunk-uploaded
{'concurrentMoveDir_nfiles':20,
'concurrentMoveDir_filesize':OWNCLOUD_CHUNK_SIZE(0.9),
'concurrentMoveDir_delaySeconds':10 }, # removing the directory more but smaller files are uploaded
{'concurrentMoveDir_nfiles':5,
'concurrentMoveDir_filesize':OWNCLOUD_CHUNK_SIZE(0.1),
'concurrentMoveDir_delaySeconds':-5 }, # removing the directory before files are uploaded
{'concurrentMoveDir_nfiles':5,
'concurrentMoveDir_filesize':OWNCLOUD_CHUNK_SIZE(2.1),
'concurrentMoveDir_delaySeconds':-10 } # removing the directory before laarge files are chunk-uploaded
]
import time
import tempfile
from smashbox.utilities.hash_files import *
@add_worker
def creator(step):
reset_owncloud_account()
reset_rundir()
step(1,'upload empty subdirectory')
d = make_workdir()
d2 = os.path.join(d,'subdir')
mkdir(d2)
run_ocsync(d)
step(6,'final check')
run_ocsync(d)
final_check(d)
@add_worker
def adder(step):
step(2,'sync the empty directory created by the creator')
d = make_workdir()
run_ocsync(d)
step(3,'locally create content in the subdirectory')
d2 = os.path.join(d,'subdir')
for i in range(nfiles):
create_hashfile(d2, size=filesize) #createfile_zero(os.path.join(d2,"test.%02d"%i),count=filesize, bs=1000)
step(4,'sync the added files in parallel')
if delaySeconds<0:
sleep(-delaySeconds)
run_ocsync(d)
# when directory is renamed while file is uploaded the PUT request finishes with Conflict error code
step(5,'mover has finished synchronizing')
# extra sync run to make sure that changes from mover have been correctly propagated
# first run will not be successful because files with Conflict response are blacklisted
# second run removes the blacklist and updates the files from scratch again
run_ocsync(d,n=2)
step(6,'final check')
run_ocsync(d)
final_check(d)
@add_worker
def mover(step):
step(2,'sync the empty directory created by the creator')
d = make_workdir()
run_ocsync(d)
step(3,'locally rename subdir to subdir2')
s1 = os.path.join(d,'subdir')
s2 = os.path.join(d,'subdir2')
os.rename(s1,s2)
step(4,'sync the subdir2 in parallel')
if delaySeconds>0:
sleep(delaySeconds)
run_ocsync(d)
step(6,'final check')
run_ocsync(d)
final_check(d)
@add_worker
def checker(step):
step(6,'sync the final state of the repository into a fresh local folder')
d = make_workdir()
run_ocsync(d)
final_check(d)
def final_check(d):
list_files(d,recursive=True)
d2 = os.path.join(d,'subdir2')
logger.info('final output: %s',d2)
all_files,analysed_files,bad_files = analyse_hashfiles(d2)
error_check(bad_files == 0,'%s corrupted files in %s'%(bad_files,d2))
error_check(analysed_files == nfiles,"not all files are present (%d/%d)"%(nfiles,analysed_files)) # FIXME: well, there may be other files - we don't check that yet
#runcmd('find %s'%d)
#log('content of /subdir as reported by webdav')
#list_webdav_propfind('subdir')
| agpl-3.0 | 4,133,240,066,960,746,000 | 28.04698 | 167 | 0.690619 | false | 3.45411 | false | false | false |
ganga-devs/ganga | doc/manuals/src/dev_survival/makegraph.py | 1 | 1662 |
# generate job status transition graph
# usage:
# 1) run ganga and this file as a script
# or 2) cd ganga/python, run python interactively and execfile(this_file)
ARC_LABELS = True
STYLED_EDGES = True
DEBUG = False
from Ganga.GPIDev.Lib.Job import Job
import os
g = Job.status_graph
initial_states = Job.initial_states
transient_states = Job.transient_states
import os.path
dot_file = os.path.abspath('ganga_job_stat.dot')
out_type = 'gif'
out_file = dot_file.replace('.dot','.'+out_type)
def debug(s):
if DEBUG:
print 'DEBUG:',s
f = file(dot_file,'w')
print >> f, 'digraph JobStatus {'
for node in g:
debug('src state: %s'%node)
for dest in g[node]:
debug('dest state: %s'%dest)
LAB = []
label = g[node][dest].transition_comment
if ARC_LABELS:
LAB.append('label="%s"'%label)
LAB.append('fontsize=8')
if STYLED_EDGES:
if label.find('force') != -1:
LAB.append('style=dotted bold')
if LAB:
LAB = '['+','.join(LAB)+']'
print >>f, '%s -> %s %s;' % (node,dest,LAB)
print >>f,"__start [shape=point]"
for node in initial_states:
print >>f, '__start -> %s;'%node
for node in transient_states:
print >>f, '%s [style=filled]'%node
print >>f, '}'
f.close()
print 'created', dot_file
#graphviz_top = '/afs/cern.ch/sw/lcg/external/graphviz/1.9/rh73_gcc32/'
#os.system('export LD_LIBRARY_PATH=%s.lib/graphviz:$LD_LIBRARY_PATH; %s/bin/dot -T%s %s -o%s'% (graphviz_top,graphviz_top,out_type, dot_file, out_file))
os.system('dot -T%s %s -o%s'% (out_type, dot_file, out_file))
print 'created', out_file
| gpl-2.0 | -2,071,245,197,494,378,500 | 25.806452 | 152 | 0.607702 | false | 2.807432 | false | false | false |
rwl/godot | godot/component/text.py | 1 | 5703 | #------------------------------------------------------------------------------
# Copyright (C) 2007 Richard W. Lincoln
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 dated June, 1991.
#
# This software is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANDABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#------------------------------------------------------------------------------
""" Defines a text component.
References:
Jose.R.Fonseca, 'XDot', http://code.google.com/p/jrfonseca/wiki/XDot
"""
#------------------------------------------------------------------------------
# Imports:
#------------------------------------------------------------------------------
from math import sqrt
from enthought.traits.api import \
Instance, Float, Int, String, Trait, on_trait_change
from enthought.traits.ui.api import View, Item, Group
from enthought.enable.api import Component
#from enthought.kiva import Font as KivaFont
#from enthought.kiva import MODERN
from enthought.kiva.fonttools.font import str_to_font
#from enthought.kiva import Font, MODERN
from pen import Pen
#------------------------------------------------------------------------------
# "Text" class:
#------------------------------------------------------------------------------
class Text(Component):
""" Component with text traits """
#--------------------------------------------------------------------------
# "Text" interface:
#--------------------------------------------------------------------------
# The background color of this component.
bgcolor = "transparent"#"fuchsia"
# Pen for drawing text
pen = Instance(Pen, desc="pen instance with which to draw the text")
# X-axis coordinate
text_x = Float(desc="x-axis coordinate")
# Y-axis coordinate
text_y = Float(desc="y-axis coordinate")
# Text justification
justification = Int(-1, desc="(LEFT, CENTER, RIGHT = -1, 0, 1)")
# justification = Trait("Left", {"Left": -1, "Centre": 0, "Right": 1})
# Width of the text
text_w = Float(desc="width of the text as computed by the library")
# Text to be drawn
text = String(desc="text")
#--------------------------------------------------------------------------
# Views:
#--------------------------------------------------------------------------
traits_view = View(
Group(
Item("pen", style="custom", show_label=False),
label="Pen", show_border=True
),
Item("text_x"), Item("text_y"), Item("text_w"),
Item("justification"), Item("text")
)
#--------------------------------------------------------------------------
# Draw component on the graphics context:
#--------------------------------------------------------------------------
def _draw_mainlayer(self, gc, view_bounds=None, mode="default"):
""" Draws the component """
gc.save_state()
try:
# Specify the font
font = str_to_font(str(self.pen.font))
gc.set_font(font)
gc.set_fill_color(self.pen.color_)
x = self.text_x - ( self.text_w / 2 )
y = self.text_y - ( font.size / 2 )
# Show text at the same scale as the graphics context
ctm = gc.get_ctm()
if hasattr(ctm, "__len__") and len(ctm) == 6:
scale = sqrt( (ctm[0] + ctm[1]) * (ctm[0] + ctm[1]) / 2.0 + \
(ctm[2] + ctm[3]) * (ctm[2] + ctm[3]) / 2.0 )
elif hasattr(gc, "get_ctm_scale"):
scale = gc.get_ctm_scale()
else:
raise RuntimeError("Unable to get scale from GC.")
x *= scale
y *= scale
gc.show_text_at_point(self.text, x, y)
finally:
gc.restore_state()
@on_trait_change("pen.+,text_x,text_y,text_w,justification,text")
def _update(self):
if self.pen is None:
return
x = self.text_x - (self.text_w / 2)
x2 = x + self.text_w
font = str_to_font( str(self.pen.font) )
y = self.text_y - (font.size / 2)
y2 = y + font.size
self.position = [x, y]
# If bounds are set to 0, horizontal/vertical lines will not render.
self.bounds = [max(x2 - x, 1), max(y2 - y, 1)]
self.request_redraw()
def normal_left_down(self, event):
print "Text [%s] selected at (%d, %d)" % (self.text, event.x, event.y)
#------------------------------------------------------------------------------
# Stand-alone call:
#------------------------------------------------------------------------------
if __name__ == "__main__":
from godot.component.component_viewer import ComponentViewer
from enthought.enable.api import Container
text = Text(
pen=Pen(), text="Foo",
text_x=50, text_y=50, text_w=30
)
container = Container(
# fit_window=False, auto_size=True,
bounds=[30, 10], position=[50, 50],
bgcolor="green")
container.add( text )
viewer = ComponentViewer( component=text )
viewer.configure_traits()
# EOF -------------------------------------------------------------------------
| mit | 1,190,239,686,013,754,000 | 32.745562 | 79 | 0.473786 | false | 4.171909 | false | false | false |
ybogdanov/history-timeline | scripts/txt_to_json.py | 1 | 1460 | #!/usr/bin/python
import sys
import json
import re
from operator import itemgetter
comment_line = re.compile(r"^\s*(?:#.*)?$")
re_line = re.compile(
r'''
^\s*
(?P<name>[^:]+) # Person name
(?:
\s*:\s*
(?P<properties>.+)? # Properties
)?
''',
re.VERBOSE
)
regex = re.compile(
r'''
(?P<key>\w+) \s* = \s* # Key consists of only alphanumerics
(?P<quote>["']?) # Optional quote character.
(?P<value>.*?) # Value is a non greedy match
(?P=quote) # Closing quote equals the first.
($|\s+) # Entry ends with comma or end of string
''',
re.VERBOSE
)
input_data = []
for i, line in enumerate(sys.stdin):
if comment_line.match(line): continue
m = re_line.match(line)
if not m:
sys.stderr.write("Cannot parse line #%d: %s" % (i+1, line))
continue
person = {
"name": m.group("name").strip()
}
if m.group("properties"):
props = {match.group('key'): match.group('value') for match in regex.finditer(m.group("properties"))}
if "from" in props: props["from"] = int(props["from"])
if "to" in props: props["to"] = int(props["to"])
if "rating" in props: props["rating"] = float(props["rating"])
person.update(props)
input_data.append(person)
sorted_result = sorted(input_data, key=itemgetter('name'))
print json.dumps(sorted_result, indent=4, separators=(',', ': '))
| gpl-2.0 | 2,532,537,697,041,589,000 | 22.548387 | 105 | 0.555479 | false | 3.258929 | false | false | false |
metabrainz/picard | picard/__init__.py | 2 | 5028 | # -*- coding: utf-8 -*-
#
# Picard, the next-generation MusicBrainz tagger
#
# Copyright (C) 2006-2008, 2011-2014 Lukáš Lalinský
# Copyright (C) 2009, 2018-2021 Philipp Wolfer
# Copyright (C) 2012 Chad Wilson
# Copyright (C) 2012-2013 Michael Wiencek
# Copyright (C) 2013-2020 Laurent Monin
# Copyright (C) 2015 Ohm Patel
# Copyright (C) 2015 Sophist-UK
# Copyright (C) 2016 Suhas
# Copyright (C) 2016-2017 Wieland Hoffmann
# Copyright (C) 2016-2018 Sambhav Kothari
# Copyright (C) 2017 Ville Skyttä
# Copyright (C) 2018, 2021 Bob Swift
# Copyright (C) 2021 Gabriel Ferreira
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
from picard.version import (
Version,
VersionError,
)
PICARD_ORG_NAME = "MusicBrainz"
PICARD_APP_NAME = "Picard"
PICARD_DISPLAY_NAME = "MusicBrainz Picard"
PICARD_APP_ID = "org.musicbrainz.Picard"
PICARD_DESKTOP_NAME = PICARD_APP_ID + ".desktop"
PICARD_VERSION = Version(2, 7, 0, 'dev', 3)
# optional build version
# it should be in the form '<platform>_<YYMMDDHHMMSS>'
# ie. win32_20140415091256
PICARD_BUILD_VERSION_STR = ""
def version_to_string(version, short=False):
"""Deprecated: Use picard.version.Version.to_string instead"""
if len(version) != 5:
raise VersionError("Length != 5")
if not isinstance(version, Version):
version = Version(*version)
return version.to_string(short=short)
def version_from_string(version_str):
"""Deprecated: Use picard.version.Version.from_string instead"""
return Version.from_string(version_str)
PICARD_VERSION_STR = PICARD_VERSION.to_string()
PICARD_VERSION_STR_SHORT = PICARD_VERSION.to_string(short=True)
if PICARD_BUILD_VERSION_STR:
__version__ = "%s+%s" % (PICARD_VERSION_STR, PICARD_BUILD_VERSION_STR)
PICARD_FANCY_VERSION_STR = "%s (%s)" % (PICARD_VERSION_STR_SHORT,
PICARD_BUILD_VERSION_STR)
else:
__version__ = PICARD_VERSION_STR_SHORT
PICARD_FANCY_VERSION_STR = PICARD_VERSION_STR_SHORT
# Keep those ordered
api_versions = [
"2.0",
"2.1",
"2.2",
"2.3",
"2.4",
"2.5",
"2.6",
"2.7",
]
api_versions_tuple = [Version.from_string(v) for v in api_versions]
def crash_handler():
"""Implements minimal handling of an exception crashing the application.
This function tries to log the exception to a log file and display
a minimal crash dialog to the user.
This function is supposed to be called from inside an except blog.
"""
import sys
# Allow disabling the graphical crash handler for debugging and CI purposes.
if set(sys.argv) & {'--no-crash-dialog', '-v', '--version', '-V', '--long-version', '-h', '--help'}:
return
# First try to get traceback information and write it to a log file
# with minimum chance to fail.
from tempfile import NamedTemporaryFile
import traceback
trace = traceback.format_exc()
logfile = None
try:
with NamedTemporaryFile(suffix='.log', prefix='picard-crash-', delete=False) as f:
f.write(trace.encode(errors="replace"))
logfile = f.name
except: # noqa: E722,F722 # pylint: disable=bare-except
print("Failed writing log file {0}".format(logfile), file=sys.stderr)
logfile = None
# Display the crash information to the user as a dialog. This requires
# importing Qt5 and has some potential to fail if things are broken.
from PyQt5.QtCore import QCoreApplication, Qt, QUrl
from PyQt5.QtWidgets import QApplication, QMessageBox
app = QCoreApplication.instance()
if not app:
app = QApplication(sys.argv)
msgbox = QMessageBox()
msgbox.setIcon(QMessageBox.Critical)
msgbox.setWindowTitle("Picard terminated unexpectedly")
msgbox.setTextFormat(Qt.RichText)
msgbox.setText(
'An unexpected error has caused Picard to crash. '
'Please report this issue on the <a href="https://tickets.metabrainz.org/projects/PICARD">MusicBrainz bug tracker</a>.')
if logfile:
logfile_url = QUrl.fromLocalFile(logfile)
msgbox.setInformativeText(
'A logfile has been written to <a href="{0}">{1}</a>.'
.format(logfile_url.url(), logfile))
msgbox.setDetailedText(trace)
msgbox.setStandardButtons(QMessageBox.Close)
msgbox.setDefaultButton(QMessageBox.Close)
msgbox.exec_()
app.quit()
| gpl-2.0 | -5,324,598,364,417,041,000 | 34.380282 | 128 | 0.689291 | false | 3.450549 | false | false | false |
retrogradeorbit/Pigo | pigo/lib/ModulePygame.py | 1 | 3307 | # -*- coding: utf-8 -*-
import pygame
from PigoFont import PigoFont
class Font(pygame.font.Font, PigoFont):
def __init__(self, filename, size):
pygame.font.Font.__init__(self,filename,size)
def GetHeight(self):
return pygame.font.Font.get_height(self)
def GetAscent(self):
return pygame.font.Font.get_ascent(self)
def GetDescent(self):
return pygame.font.Font.get_descent(self)
def GlyphMetrics(self, st):
return pygame.font.Font.metrics(self, st)
def Render(self, colour=(255,255,255)):
image,extents=self.render(SDL_Color(255,255,255))
from PIL import Image
import cFont
import numpy
buff=numpy.frombuffer(image.pixels.as_ctypes(), numpy.uint8)
copy = buff.reshape((image.w,image.h,4))
colour = numpy.array(colour,dtype=numpy.ubyte)
cFont.create_font_image_alpha(copy,colour)
dupe = Image.fromarray(copy,"RGBA")
return dupe, extents
class TestCase(object):
def setUp(self):
# initialise opengl
success, fail = pygame.init()
if fail:
print "Unable to init pygame: %s\n", pygame.get_error()
sys.exit(1)
pygame.display.init()
pygame.display.set_mode( (320,200), pygame.OPENGL, 24 )
pygame.mouse.set_visible( False )
pygame.display.set_caption(str(self.__class__),str(self.__class__))
def tearDown(self):
pygame.quit()
# base library functions
def Init():
pygame.init()
pygame.font.init()
def Quit():
pygame.quit()
def Flip():
pygame.display.flip()
def Poll():
event = pygame.event.poll()
return None if event.type == pygame.NOEVENT else event
def iskey(event,key):
return event.key == key
def isquit(event):
return event.type == pygame.QUIT
KEY_KP_PLUS = pygame.K_KP_PLUS
KEY_PLUS = pygame.K_KP_PLUS
KEY_KP_MINUS = pygame.K_KP_MINUS
KEY_MINUS = pygame.K_MINUS
KEY_ESCAPE = pygame.K_ESCAPE
KEY_EQUALS = pygame.K_EQUALS
KEY_F11 = pygame.K_F11
KEY_a = pygame.K_a
KEY_b = pygame.K_b
KEY_c = pygame.K_c
KEY_d = pygame.K_d
KEY_e = pygame.K_e
KEY_f = pygame.K_f
KEY_g = pygame.K_g
KEY_h = pygame.K_h
KEY_i = pygame.K_i
KEY_j = pygame.K_j
KEY_k = pygame.K_k
KEY_l = pygame.K_l
KEY_m = pygame.K_m
KEY_n = pygame.K_n
KEY_o = pygame.K_o
KEY_p = pygame.K_p
KEY_q = pygame.K_q
KEY_r = pygame.K_r
KEY_s = pygame.K_s
KEY_t = pygame.K_t
KEY_u = pygame.K_u
KEY_v = pygame.K_v
KEY_w = pygame.K_w
KEY_x = pygame.K_x
KEY_y = pygame.K_y
KEY_z = pygame.K_z
KEYTYPE = pygame.KEYDOWN
def ShowCursor(boolean=True):
pygame.mouse.set_visible(boolean)
def SetAppIcon(filename):
surf = pygame.image.load(filename)
pygame.display.set_icon(surf)
def SetWindowTitle(title, short=None):
pygame.display.set_caption(title, short or title)
def ListModes(depth=0):
return pygame.display.list_modes(depth,pygame.FULLSCREEN|pygame.OPENGL|pygame.HWSURFACE|pygame.DOUBLEBUF)
def SetVideoMode(w,h,depth=24,fullscreen=False):
return pygame.display.set_mode( (w,h), pygame.FULLSCREEN|pygame.OPENGL|pygame.HWSURFACE|pygame.DOUBLEBUF if fullscreen else pygame.OPENGL|pygame.HWSURFACE|pygame.DOUBLEBUF, depth)
| gpl-3.0 | 2,711,493,912,700,501,500 | 24.060606 | 183 | 0.640157 | false | 3.017336 | false | false | false |
tmsbrg/clump | clump_test.py | 1 | 2581 | #!/usr/bin/python3
# clumping factor test. Creates a forest with certain size and terrain regularity based on settings
# Copyright Thomas van der Berg 2016, released under GNU GPLv3(see LICENSE)
import random
# settings
w = 25 # width of world
h = 21 # height of world
trees = 75 # number of trees in forest. If more than num of tiles in world, rest is ignored
clumping_factor = 1.5 # higher: more round and regular terrain, lower: more irregular terrain. should be > 0.0
four_border = False # use four border tiles instead of 8. Makes things square
# data
_map = [] # map to print on terminal
forest = set() # coordinates of trees already selected
forest_border = dict() # keys: coordinates of empty spaces next to trees. values: weights(used to put more trees here)
max_weight = 0 # sum of all weights
def out_of_bounds(x, y):
return (x < 0 or x >= w or y < 0 or y >= h)
def create_forest(x, y):
global max_weight
if out_of_bounds(x, y):
print("ERROR!", x, "and", y, "out of bounds!")
return False
_map[y][x] = '#'
if (x, y) in forest_border:
max_weight -= forest_border[(x, y)]
del forest_border[(x, y)]
forest.add((x, y))
try_add_forest_border(x - 1, y)
try_add_forest_border(x + 1, y)
try_add_forest_border(x, y - 1)
try_add_forest_border(x, y + 1)
if not four_border:
try_add_forest_border(x - 1, y - 1)
try_add_forest_border(x + 1, y - 1)
try_add_forest_border(x - 1, y + 1)
try_add_forest_border(x + 1, y + 1)
return True
def try_add_forest_border(x, y):
global max_weight
if not out_of_bounds(x, y) and (x, y) not in forest:
if (x, y) not in forest_border:
forest_border[(x, y)] = 1
max_weight += 1
else:
weight = forest_border[(x, y)]
max_weight -= weight
weight *= clumping_factor
max_weight += weight
forest_border[(x, y)] = weight
# initialize map
for y in range(h):
_map.append(['.'] * w)
# initial tree
create_forest(w // 2, h // 2)
# create every tree
for tree in range(1, trees):
if len(forest_border) == 0:
break
random_factor = random.uniform(0, max_weight)
found = False
for place in forest_border.items():
random_factor -= place[1]
if random_factor < 0:
tile = place[0]
found = True
break
if found:
create_forest(tile[0], tile[1])
else:
print("Error placing tree")
# print map
for y in range(h):
print(" ".join(_map[y]))
| gpl-3.0 | 64,574,869,053,653,050 | 29.72619 | 118 | 0.596668 | false | 3.279543 | false | false | false |
person142/scipy | scipy/spatial/_spherical_voronoi.py | 1 | 13865 | """
Spherical Voronoi Code
.. versionadded:: 0.18.0
"""
#
# Copyright (C) Tyler Reddy, Ross Hemsley, Edd Edmondson,
# Nikolai Nowaczyk, Joe Pitt-Francis, 2015.
#
# Distributed under the same BSD license as SciPy.
#
import warnings
import numpy as np
import scipy
from . import _voronoi
from scipy.spatial import cKDTree
__all__ = ['SphericalVoronoi']
def calculate_solid_angles(R):
"""Calculates the solid angles of plane triangles. Implements the method of
Van Oosterom and Strackee [VanOosterom]_ with some modifications. Assumes
that input points have unit norm."""
# Original method uses a triple product `R1 . (R2 x R3)` for the numerator.
# This is equal to the determinant of the matrix [R1 R2 R3], which can be
# computed with better stability.
numerator = np.linalg.det(R)
denominator = 1 + (np.einsum('ij,ij->i', R[:, 0], R[:, 1]) +
np.einsum('ij,ij->i', R[:, 1], R[:, 2]) +
np.einsum('ij,ij->i', R[:, 2], R[:, 0]))
return np.abs(2 * np.arctan2(numerator, denominator))
class SphericalVoronoi:
""" Voronoi diagrams on the surface of a sphere.
.. versionadded:: 0.18.0
Parameters
----------
points : ndarray of floats, shape (npoints, ndim)
Coordinates of points from which to construct a spherical
Voronoi diagram.
radius : float, optional
Radius of the sphere (Default: 1)
center : ndarray of floats, shape (ndim,)
Center of sphere (Default: origin)
threshold : float
Threshold for detecting duplicate points and
mismatches between points and sphere parameters.
(Default: 1e-06)
Attributes
----------
points : double array of shape (npoints, ndim)
the points in `ndim` dimensions to generate the Voronoi diagram from
radius : double
radius of the sphere
center : double array of shape (ndim,)
center of the sphere
vertices : double array of shape (nvertices, ndim)
Voronoi vertices corresponding to points
regions : list of list of integers of shape (npoints, _ )
the n-th entry is a list consisting of the indices
of the vertices belonging to the n-th point in points
Methods
----------
calculate_areas
Calculates the areas of the Voronoi regions. The regions are
spherical polygons (not planar). The sum of the areas is
`4 * pi * radius**2`.
Raises
------
ValueError
If there are duplicates in `points`.
If the provided `radius` is not consistent with `points`.
Notes
-----
The spherical Voronoi diagram algorithm proceeds as follows. The Convex
Hull of the input points (generators) is calculated, and is equivalent to
their Delaunay triangulation on the surface of the sphere [Caroli]_.
The Convex Hull neighbour information is then used to
order the Voronoi region vertices around each generator. The latter
approach is substantially less sensitive to floating point issues than
angle-based methods of Voronoi region vertex sorting.
Empirical assessment of spherical Voronoi algorithm performance suggests
quadratic time complexity (loglinear is optimal, but algorithms are more
challenging to implement).
References
----------
.. [Caroli] Caroli et al. Robust and Efficient Delaunay triangulations of
points on or close to a sphere. Research Report RR-7004, 2009.
.. [VanOosterom] Van Oosterom and Strackee. The solid angle of a plane
triangle. IEEE Transactions on Biomedical Engineering,
2, 1983, pp 125--126.
See Also
--------
Voronoi : Conventional Voronoi diagrams in N dimensions.
Examples
--------
Do some imports and take some points on a cube:
>>> import matplotlib.pyplot as plt
>>> from scipy.spatial import SphericalVoronoi, geometric_slerp
>>> from mpl_toolkits.mplot3d import proj3d
>>> # set input data
>>> points = np.array([[0, 0, 1], [0, 0, -1], [1, 0, 0],
... [0, 1, 0], [0, -1, 0], [-1, 0, 0], ])
Calculate the spherical Voronoi diagram:
>>> radius = 1
>>> center = np.array([0, 0, 0])
>>> sv = SphericalVoronoi(points, radius, center)
Generate plot:
>>> # sort vertices (optional, helpful for plotting)
>>> sv.sort_vertices_of_regions()
>>> t_vals = np.linspace(0, 1, 2000)
>>> fig = plt.figure()
>>> ax = fig.add_subplot(111, projection='3d')
>>> # plot the unit sphere for reference (optional)
>>> u = np.linspace(0, 2 * np.pi, 100)
>>> v = np.linspace(0, np.pi, 100)
>>> x = np.outer(np.cos(u), np.sin(v))
>>> y = np.outer(np.sin(u), np.sin(v))
>>> z = np.outer(np.ones(np.size(u)), np.cos(v))
>>> ax.plot_surface(x, y, z, color='y', alpha=0.1)
>>> # plot generator points
>>> ax.scatter(points[:, 0], points[:, 1], points[:, 2], c='b')
>>> # plot Voronoi vertices
>>> ax.scatter(sv.vertices[:, 0], sv.vertices[:, 1], sv.vertices[:, 2],
... c='g')
>>> # indicate Voronoi regions (as Euclidean polygons)
>>> for region in sv.regions:
... n = len(region)
... for i in range(n):
... start = sv.vertices[region][i]
... end = sv.vertices[region][(i + 1) % n]
... result = geometric_slerp(start, end, t_vals)
... ax.plot(result[..., 0],
... result[..., 1],
... result[..., 2],
... c='k')
>>> ax.azim = 10
>>> ax.elev = 40
>>> _ = ax.set_xticks([])
>>> _ = ax.set_yticks([])
>>> _ = ax.set_zticks([])
>>> fig.set_size_inches(4, 4)
>>> plt.show()
"""
def __init__(self, points, radius=1, center=None, threshold=1e-06):
if radius is None:
radius = 1.
warnings.warn('`radius` is `None`. '
'This will raise an error in a future version. '
'Please provide a floating point number '
'(i.e. `radius=1`).',
DeprecationWarning)
self.points = np.array(points).astype(np.double)
self.radius = radius
self._dim = len(points[0])
if center is None:
self.center = np.zeros(self._dim)
else:
self.center = np.array(center)
# test degenerate input
self._rank = np.linalg.matrix_rank(self.points - self.points[0],
tol=threshold * self.radius)
if self._rank <= 1:
raise ValueError("Rank of input points must be at least 2")
if cKDTree(self.points).query_pairs(threshold * self.radius):
raise ValueError("Duplicate generators present.")
radii = np.linalg.norm(self.points - self.center, axis=1)
max_discrepancy = np.abs(radii - self.radius).max()
if max_discrepancy >= threshold * self.radius:
raise ValueError("Radius inconsistent with generators.")
self._calc_vertices_regions()
def _handle_geodesic_input(self):
# center the points
centered = self.points - self.center
# calculate an orthogonal transformation which puts circle on x-y axis
_, _, vh = np.linalg.svd(centered - np.roll(centered, 1, axis=0))
# apply transformation
circle = centered @ vh.T
h = np.mean(circle[:, 2])
if h < 0:
h, vh, circle = -h, -vh, -circle
circle_radius = np.sqrt(np.maximum(0, self.radius**2 - h**2))
# calculate the north and south poles in this basis
poles = [[0, 0, self.radius], [0, 0, -self.radius]]
# calculate spherical voronoi diagram on the circle
lower_dimensional = SphericalVoronoi(circle[:, :2],
radius=circle_radius)
n = len(lower_dimensional.vertices)
vertices = h * np.ones((n, 3))
vertices[:, :2] = lower_dimensional.vertices
# north and south poles are also Voronoi vertices
self.vertices = np.concatenate((vertices, poles)) @ vh + self.center
# each region contains two vertices from the plane and the north and
# south poles
self.regions = [[a, n, b, n + 1] for a, b in lower_dimensional.regions]
def _calc_vertices_regions(self):
"""
Calculates the Voronoi vertices and regions of the generators stored
in self.points. The vertices will be stored in self.vertices and the
regions in self.regions.
This algorithm was discussed at PyData London 2015 by
Tyler Reddy, Ross Hemsley and Nikolai Nowaczyk
"""
if self._dim == 3 and self._rank == 2:
self._handle_geodesic_input()
return
# get Convex Hull
conv = scipy.spatial.ConvexHull(self.points)
# get circumcenters of Convex Hull triangles from facet equations
# for 3D input circumcenters will have shape: (2N-4, 3)
self.vertices = self.radius * conv.equations[:, :-1] + self.center
self._simplices = conv.simplices
# calculate regions from triangulation
# for 3D input simplex_indices will have shape: (2N-4,)
simplex_indices = np.arange(len(self._simplices))
# for 3D input tri_indices will have shape: (6N-12,)
tri_indices = np.column_stack([simplex_indices] * self._dim).ravel()
# for 3D input point_indices will have shape: (6N-12,)
point_indices = self._simplices.ravel()
# for 3D input indices will have shape: (6N-12,)
indices = np.argsort(point_indices, kind='mergesort')
# for 3D input flattened_groups will have shape: (6N-12,)
flattened_groups = tri_indices[indices].astype(np.intp)
# intervals will have shape: (N+1,)
intervals = np.cumsum(np.bincount(point_indices + 1))
# split flattened groups to get nested list of unsorted regions
groups = [list(flattened_groups[intervals[i]:intervals[i + 1]])
for i in range(len(intervals) - 1)]
self.regions = groups
def sort_vertices_of_regions(self):
"""Sort indices of the vertices to be (counter-)clockwise ordered.
Raises
------
TypeError
If the points are not three-dimensional.
Notes
-----
For each region in regions, it sorts the indices of the Voronoi
vertices such that the resulting points are in a clockwise or
counterclockwise order around the generator point.
This is done as follows: Recall that the n-th region in regions
surrounds the n-th generator in points and that the k-th
Voronoi vertex in vertices is the circumcenter of the k-th triangle
in self._simplices. For each region n, we choose the first triangle
(=Voronoi vertex) in self._simplices and a vertex of that triangle
not equal to the center n. These determine a unique neighbor of that
triangle, which is then chosen as the second triangle. The second
triangle will have a unique vertex not equal to the current vertex or
the center. This determines a unique neighbor of the second triangle,
which is then chosen as the third triangle and so forth. We proceed
through all the triangles (=Voronoi vertices) belonging to the
generator in points and obtain a sorted version of the vertices
of its surrounding region.
"""
if self._dim != 3:
raise TypeError("Only supported for three-dimensional point sets")
if self._rank == 2:
return # regions are sorted by construction
_voronoi.sort_vertices_of_regions(self._simplices, self.regions)
def calculate_areas(self):
"""Calculates the areas of the Voronoi regions. The regions are
spherical polygons (not planar). The sum of the areas is
`4 * pi * radius**2`.
.. versionadded:: 1.5.0
Returns
-------
areas : double array of shape (npoints,)
The areas of the Voronoi regions.
"""
self.sort_vertices_of_regions()
sizes = [len(region) for region in self.regions]
csizes = np.cumsum(sizes)
num_regions = csizes[-1]
# We create a set of triangles consisting of one point and two Voronoi
# vertices. The vertices of each triangle are adjacent in the sorted
# regions list.
point_indices = [i for i, size in enumerate(sizes)
for j in range(size)]
nbrs1 = np.array([r for region in self.regions for r in region])
# The calculation of nbrs2 is a vectorized version of:
# np.array([r for region in self.regions for r in np.roll(region, 1)])
nbrs2 = np.roll(nbrs1, 1)
indices = np.roll(csizes, 1)
indices[0] = 0
nbrs2[indices] = nbrs1[csizes - 1]
# Normalize points and vertices.
pnormalized = (self.points - self.center) / self.radius
vnormalized = (self.vertices - self.center) / self.radius
# Create the complete set of triangles and calculate their solid angles
triangles = np.hstack([pnormalized[point_indices],
vnormalized[nbrs1],
vnormalized[nbrs2]
]).reshape((num_regions, 3, 3))
triangle_solid_angles = calculate_solid_angles(triangles)
# Sum the solid angles of the triangles in each region
solid_angles = np.cumsum(triangle_solid_angles)[csizes - 1]
solid_angles[1:] -= solid_angles[:-1]
# Get polygon areas using A = omega * r**2
return solid_angles * self.radius**2
| bsd-3-clause | -8,462,916,385,199,716,000 | 38.501425 | 79 | 0.601442 | false | 3.891384 | false | false | false |
orcmkit/ORCmKit | Python27/ORCSim/LineSet.py | 1 | 3486 | from __future__ import division
from CoolProp.CoolProp import PropsSI
from Correlations import f_h_1phase_Tube,TrhoPhase_ph,Tsat
from math import log,pi,exp
class LineSetClass():
def __init__(self,**kwargs):
#Load the parameters passed in
# using the dictionary
self.__dict__.update(kwargs)
def Update(self,**kwargs):
#Load the parameters passed in
# using the dictionary
self.__dict__.update(kwargs)
def OutputList(self):
"""
Return a list of parameters for this component for further output
It is a list of tuples, and each tuple is formed of items:
[0] Description of value
[1] Units of value
[2] The value itself
"""
return [
('Length of tube','m',self.L),
('Supply line OD','m',self.OD),
('Supply line ID','m',self.ID),
('Tube Conductivity','W/m-K',self.k_tube),
('Insulation thickness','m',self.t_insul),
('Insulation conductivity','W/m-K',self.k_insul),
('Air overall HTC','W/m^2-K',self.h_air),
('Air Temperature','K',self.T_air),
('Q Total','W',self.Q),
('Pressure drop ','Pa',self.DP),
('Reynolds # Fluid','-',self.Re_fluid),
('Mean HTC Fluid','W/m^2-K',self.h_fluid),
('Charge','kg',self.Charge),
('Inlet Temperature','K',self.Tin),
('Outlet Temperature','K',self.Tout)
]
def Calculate(self):
#Figure out the inlet state
self.Tbubble=Tsat(self.Ref,self.pin,0,0)
self.Tdew=Tsat(self.Ref,self.pin,1,0)
self.Tin,self.rhoin,self.Phasein=TrhoPhase_ph(self.Ref,self.pin,self.hin,self.Tbubble,self.Tdew)
self.f_fluid, self.h_fluid, self.Re_fluid=f_h_1phase_Tube(self.mdot, self.ID, self.Tin, self.pin, self.Ref)
# Specific heat capacity [J/kg-K]
cp=PropsSI('C','T',self.Tin,'P',self.pin*1000+100,self.Ref) #J/kg-K
# Density [kg/m^3]
rho=PropsSI('D','T',self.Tin, 'P', self.pin*1000+100, self.Ref)
#Thermal resistance of tube
R_tube=log(self.OD/self.ID)/(2*pi*self.L*self.k_tube)
#Thermal resistance of insulation
R_insul=log((self.OD+2.0*self.t_insul)/self.OD)/(2*pi*self.L*self.k_insul);
#Convective UA for inside the tube
UA_i=pi*self.ID*self.L*self.h_fluid;
#Convective UA for the air-side
UA_o=pi*(self.OD+2*self.t_insul)*self.L*self.h_air;
#Avoid the possibility of division by zero if h_air is zero
if UA_o<1e-12:
UA_o=1e-12
#Overall UA value
UA=1/(1/UA_i+R_tube+R_insul+1/UA_o)
#Outlet fluid temperature [K]
# self.Tout=self.T_air-exp(-UA/(self.mdot*cp))*(self.T_air-self.Tin)
#first, assume to temperature drop/rise in lines
self.Tout = self.Tin
#Overall heat transfer rate [W]
self.Q=self.mdot*cp*(self.Tout-self.Tin)
self.hout=self.hin+self.Q/self.mdot
#Pressure drop calculations for superheated refrigerant
v=1./rho
G=self.mdot/(pi*self.ID**2/4.0)
#Pressure gradient using Darcy friction factor
dpdz=-self.f_fluid*v*G**2/(2.*self.ID) #Pressure gradient
self.DP=dpdz*self.L
#Charge in Line set [kg]
self.Charge=pi*self.ID**2/4.0*self.L*rho | mit | -4,930,696,320,973,330,000 | 39.546512 | 115 | 0.564831 | false | 3.063269 | false | false | false |
erwin00776/copy_ml_implements | decision_tree/decision_tree_id3.py | 1 | 5458 | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
import math
import operator
def calculateShannonEntropy(dataset):
# [[0, 0, 0, 0, 'N'], [0, 0, 1, 1, 'Y']]
instance_number = len(dataset)
# {'Y': 1, 'N': 1}
label_number_map = {}
for instance in dataset:
label = instance[-1]
if label not in label_number_map.keys():
label_number_map[label] = 0
label_number_map[label] += 1
total_shannon_entropy = 0.0
for label in label_number_map:
probability = float(label_number_map[label]) / instance_number
shannon_entropy = probability * math.log(probability, 2) * -1
total_shannon_entropy += shannon_entropy
return total_shannon_entropy
def testCalculateShannonEntropy():
# Should be 1.0
dataset = [[0, 0, 0, 0, 'N'], [0, 0, 1, 1, 'Y']]
print("The shannon entropy is: {}".format(calculateShannonEntropy(dataset)))
# Should be 0.0
dataset = [[0, 0, 0, 0, 'N'], [0, 0, 1, 1, 'N']]
print("The shannon entropy is: {}".format(calculateShannonEntropy(dataset)))
def split_dataset(dataset, feature, value):
""" Get the dataset when "feature" is equal to "value"
"""
reture_data_set = []
# TODO: Example
for instance in dataset:
if instance[feature] == value:
new_instance = instance[:feature]
new_instance.extend(instance[feature + 1:])
reture_data_set.append(new_instance)
return reture_data_set
def choose_best_feature_to_split(dataset):
# Example: 4
feature_number = len(dataset[0]) - 1
base_entropy = calculateShannonEntropy(dataset)
best_info_gain_ratio = 0.0
best_feature = -1
best_after_split_entropy = 0.0
# Example: [0, 0, 0, 0]
for i in range(feature_number):
# Example:
instance_with_one_feature = [instance[i] for instance in dataset]
feature_value_set = set(instance_with_one_feature)
after_split_entropy = 0.0
instrinsic_value = 0.0
# Example: [0, 1]
for value in feature_value_set:
sub_dataset = split_dataset(dataset, i, value)
probability = len(sub_dataset) / float(len(dataset))
after_split_entropy += probability * calculateShannonEntropy(sub_dataset)
instrinsic_value += -probability * math.log(probability, 2)
'''
info_gain = base_entropy - after_split_entropy
# Check if it is zero
if (instrinsic_value == 0):
continue
info_gain_ratio = info_gain / instrinsic_value
if (info_gain_ratio > best_info_gain_ratio):
best_info_gain_ratio = info_gain_ratio
best_feature = i
'''
if after_split_entropy > best_after_split_entropy:
best_after_split_entropy = after_split_entropy
best_feature = i
return best_feature
def create_decision_tree(dataset, header_names):
# Example: [[0, 0, 0, 0, 'N'], [0, 0, 0, 1, 'N'], [1, 0, 0, 0, 'Y']]
# Example: ['N', 'N', 'Y']
labels = [instance[-1] for instance in dataset]
if labels.count(labels[0]) == len(labels):
# Return if all the values are the same
return labels[0]
# Example: ['N']
if len(dataset[0]) == 1:
label_count_map = {}
for label in labels:
if label not in label_count_map.keys():
label_count_map[label] = 0
label_count_map[label] += 1
sorted_label_count_map = sorted(
label_count_map.iteritems(), key=operator.itemgetter(1), reversed=True)
return sorted_label_count_map[0][0]
best_feature_id = choose_best_feature_to_split(dataset)
header_name = header_names[best_feature_id]
decision_tree = {header_name: {}}
# TODO: don't modify the input parameter
del (header_names[best_feature_id])
all_feature_values = [instance[best_feature_id] for instance in dataset]
unique_feature_values = set(all_feature_values)
for value in unique_feature_values:
sub_header_names = header_names[:]
sub_dataset = split_dataset(dataset, best_feature_id, value)
decision_tree[header_name][value] = create_decision_tree(
sub_dataset, sub_header_names)
return decision_tree
def predict(decision_tree, header_names, test_dataset):
# Example: {'outlook': {0: 'N', 1: 'Y', 2: {'windy': {0: 'Y', 1: 'N'}}}}
# print("Current tree: {}".format(decision_tree))
# Example: "outlook"
root_key = list(decision_tree.keys())[0]
# Example: {0: 'N', 1: 'Y', 2: {'windy': {0: 'Y', 1: 'N'}}}
sub_decision_tree = decision_tree[root_key]
# Example: 0
feature_index = header_names.index(root_key)
for key in sub_decision_tree.keys():
if test_dataset[feature_index] == key:
if type(sub_decision_tree[key]).__name__ == 'dict':
predict_label = predict(sub_decision_tree[key], header_names,
test_dataset)
else:
predict_label = sub_decision_tree[key]
return predict_label
def main():
# Train
dataset = [[0, 0, 0, 0, 'N'], [0, 0, 0, 1, 'N'], [1, 0, 0, 0, 'Y'],
[2, 1, 0, 0, 'Y'], [2, 2, 1, 0, 'Y'], [2, 2, 1, 1,
'N'], [1, 2, 1, 1, 'Y']]
header_names = ['outlook', 'temperature', 'humidity', 'windy']
decision_tree = create_decision_tree(dataset, header_names)
print("Train and get decision tree: {}".format(decision_tree))
# Test
header_names = ['outlook', 'temperature', 'humidity', 'windy']
test_dataset = [2, 1, 0, 0]
test_dataset = [2, 1, 1, 1]
result = predict(decision_tree, header_names, test_dataset)
print("Predict decision tree and get result: {}".format(result))
if __name__ == "__main__":
main()
| mit | -430,658,863,282,517,300 | 28.031915 | 79 | 0.621107 | false | 3.17695 | true | false | false |
jonmsawyer/maio | maio/forms/LoginForm.py | 1 | 2601 | '''File: JurorForm.py
Module: ``portal.forms.JurorForm``
Contains the JurorForm class used on the front page of the OQ portal.
'''
from datetime import datetime
import re
from django import forms
from django.utils.safestring import mark_safe
from django.core.exceptions import ValidationError
class LoginForm(forms.Form):
'''The main form as displayed on the ``portal:login`` screen. The user is asked
for their last name, date of birth, jury summons ID, and the last four
digits of their SSN.
:init:
.. code-block:: python
form = LoginForm(request.POST or None)
'''
#: ``base_fields`` is an automatically generated attribute that is
#: constructed based off of the form fields in this model.
base_fields = None
#: ``declared_fields`` is an automatically generated attribute that is
#: constructed based off of the form fields in this model.
declared_fields = None
#: ``media`` is a meta class.
media = None
#: CharField form field for the username of the user.
username = forms.CharField(widget=forms.TextInput(),
max_length=50,
required=True,
label='Username')
#: CharField form field for the password of the user.
password = forms.CharField(widget=forms.PasswordInput(render_value=True),
max_length=50,
required=True,
label=mark_safe('Password'))
# Method: clean
# See: DocString
def clean(self):
'''
Main cleaning method. :func:`clean()` is performed after all
:func:`clean_FIELD()` methods returned successfully.
Validations:
username - Value - must not be empty string
password - Value - must not be empty string
Raises:
'''
# Grab the entire dictionary of cleaned data. Subsequent values may be
# NULL.
data = super(LoginForm, self).clean()
username = data.get('username')
password = data.get('password')
if not username or len(username) == 0:
self.add_error('username', 'Empty username')
data.pop('username', None)
if not password or len(password) == 0:
self.add_error('password', 'Empty password')
data.pop('password', None)
# Always return cleaned data. Return type is a dictionary.
return data
| mit | -5,810,386,756,428,455,000 | 31.111111 | 83 | 0.579777 | false | 4.772477 | false | false | false |
vi/enki | enki/plugins/workspace_commands.py | 1 | 9151 | """
workspace_commands --- Open, SaveAs, GotoLine commands
======================================================
"""
import os.path
import glob
from enki.core.core import core
from enki.lib.pathcompleter import makeSuitableCompleter, PathCompleter
from enki.core.locator import AbstractCommand
class CommandGotoLine(AbstractCommand):
"""Go to line command implementation
"""
@staticmethod
def signature():
"""Command signature. For Help
"""
return '[l] [LINE]'
@staticmethod
def description():
"""Command description. For Help
"""
return 'Go to line'
@staticmethod
def pattern():
"""Pyparsing pattern
"""
from pyparsing import Literal, Optional, Suppress, White, Word, nums # delayed import, performance optimization
line = Word(nums)("line")
pat = (Literal('l ') + Suppress(Optional(White())) + Optional(line)) ^ line
pat.leaveWhitespace()
pat.setParseAction(CommandGotoLine.create)
return pat
@staticmethod
def create(str, loc, tocs):
"""Callback for pyparsing. Creates an instance of command
"""
if tocs.line:
line = int(tocs.line)
else:
line = None
return [CommandGotoLine(line)]
@staticmethod
def isAvailable():
"""Check if command is currently available
"""
return core.workspace().currentDocument() is not None
def __init__(self, line):
self._line = line
def isReadyToExecute(self):
"""Check if command is complete and ready to execute
"""
return self._line is not None
def execute(self):
"""Execute the command
"""
core.workspace().currentDocument().qutepart.cursorPosition = self._line - 1, None
class CommandOpen(AbstractCommand):
@staticmethod
def signature():
"""Command signature. For Help
"""
return '[f] PATH [LINE]'
@staticmethod
def description():
"""Command description. For Help
"""
return 'Open file. Globs are supported'
@staticmethod
def pattern():
"""pyparsing pattern
"""
def attachLocation(s, loc, tocs):
"""pyparsing callback. Saves path position in the original string
"""
return [(loc, tocs[0])]
from pyparsing import CharsNotIn, Combine, Literal, Optional, White, Word, nums # delayed import, performance optimization
path = CharsNotIn(" \t")("path")
path.setParseAction(attachLocation)
longPath = CharsNotIn(" \t", min=2)("path")
longPath.setParseAction(attachLocation)
slashPath = Combine(Literal('/') + Optional(CharsNotIn(" \t")))("path")
slashPath.setParseAction(attachLocation)
pat = ((Literal('f ') + Optional(White()) + Optional(path)) ^ longPath ^ slashPath) + \
Optional(White() + Word(nums)("line"))
pat.leaveWhitespace()
pat.setParseAction(CommandOpen.create)
return pat
@staticmethod
def create(str, loc, tocs):
"""pyparsing callback. Creates an instance of command
"""
if tocs.path:
pathLocation, path = tocs.path
else:
pathLocation, path = 0, ''
if tocs.line:
line = int(tocs.line)
else:
line = None
return [CommandOpen(pathLocation, path, line)]
def __init__(self, pathLocation, path, line):
self._path = path
self._pathLocation = pathLocation
self._line = line
def completer(self, text, pos):
"""Command completer.
If cursor is after path, returns PathCompleter or GlobCompleter
"""
if pos == self._pathLocation + len(self._path) or \
(not self._path and pos == len(text)):
return makeSuitableCompleter(self._path, pos - self._pathLocation)
else:
return None
def constructCommand(self, completableText):
"""Construct command by path
"""
command = 'f ' + completableText
if self._line is not None:
command += ' %d' % self._line
return command
@staticmethod
def _isGlob(text):
return '*' in text or \
'?' in text or \
'[' in text
def isReadyToExecute(self):
"""Check if command is complete and ready to execute
"""
if self._isGlob(self._path):
files = glob.glob(os.path.expanduser(self._path))
return len(files) > 0 and \
all([os.path.isfile(p) for p in files])
else:
if not self._path:
return False
if os.path.exists(self._path) and \
not os.path.isfile(self._path): # a directory
return False
if self._path.endswith('/'): # going to create a directory
return False
return True
def execute(self):
"""Execute the command
"""
if self._isGlob(self._path):
expandedPathes = []
for path in glob.iglob(os.path.expanduser(self._path)):
try:
path = os.path.abspath(path)
except OSError:
pass
expandedPathes.append(path)
# 2 loops, because we should open absolute pathes. When opening files, enki changes its current directory
for path in expandedPathes:
if self._line is None:
core.workspace().goTo(path)
else:
core.workspace().goTo(path, line = self._line - 1)
else: # file may be not existing
path = os.path.expanduser(self._path)
if os.path.isfile(path):
try:
path = os.path.abspath(path)
except OSError: # current dir deleted
return
if self._line is None:
core.workspace().goTo(path)
else:
core.workspace().goTo(path, line = self._line - 1)
else:
core.workspace().createEmptyNotSavedDocument(path)
class CommandSaveAs(AbstractCommand):
"""Save As Locator command
"""
@staticmethod
def signature():
"""Command signature. For Help
"""
return 's PATH'
@staticmethod
def description():
"""Command description. For Help
"""
return 'Save file As'
@staticmethod
def pattern():
"""pyparsing pattern of the command
"""
def attachLocation(s, loc, tocs):
return [(loc, tocs[0])]
from pyparsing import CharsNotIn, Literal, Optional, White # delayed import, performance optimization
path = CharsNotIn(" \t")("path")
path.setParseAction(attachLocation)
pat = (Literal('s ') + Optional(White()) + Optional(path))
pat.leaveWhitespace()
pat.setParseAction(CommandSaveAs.create)
return pat
@staticmethod
def create(str, loc, tocs):
"""Callback for pyparsing. Creates an instance
"""
if tocs.path:
pathLocation, path = tocs.path
else:
pathLocation, path = 0, ''
return [CommandSaveAs(pathLocation, path)]
@staticmethod
def isAvailable():
"""Check if command is available.
It is available, if at least one document is opened
"""
return core.workspace().currentDocument() is not None
def __init__(self, pathLocation, path):
self._path = path
self._pathLocation = pathLocation
def completer(self, text, pos):
"""Command Completer.
Returns PathCompleter, if cursor stays after path
"""
if pos == self._pathLocation + len(self._path) or \
(not self._path and pos == len(text)):
return PathCompleter(self._path, pos - self._pathLocation)
else:
return None
def constructCommand(self, completableText):
"""Construct command by path
"""
return 'f ' + completableText
def isReadyToExecute(self):
"""Check if command is complete and ready to execute
"""
return len(self._path) > 0 and not os.path.isdir(self._path)
def execute(self):
"""Execute command
"""
try:
path = os.path.abspath(os.path.expanduser(self._path))
except OSError: # directory deleted
return
core.workspace().currentDocument().setFilePath(path)
core.workspace().currentDocument().saveFile()
class Plugin:
"""Plugin interface
"""
def __init__(self):
for comClass in (CommandGotoLine, CommandOpen, CommandSaveAs):
core.locator().addCommandClass(comClass)
def del_(self):
"""Explicitly called destructor
"""
for comClass in (CommandGotoLine, CommandOpen, CommandSaveAs):
core.locator().removeCommandClass(comClass)
| gpl-2.0 | -8,741,756,029,742,064,000 | 28.614887 | 131 | 0.561359 | false | 4.536936 | false | false | false |
zzvv/shadowsocks_admin | master_node/master_node/views.py | 2 | 8577 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# utf-8 中文编码
import random
import string
from datetime import datetime,timedelta
from django import http
from django import forms
from django.db.models import Q
from django.conf import settings
from master_node.models import *
from django.template import RequestContext
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.shortcuts import get_object_or_404
from django.shortcuts import render_to_response
from django.core.urlresolvers import reverse
from django.core.paginator import Paginator,InvalidPage,EmptyPage
#from django.utils import simplejson
from django.contrib.auth.decorators import login_required
from master_node.models import *
from django.contrib import auth
from django.contrib import messages
from django.utils.translation import ugettext, ugettext_lazy as _
from django.shortcuts import render
from django import http
# Create your views here.
def index(request):
return render_to_response('index.html',
{
# 'user':request.user,
},
context_instance=RequestContext(request,)
)
class EditPassForm(forms.Form):
username = forms.CharField(max_length=100,label=u'用户名')
oldpass = forms.CharField(max_length=100,label=u'旧密码')
newpass = forms.CharField(max_length=100,label=u'新密码')
newpass2 = forms.CharField(max_length=100,label=u'重复新密码')
def clean_oldpass(self):
username = self.cleaned_data['username']
oldpass = self.cleaned_data['oldpass']
if auth.authenticate(username=username, password=oldpass) == None:
raise forms.ValidationError(u"原始密码错误!")
return oldpass
def clean_newpass(self):
newpass = self.cleaned_data['newpass']
if len(newpass)<5:
raise forms.ValidationError(u"密码太短了,请大于5位!")
return newpass
def clean_newpass2(self):
newpass = self.cleaned_data.get('newpass','')
newpass2 = self.cleaned_data['newpass2']
if newpass =='':
return newpass2
if newpass !=newpass2:
raise forms.ValidationError(u"两次密码不一致!")
return newpass2
def logout(request):
auth.logout(request)
# Redirect to a success page.
messages.success(request,message='退出成功。')
return HttpResponseRedirect("/")
@login_required
def profile(request):
if request.method == 'POST':
form = EditPassForm(request.POST)
if form.is_valid() :
cd = form.cleaned_data
# 由于 form 鉴定用户的时候省事了,直接使用了表单提交的 username
# 这里为了安全,再次鉴定并生成 user 对象。
# 不在专门处理对方伪造 username 的情况了,直接程序出错。
passuser = auth.authenticate(username=request.user.username, password=cd['oldpass'])
passuser.set_password(cd['newpass'])
passuser.save()
messages.success(request,message='密码修改成功')
return render_to_response('registration/profile.html',
{
'profile':get_profile(request.user),
'form':form
},
context_instance=RequestContext(request,)
)
return render_to_response('registration/profile.html',
{
'profile':get_profile(request.user),
},
context_instance=RequestContext(request,)
)
class UserCreationForm(forms.ModelForm):
"""
A form that creates a user, with no privileges, from the given username and
password.
"""
username = forms.RegexField(label=_("Username"), max_length=30,widget=forms.TextInput(attrs={'class':"form-control",'placeholder':"30 个英文字符或更少."}),
regex=r'^[\w.@+-]+$',
help_text="必选. 30 个英文字符或更少.",
error_messages={
'invalid': "This value may contain only letters, numbers and "
"@/./+/-/_ characters."})
email = forms.RegexField(label="Email", max_length=30,widget=forms.TextInput(attrs={'class':"form-control",'placeholder':"Email"}),
regex=r'^[^@]+@[^@]+\.[^@]+$',
help_text="必选.",
error_messages={
'invalid': "格式错误,请重新输入."})
password1 = forms.CharField(label=_("Password"),widget=forms.PasswordInput(attrs={'class':"form-control",'placeholder':"Password"}))
password2 = forms.CharField(label=_("Password confirmation"),
widget=forms.PasswordInput(attrs={'class':"form-control",'placeholder':"Password"}),
help_text=_("Enter the same password as above, for verification."))
class Meta:
model = User
fields = ("username","email",)
def clean_username(self):
# Since User.username is unique, this check is redundant,
# but it sets a nicer error message than the ORM. See #13147.
username = self.cleaned_data["username"]
try:
User._default_manager.get(username=username)
except User.DoesNotExist:
return username
raise forms.ValidationError(
u'用户名已经被使用了,请更换。',
code='duplicate_username',
)
def clean_password2(self):
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError(
u'两次密码不相同,请重试。',
code='password_mismatch',
)
return password2
def save(self, commit=True):
user = super(UserCreationForm, self).save(commit=False)
#user.email = self.email
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
def register(request):
if request.method == 'POST':
form = UserCreationForm(request.POST)
if form.is_valid():
new_user = form.save()
profile = Profile(user=new_user,
sport=8000+new_user.id,
spass=GenPassword(10),
start_date=datetime.now(),
now_date=datetime.now(),
end_date=datetime.now()+timedelta(days=30))
profile.save()
up_user() # 新建用户后同步ss服务器
messages.success(request,message=u'注册成功,请登录。')
return HttpResponseRedirect("/")
else:
form = UserCreationForm()
return render_to_response("registration/register.html", {
'form': form,
},
context_instance=RequestContext(request,))
def nodes(request):
nodes = Node.objects.all()
return render_to_response("nodes.html", {
'nodes': nodes,
},
context_instance=RequestContext(request,))
def up(request):
return render_to_response("index.html", {
'up': up_user(),
},
context_instance=RequestContext(request,))
def tree(request,id):
try:
iid = int(id)
except:
raise http.Http404
p = get_object_or_404(people,id=iid)
return render_to_response('tree.html',
{
'p':p,
},
#context_instance=RequestContext(request,)
)
def tree_json(request,id,recursion):
try:
iid = int(id)
recursion = int(recursion)
except:
raise http.Http404
p = get_object_or_404(people,id=iid)
res = p.recursion_to_dict(recursion)
return HttpResponse(simplejson.dumps(res,ensure_ascii = False))
| gpl-2.0 | -636,366,153,785,334,000 | 34.340517 | 151 | 0.555434 | false | 4.0995 | false | false | false |
Delosari/dazer | bin/user_conf/Helium_EmissivityComparison.py | 1 | 3138 | from numpy import array, linspace, zeros, pi
from CodeTools.PlottingManager import myPickle
import pyneb as pn
S4 = pn.RecAtom('He',1)
He1_Emis_pyneb = S4.getEmissivity(tem=10000.0, den=100.0, label = '3889.0')
He_Emis_article = 1.4005 * 1e-25 #Units: 4 Pi j / n_e / n_He+ (erg cm^3 s^-1).
#Emissivity coefficient
print 'Emissivity ratio', He1_Emis_pyneb / He_Emis_article
#Load script support classes
pv = myPickle()
#Declare Figure format
pv.FigFormat_One(ColorConf='Night1')
#Load pyneb atom
H1 = pn.RecAtom('H',1)
S4 = pn.RecAtom('He',1)
Helium_Labels = ['3889.0', '4026.0', '4471.0', '5876.0', '6678.0', '7065.0']
#Declare initial conditions
Te_vector = linspace(8000, 25000, 100) #K
ne_vector = linspace(10, 1000, 100) #cm^-3
Te_0 = 10000
ne_0 = 100
Hbeta_0_Emis = H1.getEmissivity(tem=Te_0, den=ne_0, label='4_2')
Normalizing_Constant = 1e-25 #erg cm^3 s^-1
#----Pyneb-----------------------------------------------
#Emissivity vector
HeEm_vector = zeros((len(Helium_Labels),len(ne_vector)))
#Calculate emissivities
for i in range(len(Helium_Labels)):
for j in range(len(Te_vector)):
HeEm_vector[i][j] = S4.getEmissivity(tem=Te_vector[j], den=ne_0, label = Helium_Labels[i])
#Plot the data
for k in range(len(Helium_Labels)):
Label = 'HeI ' + Helium_Labels[k] + r'$\AA$' + ' emissivity'
pv.DataPloter_One(Te_vector, HeEm_vector[k]/1e-25, Label, pv.Color_Vector[2][k])
#-----PFSFD Emissivities from 2012 paper-----------------------------------------------
TableAddress = '/home/vital/git/Dazer_Local/Dazer/Astro_Libraries/PFSD_HeEmissivities_ne100'
Temps, He3889, He40226, He4471, He5876, He6678, He7065 = pv.get_TableColumn((0,2,3,4,5,6,7), TableAddress, HeaderSize=1, StringIndexes=False, unpack_check=True)
Emissivities_Table = [He3889, He40226, He4471, He5876, He6678, He7065]
print 'Helium emissivity ',
#Plot the data
Conversionparameter = 1
#In the table the data is in units = 4 * pi * j / (ne * nHe+)
for k in range(len(Emissivities_Table)):
Label = 'PFSD emissivity'
Emissivity = Emissivities_Table[k] * Conversionparameter
pv.DataPloter_One(Temps, Emissivity, Label, pv.Color_Vector[2][k], LineStyle=None)
# for k in range(len(Emissivities_Table)):
# Label = 'PFSD emissivity'
# pyneb_Counterpart = zeros(len(Temps))
# for i in range(len(Temps)):
# pyneb_Counterpart[i] = S4.getEmissivity(tem=Temps[i], den=ne_0, label = Helium_Labels[k])
# print 'Linea', Helium_Labels[k]
# print Emissivities_Table[k]* 10e-25/ pyneb_Counterpart
# Emissivity = Emissivities_Table[k]* 10e-25/ pyneb_Counterpart
# pv.DataPloter_One(Temps, Emissivity, Label, pv.Color_Vector[2][k], LineStyle=None)
PlotTitle = 'Helium lines emissivity evolution with Temperature'
x_label = 'Temperature ' + r'$(K)$'
y_label = r'$E_{He}\left(\lambda,\,T_{e},\,n_{e}=100\,cm^{-3}\right)_{He}\,/ 10^-25 ()$'
pv.Labels_Legends_One(PlotTitle, x_label, y_label)
pv.DisplayFigure()
print 'Treatment completed' | mit | -3,833,703,524,354,702,300 | 35.08046 | 160 | 0.633206 | false | 2.540891 | false | false | false |
RedFantom/GSF-Parser | frames/file.py | 1 | 14998 | """
Author: RedFantom
Contributors: Daethyra (Naiii) and Sprigellania (Zarainia)
License: GNU GPLv3 as in LICENSE.md
Copyright (C) 2016-2018 RedFantom
"""
# UI Libraries
import tkinter as tk
from tkinter import ttk
from tkinter import messagebox
from tkinter import filedialog
# Standard Library
from datetime import datetime
import operator
import os
from typing import Dict, List
# Project Modules
import variables
from parsing import matchstats, spawnstats
from data import abilities
from parsing.parser import Parser
from toplevels.splash import SplashScreen
from parsing.filehandler import FileHandler
from parsing.screen import ScreenParser
from widgets.general import Calendar, DateKeyDict
# Class for the _frame in the fileTab of the parser
class FileFrame(ttk.Frame):
"""Frame containing widgets for file, match and spawn selection"""
# __init__ creates all widgets
def __init__(self, root_frame, main_window):
"""Create all widgets and make the links between them"""
ttk.Frame.__init__(self, root_frame, width=200, height=420)
self.main_window = main_window
self._splash: SplashScreen = None
self._calendar = Calendar(self, callback=self._select_date, highlight="#4286f4")
self._tree = ttk.Treeview(self, show=("tree", "headings"), height=6)
self._scroll = ttk.Scrollbar(self, orient=tk.VERTICAL, command=self._tree.yview)
self._refresh_button = ttk.Button(self, text="Refresh", command=self.update_files)
self._files: List[str] = list()
self._dates: Dict[datetime: List[str]] = DateKeyDict()
self.setup_tree()
def setup_tree(self):
"""Configure the Treeview options"""
self._tree.configure(yscrollcommand=self._scroll.set)
self._tree.column("#0", width=150)
self._tree.heading("#0", text="Matches")
self._tree.bind("<Double-1>", self._parse_item)
def grid_widgets(self):
"""Configure widgets in grid geometry manager"""
self._calendar.grid(row=0, column=0, sticky="nswe")
self._tree.grid(row=1, column=0, sticky="nswe", padx=5, pady=(0, 5))
self._scroll.grid(row=1, column=1, sticky="nswe", pady=(0, 5))
self._refresh_button.grid(row=2, column=0, sticky="nswe", padx=5, pady=(0, 5))
def update_files(self):
"""Update the Calendar with the new files"""
self.clear_data_widgets()
self._dates.clear()
folder = variables.settings["parsing"]["path"]
if not os.path.exists(folder):
messagebox.showerror("Error",
"The specified CombatLogs folder does not exist. Please "
"choose a different folder.")
folder = filedialog.askdirectory()
variables.settings.write_settings({"parsing": {"path": folder}})
return self.update_files()
files = [f for f in os.listdir(folder) if Parser.get_gsf_in_file(f)]
self.create_splash(len(files))
match_count: Dict[datetime: int] = DateKeyDict()
for file in files:
date = Parser.parse_filename(file)
if date is None: # Failed to parse
continue
if date not in match_count:
match_count[date] = 0
match_count[date] += Parser.count_matches(file)
if date not in self._dates:
self._dates[date] = list()
self._dates[date].append(file)
self._splash.increment()
self._calendar.update_heatmap(match_count)
self.destroy_splash()
def _select_date(self, date: datetime):
"""Callback for Calendar widget selection command"""
self.clear_data_widgets()
self._tree.delete(*self._tree.get_children(""))
if date not in self._dates:
return
self._files: List[str] = self._dates[date]
for f, file in enumerate(sorted(self._files)):
name = Parser.get_player_name_raw(file)
cube, matches, spawns = Parser.split_combatlog_file(file)
for m, match in enumerate(sorted(matches[::2])):
match = datetime.strftime(match, "%H:%M, {}".format(name))
match_iid = "{},{}".format(f, m)
self._tree.insert("", tk.END, text=match, iid=match_iid)
for s, spawn in enumerate(sorted(spawns[m])):
spawn = datetime.strftime(spawn, "%H:%M:%S")
player_list: List[str] = Parser.get_player_id_list(cube[m][s])
abs_dict: Dict[str: int] = Parser.get_abilities_dict(cube[m][s], player_list)
ships: List[str] = Parser.get_ship_for_dict(abs_dict)
ship = self.format_ships_list(ships)
spawn = "{}{}".format(spawn, ship)
spawn_iid = "{},{},{}".format(f, m, s)
self._tree.insert(match_iid, tk.END, text=spawn, iid=spawn_iid)
def _parse_item(self, _: tk.Event):
"""Parse a match/spawn from a file"""
self.clear_data_widgets()
selection = self._tree.selection()
if len(selection) == 0:
return
elements = selection[0].split(",")
if len(elements) == 3: # Spawns
f, m, s = map(int, elements)
self.parse_spawn(self._files[f], m, s)
else: # Matches
f, m = map(int, elements)
self.parse_match(self._files[f], m)
def create_splash(self, maximum: int):
"""Update the maximum value of the splash screen"""
if self.main_window.splash is not None:
self._splash = self.main_window.splash
self._splash.update_max(maximum)
else:
self._splash = SplashScreen(self.main_window, maximum, title="Loading Files")
def destroy_splash(self):
"""Destroy the self-created SplashScreen or keep the BootSplash"""
if self.main_window.splash is None:
self._splash.destroy()
def update_widgets(self, abilities_dict, statistics_string, shipsdict, enemies, enemydamaged,
enemydamaget, uncounted):
"""
This function can update the dta widgets for files, matches and
folders by updating the widgets of statsframe and shipsframe
according to the data received from results
:param abilities_dict: abilities dictionary with abilities as
keys and amounts as values
:param statistics_string: string to set in the statistics tab
:param shipsdict: dictionary with ships as keys and amounts as
values
:param enemies: list of enemy ID numbers
:param enemydamaged: dictionary with enemies as keys and
amounts of damage as values
:param enemydamaget: dictionary with enemies as keys and
amounts of damage as values
:param uncounted: amount of uncounted ship occurrences
"""
self.main_window.middle_frame.statistics_numbers_var.set(statistics_string)
for key, value in abilities_dict.items():
self.main_window.middle_frame.abilities_treeview.insert('', tk.END, text=key, values=(value,))
ships_string = "Ships used:\t\tCount:\n"
for ship in abilities.ships_strings:
if variables.settings["gui"]["faction"] == "republic":
name = abilities.rep_strings[ship]
else:
name = ship
try:
ships_string += name + "\t\t" + str(shipsdict[ship.replace("\t", "", 1)]) + "\n"
except KeyError:
ships_string += name + "\t\t0\n"
ships_string += "Uncounted\t\t" + str(uncounted)
self.main_window.ship_frame.ship_label_var.set(ships_string)
for enemy in enemies:
self.insert_enemy_into_treeview(enemy, enemydamaged, enemydamaget)
sequence = shipsdict.items()
most_used_ship = "default" if len(sequence) == 0 else max(sequence, key=operator.itemgetter(1))[0]
self.main_window.ship_frame.update_ship([most_used_ship])
self.main_window.ship_frame.update()
self.main_window.middle_frame.screen_label_var.set("Not available for files and matches")
def update_widgets_spawn(self, name, spawn, abilitiesdict, statistics_string, ships_list, comps, enemies,
enemydamaged, enemydamaget):
"""
This function sets the data widgets for the spawn results
results
:param name: player name
:param spawn: section of CombatLog
:param abilitiesdict: abilities dictionary with abilities as
keys and amounts as values
:param statistics_string: string to set in the statistics tab
:param ships_list: list of possible ships
:param comps: list of ship components
:param enemies: list of enemy ID numbers
:param enemydamaged: dictionary with enemies as keys and
amounts of damage as values
:param enemydamaget: dictionary with enemies as keys and
amounts of damage as values
"""
for key, value in abilitiesdict.items():
self.main_window.middle_frame.abilities_treeview.insert('', tk.END, text=key, values=(value,))
self.main_window.middle_frame.statistics_numbers_var.set(statistics_string)
ships_string = "Possible ships used:\n"
for ship in ships_list:
faction = variables.settings["gui"]["faction"]
ship_name = ship if faction == "imperial" else abilities.rep_ships[ship]
ships_string += ship_name + "\n"
ships_string += "\t\t\t\t\t\t\nWith the components:\n"
for component in comps:
ships_string += component + "\n"
self.main_window.ship_frame.ship_label_var.set(ships_string)
for enemy in enemies:
self.insert_enemy_into_treeview(enemy, enemydamaged, enemydamaget)
self.main_window.ship_frame.update_ship(ships_list)
print("[FileFrame] Inserting spawn of {} items.".format(len(spawn)))
self.main_window.middle_frame.time_view.insert_spawn(spawn, name)
def parse_match(self, file: str, match_i: int):
"""
Either adds sets the match and calls add_spawns to add the
spawns found in the match or starts the results of all files
found in the specified file and displays the results in the
other frames.
"""
print("[FileFrame] Parsing file '{}', match {}".format(file, match_i))
self.main_window.middle_frame.statistics_numbers_var.set("")
self.main_window.ship_frame.ship_label_var.set("No match or spawn selected yet.")
lines = Parser.read_file(file)
player_list = Parser.get_player_id_list(lines)
file_cube, match_timings, _ = Parser.split_combatlog(lines, player_list)
player_name = Parser.get_player_name(lines)
match = file_cube[match_i]
results = matchstats.match_statistics(file, match, match_timings[::2][match_i])
self.update_widgets(*results)
match_list = Parser.build_spawn_from_match(match)
self.main_window.middle_frame.time_view.insert_spawn(match_list, player_name)
match_timing = datetime.combine(Parser.parse_filename(file).date(), match_timings[::2][match_i].time())
self.main_window.middle_frame.scoreboard.update_match(match_timing)
def parse_spawn(self, file: str, match_i: int, spawn_i: int):
"""
Either starts the results of ALL spawns found in the specified
match or just one of them and displays the results in the other
frames accordingly.
"""
print("[FileFrame] Parsing '{}', match {}, spawn {}".format(file, match_i, spawn_i))
self.main_window.middle_frame.statistics_numbers_var.set("")
self.main_window.ship_frame.ship_label_var.set("No match or spawn selected yet.")
lines = Parser.read_file(file)
player_list = Parser.get_player_id_list(lines)
player_name = Parser.get_player_name(lines)
file_cube, match_timings, spawn_timings = Parser.split_combatlog(lines, player_list)
match = file_cube[match_i]
spawn = match[spawn_i]
results = list(spawnstats.spawn_statistics(
file, spawn, spawn_timings[match_i][spawn_i]))
results[1] = Parser.parse_player_reaction_time(spawn, player_name)
orig = len(results[1])
results[1] = ScreenParser.build_spawn_events(
file, match_timings[::2][match_i], spawn_timings[match_i][spawn_i], spawn, player_name)
print("[FileFrame] ScreenParser built {} events. Total: {}".format(len(results[1]) - orig, len(results[1])))
self.update_widgets_spawn(*results)
arguments = (file, match_timings[::2][match_i], spawn_timings[match_i][spawn_i])
string = FileHandler.get_features_string(*arguments)
self.main_window.middle_frame.screen_label_var.set(string)
self.main_window.middle_frame.update_timeline(
file, match_i, spawn_i, match_timings, spawn_timings, file_cube)
match_timing = datetime.combine(Parser.parse_filename(file).date(), match_timings[::2][match_i].time())
self.main_window.middle_frame.scoreboard.update_match(match_timing)
def clear_data_widgets(self):
"""Clear the data widgets for results parsing"""
self.main_window.middle_frame.abilities_treeview.delete(
*self.main_window.middle_frame.abilities_treeview.get_children())
self.main_window.middle_frame.enemies_treeview.delete(
*self.main_window.middle_frame.enemies_treeview.get_children())
self.main_window.ship_frame.ship_label_var.set("")
self.main_window.middle_frame.screen_label_var.set(
"Please select an available spawn for screen results information")
self.main_window.middle_frame.time_view.delete(
*self.main_window.middle_frame.time_view.get_children())
self.main_window.middle_frame.time_line.delete_marker(tk.ALL)
self.main_window.middle_frame.scoreboard.reset()
def insert_enemy_into_treeview(self, enemy, enemydamaged, enemydamaget):
"""
Insert an enemy into the Treeview with the appropriate string
:param enemy: ID number/name
:param enemydamaged: dictionary
:param enemydamaget: dictionary
"""
damage_d = str(enemydamaged[enemy]) if enemy in enemydamaged else 0
damage_t = str(enemydamaget[enemy]) if enemy in enemydamaget else 0
kwargs = {"text": "Enemy" if enemy == "" else enemy, "values": (damage_d, damage_t)}
self.main_window.middle_frame.enemies_treeview.insert("", tk.END, **kwargs)
@staticmethod
def format_ships_list(ships: List[str]):
"""Format a list of ship possibilities"""
if len(ships) == 0:
return ", Unknown"
elif len(ships) == 1:
return ", {}".format(ships[0])
else:
return ""
| gpl-3.0 | 6,754,647,611,967,693,000 | 47.85342 | 116 | 0.627484 | false | 3.717898 | false | false | false |
satnet-project/propagators | predict_sims.py | 1 | 4156 |
################################################################################
# Copyright 2015 Samuel Gongora Garcia (s.gongoragarcia@gmail.com)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
# Author: s.gongoragarcia[at]gmail.com
################################################################################
# argv[1] = family
class Do_list:
def __init__(self):
from sys import argv
from os import getcwd
open_tle = open(getcwd() + '/TLEs/' + argv[1], 'r')
satellite_list = open_tle.readlines()
satellite_list = [item.rstrip('\n') for item in satellite_list]
length_list = len(satellite_list)
y = length_list/3
list_numbers = map(self.return_list, range(y))
self.show_satellite_list = []
self.tle_first_line_list = []
self.tle_second_line_list = []
i = 0
j = 1
k = 2
for i in range(len(list_numbers)):
self.show_satellite_list.append(satellite_list[list_numbers[i]])
self.tle_first_line_list.append(satellite_list[j])
self.tle_second_line_list.append(satellite_list[k])
j = list_numbers[i] + 4
k = list_numbers[i] + 5
# Funcion para sacar los valores de la clase
self.return_values()
def return_list(self, x):
return 3*x
def return_values(self):
return self.show_satellite_list
return self.tle_first_line_list
return self.tle_second_line_list
class Solve_coordinates:
def __init__(self, lista_elementos, lista_prueba, lista_prueba2):
self.satellites_number = len(lista_elementos)
begin_time1 = 5
begin_time2 = 10
output_file1 = "ficherosalida1"
output_file2 = "ficherodesalida2"
# Provide data to pyephem_routine
for i in range(len(lista_elementos)):
i = i + 2
# Create new threads
import threading
from sys import argv
myThread = threading.Thread()
thread1 = myThread(argv[1], lista_elementos[i], begin_time1, output_file1)
thread2 = myThread(argv[1], lista_elementos[i + 1], begin_time2, output_file2)
thread1.start()
thread2.start()
class myThread:
def __init__(self, file, satellite, begin_time, output_file):
import threading
import time
threading.Thread.__init__(self)
self.run(file, satellite, begin_time, output_file)
def run(self, file, satellite, begin_time, output_file):
print "file name is: %s" %(file)
print "satellite name is: %s" %(satellite)
print "begin time is: %d" %(begin_time)
print "output file is: %s" %(output_file)
# import subprocess
# args = ("predict", "-t", file, "-f", satellite, begin_time, "-o", output_file)
# compute = subprocess.call(args)
#
# print "Starting " + self.name
# print_time(self.name, self.counter, 5)
# print "Exiting " + self.name
#
# if flag_finish == 0:
# thread.exit()
if __name__ == '__main__':
print ""
print "Predict data -test-"
do_list = Do_list()
solve_coordinates = Solve_coordinates(do_list.show_satellite_list, do_list.tle_first_line_list, do_list.tle_second_line_list) | apache-2.0 | 1,737,409,023,068,610,600 | 30.255639 | 129 | 0.55462 | false | 3.785064 | false | false | false |
City-of-Bloomington/green-rental | scripts/columbia-lookup_electricity_data.py | 2 | 24413 | #!/usr/bin/env python
"""
#
# By: Charles Brandt [code at charlesbrandt dot com]
# On: *2015.05.16 11:38:01
# License: GPLv3
# Requires:
# Description:
This script assumes buildings and units have been created in the database
It should be possible to use the original CSV *or* the database as the main source for the addresses to look up
http://www.gocolumbiamo.com/Finance/Utilities/rental-costs.php
http://www.gocolumbiamo.com/cfforms/ub/ubmap.html
http://www.gocolumbiamo.com/cfforms/ub/rental.html
http://www.gocolumbiamo.com/cfforms/ub/ubdata.cfm?LOCID=28010&AppNum=113
http://www.gocolumbiamo.com/cfforms/ub/ubdata.cfm?LOCID=165488&AppNum=5517
requires running
convert-columbia.py
first to make sure all buildings have been geocoded and created in database.
/c/clients/rentrocket/code/rentrocket/scripts/columbia-lookup_electricity_data.py
"""
import os, sys, codecs, re
import csv
#import unicodecsv
#not sure that urllib2 will suffice here...
#lots of javascript processing of requests on the client side..
#import urllib2
from selenium import webdriver
from time import strptime
from datetime import datetime
from django.utils import timezone
import time
from helpers import save_json, load_json, Location, save_results, make_person
#from building.models import make_building, make_unit
from building.models import lookup_building_with_geo, UTILITY_TYPES
#from rentrocket.helpers import address_search
from rentrocket.helpers import SearchResults, handle_place
from utility.models import ServiceProvider, UtilitySummary
#from django.conf import settings
#settings.configure()
sys.path.append(os.path.dirname(os.getcwd()))
#http://stackoverflow.com/questions/8047204/django-script-to-access-model-objects-without-using-manage-py-shell
## from rentrocket import settings
## from django.core.management import setup_environ
## setup_environ(settings)
from city.models import City, to_tag
from source.models import FeedInfo, Source
from person.models import Person
def usage():
print __doc__
#for storing fixes for addresses:
conversions = { '101 HOLLY RIDGE LN': '101 HOLLYRIDGE LN',
'4200 MERCHANT ST': '4200 MERCHANT STREET',
#'3603 BERKSHIRE CT': '',
'2405 FLORIDA CT': '2405 FLORIDA',
#works in google maps, but not here
#'1012 COLBY DR': '1012 Colby Drive',
'1012 COLBY DR': '',
#'3905 ATHENS CT': '',
#'3901 ATHENS CT': '',
#'4000 LAMAR CT': '',
#'3902 CAMERON CT': '',
#'1708 PERKINS DR': '',
#'4802 MONITEAU CT': '',
'8 N KEENE ST BLDG E&F': '8 N KEENE ST',
'7000 N BUCKINGHAM SQ': '7000 N BUCKINGHAM SQUARE',
'8 N KEENE ST BLDG G&H': '8 N KEENE ST',
'5513 HUNLEY CT': '5513 HUNLEY',
'1804 LIGHTVIEW DR': '',
'1704 HIGHRIDGE DR': '',
'2211 LACLEDE DR': '',
'5402 GEMSTONE WAY': '',
}
def unicode_csv_reader(utf8_data, dialect=csv.excel, **kwargs):
csv_reader = csv.reader(utf8_data, dialect=dialect, **kwargs)
for row in csv_reader:
yield [unicode(cell, 'utf-8') for cell in row]
def update_summary(query, date, cost, amount, bldg, unit, provider, utility_type, uom):
#'electricity'
dt = datetime(*(strptime(date, "%Y-%m-%d")[0:6]))
#date = dt
## print dt
## print timezone.get_current_timezone()
## print timezone.get_default_timezone()
## print dir(timezone)
#date = timezone.make_aware(dt, timezone.get_current_timezone())
#date = timezone.make_aware(dt, timezone.get_default_timezone())
date = timezone.make_aware(dt, timezone.utc)
## print date
## print
matches = query.filter(start_date=date)
if len(matches):
updated = False
summary = matches[0]
#print "found something", summary.cost, cost, date
if summary.cost != float(cost):
print "Different costs:", float(cost)
summary.cost = float(cost)
updated = True
if summary.amount != float(amount):
summary.amount = float(amount)
updated = True
if updated:
print "FOUND EXISTING! (and changes!)"
summary.save()
else:
print date, cost, amount
print "MAKING NEW!"
summary = UtilitySummary()
summary.building = bldg
summary.unit = unit
summary.type = utility_type
summary.provider = provider
summary.start_date = date
summary.cost = float(cost)
summary.amount = float(amount)
summary.unit_of_measurement = uom
summary.save()
#print "Saving new!!"
def read_csv(source_csv, city_name, city_tag, driver):
city_options = City.objects.filter(tag=city_tag)
print "Number of cities available: %s" % len(city_options)
if not len(city_options):
raise ValueError, "CITY NOT FOUND! run make_cities.py first"
## city = City()
## city.name = city_name
## city.tag = to_tag(city.name)
## city.save()
else:
city = city_options[0]
print city
position_file = "position.json"
position = load_json(position_file, create=True)
if not position:
position = 0
cache_file = "%s-20150525.json.bkup" % city.tag
cache_destination = os.path.join(os.path.dirname(source_csv), cache_file)
#keep a local copy of data we've processed...
#this should help with subsequent calls
#to make sure we don't need to duplicate calls to remote geolocation APIs:
local_cache = load_json(cache_destination, create=True)
if not local_cache.has_key('buildings'):
local_cache['buildings'] = {}
search_results = {}
for key, value in local_cache['buildings'].items():
#search_results[key] = Location(value)
sr = SearchResults()
sr.from_dict(value)
#print
#print sr
#print
search_results[key] = sr
#geocoder helper:
#geo = Geo()
provider = ''
provider_options = ServiceProvider.objects.filter(name='City of Columbia')
if len(provider_options):
provider = provider_options[0]
else:
raise ValueError, "error finding utility_provider: %s matches" % len(provider_options)
skips = 0
with open(source_csv) as csvfile:
reader = unicode_csv_reader(csvfile)
#just print the first row:
print '>, <'.join(reader.next())
count = 0
#want to randomize the order... distribute options more evenly
#print len(reader)
#exit()
#in order to randomize, should randomize the order in the csv
for row in reader:
count += 1
print "Looking at row: %s, position: %s" % (count, position)
start = datetime.now()
print "Started: ", start
any_updated = False
#could exit out early here, if needed
if count > 10:
#exit()
pass
#if you want to skip ahead more quickly:
#if count < 0:
if count < position:
pass
else:
#print row
objectid = row[0]
## no_units = row[12]
#can pass this in as bldg_id to make_building
#that gets used for parcel too
parcel_id = row[1]
bldg_id = parcel_id
street_num = row[2]
street_dir = row[3]
street_name = row[4]
street_sfx = row[5]
#eg building number
qualifier_pre = row[6]
#eg "UNIT" or "APT"
qualifier_post = row[7]
apt_num = row[8]
#skip row9 (in/out... whatever that means)
zip_code = row[10]
#skip row11, assessor id
#skip row12, address num
#skip row13, x
#skip row14, y
#xcoord == lng
lng = row[15]
lat = row[16]
#entry floor number: (named 'z' in sheet)
floor = row[17]
#skip row18, strcid... not sure
#skip row19, parent
#skip row20, app_
#skip row21, hteloc
zone = row[22]
bldg_type = row[23]
#number of buildings
bldg_num = row[24]
no_units = row[25]
#skip row[26], inspection type
#skip row27, app number
#skip row28, date received
#skip row29, application type
#skip row30, ownerid
#skip row31, operator id
#skip row32, agent_id
#skip row33, mail to
central_heat = row[34]
if central_heat == 'Y':
central_heat = True
else:
central_heat = False
#heat mechanism? heat mechanic??? not sure
heat_mech = row[35]
#skip row36, agent id (2)
#skip row37, agent last name
#skip row38 agent first name
#skip row39 agent middle initial
#skip row40, agent title
#skip row41, business name
#could be owner, could be agent
## owner_name = row[42]
## owner_address1 = row[43]
## owner_address2 = row[44]
## owner_city = row[45]
## owner_state = row[46]
## owner_zip = row[47]
#address = " ".join([street_num, street_dir, street_name, street_sfx, qualifier_pre, qualifier_post, apt_num])
#this is causing problems with lookups in google
if qualifier_pre == "DUP" or qualifier_pre == "DUPE" or qualifier_pre == "2-Jan" or qualifier_pre == "HM" or qualifier_pre == "DWN":
qualifier_pre = ''
address_main = " ".join([street_num, street_dir, street_name, street_sfx, qualifier_pre])
address_main = address_main.strip()
#get rid of any double spaces
address_main = address_main.replace(" ", " ")
#similar to conversions,
#but there are too many of these to list there
if re.search('HOLLY RIDGE LN', address_main):
address_main = address_main.replace('HOLLY RIDGE LN', 'HOLLYRIDGE LN')
if re.search('BERKSHIRE CT', address_main):
address_main = address_main.replace('BERKSHIRE CT', 'BERKSHIRE')
#address_main = ''
if re.search('CAMERON CT', address_main):
address_main = address_main.replace('CAMERON CT', 'CAMERON')
#address_main = ''
if re.search('ATHENS CT', address_main):
address_main = address_main.replace('ATHENS CT', 'ATHENS')
#address_main = ''
if re.search('LAMAR CT', address_main):
address_main = address_main.replace('LAMAR CT', 'LAMAR')
#address_main = ''
if re.search('MONITEAU CT', address_main):
address_main = address_main.replace('MONITEAU CT', 'MONITEAU')
#address_main = ''
if re.search('IMPERIAL CT', address_main):
address_main = ''
if re.search('PERKINS DR', address_main):
address_main = ''
if re.search('GRANITE OAKS CT', address_main):
address_main = ''
#sometimes the 'BLDG' data is added in the wrong place
#then it gets treated as a unit item
#(but it's not *always* a unit item, so can't generalize it that way)
if qualifier_post == "BLDG" or qualifier_post == "LOT":
address_main = " ".join([address_main, qualifier_post, apt_main])
address_main = address_main.strip()
apt_main = ''
else:
apt_main = " ".join([qualifier_post, apt_num])
apt_main = apt_main.strip()
#check if this is one we want to skip
if conversions.has_key(address_main.upper()):
address_main = conversions[address_main.upper()]
if address_main:
print "APT_MAIN: ", apt_main
address = ", ".join( [address_main, apt_main] )
## if (not status in ['EXPIRED', 'CLOSED']) and (permit_type in ['RENTAL']):
print "Parcel ID:", parcel_id
print address
results = None
#make sure it's not one we're skipping:
if not address:
print "SKIPPING ITEM: %s" % row[1]
skips += 1
## skips = codecs.open("skips.txt", 'a', encoding='utf-8')
## original = " ".join([street_num, street_dir, street_name, street_sfx, qualifier_pre])
## skips.write(original)
## skips.write('\n')
## skips.close()
#check if we've started processing any results for this row
elif not search_results.has_key(address.upper()):
print "No saved search results for address: %s" % address
print "Skipping."
print
#raise ValueError, "No results found for %s" % address
else:
print "Already had building: %s" % address
results = search_results[address.upper()]
assert results
#print results
lookup_building_with_geo(results, make=True, parcel_id=parcel_id)
#print results
#current['results'] = results
#print results
if results.errors:
print results
raise ValueError, results.errors
else:
bldg = results.building
assert bldg
unit = results.unit
#at this point there should be at least one unit
#and we will want to associate results with that unit
#assert unit
# can just pass this up in this case
if not unit:
print "Skipping address... no matching Unit!"
else:
#now that we have a building
#look up energy data on the remote website
#result = urllib2.urlopen("http://example.com/foo/bar")
#print result.read()
## base = "http://www.gocolumbiamo.com/cfforms/ub/rental.html"
## driver.get(base)
## search = driver.find_element_by_css_selector('#address')
## search.send_keys(address)
## button = driver.find_element_by_css_selector('.ui-bar > a:nth-child(2)')
## #button = driver.find_element_by_css_selector('#PrimaryCenterColumn > div > div.ui-bar-b.ui-header > div > a.ui-btn.ui-btn-corner-all.ui-shadow.ui-btn-down-b.ui-btn-up-b')
## #button = driver.find_element_by_css_selector('#PrimaryCenterColumn > div > div.ui-bar-b.ui-header > div > a.ui-btn.ui-btn-corner-all.ui-shadow.ui-btn-down-b.ui-btn-up-b > span > span')
## button.click()
## time.sleep(4)
## #results = driver.find_element_by_css_selector('.dojoxGridMasterView')
## results = driver.find_element_by_css_selector('.dojoxGridContent > div:nth-child(1)')
## print results.get_attribute('innerHTML')
## print parcel_id
## options = results.find_elements_by_tag_name('div')
## #options = results.find_elements_by_link_text(parcel_id)
## print options
## #something didn't work with this:
## #look_for = '<td tabindex="-1" role="gridcell" colspan="1" class="dojoxGridCell" idx="0" style="width:90px;">%s</td>' % parcel_id
## look_for = '>%s<' % parcel_id
## matches = []
## for option in options:
## markup = option.get_attribute('innerHTML')
## #print markup
## if re.search(look_for, markup):
## matches.append(option)
## #print "MATCH!"
## if len(matches) > 1:
## print matches
## raise ValueError, "Too many matches!"
## else:
## matches[0].click()
#just realized that this form uses the property_id
#which we already have...
#can skip the steps above that are trying to make this link:
base = "http://www.gocolumbiamo.com/cfforms/ub/ubdata.cfm?LOCID=%s&AppNum=79" % parcel_id
driver.get(base)
try:
heat_source = driver.find_element_by_css_selector('#PrimaryCenterColumn > table:nth-child(1) > tbody:nth-child(1) > tr:nth-child(3) > td:nth-child(1) > strong:nth-child(1) > font:nth-child(1)')
if heat_source.text.strip() == "Heating Source: Gas Heat":
bldg.heat_source_details = 'gas'
bldg.save()
else:
print heat_source.text
exit()
#TODO:
bldg.heat_source_details = 'electric'
bldg.who_pays_gas = 'not_available'
except:
print "heat source not found... skipping"
try:
selector = driver.find_element_by_css_selector('#el_table_length > label:nth-child(1) > select:nth-child(1) > option:nth-child(3)')
selector.click()
except:
print "No Water data available... skipping"
else:
body = driver.find_element_by_css_selector('#el_table > tbody:nth-child(3)')
rows = body.find_elements_by_tag_name('tr')
#row = rows[0]
query = bldg.utilitysummary_set.filter(type='electricity')
for row in rows:
#print row.get_attribute('innerHTML')
cols = row.find_elements_by_tag_name('td')
date = cols[0].text + '-01'
cost = cols[1].text.replace('$', '').strip()
amount = cols[2].text
amount = amount.replace(' KWH', '')
update_summary(query, date, cost, amount, bldg, unit, provider, 'electricity', 'kwh')
#update_summary(query, date, cost, amount)
#for item in cols:
# print item.text
#print dir(bldg)
#print bldg.utilitysummary_set
#query = bldg.utilitysummary_set.filter(type=utility_type[0])
#could look up type from UTILITY_TYPES...
#but in this case we know what they should be
#query = bldg.utilitysummary_set.filter(type='water')
#if len(query):
try:
water = driver.find_element_by_css_selector('#ext-gen23')
water.click()
selector = driver.find_element_by_css_selector('#wr_table_length > label:nth-child(1) > select:nth-child(1) > option:nth-child(3)')
selector.click()
except:
print "No Water data available... skipping"
else:
body = driver.find_element_by_css_selector('#wr_table > tbody:nth-child(3)')
rows = body.find_elements_by_tag_name('tr')
#row = rows[0]
query = bldg.utilitysummary_set.filter(type='water')
for row in rows:
#print row.get_attribute('innerHTML')
cols = row.find_elements_by_tag_name('td')
date = cols[0].text + '-01'
cost = cols[1].text.replace('$', '').strip()
amount = cols[2].text
amount = amount.replace(' CCF', '')
update_summary(query, date, cost, amount, bldg, unit, provider, 'water', 'ccf')
#update_summary(query, date, cost, amount)
#for item in cols:
# print item.text
unit.update_averages()
#see if we have enough info now to make a score:
unit.update_energy_score()
#now that we've saved the unit,
#update the averages for the whole building:
unit.building.update_utility_averages()
unit.building.update_rent_details()
position += 1
save_json(position_file, position)
if any_updated:
#back it up for later
#enable this when downloading GPS coordinates...
#the rest of the time it slows things down
local_cache['buildings'] = {}
for key, value in search_results.items():
#search_results[key] = SearchResults().from_dict(value)
local_cache['buildings'][key] = value.to_dict()
save_json(cache_destination, local_cache)
position = count
save_json(position_file, position)
exit()
end = datetime.now()
print "finished: ", end
total_time = end - start
print total_time
print
#exit()
#destination = '%s.tsv' % city_tag
#save_results(search_results, destination)
if __name__ == '__main__':
driver = webdriver.Firefox()
#driver = webdriver.Chrome()
read_csv('/home/rentrocket/cities/columbia/rental/Columbia_data_20131016-randomized.csv', "Columbia", "columbia_mo", driver)
| agpl-3.0 | -6,013,896,442,082,118,000 | 39.824415 | 225 | 0.48593 | false | 4.36414 | false | false | false |
czhuang/ChordRipple | app/resource.py | 1 | 56047 |
import os
from copy import deepcopy
import cPickle as pickle
import numpy as np
from music21 import pitch
from socketio.namespace import BaseNamespace
from socketio.mixins import BroadcastMixin
from dynamic_programming_tools import shortest_path, simple_foward_backward_gap_dist
from retrieve_model_tools import retrieve_NGram, retrieve_SkipGramNN
from music21_chord_tools import sym2chord, roman2letter, is_roman_numeral, letter2roman
# from music21_chord_tools import sym2roman
# from diversity_tools import get_diverse_seqs
from config import get_configs
from SuggestionList import SuggestionList, SuggestionItem
from QueryObject import QueryObject
from Logs import Logs
PKL_FOLDER = 'data'
EXPERIMENT_TYPE_STRS = ['Single-T',
'Single-A',
'Ripple']
TYPICAL, SIM_BUT_LESS_TYPICAL, RIPPLE = range(len(EXPERIMENT_TYPE_STRS))
EXPERIMENT_TYPE = RIPPLE
from music21_chord_tools import ROMAN2LETTER_RELABELS
from music21_chord_tools import ROMAN_PARTIAL_RELABELS
from music21_chord_tools import MAKE_NOTE_EXCEPTIONS
from music21_chord_tools import LETTER2ROMAN_RELABELS
from music21_chord_tools import LETTER_PARTIAL_RELABELS_FOR_USER
from latin_squares_experiment_tools import get_condition_ordering
MIDI_START_SLACK = 0.15
from Database import CHORD_LEN
from Database import Database, DatabasePlacebo
ALL_SIMS = True
DISABLE_NEXT = True
class Resource(BaseNamespace, BroadcastMixin):
def initialize(self):
# Called after socketio has initialized the namespace.
self.history = []
self.parsed_seqs_notes = {}
self.unit_dur = 60/92.0
self.ngram = retrieve_NGram()
self.nn = retrieve_SkipGramNN()
assert self.ngram.syms == self.nn.syms
# self.db = Database('test')
# self.db = Database('study-iui')
self.db = DatabasePlacebo()
self.db.index_model(self.ngram, 'ngram')
self.db.index_model(self.nn, 'nn')
# get randomized ordering for current experiment
condition_orderings = get_condition_ordering()
fpath = os.path.join('pkls', 'participant_count.txt')
with open(fpath, 'r') as p:
print 'reading lines from:', fpath
participant_count = p.readline()
try:
self.participant_count = int(participant_count)
except:
print type(participant_count), participant_count
print 'participant_count:', self.participant_count
self.participant_count = self.participant_count % condition_orderings.shape[0]
print 'participant_count modulo:', self.participant_count
self.ordering = condition_orderings[self.participant_count, :]
print 'ordering:', self.ordering
self.emit('ordering', list(self.ordering))
# self.experiment_type = EXPERIMENT_TYPE
# for tutorial, always start with ripple
self.experiment_type = RIPPLE # int(self.ordering[0])
query = QueryObject(dict(data=list(self.ordering), actionKind="ordering"))
self.index_user_action(query)
query = QueryObject(dict(data=self.participant_count, actionKind="participant_count"))
self.index_user_action(query)
# update participant count
with open(fpath, 'w+') as p:
p.truncate()
p.writelines(str(self.participant_count + 1))
# self.previous_sym = None
# self.previous_sym_ind = None
self._previous_sym = None
self._previous_sym_ind = None
self.n_suggestions = 5
self.n_similar = 2
self.suggestions = SuggestionList(self, 'below')
self.suggestions_above = SuggestionList(self, 'above')
self.config = get_configs()
self.corpus = self.config['corpus']
print '...corpus', self.corpus
if self.config['use_letternames']:
self.symbol_type = 'letter'
else:
self.symbol_type = 'roman'
# need to correct some roman numerals
print '# of syms: %d' % len(self.ngram.syms)
self.syms = []
for sym in self.ngram.syms:
formatted_sym, valid = self.format_sym(sym)
self.syms.append(formatted_sym)
# print 'F#m in syms?', 'F#m' in self.syms
# need to update the "spelling" of roman numerals in nn and ngram
self.nn.syms = self.syms
self.ngram.syms = self.syms
self._rn2letter, self._letter2rn = self.load_rn2letter_dict()
self.logs = Logs(EXPERIMENT_TYPE, EXPERIMENT_TYPE_STRS)
self.loop_len = None
# for first chord
if self.experiment_type == TYPICAL:
if self.symbol_type == 'roman':
self.start_syms = [['I'], ['i'], ['V'], ['IV'], ['I7']]
else:
self.start_syms = [['C'], ['Cm'], ['G'], ['F'], ['C7']]
else:
# want diverse sequences
if self.symbol_type == 'roman':
# self.start_syms = [[u'I', u'vi'], ['V', 'vi'], ['IV', 'ii'], [u'V7/IV', u'IV'], [u'i', u'i'], [u'i', u'V6']]
self.start_syms = [[u'i'], [u'I'], ['V'], ['iv'], ['IV'], ['V7/IV']]
else:
# self.start_syms = [['a', 'C64'], ['G', 'C'], ['A', 'gb'], ['C', 'F'], ['G', 'e'], ['Bb', 'Ab'], ['E', 'G']]
# self.start_syms = [['C'], ['F'], ['G'], ['Am'], ['Cm'], ['B-'], ['A-']]
self.start_syms = [['Cm'], ['Am'], ['B-'], ['F/C'], ['G'], ['Dm7'], ['Cmaj7'], ['F#dim']]
# set initial sequence
# self.init_seqs = [['C', 'F', 'Dm', 'G', 'C', 'C', 'F'],
# ['C', 'F', 'C', 'F', 'G7', 'C', 'G'],
# ['C', 'Am', 'G', 'C', 'F', 'C', 'G'],
# ['C', 'F', 'G', 'C', 'F', 'C', 'F']]
self.init_seqs = [['C', 'F', 'Dm', 'G', 'C', 'C', 'F', 'C'],
['C', 'F', 'C', 'F', 'G7', 'C', 'G', 'C'],
['C', 'Am', 'G', 'C', 'F', 'C', 'G', 'C'],
['C', 'F', 'G', 'C', 'F', 'C', 'F', 'C']]
self.on_generate_complete_seq(self.init_seqs[0])
# ======================
# == database helpers ==
# ======================
def index_user_action(self, query, suggestions=None,
suggestions_above=None, attrs=None):
experiment_type_label = EXPERIMENT_TYPE_STRS[self.experiment_type]
print '...index_user_action:', query.seqStr, query.actionKind, \
query.author, query.panelId, query.itemIdx
actionKindThatNeedsItem = 'use' == query.actionKind or 'play' == query.actionKind
itemInAttrs = attrs is not None and 'item' in attrs
assert (actionKindThatNeedsItem and itemInAttrs) or not itemInAttrs
if itemInAttrs:
print '\t\t\t', attrs['item'].inds, attrs['item'].seq
suggestions_list = []
if suggestions is not None:
suggestions_list.append(suggestions)
if suggestions_above is not None:
suggestions_list.append(suggestions_above)
if len(suggestions_list) == 0:
suggestions_list = None
self.db.index_user_action(query, experiment_type_label,
suggestions_list, attrs)
@property
def model(self):
return self.ngram
def load_rn2letter_dict(self):
fname = 'rn2letter-rock.pkl'
fpath = os.path.join(PKL_FOLDER, fname)
with open(fpath, 'rb') as p:
rn2letter = pickle.load(p)
letter2rn = {}
# TODO: is this a bug? should be letter2rn[val] = key
for key, val in rn2letter.iteritems():
letter2rn[key] = val
return rn2letter, letter2rn
# def on_loopLen(self, loop_len):
# self.loop_len = loop_len
def on_inputSave(self, text):
# log add automatically saves upon seeing type "save"
# self.logs.add("save", text)
query = QueryObject(dict(text=text, actionKind="save"))
self.index_user_action(query)
def on_comments(self, text):
print '...on_comments', text
# log add automatically saves upon seeing type "save"
# self.logs.add("save", text)
query = QueryObject(dict(text=text, actionAuthor="user",
actionKind="comments"))
self.index_user_action(query)
def on_rating(self, lineText, id, value, caption):
print '...on_rating', lineText, value, id, caption
# log add automatically saves upon seeing type "save"
# self.logs.add("save", text)
query = QueryObject(dict(text=lineText, actionAuthor="user",
actionKind="rating"))
attrs = {'rating': value,
'ratingCaption': caption,
'saveIdx': id,
'ratingQuestion': lineText}
query.add_attributes(attrs)
self.index_user_action(query)
def on_clear(self, text):
# self.logs.add("clear", text)
query = QueryObject(dict(text=text, actionKind="clear"))
self.index_user_action(query)
self.clear_suggestions()
# def on_rating(self, value, ind, text):
# try:
# ind = int(ind)
# except ValueError:
# print 'WARNING: index to save entry wanted, but received', ind
#
# if len(text) > 0:
# self.logs.add_rating(value, 'save', text, ind)
# logs = self.logs.save
# for log in logs:
# print log
def on_setPlaybackSpeed(self, speed):
print '...playbackspeed',
self.unit_dur = 60.0 / speed
print self.unit_dur
def disconnect(self, *a, **kw):
super(Resource, self).disconnect(*a, **kw)
def on_ping(self, param, loop_len):
print '---on_ping', param, loop_len
self.loop_len = loop_len
self.emit('pong', param)
def on_requestData(self):
# data = [{'x':10, 'y':15, 'label':'I'},
# {'x':10, 'y':15, 'label':'IV'}]
fname = 'w1-RN.pkl'
with open(fname, 'rb') as p:
vertices = pickle.load(p)
labels = pickle.load(p)
# vertices = [[100, 150], [200, 150]]
# labels = ['I', 'IV']
max_x = np.max(vertices[:, 0])
min_x = np.min(vertices[:, 0])
x_range = max_x - min_x
max_y = np.max(vertices[:, 1])
min_y = np.min(vertices[:, 1])
y_range = max_y - min_y
width = 750.0
margin = 30
scale = (width-2*margin)/y_range
if x_range > y_range:
scale = (width-2*margin)/x_range
vertices[:, 0] -= min_x
vertices[:, 1] -= min_y
vertices *= scale
vertices[:, 0] += margin
vertices[:, 1] += margin
vertices = map(list, vertices)
self.emit('dataLabels', vertices, labels)
# def parse_seq_as_notes(self, text):
# text = text.strip()
# if text in self.parsed_seqs_notes.keys():
# chords = self.parsed_seqs_notes[text]
# return chords
# parts, raw_parts = self.parse_seq_as_syms(text)
# chords = self.make_note_seqs(parts)
# self.parsed_seqs_notes[text] = chords
# return chords
#
def seqToNotes(self, seq, text):
# chords are list of list of notes
chords = self.make_note_seqs(seq)
self.parsed_seqs_notes[text] = chords
return chords
def parse_seq_as_syms(self, text):
text = text.strip()
parts = text.split(' ')
parts = [part for part in parts if len(part) > 0]
parts, raw_parts = self.format_seq(parts)
return parts, raw_parts
def make_note_seqs(self, seq):
print 'make_note_seqs', seq
note_seq = []
for sym in seq:
notes = self.make_notes(sym)
if len(note_seq) == 0:
note_seq.append(notes)
continue
if len(notes) == 0:
note_seq.append([])
continue
if len(note_seq[-1]) == 0:
diff = 0
else:
diff = np.min(notes) - np.min(note_seq[-1])
# print '...make_note_seqs', sym, notes, diff, diff % 12
if np.abs(diff) > 6:
# i.e. C4 to B4 => C4 to B3
# i.e. C4 to B5 => C4 to B3
shift_int = (np.abs(diff) / 12) * 12
# prefer going down
# TODO: temp disable
if np.abs(diff) % 12 > 6 and diff > 0:
shift_int += 12
# shift_int -= 12
direction = np.sign(diff) * -1
notes = np.asarray(notes) + shift_int * direction
print 'notes[0]', notes[0]
if notes[0] <= 55:
notes += 12
note_seq.append(list(notes))
else:
note_seq.append(notes)
# print 'shifted', notes
print 'playSeq, chords'
for i, ch in enumerate(note_seq):
print seq[i],
for p in ch:
print pitch.Pitch(p).nameWithOctave,
print ch
print
return note_seq
def make_notes(self, sym):
sym = sym.strip()
if sym in MAKE_NOTE_EXCEPTIONS:
# look up in dictionary
letter = self.rn2letter(sym)
print 'due to roman not giving right pitches', sym, letter
sym = letter
chord_sym = sym2chord(sym)
midi = []
if chord_sym is not None:
midi = [pch.midi for pch in chord_sym.pitches]
# take away duplicates
midi = list(set(midi))
midi = sorted(midi)
# double the tonic on top
if sym in ['I', 'I7', 'i']:
doubled_note = midi[0]
midi.append(doubled_note+12)
print 'doubled tonic on top', midi
elif len(midi) > 4 and '11' in sym:
# 11th creates half step with 3rd if major (not checking if major here)
# shift 11th up an octave to make
print midi
# 1,3,11,5,7,9
reduced_midi = midi[:2] + [midi[3]] + [midi[2]+12]
midi = reduced_midi
elif len(midi) > 4:
reduced_midi = midi[:3] + [midi[-1]]
midi = reduced_midi
return midi
# def make_log_tags_for_playback(self, query):
# tags = {}
# print 'make_log_tags_for_playback', query.author
# if query.author == 'machine':
# item = self.retrieve_suggestion_item_at(query)
# if item is None:
# print 'WARNNG: can not retrieve suggestion item', query.activeIdx, \
# query.seqStr
# assert False
# tags['suggestionItem'] = item
# # tags['author'] = author
# return tags
# def on_playSeq(self, text, pos, author, activeIdx,
# suggestPanel=None, loop=False):
def preprocess_query(self, query):
return query
def on_playSeq(self, original_query):
print "on_playSeq:" #, original_query
query = QueryObject(original_query)
# print '...on_playSeq, loop', query.loop
# logging
# TODO: re-enable logging later, has some problems
# need to make sure the chords played are actually the chords written
if query.author == 'dont_log':
pass
else:
if query.loop:
assert query.actionKind == 'loop'
else:
assert query.actionKind == 'play' or query.actionKind == 'play both'
attrs = self.retrieve_suggestion_item_as_attrs(query)
self.index_user_action(query, self.suggestions,
self.suggestions_above, attrs)
# self.logs.add(query.actionKind, query.seqStr, tags)
midi_notes = self.make_proxy_midi_notes_from_query(query)
if query.actionKind == "artificalPlay":
self.emit('playSeq', midi_notes, False, original_query)
else:
self.emit('playSeq', midi_notes, True, original_query)#, query.play)
def make_proxy_midi_notes_from_query(self, query):
print '---on_parseSeq:', query.seqStr
chords = self.seqToNotes(query.seq, query.seqStr)
print 'durations', query.durations
if query.durations is not None:
durs = [1.0*dur for dur in query.durations]
print 'len(chords)', len(chords)
print 'scaled durs', len(durs), durs
#assert len(durs) == len(chords)
else:
durs = [self.unit_dur] * len(chords)
print '...cumulative durations'
start_time_acc = [0]
for dur in durs:
start_time_acc.append(start_time_acc[-1]+dur)
print start_time_acc
if query.loop:
chords = chords[:] + chords[:]
durs = durs[:-1] + [durs[-1]+self.unit_dur*0.2] + durs[:]
midi_notes = self.make_proxy_midi_notes(chords, durs)
return midi_notes
def make_proxy_midi_notes(self, chords, durs):
if not len(chords):
return []
if not isinstance(chords[0], list):
chords = [chords]
midi_notes = []
running_time = 0.0
for i, notes in enumerate(chords):
onset = running_time
if i == 0:
onset += MIDI_START_SLACK
if i >= len(durs):
dur = durs[-1]
else:
dur = durs[i]
for note in notes:
midi_note = {'pitch': note, 'onset': onset,
'offset': running_time+dur}
midi_notes.append(midi_note)
running_time += dur
return midi_notes
# def on_(self, chord_changes, author, activeIdx,
# suggestPanel, original_query, log=True):
def on_playSubseq(self, original_query, log=True, playContext=False):
print "...on_playSubseq, playContext", playContext #, original_query
# print '...on_playSubseq', chord_changes, author, activeIdx, log, suggestPanel
# print chord_changes
syms = []
inds = []
# TODO: assumes continuous but side ripples are not
durs = []
# if not isinstance(original_query, QueryObject):
query = QueryObject(original_query)
query.playContext = playContext
query_durations = query.durations
chord_changes = query.chordSeqsAndFormat
print chord_changes
if not playContext:
for i, chord_change in enumerate(chord_changes[:CHORD_LEN]):
if chord_change[1]:
syms.append(chord_change[0])
inds.append(i)
if query_durations is not None:
durs.append(query_durations[i])
else:
start_ind = None
for i, chord_change in enumerate(chord_changes[:CHORD_LEN]):
if chord_change[1]:
start_ind = i
break
end_ind = None
for i in range(CHORD_LEN-1, -1, -1):
if chord_changes[i][1]:
end_ind = i
break
print "start_ind, end_ind", start_ind, end_ind
print "boundary"
if start_ind - 1 >= 0:
start_ind -= 1
if end_ind + 1 < CHORD_LEN:
end_ind += 1
print start_ind, end_ind
syms = [ chord_changes[i][0] for i in range(start_ind, end_ind+1) ]
durs = [ query_durations[i] for i in range(start_ind, end_ind+1) ]
inds = range(start_ind, end_ind+1)
print syms, durs
print len(inds), inds
if len(inds) > 1:
p_ind = inds[0]
fixed_inds = inds[:]
for ind in inds[1:]:
if ind - p_ind != 1:
print 'WARNING: Changes not continuous'
for i in range(p_ind+1, ind):
fixed_inds.append(i)
p_ind = ind
inds = fixed_inds
# not necessary to sort
inds.sort()
# update durations, syms
# to fulfill the later check of
# len(durs) != len(syms)
durs = [query_durations[i] for i in inds]
syms = [chord_changes[i][0] for i in inds]
if query.log and log:
attrs = self.retrieve_suggestion_item_as_attrs(query)
if query.author == 'machine':
self.index_user_action(query, self.suggestions,
self.suggestions_above, attrs)
else:
self.index_user_action(query, )
# self.logs.add("play bold", text, tags)
# original subseq notes
# need context to determine octave
all_notes = self.make_note_seqs(query.seq)
print 'on_playSubseq', len(all_notes), query.seqStr, inds
notes = [all_notes[ind] for ind in inds]
# TODO: this should not happen, should check for this, instead of letting it slide
if len(durs) != len(syms):
durs = [self.unit_dur] * len(syms)
midi_notes = self.make_proxy_midi_notes(notes, durs)
# for entire sequence
# midi_notes = self.make_proxy_midi_notes_from_query(query)
# sending the original query back so that have context
# TODO: make all communication to be query objects
# with more fields
self.emit('playSubseq', midi_notes, original_query)
return notes
def get_similar_chords(self, sym, topn):
similars = self.nn.most_similar(sym, topn=topn)
if similars is None:
return
sims = [s[0] for s in similars]
return sims
def on_startSeqs(self):
print '---on_startSeqs'
# TODO: fixed start seqs
self.clear_suggestions()
for s in self.start_syms:
self.suggestions.add(SuggestionItem(s, range(len(s)), 'start'))
seqs, inds = self.suggestions.get_seqs_inds()
# print seqs
# print inds
print '----- updateChordSuggestions ----, # of items', self.suggestions.num_items
self.emit('updateChordSuggestions', seqs, inds)
def rn2letter(self, sym):
if sym in ROMAN2LETTER_RELABELS:
formatted_sym = ROMAN2LETTER_RELABELS[sym]
elif sym in self._rn2letter:
formatted_sym = self._rn2letter[sym]
print 'rn2letter retrieved', sym, formatted_sym
else:
formatted_sym = roman2letter(sym)
return formatted_sym
def letter(self, sym):
if is_roman_numeral(sym):
return self.rn2letter(sym)
else:
return sym
def roman(self, sym):
if sym is None:
return None
if is_roman_numeral(sym):
return sym
else:
return self.letter2rn(sym)
def letter2rn(self, sym):
if sym in LETTER2ROMAN_RELABELS:
formatted_sym = LETTER2ROMAN_RELABELS[sym]
elif sym in self._letter2rn:
formatted_sym = self._letter2rn[sym]
print 'letter2rn retrieved', formatted_sym
else:
formatted_sym = letter2roman(sym)
if formatted_sym is None:
return ''
# print 'created', formatted_sym
return formatted_sym
def back_to_roman(self, sym):
is_roman = is_roman_numeral(sym)
if not is_roman:
sym = self.letter2rn(sym)
return sym
def format_seq(self, seq):
local_seq = []
local_original_seq = []
for sym in seq:
formatted_sym, valid = self.format_sym(sym)
if valid:
local_seq.append(formatted_sym)
else:
local_seq.append('')
local_original_seq.append(formatted_sym)
return local_seq, local_original_seq
def format_sym(self, sym):
is_roman = is_roman_numeral(sym)
formatted_sym = sym
# print 'format_sym', sym, is_roman
if is_roman and self.symbol_type != 'roman':
formatted_sym = self.rn2letter(sym)
elif not is_roman and self.symbol_type == 'roman':
formatted_sym = self.letter2rn(sym)
if formatted_sym is not None:
if self.symbol_type == 'roman':
for k, v in ROMAN_PARTIAL_RELABELS.iteritems():
if k in formatted_sym:
formatted_sym = formatted_sym.replace(k, v)
else:
for k, v in LETTER_PARTIAL_RELABELS_FOR_USER.iteritems():
if k in formatted_sym:
formatted_sym = formatted_sym.replace(k, v)
# check = sym2chord(sym)
# if check is None:
# return None
if formatted_sym == '':
return sym, False
return formatted_sym, True
def generate_subs_from_context(self, sym_ind, original_seq, factor=1):
print '...generate_subs_from_context', sym_ind, original_seq
# factor: factor*n_similar # of suggestions
original_sym = original_seq[sym_ind]
subs = []
# sorted_syms = None
# if 0 < sym_ind < len(original_seq):
if sym_ind - 1 < 0:
before_sym = None
else:
before_sym = original_seq[sym_ind - 1]
if before_sym not in self.syms:
before_sym = None
if sym_ind + 1 >= len(original_seq):
after_sym = None
else:
after_sym = original_seq[sym_ind + 1]
if after_sym not in self.syms:
after_sym = None
sorted_probs, sorted_syms = \
simple_foward_backward_gap_dist(self.model, before_sym, after_sym,
self.experiment_type)
if sorted_syms is not None and len(sorted_syms) > 10:
for i in range(10):
print sorted_syms[i], np.exp(sorted_probs[i])
ind = sorted_syms.index('C')
print "what is prob for C?", np.exp(sorted_probs[ind]), ind
if sorted_syms is None and sym_ind == 0 and self.experiment_type == TYPICAL:
sorted_syms = [s[0] for s in self.start_syms]
n_subs = factor*self.n_similar
if sorted_syms is not None:
subs = sorted_syms[:n_subs]
if original_sym in subs:
subs.remove(original_sym)
subs.append(sorted_syms[n_subs])
print '...subs', subs
return sorted_syms, subs
def make_single_sub_suggestion_items(self, sym_ind, original_seq,
subs, sorted_syms, return_tags_only=False):
# original_sym = original_seq[sym_ind]
suggestion_items = []
tags_list = []
for i, ss in enumerate(subs):
sub_seq = original_seq[:sym_ind] + [ss] + original_seq[sym_ind + 1:]
# print 'subseq', sub_seq
tags = {}
if i < self.n_similar:
tags['source'] = 'subs'
else:
tags['source'] = 'sim'
if sorted_syms is not None:
# print len(sorted_syms), ss
# print sorted_syms
tags['context_rank'] = sorted_syms.index(ss)
tags_list.append(tags)
item = SuggestionItem(sub_seq, [sym_ind], 'sub_ripple', tags)
suggestion_items.append(item)
# original_sym, valid = self.format_sym(original_sym)
# if valid:
# subs.insert(0, original_sym)
if return_tags_only:
return tags_list
return suggestion_items
def generate_singleton_subs(self, sym_ind, original_seq, factor=1):
original_sym = original_seq[sym_ind]
print '...generate substitutions based on similarity'
# generate substitutions based on similarity
if self.experiment_type != TYPICAL: # and sym_ind == 0:
sims = self.get_similar_chords(original_sym, self.n_similar*2)
elif self.experiment_type != TYPICAL:
sims = self.get_similar_chords(original_sym, self.n_similar)
else:
sims = None
print "sims", sims
# generate substitutions based on context
if self.experiment_type == TYPICAL:
factor = 2
# sorted_syms is for in case needed more?
# if not typical, then for first chord, only use sim
if self.experiment_type != TYPICAL: # and sym_ind == 0:
sorted_syms = None
subs = None
else:
sorted_syms, subs = self.generate_subs_from_context(sym_ind, original_seq,
factor=factor)
print "subs by context", subs
# collect all the single changes
if subs is None:
subs = sims
elif sims is not None:
subs.extend(sims)
# subs first, sims next
print "all collected singletons", subs
assert len(subs) == 4 or subs is None or len(subs) == 0
# sorted_syms is for in case needed more?
# not currently used in the calling function
return sorted_syms, subs
@property
def previous_sym(self):
return self._previous_sym
@previous_sym.setter
def previous_sym(self, sym):
print '...setting previous sym to:', sym
self._previous_sym = sym
@property
def previous_sym_ind(self):
return self._previous_sym_ind
@previous_sym_ind.setter
def previous_sym_ind(self, ind):
print '...setting previous ind to:', ind
self._previous_sym_ind = ind
def on_generateAlternatives(self, query, log=True):
# if experiment type is MANUAL then don't give any alternatives
# if self.experiment_type == TYPICAL:
# return
print '\n\n--- --- generate_alternative --- ---',
# print self.symbol_type
# print 'on_generateAlternatives, query', query
if not isinstance(query, QueryObject):
query = QueryObject(query)
# if sequence is empty, empty recommendations, add start-seq recommendations, and return
if len(query.seq) == 0:
self.previous_sym = None
self.previous_sym_ind = None
self.clear_suggestions()
self.emit('updateChordSuggestions', [], [], [])
self.on_startSeqs()
return
# check previously active chord, could be None
print 'previous', self.previous_sym, self.previous_sym_ind
print 'query', query.sym
# if previuos symbol is the same as currently actively symbol
# then don't need to generate new alternatives
if self.previous_sym is not None and self.previous_sym_ind == query.activeIdx \
and self.previous_sym == query.sym:
return
# if current symbol is empty and next symbol is also empty
# don't do anything
if len(query.sym) == 0 and query.activeIdx is not None \
and len(query.seq[query.activeIdx]) == 0:
return
# index here?
self.index_user_action(query, self.suggestions,
self.suggestions_above)
self.clear_suggestions()
self.previous_sym = query.sym
self.previous_sym_ind = query.activeIdx
original_sym = query.sym
raw_original_seq = query.seq
original_seq = query.seq
sym_ind = query.activeIdx
# generate new alternatives
if original_sym is None or not len(original_sym):
print 'WARNING: no symbol at this position'
# if sym in middle, then use context to ripple in to generate suggestions
sorted_syms, subs = self.generate_subs_from_context(sym_ind,
raw_original_seq, 4)
if sorted_syms is not None:
suggestion_items = self.make_single_sub_suggestion_items(sym_ind, raw_original_seq,
subs, sorted_syms)
self.clear_suggestions()
self.suggestions_above.add(suggestion_items)
seqs, inds = self.suggestions_above.get_seqs_inds()
print '...generateAlternatives, # of items', self.suggestions_above.num_items
self.emit('updateChordSuggestionsAbove', seqs, inds)#, self.suggestions_above.types)
return
# bottom, next ripples
print '...generate_nexts', raw_original_seq
print original_seq, original_sym
if not DISABLE_NEXT:
lastPos = query.activeIdx + 1 == len(original_seq)
nextSymIsEmpty = not lastPos and \
len(original_seq[query.activeIdx+1]) == 0
# if sym_ind == len(original_seq) - 1 or nextSymIsEmpty:
# print 'sym_ind', sym_ind, CHORD_LEN, nextSymIsEmpty
if sym_ind < CHORD_LEN - 1 or nextSymIsEmpty:
ss, sinds = self.generate_next(original_sym, sym_ind, raw_original_seq)
if ss is None:
print 'WARNING: no next chords for ', original_seq[sym_ind]
return
for i, s in enumerate(ss):
self.suggestions.add(SuggestionItem(s, sinds[i], 'next'))
print s, sinds[i]
# bottom, side ripples
if self.experiment_type == RIPPLE:
suggestion_items = self.generate_side_ripples(sym_ind, original_seq)
self.suggestions.add(suggestion_items)
# above, single sims and subs by context
sorted_syms, subs = self.generate_singleton_subs(sym_ind, raw_original_seq)
suggestion_items = self.make_single_sub_suggestion_items(sym_ind, raw_original_seq, subs, sorted_syms)
if subs is None:
seqs, inds = self.suggestions_above.get_seqs_inds()
self.emit('updateChordSuggestionsAbove', seqs, inds)
return
# generate ripples for the single changes
print '...subs', subs
if self.experiment_type == RIPPLE:
seq_subs, seq_inds = self.generate_ripples(raw_original_seq, sym_ind,
subs, all_sims=ALL_SIMS)
else:
seq_subs = None
if seq_subs is None:
# add what we have so far
self.suggestions_above.add(suggestion_items)
else:
# the first one is for the current text
# check if it is the same as current text
# same_as_original_seq = True
# ss = seq_subs.pop(0)
# inds = seq_inds.pop(0)
# for s, ind in zip(ss, inds):
# if len(original_seq) < ind and original_seq[ind] != s:
# same_as_original_seq = False
# rippled_plus_original_items = []
# if not same_as_original_seq:
# tags = {'source': 'user'}
# rippled_plus_original_items.append(SuggestionItem(ss, inds, 'sub_ripple', tags))
rippled_plus_original_items = []
print "len(suggestion_items) == len(seq_subs)?"
print len(suggestion_items), len(seq_subs)
assert len(suggestion_items) == len(seq_subs)
# interleave the two
print '...interleaving subs and their ripples'
for i, item in enumerate(suggestion_items):
rippled_plus_original_items.append(item)
ripple_item = SuggestionItem(seq_subs[i], seq_inds[i], 'sub_ripple', item.tags)
rippled_plus_original_items.append(ripple_item)
self.suggestions_above.add(rippled_plus_original_items)
# if EXPERIMENT_TYPE != BASELINE_SINGLETON and sym_ind == len(original_seq) - 1:
# ss, sinds = self.generate_continuations(original_sym, sym_ind, original_seq)
# self.suggestions_above.add_seqs_inds(ss, sinds, 'till_end')
seqs, inds = self.suggestions.get_seqs_inds()
print '...generateAlternatives, # of items', self.suggestions.num_items
self.emit('updateChordSuggestions', seqs, inds)#, self.suggestions.types)
seqs, inds = self.suggestions_above.get_seqs_inds()
print '...generateAlternatives, # of above items', self.suggestions_above.num_items
print seqs
print inds
self.emit('updateChordSuggestionsAbove', seqs, inds)#, self.suggestions.types)
def generate_continuations(self, sym, ind, original_seq):
postfix_len = 4
seqs = []
seq_inds = []
for i in range(2, postfix_len):
fixed = {ind:sym}
fixed[ind+i] = 'I'
seq, inds = \
shortest_path(self.model, fixed, ind, original_seq)
seqs.append(seq)
seq_inds.append(inds)
return seqs, seq_inds
def on_generate_complete_seq(self, seq=None):
if seq is None:
seq = self.ngram.gen_seq(CHORD_LEN)
seq, seq_original = self.format_seq(seq)
# seq = [u'C', u'F', u'D', u'G', u'C', u'C', u'F']
seq_str = ' '.join(seq)
print 'seq_str', seq_str
query = QueryObject(dict(text=seq_str, author="machine",
actionAuthor="machine",
actionKind="start_complete_seq"))
self.index_user_action(query)
self.emit('set_seq', seq_str)
def generate_next(self, sym, seq_ind, original_seq):
trans = self.model.trans
syms = self.syms
if sym not in syms:
return None, None
sym_ind = syms.index(sym)
n_conts = self.n_suggestions
inds = np.argsort(-trans[sym_ind, :])[:n_conts]
# unformatted_syms = [syms[ind] for ind in inds]
# print 'unformatted_syms', unformatted_syms
# formatted_subs = []
# for ind in inds:
# formatted_sub = self.format_sym(syms[ind])
# print syms[ind], formatted_sub
# if formatted_sub is not None:
# formatted_subs.append(ind)
# print len(inds), len(formatted_subs)
# subs = [original_seq[:] + [formatted_subs[i]] for i in range(len(inds))]
# subs = [original_seq[:] + [syms[ind]] for ind in inds]
subs = [original_seq[:seq_ind+1] + [syms[ind]] + original_seq[seq_ind+2:] for ind in inds]
# print 'generate_next', subs
return subs, [[seq_ind+1]]*n_conts
def generate_ripples(self, original_seq, sym_ind, sims, win_max=2,
all_sims=False):
print '...generate_ripples', sims
seq_subs = []
seq_inds = []
for win in range(1, win_max):
ub = sym_ind + win
lb = sym_ind - win
# allow one extra seq step
lb_out_bound = lb < -1
ub_out_bound = ub > len(original_seq)
# supposedly already second time out of bound
if lb_out_bound or ub_out_bound:
break
ub += 1
lb -= 1
if lb < 0:
lb = 0
if ub > len(original_seq):
ub = len(original_seq)
for j, s in enumerate(sims):
fixed = {}
for idx in range(lb, ub+1):
if idx < len(original_seq):
fixed[idx] = original_seq[idx]
# may override lb or ub
fixed[sym_ind] = s
print fixed
# hack for if sequence comes in with empty trailing
# spaces that causes one to think that the sequence is longer
max_ind = np.max(fixed.keys())
# if last two index is empty than take out the max
if len(fixed[max_ind]) == 0 and max_ind-1 in fixed.keys() \
and len(fixed[max_ind-1]) == 0:
del fixed[max_ind]
if not all_sims:
sub_seq, sym_inds = \
shortest_path(self.model, fixed, sym_ind, original_seq)
else:
sub_seq, sym_inds = \
shortest_path(self.model, fixed, sym_ind,
original_seq, self.nn)
seq_subs.append(sub_seq)
seq_inds.append(list(sym_inds))
for seq in seq_subs:
print seq
return seq_subs, seq_inds
def generate_side_ripples(self, sym_ind, original_seq, factor=1, win_max=2):
print '...generate_side_ripples', sym_ind, 'len(original_seq)', len(original_seq)
# because if factor not equal to one for now will cause the sub, sim attribution to be incorrect
# even though the context rank will probably reveal which one it is, and is more important anyway
assert factor == 1
# original_sym = original_seq[sym_ind]
# left side
left_subs = None
if sym_ind > 0:
left_ind = sym_ind - 1
left_sorted_syms, left_subs = self.generate_singleton_subs(left_ind, original_seq, factor)
# right side
right_subs = None
if sym_ind < len(original_seq) - 1:
right_ind = sym_ind + 1
right_sorted_syms, right_subs = self.generate_singleton_subs(right_ind, original_seq, factor)
if left_subs is None and right_subs is None:
print 'no side ripple yet'
return []
print 'left subs', left_subs
print 'right subs', right_subs
seqs = []
inds = []
n = 0
# choose the smaller non-zero one
if left_subs is None:
left_n = 0
else:
left_n = len(left_subs)
n = left_n
if right_subs is None:
right_n = 0
else:
right_n = len(right_subs)
n = right_n
if right_n > left_n > 0:
n = left_n
print left_n, right_n, n
for i in range(n):
if left_subs is not None and right_subs is not None:
seq = original_seq[:left_ind] + [left_subs[i]] + \
[original_seq[sym_ind]] + [right_subs[i]]
if right_ind + 1 < len(original_seq):
seq += original_seq[right_ind+1:]
inds.append([left_ind, right_ind])
elif left_subs is None:
seq = original_seq[:right_ind] + [right_subs[i]]
if right_ind + 1 < len(original_seq):
seq += original_seq[right_ind+1:]
inds.append([right_ind])
elif right_subs is None:
seq = original_seq[:left_ind] + [left_subs[i]] + \
original_seq[sym_ind:]
inds.append([left_ind])
else:
assert False, 'ERROR: case not considered'
seqs.append(seq)
tags = None
if left_subs is not None:
tags = self.make_single_sub_suggestion_items(left_ind, original_seq,
left_subs, left_sorted_syms,
return_tags_only=True)
right_tags = None
if right_subs is not None:
right_tags = self.make_single_sub_suggestion_items(right_ind, original_seq,
right_subs, right_sorted_syms,
return_tags_only=True)
# merge the tags
if tags is not None and right_tags is not None:
for i in range(n):
key = 'context_rank'
tag = tags[i]
other_tag = right_tags[i]
if key in tags and key in other_tag:
tag[key] = [tag[key], other_tag[key]]
elif key in other_tag:
tag[key] = [other_tag[key]]
print 'tags'
elif tags is None:
tags = right_tags
suggestion_items = []
for i, seq in enumerate(seqs):
print 'seq:', seq
print 'inds', inds[i]
print tags[i]
item = SuggestionItem(seq, inds[i], 'side_ripple', tags[i])
suggestion_items.append(item)
return suggestion_items
# TODO: more side options
def generate_sides(self, original_seq, sym_ind, sims, win_max=2):
seq_subs = []
seq_inds = []
for win in range(1, win_max):
ub = sym_ind + win
lb = sym_ind - win
# allow one extra seq step
lb_out_bound = lb < -1
ub_out_bound = ub > len(original_seq)
# supposedly already second time out of bound
if lb_out_bound or ub_out_bound:
break
if lb < 0:
lb = 0
if ub > len(original_seq):
ub = len(original_seq)
for j, s in enumerate(sims):
fixed = {}
if ub < len(original_seq):
fixed[ub] = original_seq[ub]
else:
fixed[ub] = []
fixed[lb] = original_seq[lb]
# may override lb or ub
fixed[sym_ind] = s
print fixed
sub_seq, sym_inds = \
shortest_path(self.model, fixed, sym_ind, original_seq)
seq_subs.append(sub_seq)
seq_inds.append(list(sym_inds))
return seq_subs, seq_inds
# def generate_more_change_alternatives(self, text, pos):
# sym, sym_ind = self.get_sym_at_pos(text, pos, return_ind=True)
# if sym is None:
# return
# original_seq, raw_original_seq = self.parse_seq_as_syms(text)
# win_max = 2
#
# sims = self.get_similar_chords(sym, 3)
# sims.insert(0, sym)
# if sims is None:
# return
# return self.generate_ripples(raw_original_seq, sym_ind, sims, win_max)
def on_next(self, experiment_count):
print '--- on_next', experiment_count
# experiment_count starts at 1
self.clear_suggestions()
# emit rating questions
self.questions = {0: [['1. The tool helped me explore a wider range of chords.']],
1: [['2. Ripples made it easier to adopt difficult chords.']],
2: [["3. I'm happy with the chord progressions I came up with."]]}
self.questions = {0: [], 1:[], 2:[]}
# "How did you use ripples?"
experiment_idx = experiment_count - 1
if experiment_idx < len(self.ordering):
which_experiment = int(self.ordering[experiment_idx])
print "self.ordering", self.ordering
print "experiment_idx", experiment_idx
print "which experiment ind", which_experiment
print 'which_experiment', EXPERIMENT_TYPE_STRS[which_experiment]
self.experiment_type = which_experiment
self.emit('survey', self.questions[which_experiment])
# use the raw experiment count, since one seq for tutorial
self.on_generate_complete_seq(self.init_seqs[experiment_count])
else:
end_msg = ["This is the end of the experiment. Thank you for participating! We really appreciate it and we hope you had some fun too!"]
self.emit('survey', end_msg)
def clear_suggestions(self):
print '... clear_suggestions ...'
self.previous_sym = None
self.previous_sym_ind = None
self.suggestions.clear()
self.suggestions_above.clear()
self.emit('updateChordSuggestions', [], [], [])
self.emit('updateChordSuggestionsAbove', [], [], [])
return
def retrieve_suggestion_item_as_attrs(self, query):
attrs = {}
if query.author == 'machine':
ind = query.itemIdx
print 'suggestions', query.panelId, query.itemIdx, \
self.suggestions_above.num_items, self.suggestions.num_items
if 'above' in query.panelId:
item = self.suggestions_above.retrieve_item_at(ind, query)
else:
item = self.suggestions.retrieve_item_at(ind, query)
# for debugging
if item is None:
print 'Error: can not retrieve suggestion item', \
query.activeIdx, query.seqStr
assert False
attrs = dict(suggestionItem=item)
return attrs
def getLastNonEmptyIdx(self, seq):
pass
# def on_textChange(self, text, pos, kind, author, ind, suggestPanel=None, play=True):
def on_textChange(self, original_query):
print '--- on_textChange ---' #, original_query
print 'suggestion lists lengths:', self.suggestions_above.num_items, \
self.suggestions.num_items
query = QueryObject(original_query)
if len(query.seqStr) == 0:
# TODO: why need to emit playSubseq, to set somethings to empty?
self.emit("playSubseq", [])
self.clear_suggestions()
return
# can't clear suggestions here because need to check use against the suggestion shortly after
# can have two kinds of use
# - use the machine-authored chord recommendation
# - use the user's previous chord sequences
attrs = None
if query.actionKind == 'use' and query.author == 'machine':
assert query.panelId is not None
attrs = self.retrieve_suggestion_item_as_attrs(query)
self.index_user_action(query, self.suggestions,
self.suggestions_above, attrs)
# self.logs.add(query.actionKind, query.seqStr, tags)
# simple history, which actually may be quite sufficient,
# just don't know when is athor edit and when is machine suggestion used
self.history.append(query.seqStr)
# if query.seqStr is None or not isinstance(query.seqStr, unicode):
if query.seqStr is None:
print "WARNING: chord symbol not valid", query.seqStr
return
if query.sym is None:
print "WARNING: no chord symbol at position"
return
if query.activeIdx is None:
if query.actionKind == 'use':
# don't log this extra system playback event
log = False
notes = self.on_playSubseq(original_query, log)
query.play = False
else:
return
else:
# play activeIdx
print 'query.seq', query.seq
chords = self.make_note_seqs(query.seq)
print 'chords', chords, query.activeIdx
notes = chords[query.activeIdx]
print 'notes', notes
# for entire sequence
# midi_notes = self.make_proxy_midi_notes_from_query(query)
# TODO: would there be a case where activeIdx is not None and we do not want to play
if query.play:
# if there's a textChange and is by machine,
# then it's the automatic playback when user chooses to use a suggestion
if query.author == 'machine':
assert query.chordSeqsAndFormat != None
# don't log this extra system playback event
log = False
self.on_playSubseq(original_query, log)
else:
# this is the playback single chord case
# i.e. for example if typing in chords
if query.durations is not None:
durs = [query.durations[query.activeIdx]]
else:
durs = [self.unit_dur]
print 'durs', durs
midi_notes = self.make_proxy_midi_notes(notes, durs)
print 'midi_notes', midi_notes
self.emit("playSubseq", midi_notes, original_query)
self.clear_suggestions()
if len(notes) > 0:
text_previous = ''
if len(self.history) > 1:
text_previous = self.remove_extra_spacing(self.history[-2])
self.emit("updateHistory", text_previous, query.seqStr)
self.on_generateAlternatives(query)
@staticmethod
def remove_extra_spacing(text):
parts = text.strip().split(' ')
non_space_parts = []
for part in parts:
non_space_part = part.strip()
if len(non_space_part) > 0:
non_space_parts.append(non_space_part)
return ' '.join(non_space_parts)
# get the symbol index when given the left side pos of the sym
def get_sym_ind_from_left(self, text, left):
count_syms_before = 0
p_is_space = False
for i in range(left+1):
if text[i] == ' ' and i and not p_is_space:
count_syms_before += 1
p_is_space = True
elif text[i] != ' ':
p_is_space = False
if not left:
count_syms_before = 0
return count_syms_before
# for alternatives, get symbol that's currently being edited
def get_sym_at_pos(self, text, pos, return_ind=False):
# for alternatives, get symbol that's currently being edited
# symbol that is to the left of the cursor
if len(text) == 0:
return None, None
right = pos-1
if right < 0:
right = 0
right_iter_start = right
if len(text)-1 <= right_iter_start:
right_iter_start = len(text)-1
for i in range(right_iter_start, len(text)):
if text[i] == ' ':
right = i
break
if pos > 0:
left = pos-2
if left < 0:
left = 0
else:
left = 0
left_iter_start = pos - 2
if len(text)-1 <= left_iter_start:
left_iter_start = len(text) - 1
for i in range(left_iter_start, -1, -1):
if text[i] == ' ':
left = i + 1
break
# TODO: hacky
if i == 0:
left = 0
sym_ind = self.get_sym_ind_from_left(text, left)
sym = text[left:right+1]
sym = sym.strip()
if len(sym) == 0:
return None, None
if self.symbol_type == 'roman':
sym = self.back_to_roman(sym)
if return_ind is True:
return sym, sym_ind
else:
return sym
# for playback, gets symbol before the space
def get_sym_before_pos(self, text, pos, return_ind=False):
# minus 1 because the caret pos is to right of a space
right = pos - 1
print 'right', right,
right_char_ind = right
for i in range(right, -1, -1):
if text[i] != ' ':
right_char_ind = i
break
right = right_char_ind
print right
# find the left boundary
left = 0
for i in range(right, -1, -1):
if text[i] == ' ':
left = i
break
print 'left', left
sym_ind = self.get_sym_ind_from_left(text, left)
print 'sym_ind', sym_ind
sym = text[left:right+1]
sym = sym.strip()
# sym = self.back_to_roman(sym)
if return_ind is True:
return sym, sym_ind
else:
return sym
| mit | 7,431,477,055,743,939,000 | 35.752131 | 149 | 0.532678 | false | 3.830702 | false | false | false |
hanul93/kicomav | Engine/plugins/ishield.py | 1 | 7201 | # -*- coding:utf-8 -*-
# Author: Kei Choi(hanul93@gmail.com)
import struct
import zlib
import os
import py7zlib
import zipfile
import kernel
import kavutil
# ---------------------------------------------------------------------
# InstallShield 클래스
# ---------------------------------------------------------------------
class InstallShield:
def __init__(self, fname):
self.fname = fname
self.fp = None
self.fsize = 0
self.install_name = []
def __del__(self):
if self.fp:
self.close()
def close(self):
if self.fp:
self.fp.close()
self.fp = None
def parse(self):
try:
self.fp = open(self.fname, 'rb')
self.fsize = os.fstat(self.fp.fileno()).st_size
cur_pos = 0
# Magic 체크
if self.fp.read(0xe) != 'InstallShield\x00':
raise ValueError
cur_pos += 0xe
# InstallShield에 첨부된 파일 수
data = self.fp.read(0x20)
num_file = kavutil.get_uint32(data, 0)
cur_pos += 0x20
for i in range(num_file):
data = self.fp.read(0x138)
fname = data[:0x10b].replace('\x00', '')
fsize = kavutil.get_uint32(data, 0x10c)
foff = cur_pos + 0x138
self.install_name.append((foff, fsize, fname))
cur_pos += 0x138 + fsize
self.fp.seek(cur_pos)
return True
except (IOError, OSError, ValueError) as e:
pass
return False
def namelist(self):
flist = []
for f in self.install_name:
flist.append(f[2])
return flist
def read(self, fname):
for f in self.install_name:
if f[2] == fname:
foff = f[0]
fsize = f[1]
if self.fp:
self.fp.seek(foff)
data = self.fp.read(fsize)
return data
return None
# -------------------------------------------------------------------------
# KavMain 클래스
# -------------------------------------------------------------------------
class KavMain:
# ---------------------------------------------------------------------
# init(self, plugins_path)
# 플러그인 엔진을 초기화 한다.
# 인력값 : plugins_path - 플러그인 엔진의 위치
# verbose - 디버그 모드 (True or False)
# 리턴값 : 0 - 성공, 0 이외의 값 - 실패
# ---------------------------------------------------------------------
def init(self, plugins_path, verbose=False): # 플러그인 엔진 초기화
self.handle = {} # 압축 파일 핸들
return 0 # 플러그인 엔진 초기화 성공
# ---------------------------------------------------------------------
# uninit(self)
# 플러그인 엔진을 종료한다.
# 리턴값 : 0 - 성공, 0 이외의 값 - 실패
# ---------------------------------------------------------------------
def uninit(self): # 플러그인 엔진 종료
return 0 # 플러그인 엔진 종료 성공
# ---------------------------------------------------------------------
# getinfo(self)
# 플러그인 엔진의 주요 정보를 알려준다. (제작자, 버전, ...)
# 리턴값 : 플러그인 엔진 정보
# ---------------------------------------------------------------------
def getinfo(self): # 플러그인 엔진의 주요 정보
info = dict() # 사전형 변수 선언
info['author'] = 'Kei Choi' # 제작자
info['version'] = '1.0' # 버전
info['title'] = 'InstallShield Engine' # 엔진 설명
info['kmd_name'] = 'ishield' # 엔진 파일 이름
return info
# ---------------------------------------------------------------------
# __get_handle(self, filename)
# 압축 파일의 핸들을 얻는다.
# 입력값 : filename - 파일 이름
# 리턴값 : 압축 파일 핸들
# ---------------------------------------------------------------------
def __get_handle(self, filename):
if filename in self.handle: # 이전에 열린 핸들이 존재하는가?
zfile = self.handle.get(filename, None)
else:
zfile = InstallShield(filename) # InstallShield 파일 열기
self.handle[filename] = zfile
return zfile
# ---------------------------------------------------------------------
# format(self, filehandle, filename, filename_ex)
# 파일 포맷을 분석한다.
# 입력값 : filehandle - 파일 핸들
# filename - 파일 이름
# filename_ex - 압축 파일 내부 파일 이름
# 리턴값 : {파일 포맷 분석 정보} or None
# ---------------------------------------------------------------------
def format(self, filehandle, filename, filename_ex):
ret = {}
mm = filehandle
data = mm[0:0xe]
if data == 'InstallShield\x00': # 헤더 체크
ret['ff_installshield'] = 'InstallShield'
return ret
return None
# ---------------------------------------------------------------------
# arclist(self, filename, fileformat)
# 압축 파일 내부의 파일 목록을 얻는다.
# 입력값 : filename - 파일 이름
# fileformat - 파일 포맷 분석 정보
# 리턴값 : [[압축 엔진 ID, 압축된 파일 이름]]
# ---------------------------------------------------------------------
def arclist(self, filename, fileformat):
file_scan_list = [] # 검사 대상 정보를 모두 가짐
# 미리 분석된 파일 포맷중에 InstallShield 포맷이 있는가?
if 'ff_installshield' in fileformat:
zfile = self.__get_handle(filename)
if zfile.parse():
for name in zfile.namelist():
file_scan_list.append(['arc_installshield', name])
return file_scan_list
# ---------------------------------------------------------------------
# unarc(self, arc_engine_id, arc_name, fname_in_arc)
# 입력값 : arc_engine_id - 압축 엔진 ID
# arc_name - 압축 파일
# fname_in_arc - 압축 해제할 파일 이름
# 리턴값 : 압축 해제된 내용 or None
# ---------------------------------------------------------------------
def unarc(self, arc_engine_id, arc_name, fname_in_arc):
if arc_engine_id == 'arc_installshield':
zfile = self.__get_handle(arc_name)
data = zfile.read(fname_in_arc)
return data
return None
# ---------------------------------------------------------------------
# arcclose(self)
# 압축 파일 핸들을 닫는다.
# ---------------------------------------------------------------------
def arcclose(self):
for fname in self.handle.keys():
zfile = self.handle[fname]
zfile.close()
self.handle.pop(fname)
| gpl-2.0 | 7,359,592,932,144,410,000 | 30.199029 | 75 | 0.391318 | false | 2.751284 | false | false | false |
proneetv/catalyst | setup.py | 1 | 3989 | #!/usr/bin/python2 -OO
# Copyright (C) 2013 W. Trevor King <wking@tremily.us>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Catalyst is a release building tool used by Gentoo Linux"""
# py2.6 compatibility
from __future__ import print_function
import codecs as _codecs
from distutils.core import setup as _setup, Command as _Command
import os as _os
from catalyst import __version__
from catalyst.version import set_release_version as _set_release_version
from catalyst.version import get_version as _get_version
_this_dir = _os.path.dirname(__file__)
package_name = 'catalyst'
tag = '{0}-{1}'.format(package_name, __version__)
if _os.path.sep != '/':
raise NotImplementedError('Non-POSIX paths are not supported')
def files(root, target):
"""Iterate through all the file paths under `root`
Distutils wants all paths to be written in the Unix convention
(i.e. slash-separated) [1], so that's what we'll do here.
[1]: http://docs.python.org/2/distutils/setupscript.html#writing-the-setup-script
"""
for dirpath, dirnames, filenames in _os.walk(root):
key = _os.path.join(target, dirpath)
filepaths = [_os.path.join(dirpath, filename)
for filename in filenames]
yield (key, filepaths)
_data_files = [('/etc/catalyst', ['etc/catalyst.conf','etc/catalystrc']),
('/usr/share/man/man1', ['files/catalyst.1']),
('/usr/share/man/man5', ['files/catalyst-config.5', 'files/catalyst-spec.5'])
]
_data_files.extend(files('livecd', 'lib/catalyst/'))
_data_files.extend(files('targets', 'lib/catalyst/'))
class set_version(_Command):
'''Saves the specified release version information
'''
global __version__
description = "hardcode script's version using VERSION from environment"
user_options = [] # [(long_name, short_name, desc),]
def initialize_options (self):
pass
def finalize_options (self):
pass
def run(self):
try:
version = _os.environ['VERSION']
except KeyError:
print("Try setting 'VERSION=x.y.z' on the command line... Aborting")
return
_set_release_version(version)
__version__ = _get_version()
print("Version set to:\n", __version__)
_setup(
name=package_name,
version=__version__,
maintainer='Gentoo Release Engineering',
maintainer_email='releng@gentoo.org',
url='http://www.gentoo.org/proj/en/releng/{0}/'.format(package_name),
download_url='http://git.overlays.gentoo.org/gitweb/?p=proj/{0}.git;a=snapshot;h={1};sf=tgz'.format(package_name, tag),
license='GNU General Public License (GPL)',
platforms=['all'],
description=__doc__,
long_description=_codecs.open(
_os.path.join(_this_dir, 'README'), 'r', 'utf-8').read(),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+)',
'Intended Audience :: System Administrators',
'Operating System :: POSIX',
'Topic :: System :: Archiving :: Packaging',
'Topic :: System :: Installation/Setup',
'Topic :: System :: Software Distribution',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
scripts=['bin/{0}'.format(package_name)],
packages=[
package_name,
'{0}.arch'.format(package_name),
'{0}.base'.format(package_name),
'{0}.targets'.format(package_name),
],
data_files=_data_files,
provides=[package_name],
cmdclass={
'set_version': set_version
},
)
| gpl-2.0 | -4,040,213,861,178,809,300 | 31.430894 | 120 | 0.69817 | false | 3.227346 | false | false | false |
glemaitre/UnbalancedDataset | imblearn/combine/smote_tomek.py | 2 | 6650 | """Class to perform over-sampling using SMOTE and cleaning using Tomek
links."""
# Authors: Guillaume Lemaitre <g.lemaitre58@gmail.com>
# Christos Aridas
# License: MIT
from __future__ import division
import logging
import warnings
from sklearn.utils import check_X_y
from ..base import SamplerMixin
from ..over_sampling import SMOTE
from ..under_sampling import TomekLinks
from ..utils import check_target_type, hash_X_y
class SMOTETomek(SamplerMixin):
"""Class to perform over-sampling using SMOTE and cleaning using
Tomek links.
Combine over- and under-sampling using SMOTE and Tomek links.
Read more in the :ref:`User Guide <combine>`.
Parameters
----------
ratio : str, dict, or callable, optional (default='auto')
Ratio to use for resampling the data set.
- If ``str``, has to be one of: (i) ``'minority'``: resample the
minority class; (ii) ``'majority'``: resample the majority class,
(iii) ``'not minority'``: resample all classes apart of the minority
class, (iv) ``'all'``: resample all classes, and (v) ``'auto'``:
correspond to ``'all'`` with for over-sampling methods and ``'not
minority'`` for under-sampling methods. The classes targeted will be
over-sampled or under-sampled to achieve an equal number of sample
with the majority or minority class.
- If ``dict``, the keys correspond to the targeted classes. The values
correspond to the desired number of samples.
- If callable, function taking ``y`` and returns a ``dict``. The keys
correspond to the targeted classes. The values correspond to the
desired number of samples.
random_state : int, RandomState instance or None, optional (default=None)
If int, ``random_state`` is the seed used by the random number
generator; If ``RandomState`` instance, random_state is the random
number generator; If ``None``, the random number generator is the
``RandomState`` instance used by ``np.random``.
smote : object, optional (default=SMOTE())
The :class:`imblearn.over_sampling.SMOTE` object to use. If not given,
a :class:`imblearn.over_sampling.SMOTE` object with default parameters
will be given.
tomek : object, optional (default=Tomek())
The :class:`imblearn.under_sampling.Tomek` object to use. If not given,
a :class:`imblearn.under_sampling.Tomek` object with default parameters
will be given.
Notes
-----
The methos is presented in [1]_.
Supports mutli-class resampling. Refer to SMOTE and TomekLinks regarding
the scheme which used.
See :ref:`sphx_glr_auto_examples_combine_plot_smote_tomek.py` and
:ref:`sphx_glr_auto_examples_combine_plot_comparison_combine.py`.
See also
--------
SMOTEENN : Over-sample using SMOTE followed by under-sampling using Edited
Nearest Neighbours.
References
----------
.. [1] G. Batista, B. Bazzan, M. Monard, "Balancing Training Data for
Automated Annotation of Keywords: a Case Study," In WOB, 10-18, 2003.
Examples
--------
>>> from collections import Counter
>>> from sklearn.datasets import make_classification
>>> from imblearn.combine import \
SMOTETomek # doctest: +NORMALIZE_WHITESPACE
>>> X, y = make_classification(n_classes=2, class_sep=2,
... weights=[0.1, 0.9], n_informative=3, n_redundant=1, flip_y=0,
... n_features=20, n_clusters_per_class=1, n_samples=1000, random_state=10)
>>> print('Original dataset shape {}'.format(Counter(y)))
Original dataset shape Counter({1: 900, 0: 100})
>>> smt = SMOTETomek(random_state=42)
>>> X_res, y_res = smt.fit_sample(X, y)
>>> print('Resampled dataset shape {}'.format(Counter(y_res)))
Resampled dataset shape Counter({0: 900, 1: 900})
"""
def __init__(self,
ratio='auto',
random_state=None,
smote=None,
tomek=None):
super(SMOTETomek, self).__init__()
self.ratio = ratio
self.random_state = random_state
self.smote = smote
self.tomek = tomek
self.logger = logging.getLogger(__name__)
def _validate_estimator(self):
"Private function to validate SMOTE and ENN objects"
if self.smote is not None:
if isinstance(self.smote, SMOTE):
self.smote_ = self.smote
else:
raise ValueError('smote needs to be a SMOTE object.'
'Got {} instead.'.format(type(self.smote)))
# Otherwise create a default SMOTE
else:
self.smote_ = SMOTE(
ratio=self.ratio, random_state=self.random_state)
if self.tomek is not None:
if isinstance(self.tomek, TomekLinks):
self.tomek_ = self.tomek
else:
raise ValueError('tomek needs to be a TomekLinks object.'
'Got {} instead.'.format(type(self.tomek)))
# Otherwise create a default TomekLinks
else:
self.tomek_ = TomekLinks(ratio='all')
def fit(self, X, y):
"""Find the classes statistics before to perform sampling.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Matrix containing the data which have to be sampled.
y : array-like, shape (n_samples,)
Corresponding label for each sample in X.
Returns
-------
self : object,
Return self.
"""
X, y = check_X_y(X, y, accept_sparse=['csr', 'csc'])
y = check_target_type(y)
self.ratio_ = self.ratio
self.X_hash_, self.y_hash_ = hash_X_y(X, y)
return self
def _sample(self, X, y):
"""Resample the dataset.
Parameters
----------
X : {array-like, sparse matrix}, shape (n_samples, n_features)
Matrix containing the data which have to be sampled.
y : array-like, shape (n_samples,)
Corresponding label for each sample in X.
Returns
-------
X_resampled : {ndarray, sparse matrix}, shape \
(n_samples_new, n_features)
The array containing the resampled data.
y_resampled : ndarray, shape (n_samples_new,)
The corresponding label of `X_resampled`
"""
self._validate_estimator()
X_res, y_res = self.smote_.fit_sample(X, y)
return self.tomek_.fit_sample(X_res, y_res)
| mit | -1,936,412,790,816,934,100 | 34.561497 | 79 | 0.606015 | false | 3.859547 | false | false | false |
rpiotti/glances | glances/plugins/glances_plugin.py | 11 | 24382 | # -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
# Copyright (C) 2015 Nicolargo <nicolas@nicolargo.com>
#
# Glances is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Glances is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
I am your father...
...for all Glances plugins.
"""
# Import system libs
import json
from datetime import datetime
from operator import itemgetter
# Import Glances lib
from glances.core.glances_actions import GlancesActions
from glances.core.glances_globals import is_py3
from glances.core.glances_logging import logger
from glances.core.glances_logs import glances_logs
class GlancesPlugin(object):
"""Main class for Glances plugin."""
def __init__(self, args=None, items_history_list=None):
"""Init the plugin of plugins class."""
# Plugin name (= module name without glances_)
self.plugin_name = self.__class__.__module__[len('glances_'):]
# logger.debug("Init plugin %s" % self.plugin_name)
# Init the args
self.args = args
# Init the default alignement (for curses)
self._align = 'left'
# Init the input method
self._input_method = 'local'
self._short_system_name = None
# Init the stats list
self.stats = None
# Init the history list
self.items_history_list = items_history_list
self.stats_history = self.init_stats_history()
# Init the limits dictionnary
self._limits = dict()
# Init the actions
self.actions = GlancesActions()
# Init the views
self.views = dict()
def __repr__(self):
"""Return the raw stats."""
return self.stats
def __str__(self):
"""Return the human-readable stats."""
return str(self.stats)
def get_key(self):
"""Return the key of the list."""
return None
def add_item_history(self, key, value):
"""Add an new item (key, value) to the current history."""
try:
self.stats_history[key].append(value)
except KeyError:
self.stats_history[key] = [value]
def init_stats_history(self):
"""Init the stats history (dict of list)."""
ret = None
if self.args is not None and self.args.enable_history and self.get_items_history_list() is not None:
init_list = [i['name'] for i in self.get_items_history_list()]
logger.debug("Stats history activated for plugin {0} (items: {0})".format(
self.plugin_name, init_list))
ret = {}
return ret
def reset_stats_history(self):
"""Reset the stats history (dict of list)."""
if self.args is not None and self.args.enable_history and self.get_items_history_list() is not None:
reset_list = [i['name'] for i in self.get_items_history_list()]
logger.debug("Reset history for plugin {0} (items: {0})".format(
self.plugin_name, reset_list))
self.stats_history = {}
def update_stats_history(self, item_name=''):
"""Update stats history."""
if (self.stats and self.args is not None and
self.args.enable_history and
self.get_items_history_list() is not None):
self.add_item_history('date', datetime.now())
for i in self.get_items_history_list():
if isinstance(self.stats, list):
# Stats is a list of data
# Iter throught it (for exemple, iter throught network
# interface)
for l in self.stats:
self.add_item_history(
l[item_name] + '_' + i['name'], l[i['name']])
else:
# Stats is not a list
# Add the item to the history directly
self.add_item_history(i['name'], self.stats[i['name']])
def get_stats_history(self):
"""Return the stats history."""
return self.stats_history
def get_items_history_list(self):
"""Return the items history list."""
return self.items_history_list
@property
def input_method(self):
"""Get the input method."""
return self._input_method
@input_method.setter
def input_method(self, input_method):
"""Set the input method.
* local: system local grab (psutil or direct access)
* snmp: Client server mode via SNMP
* glances: Client server mode via Glances API
"""
self._input_method = input_method
@property
def short_system_name(self):
"""Get the short detected OS name (SNMP)."""
return self._short_system_name
@short_system_name.setter
def short_system_name(self, short_name):
"""Set the short detected OS name (SNMP)."""
self._short_system_name = short_name
def set_stats(self, input_stats):
"""Set the stats to input_stats."""
self.stats = input_stats
def get_stats_snmp(self, bulk=False, snmp_oid=None):
"""Update stats using SNMP.
If bulk=True, use a bulk request instead of a get request.
"""
snmp_oid = snmp_oid or {}
from glances.core.glances_snmp import GlancesSNMPClient
# Init the SNMP request
clientsnmp = GlancesSNMPClient(host=self.args.client,
port=self.args.snmp_port,
version=self.args.snmp_version,
community=self.args.snmp_community)
# Process the SNMP request
ret = {}
if bulk:
# Bulk request
snmpresult = clientsnmp.getbulk_by_oid(0, 10, *snmp_oid.values())
if len(snmp_oid) == 1:
# Bulk command for only one OID
# Note: key is the item indexed but the OID result
for item in snmpresult:
if item.keys()[0].startswith(snmp_oid.values()[0]):
ret[snmp_oid.keys()[0] + item.keys()
[0].split(snmp_oid.values()[0])[1]] = item.values()[0]
else:
# Build the internal dict with the SNMP result
# Note: key is the first item in the snmp_oid
index = 1
for item in snmpresult:
item_stats = {}
item_key = None
for key in list(snmp_oid.keys()):
oid = snmp_oid[key] + '.' + str(index)
if oid in item:
if item_key is None:
item_key = item[oid]
else:
item_stats[key] = item[oid]
if item_stats:
ret[item_key] = item_stats
index += 1
else:
# Simple get request
snmpresult = clientsnmp.get_by_oid(*snmp_oid.values())
# Build the internal dict with the SNMP result
for key in list(snmp_oid.keys()):
ret[key] = snmpresult[snmp_oid[key]]
return ret
def get_raw(self):
"""Return the stats object."""
return self.stats
def get_stats(self):
"""Return the stats object in JSON format."""
return json.dumps(self.stats)
def get_stats_item(self, item):
"""Return the stats object for a specific item in JSON format.
Stats should be a list of dict (processlist, network...)
"""
if isinstance(self.stats, dict):
try:
return json.dumps({item: self.stats[item]})
except KeyError as e:
logger.error("Cannot get item {0} ({1})".format(item, e))
return None
elif isinstance(self.stats, list):
try:
# Source:
# http://stackoverflow.com/questions/4573875/python-get-index-of-dictionary-item-in-list
return json.dumps({item: map(itemgetter(item), self.stats)})
except (KeyError, ValueError) as e:
logger.error("Cannot get item {0} ({1})".format(item, e))
return None
else:
return None
def get_stats_value(self, item, value):
"""Return the stats object for a specific item=value in JSON format.
Stats should be a list of dict (processlist, network...)
"""
if not isinstance(self.stats, list):
return None
else:
if value.isdigit():
value = int(value)
try:
return json.dumps({value: [i for i in self.stats if i[item] == value]})
except (KeyError, ValueError) as e:
logger.error(
"Cannot get item({0})=value({1}) ({2})".format(item, value, e))
return None
def update_views(self):
"""Default builder fo the stats views.
The V of MVC
A dict of dict with the needed information to display the stats.
Example for the stat xxx:
'xxx': {'decoration': 'DEFAULT',
'optional': False,
'additional': False,
'splittable': False}
"""
ret = {}
if (isinstance(self.get_raw(), list) and
self.get_raw() is not None and
self.get_key() is not None):
# Stats are stored in a list of dict (ex: NETWORK, FS...)
for i in self.get_raw():
ret[i[self.get_key()]] = {}
for key in i.keys():
value = {'decoration': 'DEFAULT',
'optional': False,
'additional': False,
'splittable': False}
ret[i[self.get_key()]][key] = value
elif isinstance(self.get_raw(), dict) and self.get_raw() is not None:
# Stats are stored in a dict (ex: CPU, LOAD...)
for key in self.get_raw().keys():
value = {'decoration': 'DEFAULT',
'optional': False,
'additional': False,
'splittable': False}
ret[key] = value
self.views = ret
return self.views
def set_views(self, input_views):
"""Set the views to input_views."""
self.views = input_views
def get_views(self, item=None, key=None, option=None):
"""Return the views object.
If key is None, return all the view for the current plugin
else if option is None return the view for the specific key (all option)
else return the view fo the specific key/option
Specify item if the stats are stored in a dict of dict (ex: NETWORK, FS...)
"""
if item is None:
item_views = self.views
else:
item_views = self.views[item]
if key is None:
return item_views
else:
if option is None:
return item_views[key]
else:
return item_views[key][option]
def load_limits(self, config):
"""Load limits from the configuration file, if it exists."""
if (hasattr(config, 'has_section') and
config.has_section(self.plugin_name)):
for level, _ in config.items(self.plugin_name):
# Read limits
limit = '_'.join([self.plugin_name, level])
try:
self._limits[limit] = config.get_float_value(self.plugin_name, level)
except ValueError:
self._limits[limit] = config.get_value(self.plugin_name, level).split(",")
logger.debug("Load limit: {0} = {1}".format(limit, self._limits[limit]))
@property
def limits(self):
"""Return the limits object."""
return self._limits
@limits.setter
def limits(self, input_limits):
"""Set the limits to input_limits."""
self._limits = input_limits
def get_alert(self, current=0, minimum=0, maximum=100, header="", log=False):
"""Return the alert status relative to a current value.
Use this function for minor stats.
If current < CAREFUL of max then alert = OK
If current > CAREFUL of max then alert = CAREFUL
If current > WARNING of max then alert = WARNING
If current > CRITICAL of max then alert = CRITICAL
If defined 'header' is added between the plugin name and the status.
Only useful for stats with several alert status.
If log=True than add log if necessary
elif log=False than do not log
elig log=None than apply the config given in the conf file
"""
# Compute the %
try:
value = (current * 100) / maximum
except ZeroDivisionError:
return 'DEFAULT'
except TypeError:
return 'DEFAULT'
# Build the stat_name = plugin_name + header
if header == "":
stat_name = self.plugin_name
else:
stat_name = self.plugin_name + '_' + header
# Manage limits
ret = 'OK'
try:
if value > self.__get_limit('critical', stat_name=stat_name):
ret = 'CRITICAL'
elif value > self.__get_limit('warning', stat_name=stat_name):
ret = 'WARNING'
elif value > self.__get_limit('careful', stat_name=stat_name):
ret = 'CAREFUL'
elif current < minimum:
ret = 'CAREFUL'
except KeyError:
return 'DEFAULT'
# Manage log
log_str = ""
if self.__get_limit_log(stat_name=stat_name, default_action=log):
# Add _LOG to the return string
# So stats will be highlited with a specific color
log_str = "_LOG"
# Add the log to the list
glances_logs.add(ret, stat_name.upper(), value, [])
# Manage action
# Here is a command line for the current trigger ?
try:
command = self.__get_limit_action(ret.lower(), stat_name=stat_name)
except KeyError:
# Reset the trigger
self.actions.set(stat_name, ret.lower())
else:
# A command line is available for the current alert, run it
# Build the {{mustache}} dictionnary
if isinstance(self.stats, list):
# If the stats are stored in a list of dict (fs plugin for exemple)
# Return the dict for the current header
mustache_dict = {}
for item in self.stats:
if item[self.get_key()] == header:
mustache_dict = item
break
else:
# Use the stats dict
mustache_dict = self.stats
# Run the action
self.actions.run(
stat_name, ret.lower(), command, mustache_dict=mustache_dict)
# Default is ok
return ret + log_str
def get_alert_log(self, current=0, minimum=0, maximum=100, header=""):
"""Get the alert log."""
return self.get_alert(current, minimum, maximum, header, log=True)
def __get_limit(self, criticity, stat_name=""):
"""Return the limit value for the alert."""
# Get the limit for stat + header
# Exemple: network_wlan0_rx_careful
try:
limit = self._limits[stat_name + '_' + criticity]
except KeyError:
# Try fallback to plugin default limit
# Exemple: network_careful
limit = self._limits[self.plugin_name + '_' + criticity]
# Return the limit
return limit
def __get_limit_action(self, criticity, stat_name=""):
"""Return the action for the alert."""
# Get the action for stat + header
# Exemple: network_wlan0_rx_careful_action
try:
ret = self._limits[stat_name + '_' + criticity + '_action']
except KeyError:
# Try fallback to plugin default limit
# Exemple: network_careful_action
ret = self._limits[self.plugin_name + '_' + criticity + '_action']
# Return the action list
return ret
def __get_limit_log(self, stat_name, default_action=False):
"""Return the log tag for the alert."""
# Get the log tag for stat + header
# Exemple: network_wlan0_rx_log
try:
log_tag = self._limits[stat_name + '_log']
except KeyError:
# Try fallback to plugin default log
# Exemple: network_log
try:
log_tag = self._limits[self.plugin_name + '_log']
except KeyError:
# By defaukt, log are disabled
return default_action
# Return the action list
return log_tag[0].lower() == 'true'
def get_conf_value(self, value, header="", plugin_name=None):
"""Return the configuration (header_) value for the current plugin.
...or the one given by the plugin_name var.
"""
if plugin_name is None:
# If not default use the current plugin name
plugin_name = self.plugin_name
if header != "":
# Add the header
plugin_name = plugin_name + '_' + header
try:
return self._limits[plugin_name + '_' + value]
except KeyError:
return []
def is_hide(self, value, header=""):
"""Return True if the value is in the hide configuration list."""
return value in self.get_conf_value('hide', header=header)
def has_alias(self, header):
"""Return the alias name for the relative header or None if nonexist."""
try:
return self._limits[self.plugin_name + '_' + header + '_' + 'alias'][0]
except (KeyError, IndexError):
return None
def msg_curse(self, args=None, max_width=None):
"""Return default string to display in the curse interface."""
return [self.curse_add_line(str(self.stats))]
def get_stats_display(self, args=None, max_width=None):
"""Return a dict with all the information needed to display the stat.
key | description
----------------------------
display | Display the stat (True or False)
msgdict | Message to display (list of dict [{ 'msg': msg, 'decoration': decoration } ... ])
align | Message position
"""
display_curse = False
if hasattr(self, 'display_curse'):
display_curse = self.display_curse
if hasattr(self, 'align'):
align_curse = self._align
if max_width is not None:
ret = {'display': display_curse,
'msgdict': self.msg_curse(args, max_width=max_width),
'align': align_curse}
else:
ret = {'display': display_curse,
'msgdict': self.msg_curse(args),
'align': align_curse}
return ret
def curse_add_line(self, msg, decoration="DEFAULT",
optional=False, additional=False,
splittable=False):
"""Return a dict with.
Where:
msg: string
decoration:
DEFAULT: no decoration
UNDERLINE: underline
BOLD: bold
TITLE: for stat title
PROCESS: for process name
STATUS: for process status
NICE: for process niceness
CPU_TIME: for process cpu time
OK: Value is OK and non logged
OK_LOG: Value is OK and logged
CAREFUL: Value is CAREFUL and non logged
CAREFUL_LOG: Value is CAREFUL and logged
WARNING: Value is WARINING and non logged
WARNING_LOG: Value is WARINING and logged
CRITICAL: Value is CRITICAL and non logged
CRITICAL_LOG: Value is CRITICAL and logged
optional: True if the stat is optional (display only if space is available)
additional: True if the stat is additional (display only if space is available after optional)
spittable: Line can be splitted to fit on the screen (default is not)
"""
return {'msg': msg, 'decoration': decoration, 'optional': optional, 'additional': additional, 'splittable': splittable}
def curse_new_line(self):
"""Go to a new line."""
return self.curse_add_line('\n')
@property
def align(self):
"""Get the curse align."""
return self._align
@align.setter
def align(self, value):
"""Set the curse align.
value: left, right, bottom.
"""
self._align = value
def auto_unit(self, number, low_precision=False):
"""Make a nice human-readable string out of number.
Number of decimal places increases as quantity approaches 1.
examples:
CASE: 613421788 RESULT: 585M low_precision: 585M
CASE: 5307033647 RESULT: 4.94G low_precision: 4.9G
CASE: 44968414685 RESULT: 41.9G low_precision: 41.9G
CASE: 838471403472 RESULT: 781G low_precision: 781G
CASE: 9683209690677 RESULT: 8.81T low_precision: 8.8T
CASE: 1073741824 RESULT: 1024M low_precision: 1024M
CASE: 1181116006 RESULT: 1.10G low_precision: 1.1G
'low_precision=True' returns less decimal places potentially
sacrificing precision for more readability.
"""
symbols = ('K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {
'Y': 1208925819614629174706176,
'Z': 1180591620717411303424,
'E': 1152921504606846976,
'P': 1125899906842624,
'T': 1099511627776,
'G': 1073741824,
'M': 1048576,
'K': 1024
}
for symbol in reversed(symbols):
value = float(number) / prefix[symbol]
if value > 1:
decimal_precision = 0
if value < 10:
decimal_precision = 2
elif value < 100:
decimal_precision = 1
if low_precision:
if symbol in 'MK':
decimal_precision = 0
else:
decimal_precision = min(1, decimal_precision)
elif symbol in 'K':
decimal_precision = 0
return '{0:.{decimal}f}{symbol}'.format(
value, decimal=decimal_precision, symbol=symbol)
return '{0!s}'.format(number)
def _log_result_decorator(fct):
"""Log (DEBUG) the result of the function fct."""
def wrapper(*args, **kw):
ret = fct(*args, **kw)
if is_py3:
logger.debug("%s %s %s return %s" % (
args[0].__class__.__name__,
args[0].__class__.__module__[len('glances_'):],
fct.__name__, ret))
else:
logger.debug("%s %s %s return %s" % (
args[0].__class__.__name__,
args[0].__class__.__module__[len('glances_'):],
fct.func_name, ret))
return ret
return wrapper
# Mandatory to call the decorator in childs' classes
_log_result_decorator = staticmethod(_log_result_decorator)
| lgpl-3.0 | 6,693,510,422,843,767,000 | 35.886536 | 127 | 0.536748 | false | 4.272297 | false | false | false |
Noah1989/FlowSID | wire.py | 1 | 3951 | import pygame, graphics, palette
segment_width = 8
class Wire():
def __init__(self):
segment = Segment()
self.segments = [segment]
def set_start(self, (x, y)):
old_stop = self.segments[-1].rect.topright
self.segments[0].rect.left = x
self.segments[0].y = y
for segment in self.segments[1:]:
segment.rect.left = segment.left.rect.right
self.set_stop(old_stop)
def set_stop(self, (x, y)):
for segment in self.segments[1:]:
if segment.rect.right > x:
segment.left.right = None
segment.kill()
self.segments.remove(segment)
while self.segments[-1].rect.right < x:
segment = Segment()
segment.y = y
segment.rect.topleft = self.segments[-1].rect.right, y
segment.left = self.segments[-1]
segment.update_size()
self.segments[-1].right = segment
self.segments.append(segment)
segment.left.rect.width = segment_width
self.segments[-1].y = y
self.segments[-1].nexty = None
self.segments[-1].rect.top = y
self.segments[-1].rect.width = max(x - self.segments[-1].rect.left, 1)
self.segments[-1].rect.right = x
self.segments[-1].update_size()
graphics.wirelayer.add(self.segments)
class Segment(pygame.sprite.Sprite):
images = {}
thickness = 4
outline_color = palette.color(0x2a)
fill_color = palette.color(0x3f)
gravity = 0.1
spring_constant = 0.3
friction =0.5
mass = 0.8
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.y = 0
self.speed = 0
self.left = None
self.right = None
self.nexty = None
self.rect = pygame.Rect(0, 0, segment_width, 1)
self.update_size()
def update(self, frame):
if self.nexty:
self.y = self.nexty
self.update_size()
self.left.update_size()
if self.left and self.right:
force = self.gravity
force += self.spring_constant*(self.left.y + self.right.y - 2*self.y)
force -= self.friction*self.speed
self.speed += force/self.mass
self.nexty = self.y + self.speed
if not self.right:
self.rect.top = self.y
if self.left:
self.left.update_size()
def update_size(self):
if self.right:
size = int(self.right.y - self.y)
else:
size = 0
height = abs(size) + self.thickness
width = self.rect.width
key = (width, size)
if not key in self.images:
image = pygame.Surface((width, height))
image.set_colorkey(graphics.transparent)
image.fill(graphics.transparent)
points = (0, 0), (width - 1, height - self.thickness)
for shift in range(self.thickness):
points1 = tuple((x, y + shift) for x, y in points)
if size < 0:
points1 = tuple((x, height - 1 - y) for x, y in points1)
if shift in (0, self.thickness-1):
color = self.outline_color
else:
color = self.fill_color
pygame.draw.lines(image, color, False, points1)
self.images[key] = image
self.image = self.images[key]
self.rect.height = height
if self.right:
if self.y < self.right.y:
self.rect.top = self.y
else:
self.rect.top = self.right.y
| bsd-2-clause | 808,133,483,931,288,800 | 32.201681 | 81 | 0.489749 | false | 4.007099 | false | false | false |
ulif/pulp | server/test/unit/server/managers/content/test_upload.py | 4 | 12095 | import errno
import os
import shutil
import unittest
import mock
from .... import base
from pulp.common import dateutils
from pulp.devel import mock_plugins
from pulp.plugins.conduits.upload import UploadConduit
from pulp.server.controllers import importer as importer_controller
from pulp.server.db import model
from pulp.server.exceptions import (MissingResource, PulpDataException, PulpExecutionException,
InvalidValue, PulpCodedException)
from pulp.server.managers.content.upload import ContentUploadManager
import pulp.server.managers.factory as manager_factory
class ContentUploadManagerTests(base.PulpServerTests):
def setUp(self):
base.PulpServerTests.setUp(self)
mock_plugins.install()
self.upload_manager = manager_factory.content_upload_manager()
def tearDown(self):
base.PulpServerTests.tearDown(self)
mock_plugins.reset()
upload_storage_dir = self.upload_manager._upload_storage_dir()
shutil.rmtree(upload_storage_dir)
def clean(self):
base.PulpServerTests.clean(self)
model.Repository.objects.delete()
model.Importer.objects.delete()
def test_save_data_string(self):
# Test
upload_id = self.upload_manager.initialize_upload()
write_us = ['abc', 'de', 'fghi', 'jkl']
offset = 0
for w in write_us:
self.upload_manager.save_data(upload_id, offset, w)
offset += len(w)
# Verify
uploaded_filename = self.upload_manager._upload_file_path(upload_id)
self.assertTrue(os.path.exists(uploaded_filename))
written = self.upload_manager.read_upload(upload_id)
self.assertEqual(written, ''.join(write_us))
def test_save_data_rpm(self):
# Setup
test_rpm_filename = os.path.abspath(os.path.dirname(__file__)) + \
'/../../../../data/pulp-test-package-0.3.1-1.fc11.x86_64.rpm'
self.assertTrue(os.path.exists(test_rpm_filename))
# Test
upload_id = self.upload_manager.initialize_upload()
f = open(test_rpm_filename)
offset = 0
chunk_size = 256
while True:
f.seek(offset)
data = f.read(chunk_size)
if data:
self.upload_manager.save_data(upload_id, offset, data)
else:
break
offset += chunk_size
f.close()
# Verify
uploaded_filename = self.upload_manager._upload_file_path(upload_id)
self.assertTrue(os.path.exists(uploaded_filename))
expected_size = os.path.getsize(test_rpm_filename)
found_size = os.path.getsize(uploaded_filename)
self.assertEqual(expected_size, found_size)
def test_save_no_init(self):
# Test
try:
self.upload_manager.save_data('foo', 0, 'bar')
self.fail('Expected exception')
except MissingResource, e:
self.assertEqual(e.resources['upload_request'], 'foo')
def test_delete_upload(self):
# Setup
upload_id = self.upload_manager.initialize_upload()
self.upload_manager.save_data(upload_id, 0, 'fus ro dah')
uploaded_filename = self.upload_manager._upload_file_path(upload_id)
self.assertTrue(os.path.exists(uploaded_filename))
# Test
self.upload_manager.delete_upload(upload_id)
# Verify
self.assertTrue(not os.path.exists(uploaded_filename))
def test_delete_non_existent_upload(self):
# Setup
upload_id = '1234'
uploaded_filename = self.upload_manager._upload_file_path(upload_id)
self.assertFalse(os.path.exists(uploaded_filename))
# Test
try:
self.upload_manager.delete_upload(upload_id)
except Exception:
self.fail('An Exception should not have been raised.')
def test_list_upload_ids(self):
# Test - Empty
ids = self.upload_manager.list_upload_ids()
self.assertEqual(0, len(ids))
# Test - Non-empty
id1 = self.upload_manager.initialize_upload()
id2 = self.upload_manager.initialize_upload()
ids = self.upload_manager.list_upload_ids()
self.assertEqual(2, len(ids))
self.assertTrue(id1 in ids)
self.assertTrue(id2 in ids)
# -- import functionality -------------------------------------------------
@mock.patch('pulp.server.controllers.importer.model.Repository.objects')
def test_is_valid_upload(self, mock_repo_qs):
importer_controller.set_importer('repo-u', 'mock-importer', {})
valid = self.upload_manager.is_valid_upload('repo-u', 'mock-type')
self.assertTrue(valid)
@mock.patch('pulp.server.controllers.importer.model.Repository.objects')
def test_is_valid_upload_missing_or_bad_repo(self, mock_repo_qs):
self.assertRaises(MissingResource, self.upload_manager.is_valid_upload, 'empty',
'mock-type')
self.assertRaises(MissingResource, self.upload_manager.is_valid_upload, 'fake', 'mock-type')
@mock.patch('pulp.server.controllers.importer.model.Repository.objects')
def test_is_valid_upload_unsupported_type(self, mock_repo_qs):
importer_controller.set_importer('repo-u', 'mock-importer', {})
# Test
self.assertRaises(PulpDataException, self.upload_manager.is_valid_upload, 'repo-u',
'fake-type')
@mock.patch('pulp.server.controllers.repository.rebuild_content_unit_counts')
@mock.patch('pulp.server.controllers.importer.model.Repository.objects')
def test_import_uploaded_unit(self, mock_repo_qs, mock_rebuild):
importer_controller.set_importer('repo-u', 'mock-importer', {})
key = {'key': 'value'}
metadata = {'k1': 'v1'}
timestamp_pre_upload = dateutils.now_utc_datetime_with_tzinfo()
mock_repo = mock_repo_qs.get_repo_or_missing_resource.return_value
importer_return_report = {'success_flag': True, 'summary': '', 'details': {}}
mock_plugins.MOCK_IMPORTER.upload_unit.return_value = importer_return_report
upload_id = self.upload_manager.initialize_upload()
file_path = self.upload_manager._upload_file_path(upload_id)
fake_user = model.User('import-user', '')
manager_factory.principal_manager().set_principal(principal=fake_user)
response = self.upload_manager.import_uploaded_unit('repo-u', 'mock-type', key, metadata,
upload_id)
# import_uploaded_unit() should have returned our importer_return_report
self.assertTrue(response is importer_return_report)
call_args = mock_plugins.MOCK_IMPORTER.upload_unit.call_args[0]
self.assertTrue(call_args[0] is mock_repo.to_transfer_repo())
self.assertEqual(call_args[1], 'mock-type')
self.assertEqual(call_args[2], key)
self.assertEqual(call_args[3], metadata)
self.assertEqual(call_args[4], file_path)
conduit = call_args[5]
self.assertTrue(isinstance(conduit, UploadConduit))
self.assertEqual(call_args[5].repo_id, 'repo-u')
# It is now platform's responsibility to update plugin content unit counts
self.assertTrue(mock_rebuild.called, "rebuild_content_unit_counts must be called")
# Make sure that the last_unit_added timestamp was updated
self.assertTrue(mock_repo.last_unit_added > timestamp_pre_upload)
# Clean up
mock_plugins.MOCK_IMPORTER.upload_unit.return_value = None
manager_factory.principal_manager().set_principal(principal=None)
def test_import_uploaded_unit_missing_repo(self):
# Test
self.assertRaises(MissingResource, self.upload_manager.import_uploaded_unit, 'fake',
'mock-type', {}, {}, 'irrelevant')
@mock.patch('pulp.server.controllers.importer.model.Repository.objects')
def test_import_uploaded_unit_importer_error(self, mock_repo_qs):
importer_controller.set_importer('repo-u', 'mock-importer', {})
mock_plugins.MOCK_IMPORTER.upload_unit.side_effect = Exception()
upload_id = self.upload_manager.initialize_upload()
self.assertRaises(PulpExecutionException, self.upload_manager.import_uploaded_unit,
'repo-u', 'mock-type', {}, {}, upload_id)
@mock.patch('pulp.server.controllers.importer.model.Repository.objects')
def test_import_uploaded_unit_importer_error_reraise_pulp_exception(self, mock_repo_qs):
importer_controller.set_importer('repo-u', 'mock-importer', {})
mock_plugins.MOCK_IMPORTER.upload_unit.side_effect = InvalidValue(['filename'])
upload_id = self.upload_manager.initialize_upload()
self.assertRaises(InvalidValue, self.upload_manager.import_uploaded_unit, 'repo-u',
'mock-type', {}, {}, upload_id)
@mock.patch('pulp.server.controllers.importer.model.Repository.objects')
def test_import_uploaded_unit_success_flag_false(self, mock_repo_qs):
"""Test that exception is raised if upload report indicates failure."""
importer_controller.set_importer('repo-u', 'mock-importer', {})
importer_return_report = {'success_flag': False, 'summary': '', 'details': {}}
mock_plugins.MOCK_IMPORTER.upload_unit.side_effect = None
mock_plugins.MOCK_IMPORTER.upload_unit.return_value = importer_return_report
upload_id = self.upload_manager.initialize_upload()
with self.assertRaises(PulpCodedException) as cm:
self.upload_manager.import_uploaded_unit('repo-u', 'mock-type', {}, {}, upload_id)
self.assertEqual('PLP0047', cm.exception.error_code.code)
def test_upload_dir_auto_created(self):
# Setup
# Make sure it definitely doesn't exist before calling this
upload_storage_dir = self.upload_manager._upload_storage_dir()
shutil.rmtree(upload_storage_dir)
# Test
upload_storage_dir = self.upload_manager._upload_storage_dir()
# Verify
self.assertTrue(os.path.exists(upload_storage_dir))
class TestContentUploadManager(unittest.TestCase):
@mock.patch.object(ContentUploadManager, '_upload_file_path')
@mock.patch('pulp.server.managers.content.upload.os')
def test_delete_upload_removes_file(self, mock_os, mock__upload_file_path):
my_upload_id = 'asdf'
ContentUploadManager().delete_upload(my_upload_id)
mock__upload_file_path.assert_called_once_with(my_upload_id)
mock_os.remove.assert_called_once_with(mock__upload_file_path.return_value)
@mock.patch.object(ContentUploadManager, '_upload_file_path')
@mock.patch('pulp.server.managers.content.upload.os')
def test_delete_upload_silences_ENOENT_error(self, mock_os, mock__upload_file_path):
my_upload_id = 'asdf'
mock_os.remove.side_effect = OSError(errno.ENOENT, os.strerror(errno.ENOENT))
try:
ContentUploadManager().delete_upload(my_upload_id)
except Exception:
self.fail('An Exception should not have been raised.')
@mock.patch.object(ContentUploadManager, '_upload_file_path')
@mock.patch('pulp.server.managers.content.upload.os')
def test_delete_upload_allows_non_ENOENT_OSErrors_to_raise(self, mock_os,
mock__upload_file_path):
my_upload_id = 'asdf'
mock_os.remove.side_effect = OSError(errno.EISDIR, os.strerror(errno.EISDIR))
self.assertRaises(OSError, ContentUploadManager().delete_upload, my_upload_id)
@mock.patch.object(ContentUploadManager, '_upload_file_path')
@mock.patch('pulp.server.managers.content.upload.os')
def test_delete_upload_allows_non_OSErrors_to_raise(self, mock_os, mock__upload_file_path):
my_upload_id = 'asdf'
mock_os.remove.side_effect = ValueError()
self.assertRaises(ValueError, ContentUploadManager().delete_upload, my_upload_id)
| gpl-2.0 | -4,129,315,307,559,645,700 | 40.706897 | 100 | 0.651013 | false | 3.75388 | true | false | false |
threemeninaboat3247/kuchinawa | kuchinawa/File.py | 1 | 2267 | # -*- coding: utf-8 -*-
""" --- Description ---
Module:
File.py
Abstract:
A module for file output.
Modified:
threemeninaboat3247 2018/04/30
--- End ---
"""
# Standard library imports
import sys
# Third party library imports
import pandas as pd
class File():
'''
Implements a csv type file instance.
'''
def __init__(self,path,sep='\t',comment='#'):
super().__init__()
self.path=path
self.file=open(path,'a+')
self.sep=sep
self.comment=comment
self._before_writing=True
def close(self):
self.file.close()
def save(self):
self.close()
self.file=open(self.path,'a+')
def write_comment(self,string):
'''
Add a comment to the head of the file
Parameters
string: string
A string to be added. \n is not needed.
Returns
None
'''
if self._before_writing:
self.file.write(self.comment+string+'\n')
self.save()
else:
raise Exception('Comments must be written before starting to write data')
def write_header(self,mylist):
'''
Set headers to the file.
Parameters
mylist: list
The header names.
Returns
None
'''
if self._before_writing:
self.columns=pd.DataFrame(columns=mylist)
self.columns.to_csv(self.file,sep=self.sep,index=False)
self._before_writing=False
self.save()
else:
raise Exception('Headers are already written')
def write_data(self,mydict):
'''
Write data to the file
Parameters
mydict: dictionary
The keys corresponds to the headers.
Returns
None
'''
if self._before_writing:
raise Exception('Headers has not been set yet.')
else:
row=self.columns.append(mydict,ignore_index=True)
row.to_csv(self.file,sep=self.sep,index=False,header=False)
self.save() | mit | 6,346,658,029,452,516,000 | 24.2 | 85 | 0.505514 | false | 4.561368 | false | false | false |
pdroalves/mongodb-secure | src/mongodb-secure/cipher/modules/orelewi/crypto_bkp/test.py | 1 | 2199 | #!/usr/bin/env python
##########################################################################
##########################################################################
#
# mongodb-secure
# Copyright (C) 2016, Pedro Alves and Diego Aranha
# {pedro.alves, dfaranha}@ic.unicamp.br
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
##########################################################################
##########################################################################
# import sys
# sys.setrecursionlimit(100000)
from index.binarytree import BinaryTree
from index.simplenode import SimpleNode
from index.encryptednode import EncryptedNode
from random import randint,shuffle
from crypto.ore import ORESMALL as ORE
N = 1000
elements = range(1,N)
shuffle(elements)
root = BinaryTree(elements[0])
print "Insertion..."
for i,e in enumerate(elements[1:]):
# %timeit root.insert(e)
root = root.insert(e)
print "The tree has %d elements and is %s" % (root.count_nodes(), "balanced" if root.is_balanced() else "not balanced")
print "Searching..."
for i in elements[1:]:
# print i
assert root.find(i)
print "It passed!"
%timeit root.find(30)
print "Time to test encryption..."
elements = range(1,N)
# shuffle(elements)
ore = ORE()
ore.keygen("oi",N)
print "keygen ok"
root.encrypt(ore)
print "The tree is encrypted"
print "Searching..."
root.find(ore.encrypt(99))
print "Done"
see = lambda x: "%s => %s %s" % (x.me.value, x.left.me.value if x.left else None, x.right.me.value if x.right else None)
# (self.me.value, self.left.me.value if self.left else None, self.right.me.value if self.right else None) | gpl-3.0 | 6,545,534,527,663,450,000 | 34.483871 | 120 | 0.637563 | false | 3.671119 | false | false | false |
datawire/quark | quarkc/test/ffi/expected/py/signatures/generics/__init__.py | 1 | 3475 | from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from builtins import str as unicode
from quark_runtime import *
_lazyImport.plug("generics")
import quark.reflect
import generics.constructors
import generics.pkg
import generics.ccc
class Box(_QObject):
def _init(self):
self.contents = None
def __init__(self): self._init()
def set(self, contents):
pass
def get(self):
return _cast(None, lambda: T)
def _getClass(self):
return u"generics.Box<quark.Object>"
def _getField(self, name):
if ((name) == (u"contents")):
return (self).contents
return None
def _setField(self, name, value):
if ((name) == (u"contents")):
(self).contents = _cast(value, lambda: T)
class Crate(_QObject):
def _init(self):
self.box = None
self.ibox = None
def __init__(self): self._init()
def set(self, stuff):
pass
def get(self):
return _cast(None, lambda: T)
def _getClass(self):
return u"generics.Crate<quark.Object>"
def _getField(self, name):
if ((name) == (u"box")):
return (self).box
if ((name) == (u"ibox")):
return (self).ibox
return None
def _setField(self, name, value):
if ((name) == (u"box")):
(self).box = _cast(value, lambda: Box)
if ((name) == (u"ibox")):
(self).ibox = _cast(value, lambda: Box)
Crate.generics_Box_quark_Object__ref = None
Crate.generics_Box_quark_int__ref = None
Crate.generics_Crate_quark_Object__ref = None
class Sack(_QObject):
def _init(self):
self.ints = None
def __init__(self): self._init()
def _getClass(self):
return u"generics.Sack"
def _getField(self, name):
if ((name) == (u"ints")):
return (self).ints
return None
def _setField(self, name, value):
if ((name) == (u"ints")):
(self).ints = _cast(value, lambda: Box)
Sack.generics_Sack_ref = None
class Matrix(_QObject):
def _init(self):
self.width = None
self.height = None
self.columns = None
def __init__(self, width, height):
self._init()
def _q__get__(self, i, j):
return _cast(None, lambda: T)
def _q__set__(self, i, j, value):
pass
def _getClass(self):
return u"generics.Matrix<quark.Object>"
def _getField(self, name):
if ((name) == (u"width")):
return (self).width
if ((name) == (u"height")):
return (self).height
if ((name) == (u"columns")):
return (self).columns
return None
def _setField(self, name, value):
if ((name) == (u"width")):
(self).width = _cast(value, lambda: int)
if ((name) == (u"height")):
(self).height = _cast(value, lambda: int)
if ((name) == (u"columns")):
(self).columns = _cast(value, lambda: _List)
Matrix.generics_Matrix_quark_Object__ref = None
Matrix.quark_List_quark_List_quark_Object___ref = None
Matrix.quark_List_quark_Object__ref = None
def _lazy_import_quark_ffi_signatures_md():
import quark_ffi_signatures_md
globals().update(locals())
_lazyImport("import quark_ffi_signatures_md", _lazy_import_quark_ffi_signatures_md)
_lazyImport.pump("generics")
| apache-2.0 | -2,453,293,335,003,595,000 | 22.013245 | 83 | 0.566906 | false | 3.427022 | false | false | false |
Mozzo1000/InstPakg | source/menu.py | 1 | 1678 | import os, sys, curses, atexit, time
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
class cmenu(object):
datum = {}
ordered = []
pos = 0
def __init__(self, options, title="Menu"):
curses.initscr()
curses.start_color()
curses.use_default_colors()
curses.init_pair(1, curses.COLOR_RED, curses.COLOR_WHITE)
curses.curs_set(0)
self.screen = curses.initscr()
self.screen.keypad(1)
self.h = curses.color_pair(1)
self.n = curses.A_NORMAL
for item in options:
k, v = item.items()[0]
self.datum[k] = v
self.ordered.append(k)
self.title = title
atexit.register(self.cleanup)
def cleanup(self):
curses.doupdate()
curses.endwin()
def upKey(self):
if self.pos == (len(self.ordered) - 1):
self.pos = 0
else:
self.pos += 1
def downKey(self):
if self.pos <= 0:
self.pos = len(self.ordered) - 1
else:
self.pos -= 1
def display(self):
screen = self.screen
while True:
screen.clear()
screen.addstr(2, 2, self.title, curses.A_STANDOUT|curses.A_BOLD)
screen.addstr(4, 2, "Please select an option...", curses.A_BOLD)
ckey = None
func = None
while ckey != ord('\n'):
for n in range(0, len(self.ordered)):
optn = self.ordered[n]
if n != self.pos:
screen.addstr(6 + n, 4, "%d. %s" % (n, optn), self.n)
else:
screen.addstr(6 + n, 4, "%d. %s" % (n, optn), self.h)
screen.refresh()
ckey = screen.getch()
if ckey == 258:
self.upKey()
if ckey == 259:
self.downKey()
ckey = 0
self.cleanup()
if self.pos >= 0 and self.pos < len(self.ordered):
self.datum[self.ordered[self.pos]]()
self.pos = -1
else:
curses.flash()
| mit | 4,464,027,325,138,235,400 | 18.97619 | 67 | 0.606079 | false | 2.60559 | false | false | false |
eset/malware-research | glupteba/decrypt_str.py | 1 | 5383 | # -*- encoding: utf-8 -*-
#
# This IDA script decrypts the strings inside *unpacked* samples of
# Glupteba.AY samples For details about Glupteba.AY, see:
# https://www.welivesecurity.com/2018/03/22/glupteba-no-longer-windigo/
#
# For feedback or questions contact us at: github@eset.com
# https://github.com/eset/malware-research/
#
# Author:
# Frédéric Vachon <frederic.vachon@eset.com>
#
# This code is provided to the community under the two-clause BSD license as
# follows:
#
# Copyright (C) 2016 ESET
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
import struct
import idautils
import ida_idp
import ida_search
import mersenne
def find_crypt_info():
info = {
'addr': [],
'xor_key': ''
}
# Find the decryption routines
addr = 0
while True:
# Search for magic value in the decryption functions
addr, _ = ida_search.find_imm(addr + 1, SEARCH_DOWN, 0x3C6EF35F)
if addr == 0xffffffff:
break
info['addr'].append(ida_funcs.get_func(addr).startEA)
inst_addr = addr
# Find xor key
info['xor_key'] = find_xor_key(inst_addr)
return info
def find_xor_key(magic_byte_addr):
xor_key = ''
i = 0
curr_addr = magic_byte_addr
while xor_key == '' and i < 20:
inst, _ = idautils.DecodePrecedingInstruction(curr_addr)
if inst.get_canon_mnem() == 'mov' and inst.Op1.type == o_displ:
if inst.Op2.type == o_imm:
xor_key = ida_bytes.get_bytes(inst.Op2.value, 0x10)
curr_addr = inst.ea
i += 1
return xor_key
def decrypt_strings(info):
xor_key = info['xor_key']
for i, crypt_func_addr in enumerate(info['addr']):
for xref in idautils.XrefsTo(crypt_func_addr):
str_addr, str_len = find_params(xref)
if str_addr == 0 or str_len == 0:
print "ERROR: Can't find parameters for func" \
"at 0x{:08X}".format(xref)
cipher = ida_bytes.get_bytes(str_addr, str_len)
s = decrypt_str(cipher, xor_key)
# Strings from the first decryption routine are UTF-16 encoded
if i == 0:
s = s.decode('utf-16').encode('utf-8')
print "Str at 0x{:08X}: u'{}'".format(xref.frm, s)
ida_bytes.set_cmt(xref.frm, "u'{}'".format(s), False)
f_addr = ida_funcs.get_func(xref.frm)
for xref_ in idautils.XrefsTo(f_addr.startEA):
ida_bytes.set_cmt(xref_.frm, "u'{}'".format(s), False)
else:
print "Str at 0x{:08X} : {}".format(xref.frm, repr(s))
ida_bytes.set_cmt(xref.frm, repr(s), False)
f_addr = ida_funcs.get_func(xref.frm)
for xref_ in idautils.XrefsTo(f_addr.startEA):
ida_bytes.set_cmt(xref_.frm, repr(s), False)
def find_params(xref):
str_len = 0
str_addr = 0
curr_addr = xref.frm
i = 0
while (str_len == 0 or str_addr == 0) and i < 10:
inst, _ = idautils.DecodePrecedingInstruction(curr_addr)
if inst.get_canon_mnem() == 'mov' and inst.Op1.type == o_reg and \
inst.Op1.reg == ida_idp.str2reg('edx') and inst.Op2.type == o_imm:
str_addr = inst.Op2.value
elif inst.get_canon_mnem() == 'push':
str_len = inst.Op1.value
i += 1
curr_addr = inst.ea
return str_addr, str_len
def decrypt_str(cipher, key):
trunc_key = struct.unpack('<I', key[:4])[0]
mersenne.initialize_generator(trunc_key)
out = []
for b in cipher:
rand_no = mersenne.extract_number() % 0x100
out.append(ord(b) ^ rand_no)
for i, b in enumerate(out):
out[i] ^= ord(key[i % 0x10])
bl = 0x62
for i, b in enumerate(out):
bl ^= (b - (i % 0x100))
bl ^= 0x67
out[i] = bl & 0xff
return ''.join(map(chr, out))
info = find_crypt_info()
if len(info.get('addr')) == 0:
print "ERROR: Can't find decryption routines"
elif info.get('xor_key') == '':
print "ERROR: Can't find the decryption key"
else:
decrypt_strings(info)
| bsd-2-clause | 5,605,664,482,483,673,000 | 33.273885 | 78 | 0.623676 | false | 3.346393 | false | false | false |
anomaly/vishnu | tests/middleware/__init__.py | 1 | 2199 | import falcon
import pytest
import vishnu
class BaseHandler(object):
@property
def session(self):
"""
:return: the current vishnu session
:rtype: vishnu.session.Session
"""
return vishnu.get_session()
class PrivateHandler(BaseHandler):
def on_get(self, req, resp):
if self.session.get("user") == "james":
resp.status = falcon.HTTP_200
else:
resp.status = falcon.HTTP_401
class PublicHandler(BaseHandler):
def on_get(self, req, resp):
resp.status = falcon.HTTP_200
class LoginSaveHandler(BaseHandler):
def on_post(self, req, resp):
self.session["user"] = "james"
self.session.save()
resp.status = falcon.HTTP_200
class LoginNoSaveHandler(BaseHandler):
def on_post(self, req, resp):
self.session["user"] = "james"
resp.status = falcon.HTTP_200
class LogoutHandler(BaseHandler):
def on_get(self, req, resp):
self.session.terminate()
resp.status = falcon.HTTP_200
def test_app(cookie_name=None, encrypt=False, auto_save=False,
secure=True, domain=None, path=None,
use_https=False, backend=None):
from webtest import TestApp
from vishnu.middleware import SessionMiddleware
from vishnu.session import Config
api = falcon.API()
api.add_route("/private", PrivateHandler())
api.add_route("/public", PublicHandler())
api.add_route("/login/save", LoginSaveHandler())
api.add_route("/login/no/save", LoginNoSaveHandler())
api.add_route("/logout", LogoutHandler())
encrypt_key = None
if encrypt:
encrypt_key = "YTWRsQIU4lYj4HyP33Uh24nrraDFv0R9"
config = Config(
secret="OVc1Mbt79AK5Pmi6sWnJnXZvEPNO3BnI",
cookie_name=cookie_name,
encrypt_key=encrypt_key,
auto_save=auto_save,
domain=domain,
path=path,
secure=secure,
http_only=True,
backend=backend,
)
session = SessionMiddleware(api, config)
extra_environ = {}
if use_https:
extra_environ['wsgi.url_scheme'] = 'https'
return TestApp(app=session, extra_environ=extra_environ)
| apache-2.0 | 8,724,134,717,482,691,000 | 21.90625 | 62 | 0.630741 | false | 3.446708 | false | false | false |
envhyf/wrftools | wrftools/extract.py | 2 | 6064 | """extract.py provides a thin wrapper around an ncl script to
extract WRF variables and diagnostics at specified locations and
heights above ground level
Usage:
extract.py <file>...
[--config=<file>]
[--out-dir=<dir>]
[--ncl-script=<file>]
[--height=<list>]
[--loc=<file>]
[--opt=<file>]
[--mode=<mode>]
[--dry-run]
Options:
<file> input files to work on
--config=<file> configuration file
--out-dir=<dir> output directory
--ncl-script=<file> location of the ncl script to call, default is ../../extract_time_series.ncl
--height=<list> list of heights to extract to
--loc=<file> file specifying locations to extract to
--opt=<file> ncl file specifying which variables to extract and various ncl options
--mode=<mode> loop over input files, or lump input files together
--dry-run print commands, don't execute"""
import shared
import confighelper as conf
import os
import sys
import docopt
import subprocess
import time
NCL_SCRIPT = '%s/../ncl/extract_time_series.ncl' % os.path.split(sys.argv[0])[0]
SUPPORTED_MODES = ["loop","lump"]
class ConfigError(Exception):
pass
def main():
""" Pass command line arguments to NCL script"""
config = conf.config(__doc__, sys.argv[1:])
t0 = time.time()
if config['out-dir']==None:
out_dir = '.'
else:
out_dir = config['out-dir']
# if ncl-code-dir not specified, expect it in ../ncl relative to
# the path of this file
if config['ncl-script']==None:
ncl_script = NCL_SCRIPT
else:
ncl_code_dir = config['ncl-script']
cmd_files = config['<file>']
# Add nc extension if needed
nc_files = [ f if f.endswith('.nc') else f+'.nc' for f in cmd_files]
# Create height arrays
hgts = config['height']
hgts = '(/%s/)' % ','.join(map(str,hgts))
mode = config['mode']
dry_run = config['dry-run']
loc = config['loc']
opt = config['opt']
print '\n*****************************************************'
print 'extract.py'
if mode not in SUPPORTED_MODES:
raise ConfigError("specified mode not supported")
if mode=='loop':
# This will loop over each file seperately
for f in sorted(nc_files):
path,name = os.path.split(f)
out_file = out_dir+'/'+name.replace('wrfout', 'tseries')
if os.path.exists(out_file):
os.remove(out_file)
# Create NCL file array
in_file = f
#cmd = """FCST_FILE=%s NCL_OUT_FILE=%s LOCATIONS_FILE=%s NCL_OPT_FILE=%s ncl %s/%s 'extract_heights=%s' """ %(in_file, out_file, loc,opt, ncl_code_dir, NCL_SCRIPT, hgts)
cmd = """NCL_OPT_FILE=%s ncl 'in_file="%s"' 'out_file="%s"' 'extract_heights=%s' 'loc_file="%s"' %s""" % (opt,in_file,out_file, hgts, loc, ncl_script)
print cmd
# We could either aggregate all files together or loop over files
if not dry_run:
subprocess.call(cmd, shell=True)
elif mode=='lump':
f = nc_files[0]
path,name = os.path.split(f)
out_file = out_dir+'/'+name.replace('wrfout', 'tseries')
if os.path.exists(out_file):
os.rm(out_file)
# Create NCL file array
files = '","'.join(sorted(nc_files))
in_file = '(/"%s"/)' % files
cmd = """NCL_OPT_FILE=%s ncl 'in_file=%s' 'out_file="%s"' 'extract_heights=%s' 'loc_file="%s"' %s""" % (opt,in_file,out_file, hgts, loc, ncl_script)
print cmd
if not dry_run:
subprocess.call(cmd, shell=True)
te = time.time() - t0
print 'elapsed time: %0.1f ' % te
def extract_tseries(config):
logger = shared.get_logger()
logger.info('\n*** EXTRACTING TIME SERIES ***')
wrfout_dir = config['wrfout_dir']
tseries_dir = config['tseries_dir']
json_dir = config['json_dir']
init_time = config['init_time']
dom = config['dom']
fcst_file = '%s/wrfout_d%02d_%s:00:00.nc' %(wrfout_dir, dom, init_time.strftime("%Y-%m-%d_%H")) # note we add on the nc extension here
ncl_loc_file = config['locations_file']
ncl_code = config['tseries_code']
extract_hgts = config['extract_hgts']
#tseries_fmt = config['tseries_fmt']
ncl_log = config['ncl_log']
ncl_opt_template = config['ncl_opt_template']
ncl_opt_file = config['ncl_opt_file']
if not os.path.exists(tseries_dir):
os.makedirs(tseries_dir)
# Always go via the netcdf file
tseries_file = '%s/tseries_d%02d_%s.nc' % (tseries_dir, dom,init_time.strftime("%Y-%m-%d_%H"))
ncl_hgts = '(/%s/)' % ','.join(map(str,extract_hgts))
replacements = {'<ncl_in_file>' : fcst_file,
'<ncl_out_file>' : tseries_file,
'<ncl_out_dir>' : tseries_dir,
'<ncl_out_type>' : "nc",
'<ncl_loc_file>' : ncl_loc_file,
'<extract_heights>': ncl_hgts}
shared.fill_template(ncl_opt_template, ncl_opt_file, replacements)
logger.debug('ncl_opt_template: %s' % ncl_opt_template)
logger.debug(' ncl_in_file ----> %s' % fcst_file)
logger.debug(' ncl_out_dir ----> %s' % tseries_dir)
logger.debug(' ncl_out_type ----> %s' % "nc")
logger.debug(' ncl_loc_file ----> %s' % ncl_loc_file)
logger.debug('ncl_opt_file: %s' % ncl_opt_file)
for script in ncl_code:
cmd = "NCL_OPT_FILE=%s ncl %s >> %s 2>&1" % (ncl_opt_file,script, ncl_log)
shared.run_cmd(cmd, config)
logger.info("*** DONE EXTRACTING TIME SERIES ***\n")
if __name__ == '__main__':
main()
| gpl-3.0 | 2,212,893,001,227,477,800 | 32.541899 | 182 | 0.531992 | false | 3.319102 | true | false | false |
jaumebonet/pynion | pynion/__main__.py | 2 | 1819 | import argparse
import datetime
import re
from . import Manager
m = Manager()
# User Options
def options(*args, **kwds):
'''
User options.
@return: {Namespace}
'''
parser = argparse.ArgumentParser(prog = 'pynion',
formatter_class = argparse.ArgumentDefaultsHelpFormatter,
description = 'A library building minion')
parser.add_argument('-p', '--project', dest = "project",
action = "store", required = True,
help = "Project name")
parser.add_argument('-c', '--config', dest = "configfile",
action = "store", required = False,
default = None,
help = "Add a configuration file " +
"for external software")
parser.add_argument('-d', '--date', dest = "date",
action = "store", required = False,
default = datetime.date.today(),
help = "Date of the project, as YYY-MM-DD")
options = parser.parse_args()
m.set_stdout()
m.set_verbose()
return options
if __name__ == "__main__":
options = options()
m.info('Creating project: {0}'.format(options.project))
if isinstance(options.date, datetime.date):
m.project.set_date()
else:
date_regex = re.compile('(\d{4})\-(\d{2})\-(\d{2})')
d = date_regex.search(options.date)
if not d:
m.exception('Experiment date wrongly formated')
m.project.set_date(d[1], d[2], d[3])
m.project.set_configuration_file(options.configfile)
m.project.create(options.project, m.experiment.user)
| mit | 1,292,990,639,282,312,700 | 30.362069 | 94 | 0.503573 | false | 4.25 | false | false | false |
jna29/SymGP | symgp/superexpressions/supermatmul.py | 1 | 5884 | from sympy import MatMul, MatAdd, ZeroMatrix, MatrixBase, Identity, ShapeError, MatrixExpr, S, Number
from sympy.core.decorators import call_highest_priority
from sympy.strategies import (rm_id, unpack, typed, flatten, sort, condition, exhaust,
do_one, new, glom)
from .supermatbase import SuperMatBase
class SuperMatMul(SuperMatBase, MatMul):
"""
Redefines some methods of MatMul so as to make them amenable to our application
"""
_op_priority = 10000
def __new__(cls, *args, **kwargs):
return MatMul.__new__(cls, *args, **kwargs)
def as_coeff_mmul(self):
coeff, matrices = self.as_coeff_matrices()
return coeff, SuperMatMul(*matrices)
def _eval_transpose(self):
return SuperMatMul(*[arg.T if isinstance(arg, MatrixExpr) else arg
for arg in self.args[::-1]]).doit()
def _eval_inverse(self):
try:
return SuperMatMul(*[
arg.inverse() if isinstance(arg, MatrixExpr) else arg**-1
for arg in self.args[::-1]]).doit()
except ShapeError:
from .supermatexpr import SuperMatInverse
return SuperMatInverse(self)
#def transpose(self):
# from .supermatexpr import SuperMatTranspose
# return SuperMatTranspose(self).doit()
def doit(self, **kwargs):
deep = kwargs.get('deep', True)
if deep:
args = [arg.doit(**kwargs) for arg in self.args]
else:
args = self.args
return canonicalize(SuperMatMul(*args))
def newmul(*args):
if args[0] == 1:
args = args[1:]
return new(SuperMatMul, *args)
def any_zeros(mul):
if any([arg.is_zero or (arg.is_Matrix and arg.is_ZeroMatrix)
for arg in mul.args]):
matrices = [arg for arg in mul.args if arg.is_Matrix]
return ZeroMatrix(matrices[0].rows, matrices[-1].cols)
return mul
def merge_explicit(matmul):
""" Merge explicit MatrixBase arguments
>>> from sympy import MatrixSymbol, eye, Matrix, MatMul, pprint
>>> from sympy.matrices.expressions.matmul import merge_explicit
>>> A = MatrixSymbol('A', 2, 2)
>>> B = Matrix([[1, 1], [1, 1]])
>>> C = Matrix([[1, 2], [3, 4]])
>>> X = MatMul(A, B, C)
>>> pprint(X)
[1 1] [1 2]
A*[ ]*[ ]
[1 1] [3 4]
>>> pprint(merge_explicit(X))
[4 6]
A*[ ]
[4 6]
>>> X = MatMul(B, A, C)
>>> pprint(X)
[1 1] [1 2]
[ ]*A*[ ]
[1 1] [3 4]
>>> pprint(merge_explicit(X))
[1 1] [1 2]
[ ]*A*[ ]
[1 1] [3 4]
"""
if not any(isinstance(arg, MatrixBase) for arg in matmul.args):
return matmul
newargs = []
last = matmul.args[0]
for arg in matmul.args[1:]:
if isinstance(arg, (MatrixBase, Number)) and isinstance(last, (MatrixBase, Number)):
last = last * arg
else:
newargs.append(last)
last = arg
newargs.append(last)
return SuperMatMul(*newargs)
def xxinv(mul):
""" Y * X * X.I -> Y """
factor, matrices = mul.as_coeff_matrices()
for i, (X, Y) in enumerate(zip(matrices[:-1], matrices[1:])):
try:
if X.is_square and Y.is_square and X == Y.inverse():
I = Identity(X.rows)
return newmul(factor, *(matrices[:i] + [I] + matrices[i+2:]))
except ValueError: # Y might not be invertible
pass
return mul
def remove_ids(mul):
""" Remove Identities from a MatMul
This is a modified version of sympy.strategies.rm_id.
This is necesssary because MatMul may contain both MatrixExprs and Exprs
as args.
See Also
--------
sympy.strategies.rm_id
"""
# Separate Exprs from MatrixExprs in args
factor, mmul = mul.as_coeff_mmul()
# Apply standard rm_id for MatMuls
result = rm_id(lambda x: x.is_Identity is True)(mmul)
if result != mmul:
return newmul(factor, *result.args) # Recombine and return
else:
return mul
def factor_in_front(mul):
factor, matrices = mul.as_coeff_matrices()
if factor != 1:
return newmul(factor, *matrices)
return mul
rules = (any_zeros, remove_ids, xxinv, unpack, rm_id(lambda x: x == 1),
merge_explicit, factor_in_front, flatten)
canonicalize = exhaust(typed({SuperMatMul: do_one(*rules)}))
def only_squares(*matrices):
""" factor matrices only if they are square """
if matrices[0].rows != matrices[-1].cols:
raise RuntimeError("Invalid matrices being multiplied")
out = []
start = 0
for i, M in enumerate(matrices):
if M.cols == matrices[start].rows:
out.append(SuperMatMul(*matrices[start:i+1]).doit())
start = i+1
return out
from sympy.assumptions.ask import ask, Q
from sympy.assumptions.refine import handlers_dict
def refine_SuperMatMul(expr, assumptions):
"""
>>> from sympy import MatrixSymbol, Q, assuming, refine
>>> X = MatrixSymbol('X', 2, 2)
>>> expr = X * X.T
>>> print(expr)
X*X.T
>>> with assuming(Q.orthogonal(X)):
... print(refine(expr))
I
"""
newargs = []
exprargs = []
for args in expr.args:
if args.is_Matrix:
exprargs.append(args)
else:
newargs.append(args)
last = exprargs[0]
for arg in exprargs[1:]:
if arg == last.T and ask(Q.orthogonal(arg), assumptions):
last = Identity(arg.shape[0])
elif arg == last.conjugate() and ask(Q.unitary(arg), assumptions):
last = Identity(arg.shape[0])
else:
newargs.append(last)
last = arg
newargs.append(last)
return SuperMatMul(*newargs)
handlers_dict['SuperMatMul'] = refine_SuperMatMul
from .supermatadd import SuperMatAdd | mit | 5,642,404,034,762,698,000 | 29.335052 | 101 | 0.579538 | false | 3.498216 | false | false | false |
Vodak/SINS | src/Game.py | 1 | 15067 | """
Classe Game qui gère le jeu
"""
from Map import *
from Animation import *
class Game:
# Constructeur:
def __init__(self):
self.Map = Map()
self.clock = sf.Clock()
self.tour = 0
self.window = sf.RenderWindow(sf.VideoMode(960, 640), "SINS - An amazing not simulator by Vodak")
self.key = {"UP": False, "DOWN": False, "RIGHT": False, "LEFT": False}
# Chargement des textures :
# Ecran de chargement :
self.textureVodak = sf.Texture.from_file("../files/vodak.png")
# Blocs :
self.textureEau = sf.Texture.from_file("../files/textures/eau.png")
self.textureSable = sf.Texture.from_file("../files/textures/sable.png")
self.textureHerbe = sf.Texture.from_file("../files/textures/herbe.png")
self.textureHerbe2 = sf.Texture.from_file("../files/textures/testHerbe.png")
self.texturePlancher = sf.Texture.from_file("../files/textures/plancher.png")
self.textureRoute = sf.Texture.from_file("../files/textures/route.png")
self.textureMur = [sf.Texture.from_file("../files/textures/mur/normal.png"),\
sf.Texture.from_file("../files/textures/mur/gauche.png"),\
sf.Texture.from_file("../files/textures/mur/droite.png"),\
sf.Texture.from_file("../files/textures/mur/angle_gauche.png"),\
sf.Texture.from_file("../files/textures/mur/angle_droit.png")]
# IA :
self.textureIA = sf.Texture.from_file("../files/textures/IA.png")
# Objets :
self.textureBanc = sf.Texture.from_file("../files/textures/banc.png")
self.textureLit = sf.Texture.from_file("../files/textures/lit.png")
self.textureLitMedecin = sf.Texture.from_file("../files/textures/litMedecin.png")
self.textureLitPsy = sf.Texture.from_file("../files/textures/lit.png")
self.textureChaise = sf.Texture.from_file("../files/textures/chaise.png")
self.textureChaiseEcole = sf.Texture.from_file("../files/textures/chaise.png")
self.textureChaiseMedecin = sf.Texture.from_file("../files/textures/chaise.png")
self.textureChaisePsy = sf.Texture.from_file("../files/textures/chaisePsy.png")
self.textureTable = sf.Texture.from_file("../files/textures/table.png")
self.textureTableEcole = sf.Texture.from_file("../files/textures/table.png")
self.textureFour = sf.Texture.from_file("../files/textures/four.png")
self.textureTableau = sf.Texture.from_file("../files/textures/tableau.png")
self.textureBancPeche = sf.Texture.from_file("../files/textures/banc.png")
# Chargement des sprites :
# Ecran de chargement :
self.spriteVodak = sf.Sprite(self.textureVodak)
# Blocs :
self.spriteEau = sf.Sprite(self.textureEau)
self.spriteSable = sf.Sprite(self.textureSable)
self.spriteHerbe = sf.Sprite(self.textureHerbe)
self.spriteHerbe2 = sf.Sprite(self.textureHerbe2)
self.spritePlancher = sf.Sprite(self.texturePlancher)
self.spriteRoute = sf.Sprite(self.textureRoute)
self.spriteMur = [sf.Sprite(self.textureMur[0]), sf.Sprite(self.textureMur[1]), sf.Sprite(self.textureMur[2]), sf.Sprite(self.textureMur[3]), sf.Sprite(self.textureMur[4])]
# IA :
self.spriteIA = sf.Sprite(self.textureIA)
# Objets :
self.spriteBanc = sf.Sprite(self.textureBanc)
self.spriteLit = sf.Sprite(self.textureLit)
self.spriteLitMedecin = sf.Sprite(self.textureLitMedecin)
self.spriteLitPsy = sf.Sprite(self.textureLit)
self.spriteChaise = sf.Sprite(self.textureChaise)
self.spriteChaiseEcole = sf.Sprite(self.textureChaiseEcole)
self.spriteChaiseMedecin = sf.Sprite(self.textureChaiseMedecin)
self.spriteChaisePsy = sf.Sprite(self.textureChaisePsy)
self.spriteTable = sf.Sprite(self.textureTable)
self.spriteTableEcole = sf.Sprite(self.textureTableEcole)
self.spriteFour = sf.Sprite(self.textureFour)
self.spriteTableau = sf.Sprite(self.textureTableau)
self.spriteBancPeche = sf.Sprite(self.textureBancPeche)
# Scrolling :
self.xMin = 0
self.yMin = 0
# Mise en place du jeu :
def play(self):
# Ecran d'acceuil :
self.window.draw(self.spriteVodak)
self.window.display()
sf.sleep(sf.seconds(2))
self.window.clear()
# Génération de la map :
self.Map.generate()
# Disposition des maisons :
for i in range(20):
self.Map.maison()
# Invocation des IA sur la map
for i in range(3):
x = randint(0, 95)
y = randint(0, 71)
while self.Map.map[x][y].Bloc != Bloc.Herbe and self.Map.map[x][y].Bloc != Bloc.Plancher and self.Map.map[x][y].Bloc != Bloc.Sable:
x = randint(0, 95)
y = randint(0, 71)
self.Map.map[x][y].IA = IA(x, y, 21 * [randint(0, 100)])
# Boucle principale :
while self.window.is_open:
# gestion des evenements
for event in self.window.events:
if type(event) is sf.CloseEvent:
self.window.close()
elif type(event) is sf.KeyEvent:
if event.code is sf.Keyboard.ESCAPE:
self.window.close()
elif event.code is sf.Keyboard.LEFT:
self.key["LEFT"] = event.pressed
elif event.code is sf.Keyboard.UP:
self.key["UP"] = event.pressed
elif event.code is sf.Keyboard.RIGHT:
self.key["RIGHT"] = event.pressed
elif event.code is sf.Keyboard.DOWN:
self.key["DOWN"] = event.pressed
if self.key["LEFT"]:
self.xMin = self.xMin - 1 if self.xMin > 0 else self.xMin
if self.key["RIGHT"]:
self.xMin = self.xMin + 1 if self.xMin < 66 else self.xMin
if self.key["UP"]:
self.yMin = self.yMin - 1 if self.yMin > 0 else self.yMin
if self.key["DOWN"]:
self.yMin = self.yMin + 1 if self.yMin < 52 else self.yMin
# gestion des IA
x = list()
y = list()
for i in range(96):
for j in range(72):
if self.Map.map[i][j].isIA():
x.append(i)
y.append(j)
if(self.clock.elapsed_time.milliseconds > 1000):
self.clock.restart()
for i in range(len(x)):
self.tour += 1
# update des valeurs de l'ia
if self.tour % 10 == 0:
self.Map.map[x[i]][y[i]].IA.age += 1
self.Map.map[x[i]][y[i]].IA.fatigue += randint(0, 1)
self.Map.map[x[i]][y[i]].IA.faim += randint(0, 1)
if self.Map.map[x[i]][y[i]].IA.maladie >= 10: # si l'ia est malade
self.Map.map[x[i]][y[i]].IA.maladie += randint(0, 2)
if self.Map.map[x[i]][y[i]].IA.fatigue >= 85: # si l'ia est fatiguée
self.Map.map[x[i]][y[i]].IA.vie -= 1
if self.Map.map[x[i]][y[i]].IA.faim >= 85: # si l'ia est affamée
self.Map.map[x[i]][y[i]].IA.vie -= 1
if self.Map.map[x[i]][y[i]].IA.bonheur < 20: # si l'ia est très triste
self.Map.map[x[i]][y[i]].IA.vie -= 1
if self.Map.map[x[i]][y[i]].IA.bonheur == 0: # si l'ia a atteint son seuil de tristesse
self.Map.map[x[i]][y[i]].IA.vie = 0
if self.Map.map[x[i]][y[i]].IA.vie < 40: # si l'ia est mal en point
self.Map.map[x[i]][y[i]].IA.vie -= randint(0, 1)
if self.Map.map[x[i]][y[i]].IA.age > 80: # si l'ia est vieille
self.Map.map[x[i]][y[i]].IA.vie -= randint(3, 6)
# déplacement de l'ia
direction, interraction = self.Map.map[x[i]][y[i]].IA.getAction(self.Map.map)
if direction == Direction.Bas and not self.Map.map[x[i]][y[i]+1].isIA():
self.Map.map[x[i]][y[i]].IA.y += 1
self.Map.map[x[i]][y[i]+1].IA = self.Map.map[x[i]][y[i]].IA
self.Map.map[x[i]][y[i]].delIA()
y[i] += 1
elif direction == Direction.Haut and not self.Map.map[x[i]][y[i]-1].isIA():
self.Map.map[x[i]][y[i]].IA.y -= 1
self.Map.map[x[i]][y[i]-1].IA = self.Map.map[x[i]][y[i]].IA
self.Map.map[x[i]][y[i]].delIA()
y[i] -= 1
elif direction == Direction.Gauche and not self.Map.map[x[i]-1][y[i]].isIA():
self.Map.map[x[i]][y[i]].IA.x -= 1
self.Map.map[x[i]-1][y[i]].IA = self.Map.map[x[i]][y[i]].IA
self.Map.map[x[i]][y[i]].delIA()
x[i] -= 1
elif direction == Direction.Droite and not self.Map.map[x[i]+1][y[i]].isIA():
self.Map.map[x[i]][y[i]].IA.x += 1
self.Map.map[x[i]+1][y[i]].IA = self.Map.map[x[i]][y[i]].IA
self.Map.map[x[i]][y[i]].delIA()
x[i] += 1
# interraction de l'ia
if interraction:
if self.Map.map[x[i]][y[i]].Objet == Objet.Lit:
self.Map.map[x[i]][y[i]].IA.fatigue = 0
elif self.Map.map[x[i]][y[i]].Objet == Objet.EntreeFour:
self.Map.map[x[i]][y[i]].IA.faim = 0
elif self.Map.map[x[i]][y[i]].Objet == Objet.LitMedecin:
self.Map.map[x[i]][y[i]].IA.vie = 100
elif self.Map.map[x[i]][y[i]].Objet == Objet.LitPsychiatre:
self.Map.map[x[i]][y[i]].IA.bonheur = 100
elif self.Map.map[x[i]][y[i]].Objet == Objet.BancPeche:
pass
elif self.Map.map[x[i]][y[i]].Objet == Objet.ChaiseEcole:
pass
elif self.Map.map[x[i]][y[i]].Objet == Objet.PlaceProf:
pass
elif self.Map.map[x[i]][y[i]].Objet == Objet.ChaiseMedecin:
pass
elif self.Map.map[x[i]][y[i]].Objet == Objet.ChaisePsychiatre:
pass
elif self.Map.map[x[i]][y[i]].Objet == Objet.Banc:
pass
elif self.Map.map[x[i]][y[i]].Objet == Objet.Chaise:
pass
elif self.Map.map[x[i]][y[i]].Objet == Objet.Checkpoint:
pass
# mort de l'ia si sa vie est nulle
if self.Map.map[x[i]][y[i]].isIA() and self.Map.map[x[i]][y[i]].IA.vie == 0:
self.Map.map[x[i]][y[i]].delIA()
# affichage de la carte
self.window.clear()
for i in range(self.xMin, self.xMin + 30):
for j in range(self.yMin, self.yMin + 20):
# affichage des blocs
if self.Map.map[i][j].Bloc == Bloc.Eau:
self.spriteEau.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteEau)
elif self.Map.map[i][j].Bloc == Bloc.Sable:
self.spriteSable.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteSable)
elif self.Map.map[i][j].Bloc == Bloc.Herbe:
self.spriteHerbe2.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteHerbe2)
elif self.Map.map[i][j].Bloc == Bloc.Plancher:
if self.Map.map[i][j-1].Bloc == Bloc.Herbe:
self.spriteHerbe2.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteHerbe2)
else:
self.spritePlancher.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spritePlancher)
elif self.Map.map[i][j].Bloc == Bloc.Route:
self.spriteRoute.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteRoute)
elif self.Map.map[i][j].Bloc == Bloc.Mur:
if self.Map.map[i-1][j].Bloc == Bloc.Herbe:
if self.Map.map[i][j+1].Bloc == Bloc.Herbe:
self.spriteMur[0].position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteMur[0])
elif self.Map.map[i][j-1].Bloc == Bloc.Mur or self.Map.map[i][j-1].Bloc == Bloc.Plancher:
self.spriteMur[1].position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteMur[1])
else:
self.spriteMur[3].position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteMur[3])
elif self.Map.map[i+1][j].Bloc == Bloc.Herbe:
if self.Map.map[i][j+1].Bloc == Bloc.Herbe:
self.spriteMur[0].position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteMur[0])
elif self.Map.map[i][j-1].Bloc == Bloc.Mur or self.Map.map[i][j-1].Bloc == Bloc.Plancher:
self.spriteMur[2].position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteMur[2])
else:
self.spriteMur[4].position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteMur[4])
else:
self.spriteMur[0].position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteMur[0])
# affichage des objets
if self.Map.map[i][j].Objet == Objet.Banc:
self.spriteBanc.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteBanc)
elif self.Map.map[i][j].Objet == Objet.Lit:
self.spriteLit.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteLit)
elif self.Map.map[i][j].Objet == Objet.ChaiseEcole:
self.spriteChaiseEcole.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteChaiseEcole)
elif self.Map.map[i][j].Objet == Objet.TableEcole:
self.spriteTableEcole.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteTableEcole)
elif self.Map.map[i][j].Objet == Objet.Four:
self.spriteFour.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteFour)
elif self.Map.map[i][j].Objet == Objet.Chaise:
self.spriteChaise.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteChaise)
elif self.Map.map[i][j].Objet == Objet.Table:
self.spriteTable.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteTable)
elif self.Map.map[i][j].Objet == Objet.Tableau:
self.spriteTableau.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin - 1))
self.window.draw(self.spriteTableau)
elif self.Map.map[i][j].Objet == Objet.LitMedecin:
self.spriteLitMedecin.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteLitMedecin)
elif self.Map.map[i][j].Objet == Objet.ChaiseMedecin:
self.spriteChaiseMedecin.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteChaiseMedecin)
elif self.Map.map[i][j].Objet == Objet.LitPsychiatre:
self.spriteLitPsy.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteLitPsy)
elif self.Map.map[i][j].Objet == Objet.ChaisePsychiatre:
self.spriteChaisePsy.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteChaisePsy)
elif self.Map.map[i][j].Objet == Objet.BancPeche:
self.spriteBancPeche.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteBancPeche)
# affichage des IA
if self.Map.map[i][j].isIA():
self.spriteIA.position = sf.Vector2(32 * (i - self.xMin), 32 * (j - self.yMin))
self.window.draw(self.spriteIA)
self.window.display() | gpl-3.0 | 4,866,727,451,411,234,000 | 37.129114 | 174 | 0.607171 | false | 2.543919 | false | false | false |
atumanov/ray | examples/streaming/streaming.py | 1 | 3685 | import argparse
from collections import Counter, defaultdict
import heapq
import numpy as np
import os
import ray
import wikipedia
parser = argparse.ArgumentParser()
parser.add_argument("--num-mappers",
help="number of mapper actors used", default=3)
parser.add_argument("--num-reducers",
help="number of reducer actors used", default=4)
@ray.remote
class Mapper(object):
def __init__(self, title_stream):
self.title_stream = title_stream
self.num_articles_processed = 0
self.articles = []
self.word_counts = []
def get_new_article(self):
# Get the next wikipedia article.
article = wikipedia.page(self.title_stream.next()).content
# Count the words and store the result.
self.word_counts.append(Counter(article.split(" ")))
self.num_articles_processed += 1
def get_range(self, article_index, keys):
# Process more articles if this Mapper hasn't processed enough yet.
while self.num_articles_processed < article_index + 1:
self.get_new_article()
# Return the word counts from within a given character range.
return [(k, v) for k, v in self.word_counts[article_index].items()
if len(k) >= 1 and k[0] >= keys[0] and k[0] <= keys[1]]
@ray.remote
class Reducer(object):
def __init__(self, keys, *mappers):
self.mappers = mappers
self.keys = keys
def next_reduce_result(self, article_index):
word_count_sum = defaultdict(lambda: 0)
# Get the word counts for this Reducer's keys from all of the Mappers
# and aggregate the results.
count_ids = [mapper.get_range.remote(article_index, self.keys)
for mapper in self.mappers]
# TODO(rkn): We should process these out of order using ray.wait.
for count_id in count_ids:
for k, v in ray.get(count_id):
word_count_sum[k] += v
return word_count_sum
class Stream(object):
def __init__(self, elements):
self.elements = elements
def next(self):
i = np.random.randint(0, len(self.elements))
return self.elements[i]
if __name__ == "__main__":
args = parser.parse_args()
ray.init()
# Create one streaming source of articles per mapper.
directory = os.path.dirname(os.path.realpath(__file__))
streams = []
for _ in range(args.num_mappers):
with open(os.path.join(directory, "articles.txt")) as f:
streams.append(Stream([line.strip() for line in f.readlines()]))
# Partition the keys among the reducers.
chunks = np.array_split([chr(i) for i in range(ord("a"), ord("z") + 1)],
args.num_reducers)
keys = [[chunk[0], chunk[-1]] for chunk in chunks]
# Create a number of mappers.
mappers = [Mapper.remote(stream) for stream in streams]
# Create a number of reduces, each responsible for a different range of
# keys. This gives each Reducer actor a handle to each Mapper actor.
reducers = [Reducer.remote(key, *mappers) for key in keys]
article_index = 0
while True:
print("article index = {}".format(article_index))
wordcounts = {}
counts = ray.get([reducer.next_reduce_result.remote(article_index)
for reducer in reducers])
for count in counts:
wordcounts.update(count)
most_frequent_words = heapq.nlargest(10, wordcounts,
key=wordcounts.get)
for word in most_frequent_words:
print(" ", word, wordcounts[word])
article_index += 1
| apache-2.0 | 2,850,757,399,885,752,300 | 34.432692 | 77 | 0.609769 | false | 3.830561 | false | false | false |
alexlee-gk/visual_dynamics | visual_dynamics/spaces/concatenation_space.py | 1 | 1401 | from __future__ import division, print_function
import numpy as np
from visual_dynamics.spaces import Space
class ConcatenationSpace(Space):
def __init__(self, spaces):
self.spaces = spaces
sizes = []
for space in self.spaces:
size, = space.shape
sizes.append(size)
cumsum = np.cumsum(sizes)
self.slices = [slice(start, stop) for start, stop in zip((0,) + tuple(cumsum[:-1]), cumsum)]
def sample(self):
return np.concatenate([space.sample() for space in self.spaces])
def contains(self, x):
assert x.shape == self.shape
return all(space.contains(x[s]) for space, s in zip(self.spaces, self.slices))
def clip(self, x, out=None):
assert x.shape == self.shape
if out is not None:
assert out.shape == self.shape
return np.concatenate([space.clip(x[s], out=(out[s] if out is not None else None))
for space, s in zip(self.spaces, self.slices)])
@property
def shape(self):
return (self.slices[-1].stop,)
def _get_config(self):
config = super(ConcatenationSpace, self)._get_config()
config.update({'spaces': self.spaces})
return config
@staticmethod
def create(other):
spaces = [Space.create(space) for space in other.spaces]
return ConcatenationSpace(spaces)
| mit | -2,616,647,832,349,943,000 | 30.840909 | 100 | 0.604568 | false | 3.870166 | false | false | false |
franchenstein/master_project | probabilisticstate.py | 1 | 2966 | import state
import random as rnd
'''
Probabilistic version of state class. The outedges include a third parameter,
which reflects the probability of that outgoing edge being taken. Methods
regarding this new parameter are added.
'''
class ProbabilisticState(state.State):
def __init__(self, name, outedges):
state.State.__init__(self, name, outedges)
'''
Name: prob_to_next_state
Input:
*state_name: The destination state's label.
Output:
*match: probability of reaching this state from the current state.
'''
def prob_to_next_state(self, state_name):
match = [x[2] for x in self.outedges if x[1] == state_name][0]
return float(match)
'''
Name: prob_to_next_letter
Input:
*letter: letter from the graph's alphabet.
Output:
*match: probability of outputing the input letter.
'''
def prob_to_next_letter(self, letter):
match = [x[2] for x in self.outedges if x[0] == letter][0]
return float(match)
'''
Name: morph
Input:
Output:
*m: The state's morph, i.e. the probability distribution of its outputs.
Description: Statistically comparing state's morphs will be used many times.
The morph is a 2-tuple with only the output letter and the probability that
it occurs, disregarding the destination state.
'''
def morph(self):
m = [(x[0], x[2]) for x in self.outedges]
return m
def extended_morph(self, n):
if n == 1:
return self.morph()
else:
m = []
for edge in self.outedges:
if edge[1]:
branch = edge[1].extended_morph(n-1)
else:
branch = [('',0.0)]
newedge = [(edge[0] + b[0],float(edge[2])*float(b[1])) for b in branch]
m.extend(newedge)
return m
'''
Name: randomstep
Input:
Output:
*A 2-tuple of the randomly chosen output symbol and the destination
state.
Description:
Takes on step in a walk through the graph. It randomly generates a
real number in [0,1] and compares it to the state's morph and chooses
an output symbol and destination accordingly. If, by some error, the
randomly generated number does not fall into the distribution, an error
2-tuple is returned.
'''
def random_step(self):
dest = [x[:2] for x in self.outedges if float(x[2]) == 1.0]
if dest:
return dest[0]
else:
r = rnd.random()
acc = 0
i = 0
for e in self.outedges:
i += 1
if i == len(self.outedges):
return e[:2]
if acc <= r < (acc + float(e[2])):
return e[:2]
else:
acc += float(e[2])
return '', None #Error situation
| mit | 7,243,784,253,227,526,000 | 31.23913 | 87 | 0.556305 | false | 3.965241 | false | false | false |
DrigerG/IIITB-ML | project/ChatBot/seq2seq_wrapper.py | 1 | 7968 | import sys
import numpy as np
import tensorflow as tf
class Seq2Seq(object):
def __init__(self, xseq_len, yseq_len,
xvocab_size, yvocab_size,
emb_dim, num_layers, ckpt_path,
lr=0.0001,
epochs=100000, model_name='seq2seq_model'):
# attach these arguments to self
self.xseq_len = xseq_len
self.yseq_len = yseq_len
self.ckpt_path = ckpt_path
self.epochs = epochs
self.model_name = model_name
# build the graph
# attach any part of the graph that needs to be exposed, to the self
def __graph__():
# placeholders
tf.reset_default_graph()
# encoder inputs : list of indices of length xseq_len
self.enc_ip = [tf.placeholder(shape=[None, ],
dtype=tf.int64,
name='ei_{}'.format(t)) for t in
range(xseq_len)]
# labels that represent the real outputs
self.labels = [tf.placeholder(shape=[None, ],
dtype=tf.int64,
name='ei_{}'.format(t)) for t in
range(yseq_len)]
# decoder inputs : 'GO' + [ y1, y2, ... y_t-1 ]
self.dec_ip = [tf.zeros_like(self.enc_ip[0], dtype=tf.int64,
name='GO')] + self.labels[:-1]
# Basic LSTM cell wrapped in Dropout Wrapper
self.keep_prob = tf.placeholder(tf.float32)
# define the basic cell
basic_cell = tf.contrib.rnn.core_rnn_cell.DropoutWrapper(
tf.contrib.rnn.core_rnn_cell.BasicLSTMCell(
emb_dim, state_is_tuple=True),
output_keep_prob=self.keep_prob)
# stack cells together : n layered model
stacked_lstm = tf.contrib.rnn.core_rnn_cell.MultiRNNCell(
[basic_cell] * num_layers, state_is_tuple=True)
# for parameter sharing between training model
# and testing model
with tf.variable_scope('decoder') as scope:
# build the seq2seq model
# inputs : encoder, decoder inputs, LSTM cell type,
# vocabulary sizes, embedding dimensions
self.decode_outputs, self.decode_states = \
tf.contrib.legacy_seq2seq.embedding_rnn_seq2seq(
self.enc_ip, self.dec_ip, stacked_lstm,
xvocab_size, yvocab_size, emb_dim)
# share parameters
scope.reuse_variables()
# testing model, where output of previous timestep is fed as
# input to the next timestep
self.decode_outputs_test, self.decode_states_test = \
tf.contrib.legacy_seq2seq.embedding_rnn_seq2seq(
self.enc_ip, self.dec_ip, stacked_lstm, xvocab_size,
yvocab_size, emb_dim, feed_previous=True)
# now, for training,
# build loss function
# weighted loss
# TODO : add parameter hint
loss_weights = [tf.ones_like(label, dtype=tf.float32) for label in
self.labels]
self.loss = tf.contrib.legacy_seq2seq.sequence_loss(
self.decode_outputs, self.labels, loss_weights, yvocab_size)
# train op to minimize the loss
self.train_op = tf.train.AdamOptimizer(learning_rate=lr).minimize(
self.loss)
sys.stdout.write('<log> Building Graph ')
# build comput graph
__graph__()
sys.stdout.write('</log>')
'''
Training and Evaluation
'''
# get the feed dictionary
def get_feed(self, X, Y, keep_prob):
feed_dict = {self.enc_ip[t]: X[t] for t in range(self.xseq_len)}
feed_dict.update({self.labels[t]: Y[t] for t in range(self.yseq_len)})
feed_dict[self.keep_prob] = keep_prob # dropout prob
return feed_dict
# run one batch for training
def train_batch(self, sess, train_batch_gen):
# get batches
batchX, batchY = train_batch_gen.__next__()
# build feed
feed_dict = self.get_feed(batchX, batchY, keep_prob=0.5)
_, loss_v = sess.run([self.train_op, self.loss], feed_dict)
return loss_v
def eval_step(self, sess, eval_batch_gen):
# get batches
batchX, batchY = eval_batch_gen.__next__()
# build feed
feed_dict = self.get_feed(batchX, batchY, keep_prob=1.)
loss_v, dec_op_v = sess.run([self.loss, self.decode_outputs_test],
feed_dict)
# dec_op_v is a list; also need to transpose 0,1 indices
# (interchange batch_size and timesteps dimensions
dec_op_v = np.array(dec_op_v).transpose([1, 0, 2])
return loss_v, dec_op_v, batchX, batchY
# evaluate 'num_batches' batches
def eval_batches(self, sess, eval_batch_gen, num_batches):
losses = []
for i in range(num_batches):
loss_v, dec_op_v, batchX, batchY = self.eval_step(sess,
eval_batch_gen)
losses.append(loss_v)
return np.mean(losses)
# finally the train function that
# runs the train_op in a session
# evaluates on valid set periodically
# prints statistics
def train(self, train_set, valid_set, sess=None):
# we need to save the model periodically
saver = tf.train.Saver()
# if no session is given
if not sess:
# create a session
sess = tf.Session()
# init all variables
sess.run(tf.global_variables_initializer())
sys.stdout.write('\n<log> Training started </log>\n')
# run M epochs
for i in range(self.epochs):
try:
self.train_batch(sess, train_set)
if i and i % (self.epochs - 1) == 0:
# TODO : make this tunable by the user
# save model to disk
saver.save(sess,
self.ckpt_path + self.model_name + '.ckpt',
global_step=i)
# evaluate to get validation loss
val_loss = self.eval_batches(sess, valid_set,
16) # TODO : and this
# print stats
print('\nModel saved to disk at iteration #{}'.format(i))
print('val loss : {0:.6f}'.format(val_loss))
sys.stdout.flush()
except KeyboardInterrupt:
# this will most definitely happen, so handle it
print('Interrupted by user at iteration {}'.format(i))
self.session = sess
return sess
def restore_last_session(self):
saver = tf.train.Saver()
# create a session
sess = tf.Session()
# get checkpoint state
ckpt = tf.train.get_checkpoint_state(self.ckpt_path)
# restore session
if ckpt and ckpt.model_checkpoint_path:
saver.restore(sess, ckpt.model_checkpoint_path)
# return to user
return sess
# prediction
def predict(self, sess, X):
feed_dict = {self.enc_ip[t]: X[t] for t in range(self.xseq_len)}
feed_dict[self.keep_prob] = 1.
dec_op_v = sess.run(self.decode_outputs_test, feed_dict)
# dec_op_v is a list; also need to transpose 0,1 indices
# (interchange batch_size and timesteps dimensions
dec_op_v = np.array(dec_op_v).transpose([1, 0, 2])
# return the index of item with highest probability
return np.argmax(dec_op_v, axis=2)
| apache-2.0 | 4,645,855,633,055,719,000 | 40.5 | 78 | 0.527359 | false | 4.03443 | true | false | false |
nick-youngblut/SIPSim | SIPSim/rtnorm.py | 1 | 203109 | # -*- coding: utf-8 -*-
"""
This is a Python implementation of the fast algorithm developed by
Vincent Mazet and Nicolas Chopin
(see http://miv.u-strasbg.fr/mazet/rtnorm/).
The version this code is based on is the Matlab implementation from 2012.
Created on Mon Aug 12 13:48:22 2013
Update on 11/27/2014: Added `erf` fallback implementation for missing
scipy. Thanks to Dr. Cliff Kerr (University of Sidney) for submitting
his patch!
@author: Christoph Lassner
"""
from numpy.random import uniform as rand, normal as randn, randint as randi
from numpy import sqrt, pi, exp, log, floor, array
try:
from scipy.special import erf
except:
# In some situations scipy might not be available or might take too long
# to compile (e.g. for Amazon Application deployment).
# Use a fallback implementation just relying on `math.erf` and
# `numpy.nditer`.
from numpy import nditer # Loop over N-dimensional arrays
import math # For erf function in math
def erf(arr):
r"""
Replicating SciPy erf function using math erf function to remove
SciPy dependency.
"""
output = array(arr) # Copy input array
for x in nditer(output, op_flags=['readwrite']): # Loop over each element
x = math.erf(x) # Calculate the erf for this value
return output
def rtnorm(a, b, mu=0., sigma=1., size=1, probabilities=False):
r"""
Pseudorandom numbers from a truncated Gaussian distribution.
X = rtnorm(a, b) returns a pseudorandom variable generated from a normal
distribution with mean zero and variance one (i.e. standard normal
distribution) truncated to the interval [a,b].
X = rtnorm(a,b,mu,sigma) returns a pseudorandom variable generated from
a normal distribution with mean MU and variance SIGMA truncated to the
interval [a, b].
The parameter size allows to specify a vector length and if probabilities
is set to True, the function also returns the vector of probabilities of X.
This implements an extension of Chopin's algorithm detailed in
N. Chopin, "Fast simulation of truncated Gaussian distributions", Stat
Comput (2011) 21:275-288
Copyright (C) 2012 Vincent Mazet (LSIIT, CNRS/Université de Strasbourg),
Version 2012-07-04, vincent.mazet@unistra.fr
08/12/2013:
- created python version.
18/06/2012:
- first launch of rtnorm.m
05/07/2012:
- fix bug concerning the computing of the pdf when (mu,sigma) is
different from (0,1).
- fix bug about some indexes out of bounds when computing yl for some
values of the input arguments.
04/09/2012:
- change condition in line 2628 to fix a bug.
Licence: GNU General Public License Version 2
This program is free software; you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the
Free Software Foundation; either version 2 of the License, or (at your
option) any later version. This program is distributed in the hope that
it will be useful, but WITHOUT ANY WARRANTY; without even the implied
warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details. You should have received a
copy of the GNU General Public License along with this program; if not,
see http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
"""
# Ensure these are floats for proper division values later on.
mu = float(mu)
sigma = float(sigma)
a = float(a)
b = float(b)
# Scaling
if not mu == 0. or not sigma == 1.:
a = (a-mu) / sigma
b = (b-mu) / sigma
# Generate the random variables
r = array([rtstdnorm(a, b) for x in range(size)])
# Scaling
if not mu == 0. or not sigma == 1.:
r = r * sigma + mu
# Compute the probabilities
if probabilities:
Z = sqrt(pi/2)*sigma * (erf(b/sqrt(2))-erf(a/sqrt(2)))
Z = max(Z, 1e-15) # Avoid NaN
p = exp(-(r-mu)**2/2/sigma**2) / Z
return r, p
else:
return r
def rtstdnorm(a, b):
r"""
RTNORM Pseudorandom numbers from a truncated (normalized) Gaussian
distribution (i.e. rtnorm(a,b,0,1)).
"""
# Left and right limits
xmin = -2.00443204036
xmax = 3.48672170399
# Check if a < b
if a >= b:
raise Exception('For a truncated ndst in [a,b] b must be greater than a.')
# Check if |a| < |b|
elif abs(a) > abs(b):
r = -rtstdnorm(-b, -a)
# If a in the right tail (a > xmax), use rejection algorithm with
# a truncated exponential proposal
elif a > xmax:
stop = False
twoasq = 2*a**2
expab = exp(-a*(b-a)) - 1
while not stop:
# The rand-function in Matlab that was used here returns values
# uniformly distributed in (0, 1). The numpy version includes
# the left border of the interval, so the numbers are drawn from
# [0, 1). Hence use a low lower border.
z = log(1 + rand(low=1E-15)*expab)
e = -log(rand(low=1E-15))
stop = (twoasq*e > z ** 2)
r = a - z/a
# If a in the left tail (a < xmin), use rejection algorithm with
# a Gaussian proposal
elif a < xmin:
stop = False
while not stop:
r = randn()
stop = (r>=a) and (r<=b)
# In other cases (xmin < a < xmax), use Chopin's algorithm
else:
# Design variables
kmin = 5 # if kb-ka < kmin then use a rejection algorithm
INVH = 1631.73284006 # 1/h, h being the minimal interval range
I0 = 3271 # = - floor(x(1)/h)
ALPHA = 1.837877066409345 # = log(2*pi)
N = 4000 # Index of the right tail
yl0 = 0.053513975472 # y_l of the leftmost rectangle
ylN = 0.000914116389555 # y_l of the rightmost rectangle
# Compute ka and kb
i = int(I0 + floor(a*INVH))
ka = ncell[i] # not: +1 due to index offset in Matlab ;-)
kb = 0
if b >= xmax:
kb = N
else:
i = int(I0 + floor(b*INVH))
kb = ncell[i] # not: +1 due to index offset in Matlab
# If |b-a| is small, use rejection algorithm with a truncated exponential proposal
if abs(kb-ka) < kmin:
stop = False
twoasq = 2 * a**2
expab = exp(-a*(b-a)) - 1
while not stop:
z = log( 1 + rand()*expab )
e = -log(rand())
stop = (twoasq*e > z**2)
r = a - z/a
return r
while True:
# Sample integer between ka and kb
# Note that while matlab randi has including border, for numpy the high
# border is exclusive. Hence add one.
k = randi(low=ka, high=(kb+1)) # not: +1 due to index offset in Matlab
if k == N:
# Right tail
lbound = x[-1]
z = -log(rand())
e = -log(rand())
z = z / lbound
if (z**2 <= 2*e) and (z < b-lbound):
# Accept this proposition, otherwise reject
r = lbound + z
return r
elif (k<=ka+2) or (k>=kb and b<xmax):
# Two leftmost and rightmost regions
sim = x[k] + (x[k+1]-x[k]) * rand()
if (sim >= a) and (sim <= b):
# Accept this proposition, otherwise reject
simy = yu[k]*rand()
# Compute y_l from y_k
if k == 0:
ylk = yl0
elif k == N:
ylk = ylN
elif k <= 1954:
ylk = yu[k-1]
else:
ylk = yu[k+1]
if (simy<ylk) or (sim**2 + 2*log(simy) + ALPHA < 0):
r = sim
return r
else:
# All the other boxes
u = rand()
simy = yu[k] * u
d = x[k+1] - x[k]
# Compute y_l from y_k
if k == 1:
ylk = yl0
elif k == N:
ylk = ylN
elif k <= 1954:
ylk = yu[k-1]
else:
ylk = yu[k+1]
if simy < ylk: # That's what happens most of the time
r = x[k] + u*d*yu[k]/ylk
return r
sim = x[k] + d * rand()
# Otherwise, check you're below the pdf curve
if sim**2 + 2*log(simy) + ALPHA < 0:
r = sim
return r
return r
# Tables
x = array([
-2.00443204036, -1.99990455547, -1.99541747213, -1.99096998962, \
-1.98656133124, -1.98219074335, -1.97785749442, -1.97356087419, \
-1.96930019287, -1.96507478031, -1.96088398528, -1.95672717477, \
-1.95260373328, -1.9485130622, -1.94445457918, -1.94042771755, \
-1.93643192574, -1.93246666677, -1.92853141772, -1.92462566922, \
-1.92074892503, -1.91690070156, -1.91308052741, -1.90928794302, \
-1.90552250025, -1.90178376197, -1.89807130174, -1.89438470345, \
-1.89072356098, -1.88708747787, -1.88347606705, -1.8798889505, \
-1.87632575899, -1.87278613181, -1.86926971649, -1.86577616858, \
-1.86230515137, -1.85885633567, -1.8554293996, -1.85202402837, \
-1.84863991405, -1.84527675539, -1.84193425762, -1.83861213227, \
-1.83531009698, -1.83202787533, -1.82876519668, -1.825521796, \
-1.82229741372, -1.81909179558, -1.81590469249, -1.81273586036, \
-1.80958506, -1.80645205698, -1.8033366215, -1.80023852827, \
-1.79715755637, -1.79409348917, -1.79104611422, -1.78801522309, \
-1.78500061134, -1.78200207837, -1.77901942732, -1.77605246501, \
-1.77310100183, -1.77016485166, -1.76724383175, -1.7643377627, \
-1.7614464683, -1.75856977555, -1.75570751448, -1.75285951816, \
-1.75002562257, -1.74720566658, -1.74439949184, -1.74160694276, \
-1.73882786639, -1.7360621124, -1.73330953303, -1.73056998298, \
-1.72784331941, -1.72512940185, -1.72242809217, -1.71973925449, \
-1.71706275519, -1.7143984628, -1.71174624799, -1.70910598353, \
-1.70647754419, -1.70386080677, -1.70125565, -1.69866195455, \
-1.69607960292, -1.69350847947, -1.69094847035, -1.68839946345, \
-1.6858613484, -1.68333401649, -1.68081736069, -1.67831127556, \
-1.67581565725, -1.67333040348, -1.67085541345, -1.6683905879, \
-1.665935829, -1.66349104035, -1.66105612696, -1.65863099522, \
-1.65621555288, -1.65380970898, -1.65141337389, -1.64902645924, \
-1.64664887792, -1.64428054402, -1.64192137286, -1.63957128092, \
-1.63723018585, -1.63489800643, -1.63257466256, -1.63026007522, \
-1.62795416649, -1.62565685948, -1.62336807836, -1.62108774828, \
-1.61881579544, -1.61655214696, -1.61429673098, -1.61204947656, \
-1.60981031368, -1.60757917325, -1.60535598708, -1.60314068784, \
-1.60093320909, -1.59873348523, -1.59654145149, -1.59435704393, \
-1.59218019943, -1.59001085565, -1.58784895103, -1.58569442477, \
-1.58354721686, -1.581407268, -1.57927451964, -1.57714891392, \
-1.57503039371, -1.57291890258, -1.57081438477, -1.56871678519, \
-1.56662604942, -1.56454212368, -1.56246495486, -1.56039449044, \
-1.55833067854, -1.55627346789, -1.55422280782, -1.55217864825, \
-1.55014093969, -1.5481096332, -1.54608468043, -1.54406603357, \
-1.54205364536, -1.54004746908, -1.53804745854, -1.53605356807, \
-1.53406575252, -1.53208396723, -1.53010816806, -1.52813831134, \
-1.52617435391, -1.52421625305, -1.52226396655, -1.52031745264, \
-1.51837667, -1.51644157777, -1.51451213554, -1.51258830332, \
-1.51067004156, -1.50875731112, -1.5068500733, -1.50494828979, \
-1.50305192269, -1.50116093452, -1.49927528818, -1.49739494693, \
-1.49551987447, -1.49365003484, -1.49178539245, -1.48992591209, \
-1.48807155892, -1.48622229844, -1.48437809651, -1.48253891934, \
-1.48070473348, -1.47887550581, -1.47705120356, -1.47523179427, \
-1.47341724582, -1.47160752641, -1.46980260454, -1.46800244903, \
-1.46620702902, -1.46441631394, -1.46263027351, -1.46084887778, \
-1.45907209704, -1.45729990192, -1.4555322633, -1.45376915236, \
-1.45201054053, -1.45025639954, -1.44850670139, -1.44676141832, \
-1.44502052286, -1.44328398779, -1.44155178613, -1.43982389118, \
-1.43810027647, -1.43638091579, -1.43466578316, -1.43295485285, \
-1.43124809936, -1.42954549744, -1.42784702206, -1.4261526484, \
-1.42446235191, -1.42277610822, -1.42109389321, -1.41941568296, \
-1.41774145377, -1.41607118216, -1.41440484485, -1.41274241877, \
-1.41108388106, -1.40942920906, -1.40777838029, -1.40613137251, \
-1.40448816364, -1.40284873181, -1.40121305532, -1.39958111269, \
-1.3979528826, -1.39632834393, -1.39470747574, -1.39309025725, \
-1.39147666789, -1.38986668723, -1.38826029505, -1.38665747129, \
-1.38505819603, -1.38346244956, -1.38187021232, -1.3802814649, \
-1.37869618807, -1.37711436276, -1.37553597004, -1.37396099116, \
-1.37238940752, -1.37082120066, -1.36925635228, -1.36769484423, \
-1.36613665852, -1.36458177727, -1.3630301828, -1.36148185752, \
-1.35993678401, -1.35839494499, -1.35685632332, -1.35532090198, \
-1.3537886641, -1.35225959295, -1.35073367192, -1.34921088453, \
-1.34769121444, -1.34617464545, -1.34466116145, -1.34315074649, \
-1.34164338473, -1.34013906045, -1.33863775808, -1.33713946213, \
-1.33564415726, -1.33415182822, -1.33266245992, -1.33117603734, \
-1.3296925456, -1.32821196994, -1.32673429568, -1.32525950829, \
-1.32378759331, -1.32231853644, -1.32085232344, -1.31938894019, \
-1.3179283727, -1.31647060705, -1.31501562944, -1.31356342618, \
-1.31211398366, -1.31066728839, -1.30922332698, -1.30778208612, \
-1.30634355261, -1.30490771336, -1.30347455533, -1.30204406564, \
-1.30061623144, -1.29919104001, -1.29776847873, -1.29634853503, \
-1.29493119647, -1.29351645068, -1.29210428538, -1.29069468838, \
-1.28928764758, -1.28788315096, -1.28648118658, -1.2850817426, \
-1.28368480725, -1.28229036885, -1.2808984158, -1.27950893658, \
-1.27812191975, -1.27673735394, -1.27535522788, -1.27397553036, \
-1.27259825027, -1.27122337654, -1.2698508982, -1.26848080436, \
-1.26711308419, -1.26574772695, -1.26438472195, -1.26302405859, \
-1.26166572634, -1.26030971474, -1.25895601339, -1.25760461198, \
-1.25625550025, -1.25490866802, -1.25356410517, -1.25222180165, \
-1.25088174749, -1.24954393277, -1.24820834764, -1.24687498231, \
-1.24554382707, -1.24421487225, -1.24288810826, -1.24156352558, \
-1.24024111474, -1.23892086632, -1.23760277098, -1.23628681945, \
-1.23497300248, -1.23366131092, -1.23235173565, -1.23104426764, \
-1.22973889789, -1.22843561746, -1.22713441748, -1.22583528914, \
-1.22453822366, -1.22324321234, -1.22195024653, -1.22065931762, \
-1.21937041707, -1.2180835364, -1.21679866716, -1.21551580096, \
-1.21423492948, -1.21295604444, -1.21167913759, -1.21040420078, \
-1.20913122586, -1.20786020475, -1.20659112944, -1.20532399194, \
-1.20405878432, -1.2027954987, -1.20153412724, -1.20027466216, \
-1.19901709572, -1.19776142023, -1.19650762804, -1.19525571156, \
-1.19400566322, -1.19275747553, -1.19151114101, -1.19026665225, \
-1.18902400188, -1.18778318256, -1.18654418701, -1.18530700798, \
-1.18407163828, -1.18283807074, -1.18160629825, -1.18037631374, \
-1.17914811017, -1.17792168055, -1.17669701793, -1.1754741154, \
-1.17425296609, -1.17303356317, -1.17181589985, -1.17059996938, \
-1.16938576505, -1.16817328018, -1.16696250814, -1.16575344233, \
-1.1645460762, -1.16334040321, -1.16213641689, -1.16093411079, \
-1.1597334785, -1.15853451364, -1.15733720988, -1.1561415609, \
-1.15494756045, -1.1537552023, -1.15256448023, -1.1513753881, \
-1.15018791978, -1.14900206916, -1.1478178302, -1.14663519686, \
-1.14545416315, -1.14427472312, -1.14309687083, -1.14192060039, \
-1.14074590595, -1.13957278166, -1.13840122174, -1.13723122041, \
-1.13606277195, -1.13489587064, -1.13373051083, -1.13256668686, \
-1.13140439313, -1.13024362405, -1.12908437408, -1.12792663769, \
-1.1267704094, -1.12561568374, -1.12446245528, -1.12331071862, \
-1.12216046839, -1.12101169923, -1.11986440583, -1.1187185829, \
-1.11757422519, -1.11643132745, -1.11528988448, -1.11414989111, \
-1.11301134218, -1.11187423257, -1.11073855719, -1.10960431095, \
-1.10847148882, -1.10734008578, -1.10621009684, -1.10508151703, \
-1.10395434141, -1.10282856507, -1.10170418311, -1.10058119068, \
-1.09945958293, -1.09833935506, -1.09722050226, -1.09610301977, \
-1.09498690286, -1.09387214681, -1.09275874692, -1.09164669853, \
-1.09053599698, -1.08942663766, -1.08831861598, -1.08721192734, \
-1.08610656721, -1.08500253104, -1.08389981434, -1.08279841262, \
-1.08169832142, -1.0805995363, -1.07950205283, -1.07840586663, \
-1.07731097332, -1.07621736855, -1.07512504799, -1.07403400732, \
-1.07294424226, -1.07185574854, -1.07076852192, -1.06968255816, \
-1.06859785307, -1.06751440246, -1.06643220217, -1.06535124805, \
-1.06427153597, -1.06319306184, -1.06211582157, -1.06103981109, \
-1.05996502636, -1.05889146336, -1.05781911808, -1.05674798653, \
-1.05567806475, -1.05460934878, -1.0535418347, -1.0524755186, \
-1.05141039657, -1.05034646476, -1.04928371929, -1.04822215635, \
-1.04716177209, -1.04610256273, -1.04504452448, -1.04398765357, \
-1.04293194626, -1.04187739881, -1.04082400752, -1.03977176868, \
-1.03872067861, -1.03767073366, -1.03662193018, -1.03557426455, \
-1.03452773314, -1.03348233237, -1.03243805866, -1.03139490845, \
-1.03035287819, -1.02931196435, -1.02827216342, -1.02723347191, \
-1.02619588633, -1.02515940322, -1.02412401912, -1.02308973062, \
-1.02205653428, -1.02102442671, -1.01999340452, -1.01896346433, \
-1.01793460279, -1.01690681657, -1.01588010232, -1.01485445675, \
-1.01382987655, -1.01280635844, -1.01178389916, -1.01076249545, \
-1.00974214407, -1.0087228418, -1.00770458543, -1.00668737176, \
-1.00567119762, -1.00465605983, -1.00364195524, -1.00262888071, \
-1.00161683312, -1.00060580935, -0.999595806306, -0.9985868209, \
-0.997578850062, -0.996571890733, -0.995565939868, -0.994560994436, \
-0.993557051418, -0.992554107808, -0.991552160613, -0.990551206854, \
-0.989551243564, -0.988552267788, -0.987554276585, -0.986557267027, \
-0.985561236196, -0.984566181188, -0.983572099113, -0.982578987091, \
-0.981586842254, -0.980595661749, -0.979605442731, -0.978616182371, \
-0.977627877849, -0.976640526359, -0.975654125105, -0.974668671305, \
-0.973684162186, -0.972700594988, -0.971717966963, -0.970736275374, \
-0.969755517495, -0.968775690612, -0.967796792022, -0.966818819033, \
-0.965841768964, -0.964865639146, -0.963890426921, -0.962916129641, \
-0.961942744669, -0.960970269379, -0.959998701157, -0.959028037398, \
-0.958058275508, -0.957089412906, -0.956121447017, -0.955154375281, \
-0.954188195145, -0.953222904069, -0.952258499521, -0.951294978982, \
-0.95033233994, -0.949370579895, -0.948409696358, -0.947449686847, \
-0.946490548893, -0.945532280036, -0.944574877824, -0.943618339818, \
-0.942662663587, -0.941707846709, -0.940753886774, -0.939800781378, \
-0.93884852813, -0.937897124647, -0.936946568555, -0.935996857491, \
-0.9350479891, -0.934099961035, -0.933152770962, -0.932206416553, \
-0.931260895491, -0.930316205466, -0.929372344179, -0.928429309338, \
-0.927487098664, -0.926545709881, -0.925605140727, -0.924665388946, \
-0.923726452292, -0.922788328527, -0.921851015421, -0.920914510754, \
-0.919978812315, -0.919043917899, -0.918109825313, -0.917176532369, \
-0.916244036888, -0.915312336703, -0.91438142965, -0.913451313577, \
-0.912521986339, -0.911593445799, -0.910665689828, -0.909738716305, \
-0.908812523118, -0.907887108163, -0.906962469342, -0.906038604567, \
-0.905115511758, -0.90419318884, -0.90327163375, -0.902350844428, \
-0.901430818827, -0.900511554903, -0.899593050622, -0.898675303958, \
-0.897758312891, -0.896842075409, -0.895926589508, -0.895011853191, \
-0.894097864469, -0.893184621359, -0.892272121887, -0.891360364086, \
-0.890449345995, -0.889539065661, -0.888629521138, -0.887720710488, \
-0.886812631779, -0.885905283087, -0.884998662493, -0.884092768089, \
-0.883187597969, -0.882283150238, -0.881379423006, -0.88047641439, \
-0.879574122514, -0.878672545509, -0.877771681512, -0.876871528668, \
-0.875972085128, -0.875073349049, -0.874175318595, -0.873277991937, \
-0.872381367254, -0.871485442727, -0.870590216549, -0.869695686916, \
-0.868801852031, -0.867908710104, -0.86701625935, -0.866124497993, \
-0.865233424261, -0.864343036389, -0.863453332618, -0.862564311196, \
-0.861675970376, -0.860788308418, -0.859901323588, -0.859015014157, \
-0.858129378404, -0.857244414613, -0.856360121074, -0.855476496083, \
-0.854593537942, -0.853711244958, -0.852829615446, -0.851948647726, \
-0.851068340122, -0.850188690965, -0.849309698594, -0.84843136135, \
-0.847553677583, -0.846676645646, -0.845800263899, -0.844924530708, \
-0.844049444444, -0.843175003483, -0.842301206208, -0.841428051007, \
-0.840555536273, -0.839683660404, -0.838812421805, -0.837941818885, \
-0.83707185006, -0.83620251375, -0.83533380838, -0.834465732382, \
-0.833598284192, -0.832731462252, -0.831865265009, -0.830999690914, \
-0.830134738426, -0.829270406006, -0.828406692123, -0.827543595248, \
-0.826681113861, -0.825819246443, -0.824957991484, -0.824097347476, \
-0.823237312917, -0.82237788631, -0.821519066163, -0.82066085099, \
-0.819803239307, -0.818946229639, -0.818089820512, -0.817234010459, \
-0.816378798017, -0.815524181729, -0.814670160142, -0.813816731806, \
-0.812963895279, -0.812111649122, -0.8112599919, -0.810408922185, \
-0.80955843855, -0.808708539576, -0.807859223848, -0.807010489955, \
-0.806162336489, -0.805314762049, -0.804467765238, -0.803621344663, \
-0.802775498936, -0.801930226672, -0.801085526493, -0.800241397023, \
-0.799397836891, -0.798554844732, -0.797712419183, -0.796870558888, \
-0.796029262492, -0.795188528648, -0.79434835601, -0.793508743238, \
-0.792669688996, -0.791831191953, -0.790993250781, -0.790155864155, \
-0.789319030758, -0.788482749274, -0.787647018393, -0.786811836806, \
-0.785977203212, -0.785143116312, -0.784309574812, -0.783476577421, \
-0.782644122852, -0.781812209823, -0.780980837056, -0.780150003277, \
-0.779319707213, -0.7784899476, -0.777660723175, -0.776832032678, \
-0.776003874855, -0.775176248455, -0.774349152231, -0.773522584939, \
-0.772696545341, -0.7718710322, -0.771046044284, -0.770221580367, \
-0.769397639223, -0.768574219631, -0.767751320376, -0.766928940243, \
-0.766107078024, -0.765285732513, -0.764464902507, -0.763644586809, \
-0.762824784223, -0.762005493558, -0.761186713627, -0.760368443246, \
-0.759550681234, -0.758733426414, -0.757916677614, -0.757100433662, \
-0.756284693394, -0.755469455646, -0.754654719259, -0.753840483077, \
-0.753026745948, -0.752213506722, -0.751400764255, -0.750588517404, \
-0.74977676503, -0.748965505998, -0.748154739176, -0.747344463435, \
-0.746534677651, -0.7457253807, -0.744916571465, -0.74410824883, \
-0.743300411682, -0.742493058914, -0.741686189419, -0.740879802096, \
-0.740073895844, -0.739268469569, -0.738463522177, -0.737659052579, \
-0.736855059689, -0.736051542423, -0.735248499702, -0.734445930447, \
-0.733643833587, -0.73284220805, -0.732041052768, -0.731240366677, \
-0.730440148716, -0.729640397826, -0.728841112951, -0.728042293041, \
-0.727243937044, -0.726446043916, -0.725648612612, -0.724851642093, \
-0.724055131321, -0.723259079262, -0.722463484884, -0.721668347159, \
-0.720873665062, -0.720079437569, -0.719285663661, -0.718492342322, \
-0.717699472536, -0.716907053294, -0.716115083586, -0.715323562408, \
-0.714532488756, -0.713741861631, -0.712951680037, -0.712161942978, \
-0.711372649463, -0.710583798504, -0.709795389115, -0.709007420313, \
-0.708219891118, -0.707432800551, -0.706646147638, -0.705859931406, \
-0.705074150887, -0.704288805113, -0.70350389312, -0.702719413947, \
-0.701935366634, -0.701151750226, -0.700368563769, -0.699585806312, \
-0.698803476906, -0.698021574607, -0.69724009847, -0.696459047555, \
-0.695678420925, -0.694898217643, -0.694118436777, -0.693339077397, \
-0.692560138575, -0.691781619384, -0.691003518904, -0.690225836212, \
-0.689448570392, -0.688671720529, -0.687895285708, -0.687119265021, \
-0.686343657558, -0.685568462415, -0.684793678689, -0.684019305478, \
-0.683245341885, -0.682471787013, -0.68169863997, -0.680925899864, \
-0.680153565806, -0.679381636911, -0.678610112294, -0.677838991074, \
-0.677068272372, -0.67629795531, -0.675528039013, -0.674758522611, \
-0.673989405232, -0.67322068601, -0.672452364078, -0.671684438573, \
-0.670916908635, -0.670149773405, -0.669383032026, -0.668616683646, \
-0.66785072741, -0.667085162471, -0.666319987981, -0.665555203094, \
-0.664790806967, -0.66402679876, -0.663263177633, -0.662499942752, \
-0.66173709328, -0.660974628386, -0.66021254724, -0.659450849015, \
-0.658689532883, -0.657928598023, -0.657168043612, -0.656407868831, \
-0.655648072862, -0.654888654892, -0.654129614105, -0.653370949693, \
-0.652612660844, -0.651854746754, -0.651097206616, -0.650340039629, \
-0.64958324499, -0.648826821903, -0.648070769569, -0.647315087195, \
-0.646559773988, -0.645804829157, -0.645050251913, -0.64429604147, \
-0.643542197043, -0.642788717849, -0.642035603108, -0.641282852041, \
-0.640530463871, -0.639778437823, -0.639026773124, -0.638275469004, \
-0.637524524692, -0.636773939423, -0.636023712429, -0.63527384295, \
-0.634524330221, -0.633775173485, -0.633026371984, -0.632277924961, \
-0.631529831662, -0.630782091336, -0.630034703232, -0.629287666601, \
-0.628540980698, -0.627794644778, -0.627048658096, -0.626303019913, \
-0.625557729489, -0.624812786087, -0.62406818897, -0.623323937406, \
-0.622580030661, -0.621836468005, -0.621093248711, -0.62035037205, \
-0.619607837299, -0.618865643733, -0.618123790632, -0.617382277275, \
-0.616641102944, -0.615900266923, -0.615159768498, -0.614419606955, \
-0.613679781584, -0.612940291674, -0.612201136518, -0.61146231541, \
-0.610723827646, -0.609985672522, -0.609247849338, -0.608510357395, \
-0.607773195994, -0.607036364439, -0.606299862036, -0.605563688093, \
-0.604827841918, -0.604092322821, -0.603357130115, -0.602622263113, \
-0.601887721131, -0.601153503486, -0.600419609496, -0.599686038481, \
-0.598952789763, -0.598219862666, -0.597487256514, -0.596754970634, \
-0.596023004354, -0.595291357003, -0.594560027913, -0.593829016416, \
-0.593098321847, -0.592367943541, -0.591637880836, -0.590908133071, \
-0.590178699585, -0.589449579722, -0.588720772824, -0.587992278236, \
-0.587264095305, -0.586536223378, -0.585808661806, -0.585081409939, \
-0.584354467129, -0.58362783273, -0.582901506099, -0.58217548659, \
-0.581449773564, -0.580724366379, -0.579999264397, -0.57927446698, \
-0.578549973492, -0.5778257833, -0.577101895769, -0.576378310269, \
-0.575655026169, -0.57493204284, -0.574209359655, -0.573486975988, \
-0.572764891214, -0.57204310471, -0.571321615855, -0.570600424028, \
-0.56987952861, -0.569158928984, -0.568438624533, -0.567718614641, \
-0.566998898697, -0.566279476088, -0.565560346202, -0.56484150843, \
-0.564122962165, -0.563404706799, -0.562686741727, -0.561969066345, \
-0.56125168005, -0.560534582241, -0.559817772317, -0.559101249681, \
-0.558385013733, -0.557669063878, -0.556953399521, -0.556238020069, \
-0.555522924929, -0.55480811351, -0.554093585223, -0.553379339478, \
-0.552665375689, -0.551951693269, -0.551238291635, -0.550525170202, \
-0.549812328389, -0.549099765614, -0.548387481299, -0.547675474863, \
-0.546963745731, -0.546252293327, -0.545541117075, -0.544830216402, \
-0.544119590737, -0.543409239507, -0.542699162143, -0.541989358077, \
-0.541279826741, -0.540570567568, -0.539861579995, -0.539152863456, \
-0.53844441739, -0.537736241235, -0.537028334431, -0.536320696418, \
-0.535613326639, -0.534906224537, -0.534199389557, -0.533492821143, \
-0.532786518743, -0.532080481805, -0.531374709778, -0.530669202111, \
-0.529963958257, -0.529258977668, -0.528554259797, -0.5278498041, \
-0.527145610031, -0.526441677048, -0.52573800461, -0.525034592175, \
-0.524331439204, -0.523628545158, -0.5229259095, -0.522223531693, \
-0.521521411203, -0.520819547494, -0.520117940035, -0.519416588293, \
-0.518715491738, -0.518014649839, -0.517314062067, -0.516613727896, \
-0.515913646798, -0.515213818248, -0.514514241722, -0.513814916696, \
-0.513115842648, -0.512417019057, -0.511718445402, -0.511020121164, \
-0.510322045826, -0.509624218869, -0.508926639778, -0.508229308038, \
-0.507532223135, -0.506835384556, -0.506138791788, -0.505442444322, \
-0.504746341647, -0.504050483254, -0.503354868635, -0.502659497283, \
-0.501964368692, -0.501269482359, -0.500574837777, -0.499880434446, \
-0.499186271862, -0.498492349525, -0.497798666935, -0.497105223592, \
-0.496412018999, -0.49571905266, -0.495026324076, -0.494333832755, \
-0.493641578201, -0.492949559921, -0.492257777423, -0.491566230217, \
-0.49087491781, -0.490183839715, -0.489492995442, -0.488802384505, \
-0.488112006416, -0.487421860691, -0.486731946843, -0.48604226439, \
-0.48535281285, -0.484663591739, -0.483974600576, -0.483285838883, \
-0.48259730618, -0.481909001988, -0.48122092583, -0.480533077229, \
-0.479845455711, -0.479158060801, -0.478470892024, -0.477783948909, \
-0.477097230982, -0.476410737773, -0.475724468813, -0.47503842363, \
-0.474352601759, -0.473667002729, -0.472981626076, -0.472296471333, \
-0.471611538035, -0.470926825718, -0.470242333919, -0.469558062177, \
-0.468874010028, -0.468190177013, -0.467506562672, -0.466823166546, \
-0.466139988176, -0.465457027107, -0.46477428288, -0.464091755042, \
-0.463409443136, -0.46272734671, -0.46204546531, -0.461363798483, \
-0.460682345779, -0.460001106748, -0.459320080938, -0.458639267901, \
-0.45795866719, -0.457278278357, -0.456598100954, -0.455918134538, \
-0.455238378662, -0.454558832883, -0.453879496757, -0.453200369842, \
-0.452521451697, -0.45184274188, -0.45116423995, -0.45048594547, \
-0.449807858, -0.449129977103, -0.448452302341, -0.447774833279, \
-0.44709756948, -0.446420510511, -0.445743655937, -0.445067005325, \
-0.444390558244, -0.44371431426, -0.443038272944, -0.442362433866, \
-0.441686796595, -0.441011360704, -0.440336125765, -0.43966109135, \
-0.438986257034, -0.43831162239, -0.437637186994, -0.436962950421, \
-0.436288912249, -0.435615072055, -0.434941429417, -0.434267983913, \
-0.433594735123, -0.432921682628, -0.432248826008, -0.431576164846, \
-0.430903698722, -0.430231427222, -0.429559349928, -0.428887466426, \
-0.4282157763, -0.427544279136, -0.426872974521, -0.426201862043, \
-0.42553094129, -0.42486021185, -0.424189673313, -0.423519325269, \
-0.422849167309, -0.422179199024, -0.421509420007, -0.420839829851, \
-0.420170428149, -0.419501214496, -0.418832188486, -0.418163349716, \
-0.417494697781, -0.416826232279, -0.416157952806, -0.415489858963, \
-0.414821950346, -0.414154226557, -0.413486687196, -0.412819331863, \
-0.41215216016, -0.41148517169, -0.410818366055, -0.410151742859, \
-0.409485301706, -0.408819042201, -0.40815296395, -0.407487066559, \
-0.406821349634, -0.406155812784, -0.405490455615, -0.404825277738, \
-0.404160278761, -0.403495458294, -0.402830815949, -0.402166351335, \
-0.401502064066, -0.400837953753, -0.40017402001, -0.399510262451, \
-0.398846680689, -0.39818327434, -0.397520043019, -0.396856986343, \
-0.396194103929, -0.395531395393, -0.394868860354, -0.394206498431, \
-0.393544309243, -0.392882292409, -0.392220447549, -0.391558774287, \
-0.390897272241, -0.390235941036, -0.389574780293, -0.388913789636, \
-0.38825296869, -0.387592317078, -0.386931834425, -0.386271520359, \
-0.385611374504, -0.384951396488, -0.384291585938, -0.383631942482, \
-0.382972465749, -0.382313155368, -0.381654010969, -0.380995032182, \
-0.380336218639, -0.379677569969, -0.379019085806, -0.378360765783, \
-0.377702609531, -0.377044616686, -0.37638678688, -0.37572911975, \
-0.37507161493, -0.374414272056, -0.373757090765, -0.373100070693, \
-0.372443211479, -0.37178651276, -0.371129974175, -0.370473595364, \
-0.369817375965, -0.369161315619, -0.368505413967, -0.36784967065, \
-0.367194085311, -0.36653865759, -0.365883387132, -0.36522827358, \
-0.364573316578, -0.36391851577, -0.363263870801, -0.362609381316, \
-0.361955046963, -0.361300867386, -0.360646842235, -0.359992971155, \
-0.359339253795, -0.358685689804, -0.358032278831, -0.357379020525, \
-0.356725914537, -0.356072960516, -0.355420158116, -0.354767506986, \
-0.354115006779, -0.353462657149, -0.352810457747, -0.352158408227, \
-0.351506508245, -0.350854757453, -0.350203155508, -0.349551702065, \
-0.34890039678, -0.348249239309, -0.34759822931, -0.346947366441, \
-0.346296650358, -0.345646080722, -0.344995657189, -0.344345379421, \
-0.343695247078, -0.343045259818, -0.342395417304, -0.341745719196, \
-0.341096165157, -0.340446754849, -0.339797487934, -0.339148364076, \
-0.338499382938, -0.337850544184, -0.33720184748, -0.33655329249, \
-0.335904878879, -0.335256606314, -0.334608474462, -0.333960482988, \
-0.33331263156, -0.332664919846, -0.332017347514, -0.331369914234, \
-0.330722619673, -0.330075463502, -0.329428445391, -0.328781565009, \
-0.328134822029, -0.32748821612, -0.326841746956, -0.326195414208, \
-0.325549217549, -0.324903156652, -0.32425723119, -0.323611440838, \
-0.322965785269, -0.322320264159, -0.321674877183, -0.321029624016, \
-0.320384504335, -0.319739517815, -0.319094664135, -0.318449942971, \
-0.317805354, -0.317160896902, -0.316516571355, -0.315872377037, \
-0.315228313629, -0.314584380809, -0.313940578259, -0.313296905659, \
-0.312653362689, -0.312009949032, -0.311366664369, -0.310723508383, \
-0.310080480756, -0.309437581171, -0.308794809313, -0.308152164863, \
-0.307509647508, -0.306867256932, -0.306224992819, -0.305582854855, \
-0.304940842726, -0.304298956119, -0.303657194719, -0.303015558215, \
-0.302374046293, -0.301732658641, -0.301091394947, -0.3004502549, \
-0.29980923819, -0.299168344504, -0.298527573533, -0.297886924968, \
-0.297246398498, -0.296605993814, -0.295965710609, -0.295325548572, \
-0.294685507396, -0.294045586775, -0.293405786399, -0.292766105963, \
-0.292126545159, -0.291487103683, -0.290847781227, -0.290208577487, \
-0.289569492157, -0.288930524932, -0.288291675509, -0.287652943584, \
-0.287014328852, -0.28637583101, -0.285737449756, -0.285099184787, \
-0.2844610358, -0.283823002495, -0.283185084568, -0.28254728172, \
-0.28190959365, -0.281272020056, -0.280634560639, -0.279997215099, \
-0.279359983137, -0.278722864454, -0.278085858751, -0.277448965729, \
-0.27681218509, -0.276175516537, -0.275538959773, -0.2749025145, \
-0.274266180422, -0.273629957242, -0.272993844665, -0.272357842394, \
-0.271721950134, -0.271086167591, -0.270450494469, -0.269814930474, \
-0.269179475313, -0.268544128691, -0.267908890315, -0.267273759892, \
-0.26663873713, -0.266003821735, -0.265369013416, -0.264734311881, \
-0.264099716838, -0.263465227997, -0.262830845067, -0.262196567757, \
-0.261562395776, -0.260928328836, -0.260294366646, -0.259660508917, \
-0.25902675536, -0.258393105687, -0.25775955961, -0.25712611684, \
-0.256492777089, -0.25585954007, -0.255226405497, -0.254593373082, \
-0.253960442538, -0.253327613581, -0.252694885923, -0.252062259279, \
-0.251429733364, -0.250797307892, -0.25016498258, -0.249532757143, \
-0.248900631296, -0.248268604756, -0.24763667724, -0.247004848463, \
-0.246373118143, -0.245741485998, -0.245109951745, -0.244478515102, \
-0.243847175788, -0.24321593352, -0.242584788017, -0.241953738999, \
-0.241322786185, -0.240691929294, -0.240061168047, -0.239430502163, \
-0.238799931363, -0.238169455368, -0.237539073899, -0.236908786677, \
-0.236278593424, -0.235648493861, -0.235018487711, -0.234388574696, \
-0.233758754538, -0.233129026961, -0.232499391688, -0.231869848442, \
-0.231240396947, -0.230611036927, -0.229981768106, -0.229352590209, \
-0.22872350296, -0.228094506085, -0.227465599309, -0.226836782357, \
-0.226208054955, -0.22557941683, -0.224950867708, -0.224322407315, \
-0.223694035378, -0.223065751624, -0.222437555781, -0.221809447576, \
-0.221181426737, -0.220553492993, -0.219925646071, -0.219297885701, \
-0.21867021161, -0.218042623529, -0.217415121186, -0.216787704312, \
-0.216160372636, -0.215533125887, -0.214905963798, -0.214278886097, \
-0.213651892517, -0.213024982787, -0.212398156639, -0.211771413806, \
-0.211144754018, -0.210518177008, -0.209891682507, -0.209265270249, \
-0.208638939966, -0.208012691392, -0.207386524258, -0.206760438299, \
-0.206134433249, -0.20550850884, -0.204882664808, -0.204256900887, \
-0.203631216811, -0.203005612315, -0.202380087133, -0.201754641003, \
-0.201129273658, -0.200503984834, -0.199878774268, -0.199253641695, \
-0.198628586852, -0.198003609476, -0.197378709302, -0.196753886069, \
-0.196129139514, -0.195504469373, -0.194879875385, -0.194255357287, \
-0.193630914818, -0.193006547715, -0.192382255718, -0.191758038565, \
-0.191133895995, -0.190509827747, -0.189885833561, -0.189261913176, \
-0.188638066331, -0.188014292767, -0.187390592225, -0.186766964443, \
-0.186143409164, -0.185519926127, -0.184896515074, -0.184273175746, \
-0.183649907884, -0.18302671123, -0.182403585526, -0.181780530513, \
-0.181157545935, -0.180534631532, -0.179911787049, -0.179289012227, \
-0.17866630681, -0.178043670541, -0.177421103162, -0.176798604419, \
-0.176176174053, -0.175553811811, -0.174931517434, -0.174309290669, \
-0.173687131258, -0.173065038948, -0.172443013482, -0.171821054607, \
-0.171199162066, -0.170577335606, -0.169955574973, -0.169333879911, \
-0.168712250167, -0.168090685487, -0.167469185618, -0.166847750305, \
-0.166226379296, -0.165605072338, -0.164983829177, -0.164362649561, \
-0.163741533237, -0.163120479952, -0.162499489456, -0.161878561494, \
-0.161257695816, -0.16063689217, -0.160016150304, -0.159395469966, \
-0.158774850907, -0.158154292873, -0.157533795616, -0.156913358883, \
-0.156292982424, -0.15567266599, -0.155052409329, -0.154432212191, \
-0.153812074328, -0.153191995488, -0.152571975423, -0.151952013883, \
-0.151332110619, -0.150712265381, -0.150092477921, -0.14947274799, \
-0.148853075339, -0.148233459721, -0.147613900885, -0.146994398586, \
-0.146374952573, -0.1457555626, -0.14513622842, -0.144516949783, \
-0.143897726444, -0.143278558154, -0.142659444667, -0.142040385735, \
-0.141421381113, -0.140802430553, -0.140183533808, -0.139564690633, \
-0.138945900782, -0.138327164007, -0.137708480064, -0.137089848707, \
-0.136471269689, -0.135852742766, -0.135234267692, -0.134615844222, \
-0.13399747211, -0.133379151113, -0.132760880985, -0.132142661481, \
-0.131524492358, -0.13090637337, -0.130288304273, -0.129670284824, \
-0.129052314778, -0.128434393892, -0.127816521921, -0.127198698623, \
-0.126580923754, -0.12596319707, -0.125345518329, -0.124727887287, \
-0.124110303701, -0.12349276733, -0.122875277929, -0.122257835256, \
-0.12164043907, -0.121023089128, -0.120405785187, -0.119788527006, \
-0.119171314342, -0.118554146955, -0.117937024601, -0.117319947041, \
-0.116702914032, -0.116085925333, -0.115468980703, -0.1148520799, \
-0.114235222685, -0.113618408815, -0.113001638051, -0.112384910152, \
-0.111768224876, -0.111151581985, -0.110534981238, -0.109918422394, \
-0.109301905213, -0.108685429456, -0.108068994883, -0.107452601254, \
-0.10683624833, -0.10621993587, -0.105603663637, -0.104987431389, \
-0.10437123889, -0.103755085898, -0.103138972176, -0.102522897484, \
-0.101906861584, -0.101290864237, -0.100674905205, -0.100058984249, \
-0.0994431011311, -0.0988272556129, -0.0982114474563, -0.0975956764232, \
-0.0969799422759, -0.0963642447764, -0.0957485836871, -0.0951329587704, \
-0.0945173697886, -0.0939018165045, -0.0932862986806, -0.0926708160798, \
-0.0920553684649, -0.0914399555988, -0.0908245772446, -0.0902092331655, \
-0.0895939231246, -0.0889786468852, -0.0883634042109, -0.0877481948651, \
-0.0871330186113, -0.0865178752133, -0.0859027644347, -0.0852876860396, \
-0.0846726397917, -0.0840576254552, -0.0834426427941, -0.0828276915726, \
-0.0822127715549, -0.0815978825056, -0.0809830241889, -0.0803681963694, \
-0.0797533988117, -0.0791386312806, -0.0785238935406, -0.0779091853568, \
-0.077294506494, -0.0766798567172, -0.0760652357914, -0.0754506434819, \
-0.0748360795539, -0.0742215437727, -0.0736070359035, -0.072992555712, \
-0.0723781029636, -0.0717636774239, -0.0711492788586, -0.0705349070334, \
-0.0699205617141, -0.0693062426667, -0.068691949657, -0.0680776824512, \
-0.0674634408152, -0.0668492245152, -0.0662350333175, -0.0656208669883, \
-0.065006725294, -0.0643926080011, -0.0637785148759, -0.0631644456851, \
-0.0625504001953, -0.0619363781731, -0.0613223793853, -0.0607084035987, \
-0.0600944505801, -0.0594805200964, -0.0588666119147, -0.058252725802, \
-0.0576388615253, -0.0570250188518, -0.0564111975488, -0.0557973973834, \
-0.0551836181231, -0.0545698595352, -0.0539561213871, -0.0533424034463, \
-0.0527287054803, -0.0521150272568, -0.0515013685434, -0.0508877291078, \
-0.0502741087177, -0.0496605071409, -0.0490469241453, -0.0484333594987, \
-0.0478198129692, -0.0472062843246, -0.0465927733331, -0.0459792797628, \
-0.0453658033816, -0.0447523439579, -0.0441389012599, -0.0435254750557, \
-0.0429120651138, -0.0422986712024, -0.04168529309, -0.041071930545, \
-0.0404585833359, -0.0398452512311, -0.0392319339993, -0.0386186314091, \
-0.0380053432289, -0.0373920692277, -0.0367788091739, -0.0361655628365, \
-0.0355523299841, -0.0349391103856, -0.0343259038099, -0.0337127100257, \
-0.0330995288021, -0.032486359908, -0.0318732031123, -0.0312600581842, \
-0.0306469248925, -0.0300338030065, -0.0294206922952, -0.0288075925277, \
-0.0281945034733, -0.0275814249011, -0.0269683565803, -0.0263552982801, \
-0.0257422497699, -0.025129210819, -0.0245161811967, -0.0239031606722, \
-0.0232901490151, -0.0226771459946, -0.0220641513803, -0.0214511649415, \
-0.0208381864477, -0.0202252156684, -0.019612252373, -0.0189992963312, \
-0.0183863473125, -0.0177734050864, -0.0171604694224, -0.0165475400903, \
-0.0159346168595, -0.0153216994998, -0.0147087877807, -0.0140958814719, \
-0.0134829803432, -0.0128700841641, -0.0122571927044, -0.0116443057337, \
-0.0110314230219, -0.0104185443386, -0.00980566945358, -0.00919279813659, \
-0.00857993015739, -0.00796706528575, -0.00735420329145, -0.00674134394428, \
-0.00612848701402, -0.00551563227049, -0.00490277948347, -0.00428992842278, \
-0.00367707885824, -0.00306423055966, -0.00245138329686, -0.00183853683967, \
-0.00122569095791, -0.000612845421414, 0.0, 0.000612845421414, \
0.00122569095791, 0.00183853683967, 0.00245138329686, 0.00306423055966, \
0.00367707885824, 0.00428992842278, 0.00490277948347, 0.00551563227049, \
0.00612848701402, 0.00674134394428, 0.00735420329145, 0.00796706528575, \
0.00857993015739, 0.00919279813659, 0.00980566945358, 0.0104185443386, \
0.0110314230219, 0.0116443057337, 0.0122571927044, 0.0128700841641, \
0.0134829803432, 0.0140958814719, 0.0147087877807, 0.0153216994998, \
0.0159346168595, 0.0165475400903, 0.0171604694224, 0.0177734050864, \
0.0183863473125, 0.0189992963312, 0.019612252373, 0.0202252156684, \
0.0208381864477, 0.0214511649415, 0.0220641513803, 0.0226771459946, \
0.0232901490151, 0.0239031606722, 0.0245161811967, 0.025129210819, \
0.0257422497699, 0.0263552982801, 0.0269683565803, 0.0275814249011, \
0.0281945034733, 0.0288075925277, 0.0294206922952, 0.0300338030065, \
0.0306469248925, 0.0312600581842, 0.0318732031123, 0.032486359908, \
0.0330995288021, 0.0337127100257, 0.0343259038099, 0.0349391103856, \
0.0355523299841, 0.0361655628365, 0.0367788091739, 0.0373920692277, \
0.0380053432289, 0.0386186314091, 0.0392319339993, 0.0398452512311, \
0.0404585833359, 0.041071930545, 0.04168529309, 0.0422986712024, \
0.0429120651138, 0.0435254750557, 0.0441389012599, 0.0447523439579, \
0.0453658033816, 0.0459792797628, 0.0465927733331, 0.0472062843246, \
0.0478198129692, 0.0484333594987, 0.0490469241453, 0.0496605071409, \
0.0502741087177, 0.0508877291078, 0.0515013685434, 0.0521150272568, \
0.0527287054803, 0.0533424034463, 0.0539561213871, 0.0545698595352, \
0.0551836181231, 0.0557973973834, 0.0564111975488, 0.0570250188518, \
0.0576388615253, 0.058252725802, 0.0588666119147, 0.0594805200964, \
0.0600944505801, 0.0607084035987, 0.0613223793853, 0.0619363781731, \
0.0625504001953, 0.0631644456851, 0.0637785148759, 0.0643926080011, \
0.065006725294, 0.0656208669883, 0.0662350333175, 0.0668492245152, \
0.0674634408152, 0.0680776824512, 0.068691949657, 0.0693062426667, \
0.0699205617141, 0.0705349070334, 0.0711492788586, 0.0717636774239, \
0.0723781029636, 0.072992555712, 0.0736070359035, 0.0742215437727, \
0.0748360795539, 0.0754506434819, 0.0760652357914, 0.0766798567172, \
0.077294506494, 0.0779091853568, 0.0785238935406, 0.0791386312806, \
0.0797533988117, 0.0803681963694, 0.0809830241889, 0.0815978825056, \
0.0822127715549, 0.0828276915726, 0.0834426427941, 0.0840576254552, \
0.0846726397917, 0.0852876860396, 0.0859027644347, 0.0865178752133, \
0.0871330186113, 0.0877481948651, 0.0883634042109, 0.0889786468852, \
0.0895939231246, 0.0902092331655, 0.0908245772446, 0.0914399555988, \
0.0920553684649, 0.0926708160798, 0.0932862986806, 0.0939018165045, \
0.0945173697886, 0.0951329587704, 0.0957485836871, 0.0963642447764, \
0.0969799422759, 0.0975956764232, 0.0982114474563, 0.0988272556129, \
0.0994431011311, 0.100058984249, 0.100674905205, 0.101290864237, \
0.101906861584, 0.102522897484, 0.103138972176, 0.103755085898, \
0.10437123889, 0.104987431389, 0.105603663637, 0.10621993587, \
0.10683624833, 0.107452601254, 0.108068994883, 0.108685429456, \
0.109301905213, 0.109918422394, 0.110534981238, 0.111151581985, \
0.111768224876, 0.112384910152, 0.113001638051, 0.113618408815, \
0.114235222685, 0.1148520799, 0.115468980703, 0.116085925333, \
0.116702914032, 0.117319947041, 0.117937024601, 0.118554146955, \
0.119171314342, 0.119788527006, 0.120405785187, 0.121023089128, \
0.12164043907, 0.122257835256, 0.122875277929, 0.12349276733, \
0.124110303701, 0.124727887287, 0.125345518329, 0.12596319707, \
0.126580923754, 0.127198698623, 0.127816521921, 0.128434393892, \
0.129052314778, 0.129670284824, 0.130288304273, 0.13090637337, \
0.131524492358, 0.132142661481, 0.132760880985, 0.133379151113, \
0.13399747211, 0.134615844222, 0.135234267692, 0.135852742766, \
0.136471269689, 0.137089848707, 0.137708480064, 0.138327164007, \
0.138945900782, 0.139564690633, 0.140183533808, 0.140802430553, \
0.141421381113, 0.142040385735, 0.142659444667, 0.143278558154, \
0.143897726444, 0.144516949783, 0.14513622842, 0.1457555626, \
0.146374952573, 0.146994398586, 0.147613900885, 0.148233459721, \
0.148853075339, 0.14947274799, 0.150092477921, 0.150712265381, \
0.151332110619, 0.151952013883, 0.152571975423, 0.153191995488, \
0.153812074328, 0.154432212191, 0.155052409329, 0.15567266599, \
0.156292982424, 0.156913358883, 0.157533795616, 0.158154292873, \
0.158774850907, 0.159395469966, 0.160016150304, 0.16063689217, \
0.161257695816, 0.161878561494, 0.162499489456, 0.163120479952, \
0.163741533237, 0.164362649561, 0.164983829177, 0.165605072338, \
0.166226379296, 0.166847750305, 0.167469185618, 0.168090685487, \
0.168712250167, 0.169333879911, 0.169955574973, 0.170577335606, \
0.171199162066, 0.171821054607, 0.172443013482, 0.173065038948, \
0.173687131258, 0.174309290669, 0.174931517434, 0.175553811811, \
0.176176174053, 0.176798604419, 0.177421103162, 0.178043670541, \
0.17866630681, 0.179289012227, 0.179911787049, 0.180534631532, \
0.181157545935, 0.181780530513, 0.182403585526, 0.18302671123, \
0.183649907884, 0.184273175746, 0.184896515074, 0.185519926127, \
0.186143409164, 0.186766964443, 0.187390592225, 0.188014292767, \
0.188638066331, 0.189261913176, 0.189885833561, 0.190509827747, \
0.191133895995, 0.191758038565, 0.192382255718, 0.193006547715, \
0.193630914818, 0.194255357287, 0.194879875385, 0.195504469373, \
0.196129139514, 0.196753886069, 0.197378709302, 0.198003609476, \
0.198628586852, 0.199253641695, 0.199878774268, 0.200503984834, \
0.201129273658, 0.201754641003, 0.202380087133, 0.203005612315, \
0.203631216811, 0.204256900887, 0.204882664808, 0.20550850884, \
0.206134433249, 0.206760438299, 0.207386524258, 0.208012691392, \
0.208638939966, 0.209265270249, 0.209891682507, 0.210518177008, \
0.211144754018, 0.211771413806, 0.212398156639, 0.213024982787, \
0.213651892517, 0.214278886097, 0.214905963798, 0.215533125887, \
0.216160372636, 0.216787704312, 0.217415121186, 0.218042623529, \
0.21867021161, 0.219297885701, 0.219925646071, 0.220553492993, \
0.221181426737, 0.221809447576, 0.222437555781, 0.223065751624, \
0.223694035378, 0.224322407315, 0.224950867708, 0.22557941683, \
0.226208054955, 0.226836782357, 0.227465599309, 0.228094506085, \
0.22872350296, 0.229352590209, 0.229981768106, 0.230611036927, \
0.231240396947, 0.231869848442, 0.232499391688, 0.233129026961, \
0.233758754538, 0.234388574696, 0.235018487711, 0.235648493861, \
0.236278593424, 0.236908786677, 0.237539073899, 0.238169455368, \
0.238799931363, 0.239430502163, 0.240061168047, 0.240691929294, \
0.241322786185, 0.241953738999, 0.242584788017, 0.24321593352, \
0.243847175788, 0.244478515102, 0.245109951745, 0.245741485998, \
0.246373118143, 0.247004848463, 0.24763667724, 0.248268604756, \
0.248900631296, 0.249532757143, 0.25016498258, 0.250797307892, \
0.251429733364, 0.252062259279, 0.252694885923, 0.253327613581, \
0.253960442538, 0.254593373082, 0.255226405497, 0.25585954007, \
0.256492777089, 0.25712611684, 0.25775955961, 0.258393105687, \
0.25902675536, 0.259660508917, 0.260294366646, 0.260928328836, \
0.261562395776, 0.262196567757, 0.262830845067, 0.263465227997, \
0.264099716838, 0.264734311881, 0.265369013416, 0.266003821735, \
0.26663873713, 0.267273759892, 0.267908890315, 0.268544128691, \
0.269179475313, 0.269814930474, 0.270450494469, 0.271086167591, \
0.271721950134, 0.272357842394, 0.272993844665, 0.273629957242, \
0.274266180422, 0.2749025145, 0.275538959773, 0.276175516537, \
0.27681218509, 0.277448965729, 0.278085858751, 0.278722864454, \
0.279359983137, 0.279997215099, 0.280634560639, 0.281272020056, \
0.28190959365, 0.28254728172, 0.283185084568, 0.283823002495, \
0.2844610358, 0.285099184787, 0.285737449756, 0.28637583101, \
0.287014328852, 0.287652943584, 0.288291675509, 0.288930524932, \
0.289569492157, 0.290208577487, 0.290847781227, 0.291487103683, \
0.292126545159, 0.292766105963, 0.293405786399, 0.294045586775, \
0.294685507396, 0.295325548572, 0.295965710609, 0.296605993814, \
0.297246398498, 0.297886924968, 0.298527573533, 0.299168344504, \
0.29980923819, 0.3004502549, 0.301091394947, 0.301732658641, \
0.302374046293, 0.303015558215, 0.303657194719, 0.304298956119, \
0.304940842726, 0.305582854855, 0.306224992819, 0.306867256932, \
0.307509647508, 0.308152164863, 0.308794809313, 0.309437581171, \
0.310080480756, 0.310723508383, 0.311366664369, 0.312009949032, \
0.312653362689, 0.313296905659, 0.313940578259, 0.314584380809, \
0.315228313629, 0.315872377037, 0.316516571355, 0.317160896902, \
0.317805354, 0.318449942971, 0.319094664135, 0.319739517815, \
0.320384504335, 0.321029624016, 0.321674877183, 0.322320264159, \
0.322965785269, 0.323611440838, 0.32425723119, 0.324903156652, \
0.325549217549, 0.326195414208, 0.326841746956, 0.32748821612, \
0.328134822029, 0.328781565009, 0.329428445391, 0.330075463502, \
0.330722619673, 0.331369914234, 0.332017347514, 0.332664919846, \
0.33331263156, 0.333960482988, 0.334608474462, 0.335256606314, \
0.335904878879, 0.33655329249, 0.33720184748, 0.337850544184, \
0.338499382938, 0.339148364076, 0.339797487934, 0.340446754849, \
0.341096165157, 0.341745719196, 0.342395417304, 0.343045259818, \
0.343695247078, 0.344345379421, 0.344995657189, 0.345646080722, \
0.346296650358, 0.346947366441, 0.34759822931, 0.348249239309, \
0.34890039678, 0.349551702065, 0.350203155508, 0.350854757453, \
0.351506508245, 0.352158408227, 0.352810457747, 0.353462657149, \
0.354115006779, 0.354767506986, 0.355420158116, 0.356072960516, \
0.356725914537, 0.357379020525, 0.358032278831, 0.358685689804, \
0.359339253795, 0.359992971155, 0.360646842235, 0.361300867386, \
0.361955046963, 0.362609381316, 0.363263870801, 0.36391851577, \
0.364573316578, 0.36522827358, 0.365883387132, 0.36653865759, \
0.367194085311, 0.36784967065, 0.368505413967, 0.369161315619, \
0.369817375965, 0.370473595364, 0.371129974175, 0.37178651276, \
0.372443211479, 0.373100070693, 0.373757090765, 0.374414272056, \
0.37507161493, 0.37572911975, 0.37638678688, 0.377044616686, \
0.377702609531, 0.378360765783, 0.379019085806, 0.379677569969, \
0.380336218639, 0.380995032182, 0.381654010969, 0.382313155368, \
0.382972465749, 0.383631942482, 0.384291585938, 0.384951396488, \
0.385611374504, 0.386271520359, 0.386931834425, 0.387592317078, \
0.38825296869, 0.388913789636, 0.389574780293, 0.390235941036, \
0.390897272241, 0.391558774287, 0.392220447549, 0.392882292409, \
0.393544309243, 0.394206498431, 0.394868860354, 0.395531395393, \
0.396194103929, 0.396856986343, 0.397520043019, 0.39818327434, \
0.398846680689, 0.399510262451, 0.40017402001, 0.400837953753, \
0.401502064066, 0.402166351335, 0.402830815949, 0.403495458294, \
0.404160278761, 0.404825277738, 0.405490455615, 0.406155812784, \
0.406821349634, 0.407487066559, 0.40815296395, 0.408819042201, \
0.409485301706, 0.410151742859, 0.410818366055, 0.41148517169, \
0.41215216016, 0.412819331863, 0.413486687196, 0.414154226557, \
0.414821950346, 0.415489858963, 0.416157952806, 0.416826232279, \
0.417494697781, 0.418163349716, 0.418832188486, 0.419501214496, \
0.420170428149, 0.420839829851, 0.421509420007, 0.422179199024, \
0.422849167309, 0.423519325269, 0.424189673313, 0.42486021185, \
0.42553094129, 0.426201862043, 0.426872974521, 0.427544279136, \
0.4282157763, 0.428887466426, 0.429559349928, 0.430231427222, \
0.430903698722, 0.431576164846, 0.432248826008, 0.432921682628, \
0.433594735123, 0.434267983913, 0.434941429417, 0.435615072055, \
0.436288912249, 0.436962950421, 0.437637186994, 0.43831162239, \
0.438986257034, 0.43966109135, 0.440336125765, 0.441011360704, \
0.441686796595, 0.442362433866, 0.443038272944, 0.44371431426, \
0.444390558244, 0.445067005325, 0.445743655937, 0.446420510511, \
0.44709756948, 0.447774833279, 0.448452302341, 0.449129977103, \
0.449807858, 0.45048594547, 0.45116423995, 0.45184274188, \
0.452521451697, 0.453200369842, 0.453879496757, 0.454558832883, \
0.455238378662, 0.455918134538, 0.456598100954, 0.457278278357, \
0.45795866719, 0.458639267901, 0.459320080938, 0.460001106748, \
0.460682345779, 0.461363798483, 0.46204546531, 0.46272734671, \
0.463409443136, 0.464091755042, 0.46477428288, 0.465457027107, \
0.466139988176, 0.466823166546, 0.467506562672, 0.468190177013, \
0.468874010028, 0.469558062177, 0.470242333919, 0.470926825718, \
0.471611538035, 0.472296471333, 0.472981626076, 0.473667002729, \
0.474352601759, 0.47503842363, 0.475724468813, 0.476410737773, \
0.477097230982, 0.477783948909, 0.478470892024, 0.479158060801, \
0.479845455711, 0.480533077229, 0.48122092583, 0.481909001988, \
0.48259730618, 0.483285838883, 0.483974600576, 0.484663591739, \
0.48535281285, 0.48604226439, 0.486731946843, 0.487421860691, \
0.488112006416, 0.488802384505, 0.489492995442, 0.490183839715, \
0.49087491781, 0.491566230217, 0.492257777423, 0.492949559921, \
0.493641578201, 0.494333832755, 0.495026324076, 0.49571905266, \
0.496412018999, 0.497105223592, 0.497798666935, 0.498492349525, \
0.499186271862, 0.499880434446, 0.500574837777, 0.501269482359, \
0.501964368692, 0.502659497283, 0.503354868635, 0.504050483254, \
0.504746341647, 0.505442444322, 0.506138791788, 0.506835384556, \
0.507532223135, 0.508229308038, 0.508926639778, 0.509624218869, \
0.510322045826, 0.511020121164, 0.511718445402, 0.512417019057, \
0.513115842648, 0.513814916696, 0.514514241722, 0.515213818248, \
0.515913646798, 0.516613727896, 0.517314062067, 0.518014649839, \
0.518715491738, 0.519416588293, 0.520117940035, 0.520819547494, \
0.521521411203, 0.522223531693, 0.5229259095, 0.523628545158, \
0.524331439204, 0.525034592175, 0.52573800461, 0.526441677048, \
0.527145610031, 0.5278498041, 0.528554259797, 0.529258977668, \
0.529963958257, 0.530669202111, 0.531374709778, 0.532080481805, \
0.532786518743, 0.533492821143, 0.534199389557, 0.534906224537, \
0.535613326639, 0.536320696418, 0.537028334431, 0.537736241235, \
0.53844441739, 0.539152863456, 0.539861579995, 0.540570567568, \
0.541279826741, 0.541989358077, 0.542699162143, 0.543409239507, \
0.544119590737, 0.544830216402, 0.545541117075, 0.546252293327, \
0.546963745731, 0.547675474863, 0.548387481299, 0.549099765614, \
0.549812328389, 0.550525170202, 0.551238291635, 0.551951693269, \
0.552665375689, 0.553379339478, 0.554093585223, 0.55480811351, \
0.555522924929, 0.556238020069, 0.556953399521, 0.557669063878, \
0.558385013733, 0.559101249681, 0.559817772317, 0.560534582241, \
0.56125168005, 0.561969066345, 0.562686741727, 0.563404706799, \
0.564122962165, 0.56484150843, 0.565560346202, 0.566279476088, \
0.566998898697, 0.567718614641, 0.568438624533, 0.569158928984, \
0.56987952861, 0.570600424028, 0.571321615855, 0.57204310471, \
0.572764891214, 0.573486975988, 0.574209359655, 0.57493204284, \
0.575655026169, 0.576378310269, 0.577101895769, 0.5778257833, \
0.578549973492, 0.57927446698, 0.579999264397, 0.580724366379, \
0.581449773564, 0.58217548659, 0.582901506099, 0.58362783273, \
0.584354467129, 0.585081409939, 0.585808661806, 0.586536223378, \
0.587264095305, 0.587992278236, 0.588720772824, 0.589449579722, \
0.590178699585, 0.590908133071, 0.591637880836, 0.592367943541, \
0.593098321847, 0.593829016416, 0.594560027913, 0.595291357003, \
0.596023004354, 0.596754970634, 0.597487256514, 0.598219862666, \
0.598952789763, 0.599686038481, 0.600419609496, 0.601153503486, \
0.601887721131, 0.602622263113, 0.603357130115, 0.604092322821, \
0.604827841918, 0.605563688093, 0.606299862036, 0.607036364439, \
0.607773195994, 0.608510357395, 0.609247849338, 0.609985672522, \
0.610723827646, 0.61146231541, 0.612201136518, 0.612940291674, \
0.613679781584, 0.614419606955, 0.615159768498, 0.615900266923, \
0.616641102944, 0.617382277275, 0.618123790632, 0.618865643733, \
0.619607837299, 0.62035037205, 0.621093248711, 0.621836468005, \
0.622580030661, 0.623323937406, 0.62406818897, 0.624812786087, \
0.625557729489, 0.626303019913, 0.627048658096, 0.627794644778, \
0.628540980698, 0.629287666601, 0.630034703232, 0.630782091336, \
0.631529831662, 0.632277924961, 0.633026371984, 0.633775173485, \
0.634524330221, 0.63527384295, 0.636023712429, 0.636773939423, \
0.637524524692, 0.638275469004, 0.639026773124, 0.639778437823, \
0.640530463871, 0.641282852041, 0.642035603108, 0.642788717849, \
0.643542197043, 0.64429604147, 0.645050251913, 0.645804829157, \
0.646559773988, 0.647315087195, 0.648070769569, 0.648826821903, \
0.64958324499, 0.650340039629, 0.651097206616, 0.651854746754, \
0.652612660844, 0.653370949693, 0.654129614105, 0.654888654892, \
0.655648072862, 0.656407868831, 0.657168043612, 0.657928598023, \
0.658689532883, 0.659450849015, 0.66021254724, 0.660974628386, \
0.66173709328, 0.662499942752, 0.663263177633, 0.66402679876, \
0.664790806967, 0.665555203094, 0.666319987981, 0.667085162471, \
0.66785072741, 0.668616683646, 0.669383032026, 0.670149773405, \
0.670916908635, 0.671684438573, 0.672452364078, 0.67322068601, \
0.673989405232, 0.674758522611, 0.675528039013, 0.67629795531, \
0.677068272372, 0.677838991074, 0.678610112294, 0.679381636911, \
0.680153565806, 0.680925899864, 0.68169863997, 0.682471787013, \
0.683245341885, 0.684019305478, 0.684793678689, 0.685568462415, \
0.686343657558, 0.687119265021, 0.687895285708, 0.688671720529, \
0.689448570392, 0.690225836212, 0.691003518904, 0.691781619384, \
0.692560138575, 0.693339077397, 0.694118436777, 0.694898217643, \
0.695678420925, 0.696459047555, 0.69724009847, 0.698021574607, \
0.698803476906, 0.699585806312, 0.700368563769, 0.701151750226, \
0.701935366634, 0.702719413947, 0.70350389312, 0.704288805113, \
0.705074150887, 0.705859931406, 0.706646147638, 0.707432800551, \
0.708219891118, 0.709007420313, 0.709795389115, 0.710583798504, \
0.711372649463, 0.712161942978, 0.712951680037, 0.713741861631, \
0.714532488756, 0.715323562408, 0.716115083586, 0.716907053294, \
0.717699472536, 0.718492342322, 0.719285663661, 0.720079437569, \
0.720873665062, 0.721668347159, 0.722463484884, 0.723259079262, \
0.724055131321, 0.724851642093, 0.725648612612, 0.726446043916, \
0.727243937044, 0.728042293041, 0.728841112951, 0.729640397826, \
0.730440148716, 0.731240366677, 0.732041052768, 0.73284220805, \
0.733643833587, 0.734445930447, 0.735248499702, 0.736051542423, \
0.736855059689, 0.737659052579, 0.738463522177, 0.739268469569, \
0.740073895844, 0.740879802096, 0.741686189419, 0.742493058914, \
0.743300411682, 0.74410824883, 0.744916571465, 0.7457253807, \
0.746534677651, 0.747344463435, 0.748154739176, 0.748965505998, \
0.74977676503, 0.750588517404, 0.751400764255, 0.752213506722, \
0.753026745948, 0.753840483077, 0.754654719259, 0.755469455646, \
0.756284693394, 0.757100433662, 0.757916677614, 0.758733426414, \
0.759550681234, 0.760368443246, 0.761186713627, 0.762005493558, \
0.762824784223, 0.763644586809, 0.764464902507, 0.765285732513, \
0.766107078024, 0.766928940243, 0.767751320376, 0.768574219631, \
0.769397639223, 0.770221580367, 0.771046044284, 0.7718710322, \
0.772696545341, 0.773522584939, 0.774349152231, 0.775176248455, \
0.776003874855, 0.776832032678, 0.777660723175, 0.7784899476, \
0.779319707213, 0.780150003277, 0.780980837056, 0.781812209823, \
0.782644122852, 0.783476577421, 0.784309574812, 0.785143116312, \
0.785977203212, 0.786811836806, 0.787647018393, 0.788482749274, \
0.789319030758, 0.790155864155, 0.790993250781, 0.791831191953, \
0.792669688996, 0.793508743238, 0.79434835601, 0.795188528648, \
0.796029262492, 0.796870558888, 0.797712419183, 0.798554844732, \
0.799397836891, 0.800241397023, 0.801085526493, 0.801930226672, \
0.802775498936, 0.803621344663, 0.804467765238, 0.805314762049, \
0.806162336489, 0.807010489955, 0.807859223848, 0.808708539576, \
0.80955843855, 0.810408922185, 0.8112599919, 0.812111649122, \
0.812963895279, 0.813816731806, 0.814670160142, 0.815524181729, \
0.816378798017, 0.817234010459, 0.818089820512, 0.818946229639, \
0.819803239307, 0.82066085099, 0.821519066163, 0.82237788631, \
0.823237312917, 0.824097347476, 0.824957991484, 0.825819246443, \
0.826681113861, 0.827543595248, 0.828406692123, 0.829270406006, \
0.830134738426, 0.830999690914, 0.831865265009, 0.832731462252, \
0.833598284192, 0.834465732382, 0.83533380838, 0.83620251375, \
0.83707185006, 0.837941818885, 0.838812421805, 0.839683660404, \
0.840555536273, 0.841428051007, 0.842301206208, 0.843175003483, \
0.844049444444, 0.844924530708, 0.845800263899, 0.846676645646, \
0.847553677583, 0.84843136135, 0.849309698594, 0.850188690965, \
0.851068340122, 0.851948647726, 0.852829615446, 0.853711244958, \
0.854593537942, 0.855476496083, 0.856360121074, 0.857244414613, \
0.858129378404, 0.859015014157, 0.859901323588, 0.860788308418, \
0.861675970376, 0.862564311196, 0.863453332618, 0.864343036389, \
0.865233424261, 0.866124497993, 0.86701625935, 0.867908710104, \
0.868801852031, 0.869695686916, 0.870590216549, 0.871485442727, \
0.872381367254, 0.873277991937, 0.874175318595, 0.875073349049, \
0.875972085128, 0.876871528668, 0.877771681512, 0.878672545509, \
0.879574122514, 0.88047641439, 0.881379423006, 0.882283150238, \
0.883187597969, 0.884092768089, 0.884998662493, 0.885905283087, \
0.886812631779, 0.887720710488, 0.888629521138, 0.889539065661, \
0.890449345995, 0.891360364086, 0.892272121887, 0.893184621359, \
0.894097864469, 0.895011853191, 0.895926589508, 0.896842075409, \
0.897758312891, 0.898675303958, 0.899593050622, 0.900511554903, \
0.901430818827, 0.902350844428, 0.90327163375, 0.90419318884, \
0.905115511758, 0.906038604567, 0.906962469342, 0.907887108163, \
0.908812523118, 0.909738716305, 0.910665689828, 0.911593445799, \
0.912521986339, 0.913451313577, 0.91438142965, 0.915312336703, \
0.916244036888, 0.917176532369, 0.918109825313, 0.919043917899, \
0.919978812315, 0.920914510754, 0.921851015421, 0.922788328527, \
0.923726452292, 0.924665388946, 0.925605140727, 0.926545709881, \
0.927487098664, 0.928429309338, 0.929372344179, 0.930316205466, \
0.931260895491, 0.932206416553, 0.933152770962, 0.934099961035, \
0.9350479891, 0.935996857491, 0.936946568555, 0.937897124647, \
0.93884852813, 0.939800781378, 0.940753886774, 0.941707846709, \
0.942662663587, 0.943618339818, 0.944574877824, 0.945532280036, \
0.946490548893, 0.947449686847, 0.948409696358, 0.949370579895, \
0.95033233994, 0.951294978982, 0.952258499521, 0.953222904069, \
0.954188195145, 0.955154375281, 0.956121447017, 0.957089412906, \
0.958058275508, 0.959028037398, 0.959998701157, 0.960970269379, \
0.961942744669, 0.962916129641, 0.963890426921, 0.964865639146, \
0.965841768964, 0.966818819033, 0.967796792022, 0.968775690612, \
0.969755517495, 0.970736275374, 0.971717966963, 0.972700594988, \
0.973684162186, 0.974668671305, 0.975654125105, 0.976640526359, \
0.977627877849, 0.978616182371, 0.979605442731, 0.980595661749, \
0.981586842254, 0.982578987091, 0.983572099113, 0.984566181188, \
0.985561236196, 0.986557267027, 0.987554276585, 0.988552267788, \
0.989551243564, 0.990551206854, 0.991552160613, 0.992554107808, \
0.993557051418, 0.994560994436, 0.995565939868, 0.996571890733, \
0.997578850062, 0.9985868209, 0.999595806306, 1.00060580935, \
1.00161683312, 1.00262888071, 1.00364195524, 1.00465605983, \
1.00567119762, 1.00668737176, 1.00770458543, 1.0087228418, \
1.00974214407, 1.01076249545, 1.01178389916, 1.01280635844, \
1.01382987655, 1.01485445675, 1.01588010232, 1.01690681657, \
1.01793460279, 1.01896346433, 1.01999340452, 1.02102442671, \
1.02205653428, 1.02308973062, 1.02412401912, 1.02515940322, \
1.02619588633, 1.02723347191, 1.02827216342, 1.02931196435, \
1.03035287819, 1.03139490845, 1.03243805866, 1.03348233237, \
1.03452773314, 1.03557426455, 1.03662193018, 1.03767073366, \
1.03872067861, 1.03977176868, 1.04082400752, 1.04187739881, \
1.04293194626, 1.04398765357, 1.04504452448, 1.04610256273, \
1.04716177209, 1.04822215635, 1.04928371929, 1.05034646476, \
1.05141039657, 1.0524755186, 1.0535418347, 1.05460934878, \
1.05567806475, 1.05674798653, 1.05781911808, 1.05889146336, \
1.05996502636, 1.06103981109, 1.06211582157, 1.06319306184, \
1.06427153597, 1.06535124805, 1.06643220217, 1.06751440246, \
1.06859785307, 1.06968255816, 1.07076852192, 1.07185574854, \
1.07294424226, 1.07403400732, 1.07512504799, 1.07621736855, \
1.07731097332, 1.07840586663, 1.07950205283, 1.0805995363, \
1.08169832142, 1.08279841262, 1.08389981434, 1.08500253104, \
1.08610656721, 1.08721192734, 1.08831861598, 1.08942663766, \
1.09053599698, 1.09164669853, 1.09275874692, 1.09387214681, \
1.09498690286, 1.09610301977, 1.09722050226, 1.09833935506, \
1.09945958293, 1.10058119068, 1.10170418311, 1.10282856507, \
1.10395434141, 1.10508151703, 1.10621009684, 1.10734008578, \
1.10847148882, 1.10960431095, 1.11073855719, 1.11187423257, \
1.11301134218, 1.11414989111, 1.11528988448, 1.11643132745, \
1.11757422519, 1.1187185829, 1.11986440583, 1.12101169923, \
1.12216046839, 1.12331071862, 1.12446245528, 1.12561568374, \
1.1267704094, 1.12792663769, 1.12908437408, 1.13024362405, \
1.13140439313, 1.13256668686, 1.13373051083, 1.13489587064, \
1.13606277195, 1.13723122041, 1.13840122174, 1.13957278166, \
1.14074590595, 1.14192060039, 1.14309687083, 1.14427472312, \
1.14545416315, 1.14663519686, 1.1478178302, 1.14900206916, \
1.15018791978, 1.1513753881, 1.15256448023, 1.1537552023, \
1.15494756045, 1.1561415609, 1.15733720988, 1.15853451364, \
1.1597334785, 1.16093411079, 1.16213641689, 1.16334040321, \
1.1645460762, 1.16575344233, 1.16696250814, 1.16817328018, \
1.16938576505, 1.17059996938, 1.17181589985, 1.17303356317, \
1.17425296609, 1.1754741154, 1.17669701793, 1.17792168055, \
1.17914811017, 1.18037631374, 1.18160629825, 1.18283807074, \
1.18407163828, 1.18530700798, 1.18654418701, 1.18778318256, \
1.18902400188, 1.19026665225, 1.19151114101, 1.19275747553, \
1.19400566322, 1.19525571156, 1.19650762804, 1.19776142023, \
1.19901709572, 1.20027466216, 1.20153412724, 1.2027954987, \
1.20405878432, 1.20532399194, 1.20659112944, 1.20786020475, \
1.20913122586, 1.21040420078, 1.21167913759, 1.21295604444, \
1.21423492948, 1.21551580096, 1.21679866716, 1.2180835364, \
1.21937041707, 1.22065931762, 1.22195024653, 1.22324321234, \
1.22453822366, 1.22583528914, 1.22713441748, 1.22843561746, \
1.22973889789, 1.23104426764, 1.23235173565, 1.23366131092, \
1.23497300248, 1.23628681945, 1.23760277098, 1.23892086632, \
1.24024111474, 1.24156352558, 1.24288810826, 1.24421487225, \
1.24554382707, 1.24687498231, 1.24820834764, 1.24954393277, \
1.25088174749, 1.25222180165, 1.25356410517, 1.25490866802, \
1.25625550025, 1.25760461198, 1.25895601339, 1.26030971474, \
1.26166572634, 1.26302405859, 1.26438472195, 1.26574772695, \
1.26711308419, 1.26848080436, 1.2698508982, 1.27122337654, \
1.27259825027, 1.27397553036, 1.27535522788, 1.27673735394, \
1.27812191975, 1.27950893658, 1.2808984158, 1.28229036885, \
1.28368480725, 1.2850817426, 1.28648118658, 1.28788315096, \
1.28928764758, 1.29069468838, 1.29210428538, 1.29351645068, \
1.29493119647, 1.29634853503, 1.29776847873, 1.29919104001, \
1.30061623144, 1.30204406564, 1.30347455533, 1.30490771336, \
1.30634355261, 1.30778208612, 1.30922332698, 1.31066728839, \
1.31211398366, 1.31356342618, 1.31501562944, 1.31647060705, \
1.3179283727, 1.31938894019, 1.32085232344, 1.32231853644, \
1.32378759331, 1.32525950829, 1.32673429568, 1.32821196994, \
1.3296925456, 1.33117603734, 1.33266245992, 1.33415182822, \
1.33564415726, 1.33713946213, 1.33863775808, 1.34013906045, \
1.34164338473, 1.34315074649, 1.34466116145, 1.34617464545, \
1.34769121444, 1.34921088453, 1.35073367192, 1.35225959295, \
1.3537886641, 1.35532090198, 1.35685632332, 1.35839494499, \
1.35993678401, 1.36148185752, 1.3630301828, 1.36458177727, \
1.36613665852, 1.36769484423, 1.36925635228, 1.37082120066, \
1.37238940752, 1.37396099116, 1.37553597004, 1.37711436276, \
1.37869618807, 1.3802814649, 1.38187021232, 1.38346244956, \
1.38505819603, 1.38665747129, 1.38826029505, 1.38986668723, \
1.39147666789, 1.39309025725, 1.39470747574, 1.39632834393, \
1.3979528826, 1.39958111269, 1.40121305532, 1.40284873181, \
1.40448816364, 1.40613137251, 1.40777838029, 1.40942920906, \
1.41108388106, 1.41274241877, 1.41440484485, 1.41607118216, \
1.41774145377, 1.41941568296, 1.42109389321, 1.42277610822, \
1.42446235191, 1.4261526484, 1.42784702206, 1.42954549744, \
1.43124809936, 1.43295485285, 1.43466578316, 1.43638091579, \
1.43810027647, 1.43982389118, 1.44155178613, 1.44328398779, \
1.44502052286, 1.44676141832, 1.44850670139, 1.45025639954, \
1.45201054053, 1.45376915236, 1.4555322633, 1.45729990192, \
1.45907209704, 1.46084887778, 1.46263027351, 1.46441631394, \
1.46620702902, 1.46800244903, 1.46980260454, 1.47160752641, \
1.47341724582, 1.47523179427, 1.47705120356, 1.47887550581, \
1.48070473348, 1.48253891934, 1.48437809651, 1.48622229844, \
1.48807155892, 1.48992591209, 1.49178539245, 1.49365003484, \
1.49551987447, 1.49739494693, 1.49927528818, 1.50116093452, \
1.50305192269, 1.50494828979, 1.5068500733, 1.50875731112, \
1.51067004156, 1.51258830332, 1.51451213554, 1.51644157777, \
1.51837667, 1.52031745264, 1.52226396655, 1.52421625305, \
1.52617435391, 1.52813831134, 1.53010816806, 1.53208396723, \
1.53406575252, 1.53605356807, 1.53804745854, 1.54004746908, \
1.54205364536, 1.54406603357, 1.54608468043, 1.5481096332, \
1.55014093969, 1.55217864825, 1.55422280782, 1.55627346789, \
1.55833067854, 1.56039449044, 1.56246495486, 1.56454212368, \
1.56662604942, 1.56871678519, 1.57081438477, 1.57291890258, \
1.57503039371, 1.57714891392, 1.57927451964, 1.581407268, \
1.58354721686, 1.58569442477, 1.58784895103, 1.59001085565, \
1.59218019943, 1.59435704393, 1.59654145149, 1.59873348523, \
1.60093320909, 1.60314068784, 1.60535598708, 1.60757917325, \
1.60981031368, 1.61204947656, 1.61429673098, 1.61655214696, \
1.61881579544, 1.62108774828, 1.62336807836, 1.62565685948, \
1.62795416649, 1.63026007522, 1.63257466256, 1.63489800643, \
1.63723018585, 1.63957128092, 1.64192137286, 1.64428054402, \
1.64664887792, 1.64902645924, 1.65141337389, 1.65380970898, \
1.65621555288, 1.65863099522, 1.66105612696, 1.66349104035, \
1.665935829, 1.6683905879, 1.67085541345, 1.67333040348, \
1.67581565725, 1.67831127556, 1.68081736069, 1.68333401649, \
1.6858613484, 1.68839946345, 1.69094847035, 1.69350847947, \
1.69607960292, 1.69866195455, 1.70125565, 1.70386080677, \
1.70647754419, 1.70910598353, 1.71174624799, 1.7143984628, \
1.71706275519, 1.71973925449, 1.72242809217, 1.72512940185, \
1.72784331941, 1.73056998298, 1.73330953303, 1.7360621124, \
1.73882786639, 1.74160694276, 1.74439949184, 1.74720566658, \
1.75002562257, 1.75285951816, 1.75570751448, 1.75856977555, \
1.7614464683, 1.7643377627, 1.76724383175, 1.77016485166, \
1.77310100183, 1.77605246501, 1.77901942732, 1.78200207837, \
1.78500061134, 1.78801522309, 1.79104611422, 1.79409348917, \
1.79715755637, 1.80023852827, 1.8033366215, 1.80645205698, \
1.80958506, 1.81273586036, 1.81590469249, 1.81909179558, \
1.82229741372, 1.825521796, 1.82876519668, 1.83202787533, \
1.83531009698, 1.83861213227, 1.84193425762, 1.84527675539, \
1.84863991405, 1.85202402837, 1.8554293996, 1.85885633567, \
1.86230515137, 1.86577616858, 1.86926971649, 1.87278613181, \
1.87632575899, 1.8798889505, 1.88347606705, 1.88708747787, \
1.89072356098, 1.89438470345, 1.89807130174, 1.90178376197, \
1.90552250025, 1.90928794302, 1.91308052741, 1.91690070156, \
1.92074892503, 1.92462566922, 1.92853141772, 1.93246666677, \
1.93643192574, 1.94042771755, 1.94445457918, 1.9485130622, \
1.95260373328, 1.95672717477, 1.96088398528, 1.96507478031, \
1.96930019287, 1.97356087419, 1.97785749442, 1.98219074335, \
1.98656133124, 1.99096998962, 1.99541747213, 1.99990455547, \
2.00443204036, 2.00900075251, 2.01361154371, 2.01826529295, \
2.02296290758, 2.02770532458, 2.03249351185, 2.03732846962, \
2.04221123192, 2.0471428681, 2.05212448451, 2.05715722623, \
2.06224227888, 2.06738087065, 2.07257427429, 2.07782380938, \
2.08313084465, 2.08849680045, 2.09392315145, 2.09941142942, \
2.10496322628, 2.11058019727, 2.11626406445, 2.12201662031, \
2.12783973172, 2.13373534416, 2.13970548619, 2.14575227431, \
2.15187791816, 2.1580847261, 2.16437511125, 2.17075159793, \
2.17721682871, 2.18377357191, 2.19042472984, 2.1971733476, \
2.20402262267, 2.21097591537, 2.21803676019, 2.22520887808, \
2.23249618993, 2.23990283128, 2.24743316835, 2.25509181569, \
2.26288365549, 2.27081385882, 2.27888790909, 2.28711162784, \
2.29549120334, 2.30403322228, 2.31274470495, 2.32163314439, \
2.33070655011, 2.33997349698, 2.34944318006, 2.35912547628, \
2.36903101398, 2.37917125159, 2.3895585669, 2.40020635872, \
2.411129163, 2.422342786, 2.43386445755, 2.44571300825, \
2.45790907518, 2.47047534177, 2.48343681896, 2.49682117646, \
2.51065913523, 2.52498493519, 2.53983689632, 2.55525809634, \
2.57129719532, 2.58800944712, 2.60545795099, 2.62371521493, \
2.64286512889, 2.66300548366, 2.68425122698, 2.7067387311, \
2.73063147325, 2.75612772826, 2.78347119119, 2.81296597401, \
2.84499832124, 2.8800689917, 2.91884323355, 2.9622311235, \
3.0115232357, 3.06863405379, 3.13657337257, 3.22045475765, \
3.32996541598, 3.48672170399 ])
yu = array([
0.0540012735356, 0.0544874991381, 0.054972661389, 0.0554567692269, \
0.0559398314244, 0.0564218565922, 0.0569028531841, 0.0573828295011, \
0.0578617936955, 0.0583397537752, 0.058816717607, 0.059292692921, \
0.0597676873138, 0.060241708252, 0.0607147630756, 0.0611868590013, \
0.0616580031256, 0.0621282024276, 0.0625974637723, 0.0630657939132, \
0.0635331994951, 0.0639996870564, 0.0644652630325, 0.0649299337573, \
0.0653937054663, 0.0658565842984, 0.0663185762986, 0.0667796874201, \
0.0672399235259, 0.0676992903918, 0.0681577937072, 0.0686154390782, \
0.0690722320286, 0.0695281780022, 0.0699832823643, 0.0704375504035, \
0.0708909873334, 0.0713435982942, 0.0717953883543, 0.0722463625114, \
0.0726965256949, 0.0731458827663, 0.0735944385211, 0.0740421976905, \
0.074489164942, 0.0749353448811, 0.0753807420524, 0.0758253609412, \
0.076269205974, 0.0767122815202, 0.0771545918932, 0.077596141351, \
0.0780369340979, 0.0784769742851, 0.078916266012, 0.0793548133268, \
0.0797926202279, 0.0802296906646, 0.080666028538, 0.081101637702, \
0.081536521964, 0.0819706850859, 0.082404130785, 0.0828368627345, \
0.0832688845646, 0.0837001998631, 0.0841308121761, 0.084560725009, \
0.0849899418268, 0.0854184660553, 0.0858463010813, 0.0862734502535, \
0.0866999168832, 0.0871257042449, 0.0875508155769, 0.0879752540816, \
0.0883990229268, 0.0888221252457, 0.0892445641375, 0.0896663426683, \
0.0900874638713, 0.0905079307475, 0.0909277462662, 0.0913469133655, \
0.0917654349529, 0.0921833139053, 0.0926005530704, 0.0930171552661, \
0.0934331232819, 0.0938484598786, 0.0942631677893, 0.0946772497194, \
0.0950907083474, 0.095503546325, 0.0959157662776, 0.0963273708049, \
0.0967383624809, 0.0971487438546, 0.0975585174503, 0.0979676857678, \
0.098376251283, 0.098784216448, 0.0991915836918, 0.0995983554201, \
0.100004534016, 0.100410121841, 0.100815121233, 0.10121953451, \
0.101623363968, 0.102026611881, 0.102429280502, 0.102831372066, \
0.103232888785, 0.103633832853, 0.104034206444, 0.10443401171, \
0.104833250787, 0.105231925791, 0.10563003882, 0.106027591953, \
0.106424587249, 0.106821026753, 0.10721691249, 0.107612246467, \
0.108007030675, 0.108401267088, 0.108794957663, 0.10918810434, \
0.109580709043, 0.109972773679, 0.110364300142, 0.110755290307, \
0.111145746034, 0.111535669171, 0.111925061545, 0.112313924974, \
0.112702261257, 0.113090072181, 0.113477359516, 0.113864125022, \
0.11425037044, 0.1146360975, 0.115021307918, 0.115406003396, \
0.115790185624, 0.116173856276, 0.116557017014, 0.11693966949, \
0.117321815339, 0.117703456185, 0.118084593641, 0.118465229306, \
0.118845364768, 0.1192250016, 0.119604141367, 0.119982785621, \
0.120360935901, 0.120738593735, 0.121115760642, 0.121492438126, \
0.121868627682, 0.122244330795, 0.122619548937, 0.12299428357, \
0.123368536146, 0.123742308106, 0.124115600881, 0.12448841589, \
0.124860754545, 0.125232618245, 0.12560400838, 0.125974926331, \
0.126345373469, 0.126715351154, 0.127084860738, 0.127453903564, \
0.127822480963, 0.128190594259, 0.128558244767, 0.128925433793, \
0.129292162631, 0.129658432571, 0.130024244891, 0.13038960086, \
0.130754501741, 0.131118948787, 0.131482943242, 0.131846486342, \
0.132209579317, 0.132572223385, 0.132934419758, 0.133296169642, \
0.13365747423, 0.134018334713, 0.13437875227, 0.134738728074, \
0.13509826329, 0.135457359075, 0.135816016581, 0.13617423695, \
0.136532021316, 0.13688937081, 0.137246286551, 0.137602769653, \
0.137958821225, 0.138314442365, 0.138669634167, 0.139024397717, \
0.139378734095, 0.139732644373, 0.140086129618, 0.14043919089, \
0.140791829241, 0.141144045718, 0.141495841361, 0.141847217205, \
0.142198174276, 0.142548713597, 0.142898836183, 0.143248543043, \
0.143597835179, 0.14394671359, 0.144295179266, 0.144643233193, \
0.144990876349, 0.14533810971, 0.145684934242, 0.146031350908, \
0.146377360665, 0.146722964463, 0.147068163249, 0.147412957962, \
0.147757349537, 0.148101338903, 0.148444926984, 0.148788114699, \
0.149130902961, 0.149473292678, 0.149815284753, 0.150156880085, \
0.150498079565, 0.150838884082, 0.151179294519, 0.151519311753, \
0.151858936658, 0.152198170101, 0.152537012946, 0.152875466051, \
0.153213530271, 0.153551206454, 0.153888495445, 0.154225398084, \
0.154561915205, 0.15489804764, 0.155233796214, 0.155569161751, \
0.155904145066, 0.156238746972, 0.156572968279, 0.156906809791, \
0.157240272307, 0.157573356623, 0.15790606353, 0.158238393817, \
0.158570348265, 0.158901927654, 0.159233132759, 0.159563964351, \
0.159894423197, 0.160224510058, 0.160554225695, 0.160883570863, \
0.161212546311, 0.161541152788, 0.161869391036, 0.162197261796, \
0.162524765803, 0.162851903789, 0.163178676483, 0.163505084608, \
0.163831128886, 0.164156810034, 0.164482128766, 0.164807085792, \
0.165131681818, 0.165455917548, 0.165779793681, 0.166103310913, \
0.166426469936, 0.16674927144, 0.167071716111, 0.167393804631, \
0.167715537679, 0.16803691593, 0.168357940058, 0.168678610731, \
0.168998928615, 0.169318894373, 0.169638508664, 0.169957772145, \
0.170276685469, 0.170595249286, 0.170913464242, 0.171231330982, \
0.171548850146, 0.171866022373, 0.172182848295, 0.172499328546, \
0.172815463755, 0.173131254545, 0.173446701542, 0.173761805364, \
0.174076566628, 0.174390985949, 0.174705063937, 0.175018801202, \
0.175332198348, 0.175645255979, 0.175957974695, 0.176270355092, \
0.176582397766, 0.176894103309, 0.177205472308, 0.177516505351, \
0.177827203022, 0.178137565902, 0.178447594568, 0.178757289598, \
0.179066651564, 0.179375681037, 0.179684378585, 0.179992744774, \
0.180300780166, 0.180608485323, 0.180915860803, 0.18122290716, \
0.181529624949, 0.18183601472, 0.182142077022, 0.182447812399, \
0.182753221396, 0.183058304554, 0.183363062412, 0.183667495505, \
0.183971604368, 0.184275389534, 0.18457885153, 0.184881990885, \
0.185184808123, 0.185487303768, 0.185789478338, 0.186091332353, \
0.186392866329, 0.186694080779, 0.186994976215, 0.187295553146, \
0.18759581208, 0.187895753521, 0.188195377973, 0.188494685937, \
0.188793677911, 0.189092354391, 0.189390715873, 0.18968876285, \
0.189986495811, 0.190283915244, 0.190581021638, 0.190877815475, \
0.191174297238, 0.191470467408, 0.191766326463, 0.192061874879, \
0.192357113132, 0.192652041693, 0.192946661033, 0.193240971621, \
0.193534973925, 0.193828668408, 0.194122055533, 0.194415135763, \
0.194707909556, 0.19500037737, 0.195292539661, 0.195584396882, \
0.195875949485, 0.196167197921, 0.196458142637, 0.196748784081, \
0.197039122697, 0.197329158929, 0.197618893218, 0.197908326003, \
0.198197457722, 0.198486288812, 0.198774819706, 0.199063050838, \
0.199350982639, 0.199638615537, 0.199925949961, 0.200212986337, \
0.200499725089, 0.20078616664, 0.20107231141, 0.20135815982, \
0.201643712287, 0.201928969228, 0.202213931056, 0.202498598186, \
0.202782971029, 0.203067049994, 0.20335083549, 0.203634327924, \
0.203917527701, 0.204200435225, 0.204483050898, 0.204765375121, \
0.205047408293, 0.205329150811, 0.205610603072, 0.205891765471, \
0.2061726384, 0.206453222252, 0.206733517417, 0.207013524284, \
0.207293243239, 0.20757267467, 0.207851818961, 0.208130676495, \
0.208409247653, 0.208687532816, 0.208965532363, 0.209243246671, \
0.209520676117, 0.209797821075, 0.210074681919, 0.210351259021, \
0.210627552752, 0.21090356348, 0.211179291575, 0.211454737402, \
0.211729901327, 0.212004783714, 0.212279384926, 0.212553705325, \
0.21282774527, 0.213101505121, 0.213374985234, 0.213648185967, \
0.213921107674, 0.214193750709, 0.214466115425, 0.214738202173, \
0.215010011303, 0.215281543164, 0.215552798104, 0.215823776468, \
0.216094478602, 0.21636490485, 0.216635055555, 0.216904931057, \
0.217174531699, 0.217443857818, 0.217712909752, 0.21798168784, \
0.218250192415, 0.218518423813, 0.218786382367, 0.219054068409, \
0.21932148227, 0.21958862428, 0.219855494768, 0.220122094062, \
0.220388422488, 0.220654480371, 0.220920268035, 0.221185785805, \
0.221451034002, 0.221716012947, 0.22198072296, 0.222245164359, \
0.222509337463, 0.222773242589, 0.223036880051, 0.223300250165, \
0.223563353244, 0.2238261896, 0.224088759545, 0.224351063389, \
0.224613101442, 0.224874874012, 0.225136381406, 0.22539762393, \
0.22565860189, 0.22591931559, 0.226179765333, 0.226439951422, \
0.226699874157, 0.22695953384, 0.227218930768, 0.227478065241, \
0.227736937556, 0.227995548009, 0.228253896895, 0.22851198451, \
0.228769811145, 0.229027377095, 0.22928468265, 0.229541728101, \
0.229798513738, 0.23005503985, 0.230311306723, 0.230567314646, \
0.230823063904, 0.231078554782, 0.231333787564, 0.231588762534, \
0.231843479974, 0.232097940164, 0.232352143387, 0.232606089921, \
0.232859780045, 0.233113214036, 0.233366392173, 0.233619314731, \
0.233871981984, 0.234124394209, 0.234376551677, 0.234628454662, \
0.234880103436, 0.235131498268, 0.235382639431, 0.235633527192, \
0.23588416182, 0.236134543582, 0.236384672746, 0.236634549577, \
0.23688417434, 0.2371335473, 0.237382668719, 0.237631538861, \
0.237880157987, 0.238128526359, 0.238376644236, 0.238624511878, \
0.238872129544, 0.23911949749, 0.239366615975, 0.239613485254, \
0.239860105583, 0.240106477217, 0.240352600409, 0.240598475413, \
0.240844102481, 0.241089481863, 0.241334613813, 0.241579498578, \
0.241824136409, 0.242068527555, 0.242312672262, 0.242556570778, \
0.24280022335, 0.243043630222, 0.243286791641, 0.243529707849, \
0.24377237909, 0.244014805607, 0.244256987642, 0.244498925435, \
0.244740619229, 0.244982069261, 0.245223275772, 0.245464238999, \
0.24570495918, 0.245945436553, 0.246185671353, 0.246425663816, \
0.246665414177, 0.24690492267, 0.247144189529, 0.247383214985, \
0.247621999273, 0.247860542621, 0.248098845263, 0.248336907427, \
0.248574729343, 0.24881231124, 0.249049653346, 0.249286755888, \
0.249523619094, 0.249760243188, 0.249996628397, 0.250232774945, \
0.250468683057, 0.250704352956, 0.250939784865, 0.251174979007, \
0.251409935601, 0.251644654871, 0.251879137035, 0.252113382314, \
0.252347390927, 0.252581163092, 0.252814699027, 0.253047998949, \
0.253281063075, 0.253513891621, 0.253746484801, 0.253978842831, \
0.254210965925, 0.254442854297, 0.254674508159, 0.254905927723, \
0.255137113202, 0.255368064807, 0.255598782747, 0.255829267233, \
0.256059518475, 0.256289536681, 0.256519322059, 0.256748874817, \
0.256978195162, 0.2572072833, 0.257436139437, 0.257664763779, \
0.25789315653, 0.258121317895, 0.258349248077, 0.258576947278, \
0.258804415703, 0.259031653551, 0.259258661026, 0.259485438327, \
0.259711985655, 0.259938303209, 0.260164391189, 0.260390249794, \
0.260615879221, 0.260841279668, 0.261066451331, 0.261291394408, \
0.261516109095, 0.261740595585, 0.261964854076, 0.26218888476, \
0.262412687831, 0.262636263484, 0.26285961191, 0.263082733302, \
0.263305627851, 0.263528295749, 0.263750737186, 0.263972952353, \
0.264194941439, 0.264416704633, 0.264638242124, 0.2648595541, \
0.265080640748, 0.265301502256, 0.265522138811, 0.265742550598, \
0.265962737803, 0.266182700611, 0.266402439207, 0.266621953774, \
0.266841244498, 0.26706031156, 0.267279155143, 0.26749777543, \
0.267716172603, 0.267934346842, 0.268152298328, 0.268370027242, \
0.268587533763, 0.268804818071, 0.269021880344, 0.269238720761, \
0.2694553395, 0.269671736739, 0.269887912653, 0.270103867421, \
0.270319601217, 0.270535114218, 0.270750406598, 0.270965478533, \
0.271180330196, 0.271394961762, 0.271609373403, 0.271823565293, \
0.272037537604, 0.272251290507, 0.272464824175, 0.272678138779, \
0.272891234489, 0.273104111476, 0.273316769908, 0.273529209956, \
0.273741431789, 0.273953435575, 0.274165221481, 0.274376789677, \
0.274588140328, 0.274799273601, 0.275010189664, 0.275220888681, \
0.275431370818, 0.275641636241, 0.275851685114, 0.2760615176, \
0.276271133865, 0.276480534071, 0.276689718381, 0.276898686958, \
0.277107439965, 0.277315977561, 0.27752429991, 0.277732407172, \
0.277940299507, 0.278147977076, 0.278355440038, 0.278562688553, \
0.278769722779, 0.278976542875, 0.279183149, 0.27938954131, \
0.279595719963, 0.279801685116, 0.280007436926, 0.280212975549, \
0.280418301139, 0.280623413854, 0.280828313848, 0.281033001275, \
0.281237476289, 0.281441739045, 0.281645789695, 0.281849628394, \
0.282053255293, 0.282256670545, 0.282459874302, 0.282662866715, \
0.282865647935, 0.283068218114, 0.283270577402, 0.283472725948, \
0.283674663903, 0.283876391415, 0.284077908635, 0.284279215709, \
0.284480312788, 0.284681200017, 0.284881877546, 0.285082345521, \
0.285282604088, 0.285482653395, 0.285682493588, 0.285882124811, \
0.286081547211, 0.286280760932, 0.286479766119, 0.286678562916, \
0.286877151468, 0.287075531918, 0.287273704409, 0.287471669084, \
0.287669426085, 0.287866975555, 0.288064317636, 0.28826145247, \
0.288458380196, 0.288655100957, 0.288851614893, 0.289047922144, \
0.289244022849, 0.289439917148, 0.289635605182, 0.289831087087, \
0.290026363003, 0.290221433068, 0.290416297419, 0.290610956195, \
0.290805409533, 0.290999657568, 0.291193700439, 0.29138753828, \
0.291581171228, 0.291774599419, 0.291967822987, 0.292160842068, \
0.292353656796, 0.292546267306, 0.292738673731, 0.292930876204, \
0.29312287486, 0.293314669832, 0.293506261251, 0.293697649251, \
0.293888833963, 0.294079815519, 0.294270594051, 0.29446116969, \
0.294651542566, 0.294841712811, 0.295031680553, 0.295221445924, \
0.295411009053, 0.295600370069, 0.295789529101, 0.295978486277, \
0.296167241727, 0.296355795578, 0.296544147958, 0.296732298995, \
0.296920248815, 0.297107997546, 0.297295545315, 0.297482892246, \
0.297670038468, 0.297856984104, 0.298043729282, 0.298230274125, \
0.298416618759, 0.298602763308, 0.298788707897, 0.298974452649, \
0.299159997689, 0.299345343139, 0.299530489123, 0.299715435764, \
0.299900183184, 0.300084731505, 0.30026908085, 0.300453231339, \
0.300637183096, 0.30082093624, 0.301004490893, 0.301187847175, \
0.301371005207, 0.301553965108, 0.301736726999, 0.301919290999, \
0.302101657227, 0.302283825802, 0.302465796843, 0.302647570468, \
0.302829146795, 0.303010525942, 0.303191708028, 0.303372693168, \
0.303553481481, 0.303734073083, 0.30391446809, 0.304094666619, \
0.304274668786, 0.304454474707, 0.304634084497, 0.304813498271, \
0.304992716144, 0.305171738232, 0.305350564647, 0.305529195506, \
0.305707630921, 0.305885871006, 0.306063915875, 0.306241765641, \
0.306419420416, 0.306596880314, 0.306774145446, 0.306951215926, \
0.307128091864, 0.307304773373, 0.307481260563, 0.307657553547, \
0.307833652434, 0.308009557336, 0.308185268362, 0.308360785624, \
0.308536109231, 0.308711239292, 0.308886175918, 0.309060919216, \
0.309235469297, 0.309409826268, 0.309583990239, 0.309757961317, \
0.309931739611, 0.310105325228, 0.310278718275, 0.31045191886, \
0.31062492709, 0.310797743071, 0.310970366911, 0.311142798715, \
0.31131503859, 0.31148708664, 0.311658942973, 0.311830607693, \
0.312002080905, 0.312173362715, 0.312344453226, 0.312515352544, \
0.312686060772, 0.312856578014, 0.313026904375, 0.313197039958, \
0.313366984865, 0.313536739201, 0.313706303067, 0.313875676567, \
0.314044859803, 0.314213852877, 0.31438265589, 0.314551268945, \
0.314719692144, 0.314887925586, 0.315055969374, 0.315223823609, \
0.31539148839, 0.315558963818, 0.315726249993, 0.315893347016, \
0.316060254985, 0.316226974001, 0.316393504163, 0.31655984557, \
0.31672599832, 0.316891962512, 0.317057738245, 0.317223325617, \
0.317388724726, 0.31755393567, 0.317718958546, 0.317883793451, \
0.318048440483, 0.318212899739, 0.318377171315, 0.318541255307, \
0.318705151813, 0.318868860929, 0.319032382749, 0.31919571737, \
0.319358864888, 0.319521825397, 0.319684598993, 0.31984718577, \
0.320009585823, 0.320171799247, 0.320333826135, 0.320495666583, \
0.320657320683, 0.320818788529, 0.320980070215, 0.321141165834, \
0.321302075479, 0.321462799242, 0.321623337217, 0.321783689496, \
0.321943856171, 0.322103837334, 0.322263633077, 0.322423243491, \
0.322582668669, 0.322741908701, 0.322900963677, 0.323059833691, \
0.323218518831, 0.323377019188, 0.323535334852, 0.323693465915, \
0.323851412464, 0.324009174591, 0.324166752385, 0.324324145934, \
0.324481355329, 0.324638380658, 0.324795222009, 0.324951879472, \
0.325108353134, 0.325264643085, 0.325420749411, 0.3255766722, \
0.325732411541, 0.325887967521, 0.326043340226, 0.326198529745, \
0.326353536163, 0.326508359567, 0.326663000045, 0.326817457682, \
0.326971732564, 0.327125824778, 0.327279734408, 0.327433461542, \
0.327587006263, 0.327740368658, 0.327893548812, 0.328046546808, \
0.328199362732, 0.328351996669, 0.328504448702, 0.328656718917, \
0.328808807396, 0.328960714223, 0.329112439483, 0.329263983259, \
0.329415345633, 0.32956652669, 0.329717526511, 0.32986834518, \
0.330018982779, 0.330169439391, 0.330319715097, 0.33046980998, \
0.330619724122, 0.330769457604, 0.330919010508, 0.331068382916, \
0.331217574907, 0.331366586564, 0.331515417967, 0.331664069197, \
0.331812540334, 0.331960831459, 0.332108942652, 0.332256873993, \
0.332404625561, 0.332552197437, 0.332699589699, 0.332846802427, \
0.332993835701, 0.333140689599, 0.3332873642, 0.333433859582, \
0.333580175825, 0.333726313006, 0.333872271204, 0.334018050496, \
0.334163650961, 0.334309072676, 0.334454315719, 0.334599380166, \
0.334744266096, 0.334888973585, 0.33503350271, 0.335177853547, \
0.335322026174, 0.335466020667, 0.335609837101, 0.335753475553, \
0.335896936099, 0.336040218815, 0.336183323776, 0.336326251057, \
0.336469000734, 0.336611572882, 0.336753967576, 0.336896184891, \
0.3370382249, 0.33718008768, 0.337321773304, 0.337463281846, \
0.33760461338, 0.337745767981, 0.337886745721, 0.338027546675, \
0.338168170916, 0.338308618517, 0.33844888955, 0.338588984091, \
0.33872890221, 0.33886864398, 0.339008209475, 0.339147598766, \
0.339286811925, 0.339425849025, 0.339564710138, 0.339703395335, \
0.339841904688, 0.339980238268, 0.340118396147, 0.340256378395, \
0.340394185084, 0.340531816284, 0.340669272067, 0.340806552503, \
0.340943657662, 0.341080587614, 0.34121734243, 0.341353922179, \
0.341490326932, 0.341626556758, 0.341762611726, 0.341898491907, \
0.342034197368, 0.34216972818, 0.342305084412, 0.342440266131, \
0.342575273407, 0.342710106308, 0.342844764904, 0.342979249261, \
0.343113559448, 0.343247695533, 0.343381657583, 0.343515445668, \
0.343649059853, 0.343782500207, 0.343915766796, 0.344048859689, \
0.344181778951, 0.344314524649, 0.344447096851, 0.344579495623, \
0.344711721031, 0.344843773142, 0.344975652022, 0.345107357736, \
0.345238890351, 0.345370249932, 0.345501436546, 0.345632450257, \
0.34576329113, 0.345893959232, 0.346024454627, 0.346154777379, \
0.346284927555, 0.346414905218, 0.346544710432, 0.346674343264, \
0.346803803775, 0.346933092032, 0.347062208097, 0.347191152035, \
0.347319923909, 0.347448523782, 0.34757695172, 0.347705207784, \
0.347833292037, 0.347961204544, 0.348088945366, 0.348216514568, \
0.34834391221, 0.348471138357, 0.348598193069, 0.348725076411, \
0.348851788443, 0.348978329228, 0.349104698827, 0.349230897303, \
0.349356924717, 0.349482781131, 0.349608466606, 0.349733981203, \
0.349859324983, 0.349984498007, 0.350109500337, 0.350234332033, \
0.350358993156, 0.350483483765, 0.350607803923, 0.350731953687, \
0.35085593312, 0.350979742281, 0.351103381229, 0.351226850025, \
0.351350148728, 0.351473277398, 0.351596236094, 0.351719024875, \
0.3518416438, 0.351964092929, 0.35208637232, 0.352208482032, \
0.352330422124, 0.352452192653, 0.35257379368, 0.352695225262, \
0.352816487456, 0.352937580322, 0.353058503916, 0.353179258297, \
0.353299843523, 0.353420259651, 0.353540506738, 0.353660584842, \
0.35378049402, 0.353900234329, 0.354019805826, 0.354139208567, \
0.35425844261, 0.354377508012, 0.354496404828, 0.354615133114, \
0.354733692928, 0.354852084326, 0.354970307363, 0.355088362094, \
0.355206248577, 0.355323966867, 0.355441517019, 0.355558899089, \
0.355676113131, 0.355793159202, 0.355910037356, 0.356026747648, \
0.356143290133, 0.356259664866, 0.356375871902, 0.356491911294, \
0.356607783098, 0.356723487367, 0.356839024156, 0.356954393519, \
0.35706959551, 0.357184630183, 0.35729949759, 0.357414197787, \
0.357528730826, 0.357643096761, 0.357757295645, 0.357871327531, \
0.357985192472, 0.358098890522, 0.358212421732, 0.358325786157, \
0.358438983847, 0.358552014857, 0.358664879237, 0.358777577041, \
0.358890108321, 0.359002473128, 0.359114671515, 0.359226703533, \
0.359338569235, 0.359450268671, 0.359561801893, 0.359673168954, \
0.359784369903, 0.359895404792, 0.360006273672, 0.360116976594, \
0.36022751361, 0.360337884769, 0.360448090122, 0.360558129721, \
0.360668003615, 0.360777711854, 0.360887254489, 0.360996631571, \
0.361105843148, 0.361214889271, 0.36132376999, 0.361432485354, \
0.361541035413, 0.361649420216, 0.361757639813, 0.361865694253, \
0.361973583586, 0.362081307859, 0.362188867123, 0.362296261425, \
0.362403490816, 0.362510555343, 0.362617455054, 0.36272419, \
0.362830760226, 0.362937165783, 0.363043406718, 0.363149483079, \
0.363255394914, 0.36336114227, 0.363466725196, 0.36357214374, \
0.363677397947, 0.363782487867, 0.363887413546, 0.363992175032, \
0.364096772372, 0.364201205612, 0.364305474799, 0.364409579981, \
0.364513521204, 0.364617298515, 0.364720911959, 0.364824361585, \
0.364927647437, 0.365030769562, 0.365133728007, 0.365236522816, \
0.365339154037, 0.365441621714, 0.365543925894, 0.365646066623, \
0.365748043945, 0.365849857906, 0.365951508551, 0.366052995927, \
0.366154320077, 0.366255481046, 0.366356478881, 0.366457313625, \
0.366557985323, 0.36665849402, 0.366758839761, 0.366859022589, \
0.36695904255, 0.367058899687, 0.367158594044, 0.367258125667, \
0.367357494598, 0.367456700881, 0.367555744561, 0.36765462568, \
0.367753344284, 0.367851900414, 0.367950294114, 0.368048525428, \
0.368146594399, 0.36824450107, 0.368342245484, 0.368439827684, \
0.368537247713, 0.368634505613, 0.368731601427, 0.368828535197, \
0.368925306967, 0.369021916778, 0.369118364673, 0.369214650693, \
0.369310774882, 0.36940673728, 0.36950253793, 0.369598176874, \
0.369693654153, 0.369788969809, 0.369884123883, 0.369979116417, \
0.370073947452, 0.37016861703, 0.370263125191, 0.370357471977, \
0.370451657429, 0.370545681587, 0.370639544492, 0.370733246185, \
0.370826786707, 0.370920166098, 0.371013384399, 0.371106441649, \
0.37119933789, 0.371292073161, 0.371384647502, 0.371477060953, \
0.371569313555, 0.371661405347, 0.371753336368, 0.371845106659, \
0.371936716259, 0.372028165207, 0.372119453543, 0.372210581305, \
0.372301548534, 0.372392355268, 0.372483001547, 0.372573487408, \
0.372663812892, 0.372753978036, 0.372843982879, 0.372933827461, \
0.373023511819, 0.373113035991, 0.373202400017, 0.373291603934, \
0.373380647781, 0.373469531595, 0.373558255414, 0.373646819277, \
0.373735223221, 0.373823467284, 0.373911551503, 0.373999475916, \
0.37408724056, 0.374174845473, 0.374262290692, 0.374349576255, \
0.374436702197, 0.374523668557, 0.374610475371, 0.374697122676, \
0.374783610509, 0.374869938907, 0.374956107906, 0.375042117542, \
0.375127967852, 0.375213658872, 0.37529919064, 0.375384563189, \
0.375469776558, 0.375554830782, 0.375639725896, 0.375724461937, \
0.37580903894, 0.375893456941, 0.375977715976, 0.37606181608, \
0.376145757289, 0.376229539637, 0.37631316316, 0.376396627894, \
0.376479933873, 0.376563081133, 0.376646069707, 0.376728899632, \
0.376811570942, 0.376894083671, 0.376976437854, 0.377058633526, \
0.377140670721, 0.377222549474, 0.377304269818, 0.377385831788, \
0.377467235419, 0.377548480743, 0.377629567795, 0.377710496609, \
0.377791267219, 0.377871879658, 0.37795233396, 0.378032630158, \
0.378112768286, 0.378192748378, 0.378272570466, 0.378352234585, \
0.378431740766, 0.378511089043, 0.378590279449, 0.378669312017, \
0.37874818678, 0.378826903771, 0.378905463021, 0.378983864565, \
0.379062108433, 0.37914019466, 0.379218123276, 0.379295894315, \
0.379373507808, 0.379450963788, 0.379528262286, 0.379605403335, \
0.379682386967, 0.379759213212, 0.379835882104, 0.379912393674, \
0.379988747952, 0.380064944972, 0.380140984763, 0.380216867358, \
0.380292592787, 0.380368161083, 0.380443572275, 0.380518826396, \
0.380593923475, 0.380668863545, 0.380743646634, 0.380818272776, \
0.380892741999, 0.380967054335, 0.381041209813, 0.381115208466, \
0.381189050322, 0.381262735412, 0.381336263766, 0.381409635414, \
0.381482850387, 0.381555908715, 0.381628810426, 0.381701555551, \
0.381774144121, 0.381846576163, 0.381918851709, 0.381990970787, \
0.382062933426, 0.382134739657, 0.382206389509, 0.38227788301, \
0.382349220191, 0.382420401079, 0.382491425704, 0.382562294094, \
0.38263300628, 0.382703562288, 0.382773962149, 0.382844205891, \
0.382914293541, 0.382984225129, 0.383054000684, 0.383123620232, \
0.383193083804, 0.383262391426, 0.383331543127, 0.383400538934, \
0.383469378877, 0.383538062982, 0.383606591278, 0.383674963791, \
0.383743180551, 0.383811241584, 0.383879146918, 0.383946896581, \
0.384014490599, 0.384081929, 0.384149211811, 0.38421633906, \
0.384283310773, 0.384350126978, 0.384416787701, 0.384483292969, \
0.384549642809, 0.384615837248, 0.384681876312, 0.384747760028, \
0.384813488423, 0.384879061522, 0.384944479352, 0.38500974194, \
0.385074849312, 0.385139801493, 0.38520459851, 0.385269240389, \
0.385333727156, 0.385398058837, 0.385462235457, 0.385526257042, \
0.385590123617, 0.385653835209, 0.385717391843, 0.385780793545, \
0.385844040338, 0.38590713225, 0.385970069305, 0.386032851528, \
0.386095478944, 0.386157951579, 0.386220269457, 0.386282432603, \
0.386344441042, 0.386406294798, 0.386467993897, 0.386529538363, \
0.386590928221, 0.386652163494, 0.386713244207, 0.386774170385, \
0.386834942052, 0.386895559231, 0.386956021948, 0.387016330226, \
0.387076484089, 0.387136483561, 0.387196328665, 0.387256019426, \
0.387315555867, 0.387374938013, 0.387434165885, 0.387493239508, \
0.387552158906, 0.387610924101, 0.387669535118, 0.387727991978, \
0.387786294705, 0.387844443323, 0.387902437854, 0.387960278321, \
0.388017964748, 0.388075497156, 0.388132875569, 0.38819010001, \
0.3882471705, 0.388304087062, 0.38836084972, 0.388417458495, \
0.388473913409, 0.388530214485, 0.388586361746, 0.388642355212, \
0.388698194907, 0.388753880852, 0.388809413069, 0.38886479158, \
0.388920016407, 0.388975087572, 0.389030005096, 0.389084769, \
0.389139379307, 0.389193836037, 0.389248139213, 0.389302288855, \
0.389356284985, 0.389410127623, 0.389463816792, 0.389517352512, \
0.389570734804, 0.389623963688, 0.389677039187, 0.38972996132, \
0.389782730109, 0.389835345574, 0.389887807735, 0.389940116614, \
0.38999227223, 0.390044274604, 0.390096123756, 0.390147819707, \
0.390199362477, 0.390250752086, 0.390301988554, 0.390353071901, \
0.390404002147, 0.390454779312, 0.390505403415, 0.390555874477, \
0.390606192517, 0.390656357554, 0.390706369609, 0.390756228701, \
0.390805934848, 0.390855488072, 0.39090488839, 0.390954135822, \
0.391003230387, 0.391052172104, 0.391100960993, 0.391149597072, \
0.391198080361, 0.391246410877, 0.39129458864, 0.391342613669, \
0.391390485981, 0.391438205597, 0.391485772534, 0.39153318681, \
0.391580448445, 0.391627557456, 0.391674513861, 0.39172131768, \
0.39176796893, 0.391814467628, 0.391860813794, 0.391907007445, \
0.391953048599, 0.391998937274, 0.392044673487, 0.392090257256, \
0.392135688599, 0.392180967534, 0.392226094077, 0.392271068247, \
0.392315890061, 0.392360559535, 0.392405076688, 0.392449441536, \
0.392493654097, 0.392537714388, 0.392581622425, 0.392625378225, \
0.392668981806, 0.392712433185, 0.392755732377, 0.3927988794, \
0.392841874271, 0.392884717005, 0.392927407619, 0.392969946131, \
0.393012332555, 0.393054566909, 0.393096649209, 0.39313857947, \
0.39318035771, 0.393221983944, 0.393263458188, 0.393304780457, \
0.393345950769, 0.393386969139, 0.393427835581, 0.393468550113, \
0.39350911275, 0.393549523507, 0.3935897824, 0.393629889444, \
0.393669844655, 0.393709648048, 0.393749299638, 0.393788799441, \
0.393828147471, 0.393867343743, 0.393906388274, 0.393945281077, \
0.393984022167, 0.39402261156, 0.39406104927, 0.394099335311, \
0.394137469699, 0.394175452448, 0.394213283572, 0.394250963087, \
0.394288491006, 0.394325867343, 0.394363092114, 0.394400165331, \
0.39443708701, 0.394473857165, 0.394510475809, 0.394546942956, \
0.394583258621, 0.394619422817, 0.394655435558, 0.394691296858, \
0.39472700673, 0.394762565188, 0.394797972246, 0.394833227916, \
0.394868332214, 0.394903285151, 0.394938086741, 0.394972736997, \
0.395007235934, 0.395041583563, 0.395075779897, 0.395109824951, \
0.395143718736, 0.395177461266, 0.395211052553, 0.39524449261, \
0.39527778145, 0.395310919086, 0.39534390553, 0.395376740795, \
0.395409424892, 0.395441957836, 0.395474339637, 0.395506570308, \
0.395538649862, 0.39557057831, 0.395602355665, 0.395633981938, \
0.395665457143, 0.39569678129, 0.395727954391, 0.395758976459, \
0.395789847504, 0.39582056754, 0.395851136577, 0.395881554626, \
0.395911821701, 0.395941937811, 0.395971902968, 0.396001717184, \
0.39603138047, 0.396060892837, 0.396090254297, 0.396119464859, \
0.396148524537, 0.396177433339, 0.396206191278, 0.396234798364, \
0.396263254608, 0.396291560021, 0.396319714613, 0.396347718395, \
0.396375571378, 0.396403273572, 0.396430824988, 0.396458225635, \
0.396485475525, 0.396512574667, 0.396539523072, 0.39656632075, \
0.396592967711, 0.396619463965, 0.396645809523, 0.396672004393, \
0.396698048586, 0.396723942111, 0.396749684979, 0.396775277199, \
0.396800718781, 0.396826009734, 0.396851150068, 0.396876139792, \
0.396900978915, 0.396925667448, 0.396950205399, 0.396974592777, \
0.396998829592, 0.397022915853, 0.397046851568, 0.397070636747, \
0.397094271399, 0.397117755533, 0.397141089157, 0.39716427228, \
0.39718730491, 0.397210187058, 0.39723291873, 0.397255499936, \
0.397277930684, 0.397300210983, 0.39732234084, 0.397344320264, \
0.397366149264, 0.397387827847, 0.397409356022, 0.397430733797, \
0.397451961179, 0.397473038177, 0.397493964799, 0.397514741051, \
0.397535366943, 0.397555842482, 0.397576167675, 0.397596342531, \
0.397616367056, 0.397636241258, 0.397655965145, 0.397675538724, \
0.397694962001, 0.397714234986, 0.397733357684, 0.397752330103, \
0.39777115225, 0.397789824132, 0.397808345755, 0.397826717128, \
0.397844938256, 0.397863009146, 0.397880929806, 0.397898700242, \
0.39791632046, 0.397933790467, 0.39795111027, 0.397968279875, \
0.397985299288, 0.398002168517, 0.398018887566, 0.398035456442, \
0.398051875152, 0.398068143701, 0.398084262096, 0.398100230343, \
0.398116048447, 0.398131716414, 0.398147234251, 0.398162601962, \
0.398177819554, 0.398192887033, 0.398207804404, 0.398222571672, \
0.398237188843, 0.398251655922, 0.398265972915, 0.398280139828, \
0.398294156664, 0.39830802343, 0.398321740131, 0.398335306771, \
0.398348723357, 0.398361989891, 0.398375106381, 0.39838807283, \
0.398400889243, 0.398413555626, 0.398426071982, 0.398438438316, \
0.398450654634, 0.398462720938, 0.398474637235, 0.398486403528, \
0.398498019821, 0.39850948612, 0.398520802428, 0.398531968749, \
0.398542985087, 0.398553851447, 0.398564567832, 0.398575134247, \
0.398585550695, 0.398595817181, 0.398605933707, 0.398615900278, \
0.398625716898, 0.398635383569, 0.398644900296, 0.398654267082, \
0.39866348393, 0.398672550844, 0.398681467827, 0.398690234883, \
0.398698852014, 0.398707319224, 0.398715636516, 0.398723803893, \
0.398731821357, 0.398739688913, 0.398747406562, 0.398754974308, \
0.398762392153, 0.3987696601, 0.398776778152, 0.398783746311, \
0.398790564579, 0.39879723296, 0.398803751456, 0.398810120068, \
0.3988163388, 0.398822407654, 0.398828326631, 0.398834095735, \
0.398839714966, 0.398845184327, 0.39885050382, 0.398855673448, \
0.39886069321, 0.398865563111, 0.398870283151, 0.398874853332, \
0.398879273655, 0.398883544123, 0.398887664737, 0.398891635497, \
0.398895456407, 0.398899127466, 0.398902648676, 0.398906020039, \
0.398909241556, 0.398912313228, 0.398915235055, 0.398918007039, \
0.398920629181, 0.398923101482, 0.398925423943, 0.398927596563, \
0.398929619345, 0.398931492289, 0.398933215395, 0.398934788664, \
0.398936212097, 0.398937485693, 0.398938609454, 0.398939583379, \
0.39894040747, 0.398941081725, 0.398941606146, 0.398941980732, \
0.398942205484, 0.398942280401, 0.398942280401, 0.398942205484, \
0.398941980732, 0.398941606146, 0.398941081725, 0.39894040747, \
0.398939583379, 0.398938609454, 0.398937485693, 0.398936212097, \
0.398934788664, 0.398933215395, 0.398931492289, 0.398929619345, \
0.398927596563, 0.398925423943, 0.398923101482, 0.398920629181, \
0.398918007039, 0.398915235055, 0.398912313228, 0.398909241556, \
0.398906020039, 0.398902648676, 0.398899127466, 0.398895456407, \
0.398891635497, 0.398887664737, 0.398883544123, 0.398879273655, \
0.398874853332, 0.398870283151, 0.398865563111, 0.39886069321, \
0.398855673448, 0.39885050382, 0.398845184327, 0.398839714966, \
0.398834095735, 0.398828326631, 0.398822407654, 0.3988163388, \
0.398810120068, 0.398803751456, 0.39879723296, 0.398790564579, \
0.398783746311, 0.398776778152, 0.3987696601, 0.398762392153, \
0.398754974308, 0.398747406562, 0.398739688913, 0.398731821357, \
0.398723803893, 0.398715636516, 0.398707319224, 0.398698852014, \
0.398690234883, 0.398681467827, 0.398672550844, 0.39866348393, \
0.398654267082, 0.398644900296, 0.398635383569, 0.398625716898, \
0.398615900278, 0.398605933707, 0.398595817181, 0.398585550695, \
0.398575134247, 0.398564567832, 0.398553851447, 0.398542985087, \
0.398531968749, 0.398520802428, 0.39850948612, 0.398498019821, \
0.398486403528, 0.398474637235, 0.398462720938, 0.398450654634, \
0.398438438316, 0.398426071982, 0.398413555626, 0.398400889243, \
0.39838807283, 0.398375106381, 0.398361989891, 0.398348723357, \
0.398335306771, 0.398321740131, 0.39830802343, 0.398294156664, \
0.398280139828, 0.398265972915, 0.398251655922, 0.398237188843, \
0.398222571672, 0.398207804404, 0.398192887033, 0.398177819554, \
0.398162601962, 0.398147234251, 0.398131716414, 0.398116048447, \
0.398100230343, 0.398084262096, 0.398068143701, 0.398051875152, \
0.398035456442, 0.398018887566, 0.398002168517, 0.397985299288, \
0.397968279875, 0.39795111027, 0.397933790467, 0.39791632046, \
0.397898700242, 0.397880929806, 0.397863009146, 0.397844938256, \
0.397826717128, 0.397808345755, 0.397789824132, 0.39777115225, \
0.397752330103, 0.397733357684, 0.397714234986, 0.397694962001, \
0.397675538724, 0.397655965145, 0.397636241258, 0.397616367056, \
0.397596342531, 0.397576167675, 0.397555842482, 0.397535366943, \
0.397514741051, 0.397493964799, 0.397473038177, 0.397451961179, \
0.397430733797, 0.397409356022, 0.397387827847, 0.397366149264, \
0.397344320264, 0.39732234084, 0.397300210983, 0.397277930684, \
0.397255499936, 0.39723291873, 0.397210187058, 0.39718730491, \
0.39716427228, 0.397141089157, 0.397117755533, 0.397094271399, \
0.397070636747, 0.397046851568, 0.397022915853, 0.396998829592, \
0.396974592777, 0.396950205399, 0.396925667448, 0.396900978915, \
0.396876139792, 0.396851150068, 0.396826009734, 0.396800718781, \
0.396775277199, 0.396749684979, 0.396723942111, 0.396698048586, \
0.396672004393, 0.396645809523, 0.396619463965, 0.396592967711, \
0.39656632075, 0.396539523072, 0.396512574667, 0.396485475525, \
0.396458225635, 0.396430824988, 0.396403273572, 0.396375571378, \
0.396347718395, 0.396319714613, 0.396291560021, 0.396263254608, \
0.396234798364, 0.396206191278, 0.396177433339, 0.396148524537, \
0.396119464859, 0.396090254297, 0.396060892837, 0.39603138047, \
0.396001717184, 0.395971902968, 0.395941937811, 0.395911821701, \
0.395881554626, 0.395851136577, 0.39582056754, 0.395789847504, \
0.395758976459, 0.395727954391, 0.39569678129, 0.395665457143, \
0.395633981938, 0.395602355665, 0.39557057831, 0.395538649862, \
0.395506570308, 0.395474339637, 0.395441957836, 0.395409424892, \
0.395376740795, 0.39534390553, 0.395310919086, 0.39527778145, \
0.39524449261, 0.395211052553, 0.395177461266, 0.395143718736, \
0.395109824951, 0.395075779897, 0.395041583563, 0.395007235934, \
0.394972736997, 0.394938086741, 0.394903285151, 0.394868332214, \
0.394833227916, 0.394797972246, 0.394762565188, 0.39472700673, \
0.394691296858, 0.394655435558, 0.394619422817, 0.394583258621, \
0.394546942956, 0.394510475809, 0.394473857165, 0.39443708701, \
0.394400165331, 0.394363092114, 0.394325867343, 0.394288491006, \
0.394250963087, 0.394213283572, 0.394175452448, 0.394137469699, \
0.394099335311, 0.39406104927, 0.39402261156, 0.393984022167, \
0.393945281077, 0.393906388274, 0.393867343743, 0.393828147471, \
0.393788799441, 0.393749299638, 0.393709648048, 0.393669844655, \
0.393629889444, 0.3935897824, 0.393549523507, 0.39350911275, \
0.393468550113, 0.393427835581, 0.393386969139, 0.393345950769, \
0.393304780457, 0.393263458188, 0.393221983944, 0.39318035771, \
0.39313857947, 0.393096649209, 0.393054566909, 0.393012332555, \
0.392969946131, 0.392927407619, 0.392884717005, 0.392841874271, \
0.3927988794, 0.392755732377, 0.392712433185, 0.392668981806, \
0.392625378225, 0.392581622425, 0.392537714388, 0.392493654097, \
0.392449441536, 0.392405076688, 0.392360559535, 0.392315890061, \
0.392271068247, 0.392226094077, 0.392180967534, 0.392135688599, \
0.392090257256, 0.392044673487, 0.391998937274, 0.391953048599, \
0.391907007445, 0.391860813794, 0.391814467628, 0.39176796893, \
0.39172131768, 0.391674513861, 0.391627557456, 0.391580448445, \
0.39153318681, 0.391485772534, 0.391438205597, 0.391390485981, \
0.391342613669, 0.39129458864, 0.391246410877, 0.391198080361, \
0.391149597072, 0.391100960993, 0.391052172104, 0.391003230387, \
0.390954135822, 0.39090488839, 0.390855488072, 0.390805934848, \
0.390756228701, 0.390706369609, 0.390656357554, 0.390606192517, \
0.390555874477, 0.390505403415, 0.390454779312, 0.390404002147, \
0.390353071901, 0.390301988554, 0.390250752086, 0.390199362477, \
0.390147819707, 0.390096123756, 0.390044274604, 0.38999227223, \
0.389940116614, 0.389887807735, 0.389835345574, 0.389782730109, \
0.38972996132, 0.389677039187, 0.389623963688, 0.389570734804, \
0.389517352512, 0.389463816792, 0.389410127623, 0.389356284985, \
0.389302288855, 0.389248139213, 0.389193836037, 0.389139379307, \
0.389084769, 0.389030005096, 0.388975087572, 0.388920016407, \
0.38886479158, 0.388809413069, 0.388753880852, 0.388698194907, \
0.388642355212, 0.388586361746, 0.388530214485, 0.388473913409, \
0.388417458495, 0.38836084972, 0.388304087062, 0.3882471705, \
0.38819010001, 0.388132875569, 0.388075497156, 0.388017964748, \
0.387960278321, 0.387902437854, 0.387844443323, 0.387786294705, \
0.387727991978, 0.387669535118, 0.387610924101, 0.387552158906, \
0.387493239508, 0.387434165885, 0.387374938013, 0.387315555867, \
0.387256019426, 0.387196328665, 0.387136483561, 0.387076484089, \
0.387016330226, 0.386956021948, 0.386895559231, 0.386834942052, \
0.386774170385, 0.386713244207, 0.386652163494, 0.386590928221, \
0.386529538363, 0.386467993897, 0.386406294798, 0.386344441042, \
0.386282432603, 0.386220269457, 0.386157951579, 0.386095478944, \
0.386032851528, 0.385970069305, 0.38590713225, 0.385844040338, \
0.385780793545, 0.385717391843, 0.385653835209, 0.385590123617, \
0.385526257042, 0.385462235457, 0.385398058837, 0.385333727156, \
0.385269240389, 0.38520459851, 0.385139801493, 0.385074849312, \
0.38500974194, 0.384944479352, 0.384879061522, 0.384813488423, \
0.384747760028, 0.384681876312, 0.384615837248, 0.384549642809, \
0.384483292969, 0.384416787701, 0.384350126978, 0.384283310773, \
0.38421633906, 0.384149211811, 0.384081929, 0.384014490599, \
0.383946896581, 0.383879146918, 0.383811241584, 0.383743180551, \
0.383674963791, 0.383606591278, 0.383538062982, 0.383469378877, \
0.383400538934, 0.383331543127, 0.383262391426, 0.383193083804, \
0.383123620232, 0.383054000684, 0.382984225129, 0.382914293541, \
0.382844205891, 0.382773962149, 0.382703562288, 0.38263300628, \
0.382562294094, 0.382491425704, 0.382420401079, 0.382349220191, \
0.38227788301, 0.382206389509, 0.382134739657, 0.382062933426, \
0.381990970787, 0.381918851709, 0.381846576163, 0.381774144121, \
0.381701555551, 0.381628810426, 0.381555908715, 0.381482850387, \
0.381409635414, 0.381336263766, 0.381262735412, 0.381189050322, \
0.381115208466, 0.381041209813, 0.380967054335, 0.380892741999, \
0.380818272776, 0.380743646634, 0.380668863545, 0.380593923475, \
0.380518826396, 0.380443572275, 0.380368161083, 0.380292592787, \
0.380216867358, 0.380140984763, 0.380064944972, 0.379988747952, \
0.379912393674, 0.379835882104, 0.379759213212, 0.379682386967, \
0.379605403335, 0.379528262286, 0.379450963788, 0.379373507808, \
0.379295894315, 0.379218123276, 0.37914019466, 0.379062108433, \
0.378983864565, 0.378905463021, 0.378826903771, 0.37874818678, \
0.378669312017, 0.378590279449, 0.378511089043, 0.378431740766, \
0.378352234585, 0.378272570466, 0.378192748378, 0.378112768286, \
0.378032630158, 0.37795233396, 0.377871879658, 0.377791267219, \
0.377710496609, 0.377629567795, 0.377548480743, 0.377467235419, \
0.377385831788, 0.377304269818, 0.377222549474, 0.377140670721, \
0.377058633526, 0.376976437854, 0.376894083671, 0.376811570942, \
0.376728899632, 0.376646069707, 0.376563081133, 0.376479933873, \
0.376396627894, 0.37631316316, 0.376229539637, 0.376145757289, \
0.37606181608, 0.375977715976, 0.375893456941, 0.37580903894, \
0.375724461937, 0.375639725896, 0.375554830782, 0.375469776558, \
0.375384563189, 0.37529919064, 0.375213658872, 0.375127967852, \
0.375042117542, 0.374956107906, 0.374869938907, 0.374783610509, \
0.374697122676, 0.374610475371, 0.374523668557, 0.374436702197, \
0.374349576255, 0.374262290692, 0.374174845473, 0.37408724056, \
0.373999475916, 0.373911551503, 0.373823467284, 0.373735223221, \
0.373646819277, 0.373558255414, 0.373469531595, 0.373380647781, \
0.373291603934, 0.373202400017, 0.373113035991, 0.373023511819, \
0.372933827461, 0.372843982879, 0.372753978036, 0.372663812892, \
0.372573487408, 0.372483001547, 0.372392355268, 0.372301548534, \
0.372210581305, 0.372119453543, 0.372028165207, 0.371936716259, \
0.371845106659, 0.371753336368, 0.371661405347, 0.371569313555, \
0.371477060953, 0.371384647502, 0.371292073161, 0.37119933789, \
0.371106441649, 0.371013384399, 0.370920166098, 0.370826786707, \
0.370733246185, 0.370639544492, 0.370545681587, 0.370451657429, \
0.370357471977, 0.370263125191, 0.37016861703, 0.370073947452, \
0.369979116417, 0.369884123883, 0.369788969809, 0.369693654153, \
0.369598176874, 0.36950253793, 0.36940673728, 0.369310774882, \
0.369214650693, 0.369118364673, 0.369021916778, 0.368925306967, \
0.368828535197, 0.368731601427, 0.368634505613, 0.368537247713, \
0.368439827684, 0.368342245484, 0.36824450107, 0.368146594399, \
0.368048525428, 0.367950294114, 0.367851900414, 0.367753344284, \
0.36765462568, 0.367555744561, 0.367456700881, 0.367357494598, \
0.367258125667, 0.367158594044, 0.367058899687, 0.36695904255, \
0.366859022589, 0.366758839761, 0.36665849402, 0.366557985323, \
0.366457313625, 0.366356478881, 0.366255481046, 0.366154320077, \
0.366052995927, 0.365951508551, 0.365849857906, 0.365748043945, \
0.365646066623, 0.365543925894, 0.365441621714, 0.365339154037, \
0.365236522816, 0.365133728007, 0.365030769562, 0.364927647437, \
0.364824361585, 0.364720911959, 0.364617298515, 0.364513521204, \
0.364409579981, 0.364305474799, 0.364201205612, 0.364096772372, \
0.363992175032, 0.363887413546, 0.363782487867, 0.363677397947, \
0.36357214374, 0.363466725196, 0.36336114227, 0.363255394914, \
0.363149483079, 0.363043406718, 0.362937165783, 0.362830760226, \
0.36272419, 0.362617455054, 0.362510555343, 0.362403490816, \
0.362296261425, 0.362188867123, 0.362081307859, 0.361973583586, \
0.361865694253, 0.361757639813, 0.361649420216, 0.361541035413, \
0.361432485354, 0.36132376999, 0.361214889271, 0.361105843148, \
0.360996631571, 0.360887254489, 0.360777711854, 0.360668003615, \
0.360558129721, 0.360448090122, 0.360337884769, 0.36022751361, \
0.360116976594, 0.360006273672, 0.359895404792, 0.359784369903, \
0.359673168954, 0.359561801893, 0.359450268671, 0.359338569235, \
0.359226703533, 0.359114671515, 0.359002473128, 0.358890108321, \
0.358777577041, 0.358664879237, 0.358552014857, 0.358438983847, \
0.358325786157, 0.358212421732, 0.358098890522, 0.357985192472, \
0.357871327531, 0.357757295645, 0.357643096761, 0.357528730826, \
0.357414197787, 0.35729949759, 0.357184630183, 0.35706959551, \
0.356954393519, 0.356839024156, 0.356723487367, 0.356607783098, \
0.356491911294, 0.356375871902, 0.356259664866, 0.356143290133, \
0.356026747648, 0.355910037356, 0.355793159202, 0.355676113131, \
0.355558899089, 0.355441517019, 0.355323966867, 0.355206248577, \
0.355088362094, 0.354970307363, 0.354852084326, 0.354733692928, \
0.354615133114, 0.354496404828, 0.354377508012, 0.35425844261, \
0.354139208567, 0.354019805826, 0.353900234329, 0.35378049402, \
0.353660584842, 0.353540506738, 0.353420259651, 0.353299843523, \
0.353179258297, 0.353058503916, 0.352937580322, 0.352816487456, \
0.352695225262, 0.35257379368, 0.352452192653, 0.352330422124, \
0.352208482032, 0.35208637232, 0.351964092929, 0.3518416438, \
0.351719024875, 0.351596236094, 0.351473277398, 0.351350148728, \
0.351226850025, 0.351103381229, 0.350979742281, 0.35085593312, \
0.350731953687, 0.350607803923, 0.350483483765, 0.350358993156, \
0.350234332033, 0.350109500337, 0.349984498007, 0.349859324983, \
0.349733981203, 0.349608466606, 0.349482781131, 0.349356924717, \
0.349230897303, 0.349104698827, 0.348978329228, 0.348851788443, \
0.348725076411, 0.348598193069, 0.348471138357, 0.34834391221, \
0.348216514568, 0.348088945366, 0.347961204544, 0.347833292037, \
0.347705207784, 0.34757695172, 0.347448523782, 0.347319923909, \
0.347191152035, 0.347062208097, 0.346933092032, 0.346803803775, \
0.346674343264, 0.346544710432, 0.346414905218, 0.346284927555, \
0.346154777379, 0.346024454627, 0.345893959232, 0.34576329113, \
0.345632450257, 0.345501436546, 0.345370249932, 0.345238890351, \
0.345107357736, 0.344975652022, 0.344843773142, 0.344711721031, \
0.344579495623, 0.344447096851, 0.344314524649, 0.344181778951, \
0.344048859689, 0.343915766796, 0.343782500207, 0.343649059853, \
0.343515445668, 0.343381657583, 0.343247695533, 0.343113559448, \
0.342979249261, 0.342844764904, 0.342710106308, 0.342575273407, \
0.342440266131, 0.342305084412, 0.34216972818, 0.342034197368, \
0.341898491907, 0.341762611726, 0.341626556758, 0.341490326932, \
0.341353922179, 0.34121734243, 0.341080587614, 0.340943657662, \
0.340806552503, 0.340669272067, 0.340531816284, 0.340394185084, \
0.340256378395, 0.340118396147, 0.339980238268, 0.339841904688, \
0.339703395335, 0.339564710138, 0.339425849025, 0.339286811925, \
0.339147598766, 0.339008209475, 0.33886864398, 0.33872890221, \
0.338588984091, 0.33844888955, 0.338308618517, 0.338168170916, \
0.338027546675, 0.337886745721, 0.337745767981, 0.33760461338, \
0.337463281846, 0.337321773304, 0.33718008768, 0.3370382249, \
0.336896184891, 0.336753967576, 0.336611572882, 0.336469000734, \
0.336326251057, 0.336183323776, 0.336040218815, 0.335896936099, \
0.335753475553, 0.335609837101, 0.335466020667, 0.335322026174, \
0.335177853547, 0.33503350271, 0.334888973585, 0.334744266096, \
0.334599380166, 0.334454315719, 0.334309072676, 0.334163650961, \
0.334018050496, 0.333872271204, 0.333726313006, 0.333580175825, \
0.333433859582, 0.3332873642, 0.333140689599, 0.332993835701, \
0.332846802427, 0.332699589699, 0.332552197437, 0.332404625561, \
0.332256873993, 0.332108942652, 0.331960831459, 0.331812540334, \
0.331664069197, 0.331515417967, 0.331366586564, 0.331217574907, \
0.331068382916, 0.330919010508, 0.330769457604, 0.330619724122, \
0.33046980998, 0.330319715097, 0.330169439391, 0.330018982779, \
0.32986834518, 0.329717526511, 0.32956652669, 0.329415345633, \
0.329263983259, 0.329112439483, 0.328960714223, 0.328808807396, \
0.328656718917, 0.328504448702, 0.328351996669, 0.328199362732, \
0.328046546808, 0.327893548812, 0.327740368658, 0.327587006263, \
0.327433461542, 0.327279734408, 0.327125824778, 0.326971732564, \
0.326817457682, 0.326663000045, 0.326508359567, 0.326353536163, \
0.326198529745, 0.326043340226, 0.325887967521, 0.325732411541, \
0.3255766722, 0.325420749411, 0.325264643085, 0.325108353134, \
0.324951879472, 0.324795222009, 0.324638380658, 0.324481355329, \
0.324324145934, 0.324166752385, 0.324009174591, 0.323851412464, \
0.323693465915, 0.323535334852, 0.323377019188, 0.323218518831, \
0.323059833691, 0.322900963677, 0.322741908701, 0.322582668669, \
0.322423243491, 0.322263633077, 0.322103837334, 0.321943856171, \
0.321783689496, 0.321623337217, 0.321462799242, 0.321302075479, \
0.321141165834, 0.320980070215, 0.320818788529, 0.320657320683, \
0.320495666583, 0.320333826135, 0.320171799247, 0.320009585823, \
0.31984718577, 0.319684598993, 0.319521825397, 0.319358864888, \
0.31919571737, 0.319032382749, 0.318868860929, 0.318705151813, \
0.318541255307, 0.318377171315, 0.318212899739, 0.318048440483, \
0.317883793451, 0.317718958546, 0.31755393567, 0.317388724726, \
0.317223325617, 0.317057738245, 0.316891962512, 0.31672599832, \
0.31655984557, 0.316393504163, 0.316226974001, 0.316060254985, \
0.315893347016, 0.315726249993, 0.315558963818, 0.31539148839, \
0.315223823609, 0.315055969374, 0.314887925586, 0.314719692144, \
0.314551268945, 0.31438265589, 0.314213852877, 0.314044859803, \
0.313875676567, 0.313706303067, 0.313536739201, 0.313366984865, \
0.313197039958, 0.313026904375, 0.312856578014, 0.312686060772, \
0.312515352544, 0.312344453226, 0.312173362715, 0.312002080905, \
0.311830607693, 0.311658942973, 0.31148708664, 0.31131503859, \
0.311142798715, 0.310970366911, 0.310797743071, 0.31062492709, \
0.31045191886, 0.310278718275, 0.310105325228, 0.309931739611, \
0.309757961317, 0.309583990239, 0.309409826268, 0.309235469297, \
0.309060919216, 0.308886175918, 0.308711239292, 0.308536109231, \
0.308360785624, 0.308185268362, 0.308009557336, 0.307833652434, \
0.307657553547, 0.307481260563, 0.307304773373, 0.307128091864, \
0.306951215926, 0.306774145446, 0.306596880314, 0.306419420416, \
0.306241765641, 0.306063915875, 0.305885871006, 0.305707630921, \
0.305529195506, 0.305350564647, 0.305171738232, 0.304992716144, \
0.304813498271, 0.304634084497, 0.304454474707, 0.304274668786, \
0.304094666619, 0.30391446809, 0.303734073083, 0.303553481481, \
0.303372693168, 0.303191708028, 0.303010525942, 0.302829146795, \
0.302647570468, 0.302465796843, 0.302283825802, 0.302101657227, \
0.301919290999, 0.301736726999, 0.301553965108, 0.301371005207, \
0.301187847175, 0.301004490893, 0.30082093624, 0.300637183096, \
0.300453231339, 0.30026908085, 0.300084731505, 0.299900183184, \
0.299715435764, 0.299530489123, 0.299345343139, 0.299159997689, \
0.298974452649, 0.298788707897, 0.298602763308, 0.298416618759, \
0.298230274125, 0.298043729282, 0.297856984104, 0.297670038468, \
0.297482892246, 0.297295545315, 0.297107997546, 0.296920248815, \
0.296732298995, 0.296544147958, 0.296355795578, 0.296167241727, \
0.295978486277, 0.295789529101, 0.295600370069, 0.295411009053, \
0.295221445924, 0.295031680553, 0.294841712811, 0.294651542566, \
0.29446116969, 0.294270594051, 0.294079815519, 0.293888833963, \
0.293697649251, 0.293506261251, 0.293314669832, 0.29312287486, \
0.292930876204, 0.292738673731, 0.292546267306, 0.292353656796, \
0.292160842068, 0.291967822987, 0.291774599419, 0.291581171228, \
0.29138753828, 0.291193700439, 0.290999657568, 0.290805409533, \
0.290610956195, 0.290416297419, 0.290221433068, 0.290026363003, \
0.289831087087, 0.289635605182, 0.289439917148, 0.289244022849, \
0.289047922144, 0.288851614893, 0.288655100957, 0.288458380196, \
0.28826145247, 0.288064317636, 0.287866975555, 0.287669426085, \
0.287471669084, 0.287273704409, 0.287075531918, 0.286877151468, \
0.286678562916, 0.286479766119, 0.286280760932, 0.286081547211, \
0.285882124811, 0.285682493588, 0.285482653395, 0.285282604088, \
0.285082345521, 0.284881877546, 0.284681200017, 0.284480312788, \
0.284279215709, 0.284077908635, 0.283876391415, 0.283674663903, \
0.283472725948, 0.283270577402, 0.283068218114, 0.282865647935, \
0.282662866715, 0.282459874302, 0.282256670545, 0.282053255293, \
0.281849628394, 0.281645789695, 0.281441739045, 0.281237476289, \
0.281033001275, 0.280828313848, 0.280623413854, 0.280418301139, \
0.280212975549, 0.280007436926, 0.279801685116, 0.279595719963, \
0.27938954131, 0.279183149, 0.278976542875, 0.278769722779, \
0.278562688553, 0.278355440038, 0.278147977076, 0.277940299507, \
0.277732407172, 0.27752429991, 0.277315977561, 0.277107439965, \
0.276898686958, 0.276689718381, 0.276480534071, 0.276271133865, \
0.2760615176, 0.275851685114, 0.275641636241, 0.275431370818, \
0.275220888681, 0.275010189664, 0.274799273601, 0.274588140328, \
0.274376789677, 0.274165221481, 0.273953435575, 0.273741431789, \
0.273529209956, 0.273316769908, 0.273104111476, 0.272891234489, \
0.272678138779, 0.272464824175, 0.272251290507, 0.272037537604, \
0.271823565293, 0.271609373403, 0.271394961762, 0.271180330196, \
0.270965478533, 0.270750406598, 0.270535114218, 0.270319601217, \
0.270103867421, 0.269887912653, 0.269671736739, 0.2694553395, \
0.269238720761, 0.269021880344, 0.268804818071, 0.268587533763, \
0.268370027242, 0.268152298328, 0.267934346842, 0.267716172603, \
0.26749777543, 0.267279155143, 0.26706031156, 0.266841244498, \
0.266621953774, 0.266402439207, 0.266182700611, 0.265962737803, \
0.265742550598, 0.265522138811, 0.265301502256, 0.265080640748, \
0.2648595541, 0.264638242124, 0.264416704633, 0.264194941439, \
0.263972952353, 0.263750737186, 0.263528295749, 0.263305627851, \
0.263082733302, 0.26285961191, 0.262636263484, 0.262412687831, \
0.26218888476, 0.261964854076, 0.261740595585, 0.261516109095, \
0.261291394408, 0.261066451331, 0.260841279668, 0.260615879221, \
0.260390249794, 0.260164391189, 0.259938303209, 0.259711985655, \
0.259485438327, 0.259258661026, 0.259031653551, 0.258804415703, \
0.258576947278, 0.258349248077, 0.258121317895, 0.25789315653, \
0.257664763779, 0.257436139437, 0.2572072833, 0.256978195162, \
0.256748874817, 0.256519322059, 0.256289536681, 0.256059518475, \
0.255829267233, 0.255598782747, 0.255368064807, 0.255137113202, \
0.254905927723, 0.254674508159, 0.254442854297, 0.254210965925, \
0.253978842831, 0.253746484801, 0.253513891621, 0.253281063075, \
0.253047998949, 0.252814699027, 0.252581163092, 0.252347390927, \
0.252113382314, 0.251879137035, 0.251644654871, 0.251409935601, \
0.251174979007, 0.250939784865, 0.250704352956, 0.250468683057, \
0.250232774945, 0.249996628397, 0.249760243188, 0.249523619094, \
0.249286755888, 0.249049653346, 0.24881231124, 0.248574729343, \
0.248336907427, 0.248098845263, 0.247860542621, 0.247621999273, \
0.247383214985, 0.247144189529, 0.24690492267, 0.246665414177, \
0.246425663816, 0.246185671353, 0.245945436553, 0.24570495918, \
0.245464238999, 0.245223275772, 0.244982069261, 0.244740619229, \
0.244498925435, 0.244256987642, 0.244014805607, 0.24377237909, \
0.243529707849, 0.243286791641, 0.243043630222, 0.24280022335, \
0.242556570778, 0.242312672262, 0.242068527555, 0.241824136409, \
0.241579498578, 0.241334613813, 0.241089481863, 0.240844102481, \
0.240598475413, 0.240352600409, 0.240106477217, 0.239860105583, \
0.239613485254, 0.239366615975, 0.23911949749, 0.238872129544, \
0.238624511878, 0.238376644236, 0.238128526359, 0.237880157987, \
0.237631538861, 0.237382668719, 0.2371335473, 0.23688417434, \
0.236634549577, 0.236384672746, 0.236134543582, 0.23588416182, \
0.235633527192, 0.235382639431, 0.235131498268, 0.234880103436, \
0.234628454662, 0.234376551677, 0.234124394209, 0.233871981984, \
0.233619314731, 0.233366392173, 0.233113214036, 0.232859780045, \
0.232606089921, 0.232352143387, 0.232097940164, 0.231843479974, \
0.231588762534, 0.231333787564, 0.231078554782, 0.230823063904, \
0.230567314646, 0.230311306723, 0.23005503985, 0.229798513738, \
0.229541728101, 0.22928468265, 0.229027377095, 0.228769811145, \
0.22851198451, 0.228253896895, 0.227995548009, 0.227736937556, \
0.227478065241, 0.227218930768, 0.22695953384, 0.226699874157, \
0.226439951422, 0.226179765333, 0.22591931559, 0.22565860189, \
0.22539762393, 0.225136381406, 0.224874874012, 0.224613101442, \
0.224351063389, 0.224088759545, 0.2238261896, 0.223563353244, \
0.223300250165, 0.223036880051, 0.222773242589, 0.222509337463, \
0.222245164359, 0.22198072296, 0.221716012947, 0.221451034002, \
0.221185785805, 0.220920268035, 0.220654480371, 0.220388422488, \
0.220122094062, 0.219855494768, 0.21958862428, 0.21932148227, \
0.219054068409, 0.218786382367, 0.218518423813, 0.218250192415, \
0.21798168784, 0.217712909752, 0.217443857818, 0.217174531699, \
0.216904931057, 0.216635055555, 0.21636490485, 0.216094478602, \
0.215823776468, 0.215552798104, 0.215281543164, 0.215010011303, \
0.214738202173, 0.214466115425, 0.214193750709, 0.213921107674, \
0.213648185967, 0.213374985234, 0.213101505121, 0.21282774527, \
0.212553705325, 0.212279384926, 0.212004783714, 0.211729901327, \
0.211454737402, 0.211179291575, 0.21090356348, 0.210627552752, \
0.210351259021, 0.210074681919, 0.209797821075, 0.209520676117, \
0.209243246671, 0.208965532363, 0.208687532816, 0.208409247653, \
0.208130676495, 0.207851818961, 0.20757267467, 0.207293243239, \
0.207013524284, 0.206733517417, 0.206453222252, 0.2061726384, \
0.205891765471, 0.205610603072, 0.205329150811, 0.205047408293, \
0.204765375121, 0.204483050898, 0.204200435225, 0.203917527701, \
0.203634327924, 0.20335083549, 0.203067049994, 0.202782971029, \
0.202498598186, 0.202213931056, 0.201928969228, 0.201643712287, \
0.20135815982, 0.20107231141, 0.20078616664, 0.200499725089, \
0.200212986337, 0.199925949961, 0.199638615537, 0.199350982639, \
0.199063050838, 0.198774819706, 0.198486288812, 0.198197457722, \
0.197908326003, 0.197618893218, 0.197329158929, 0.197039122697, \
0.196748784081, 0.196458142637, 0.196167197921, 0.195875949485, \
0.195584396882, 0.195292539661, 0.19500037737, 0.194707909556, \
0.194415135763, 0.194122055533, 0.193828668408, 0.193534973925, \
0.193240971621, 0.192946661033, 0.192652041693, 0.192357113132, \
0.192061874879, 0.191766326463, 0.191470467408, 0.191174297238, \
0.190877815475, 0.190581021638, 0.190283915244, 0.189986495811, \
0.18968876285, 0.189390715873, 0.189092354391, 0.188793677911, \
0.188494685937, 0.188195377973, 0.187895753521, 0.18759581208, \
0.187295553146, 0.186994976215, 0.186694080779, 0.186392866329, \
0.186091332353, 0.185789478338, 0.185487303768, 0.185184808123, \
0.184881990885, 0.18457885153, 0.184275389534, 0.183971604368, \
0.183667495505, 0.183363062412, 0.183058304554, 0.182753221396, \
0.182447812399, 0.182142077022, 0.18183601472, 0.181529624949, \
0.18122290716, 0.180915860803, 0.180608485323, 0.180300780166, \
0.179992744774, 0.179684378585, 0.179375681037, 0.179066651564, \
0.178757289598, 0.178447594568, 0.178137565902, 0.177827203022, \
0.177516505351, 0.177205472308, 0.176894103309, 0.176582397766, \
0.176270355092, 0.175957974695, 0.175645255979, 0.175332198348, \
0.175018801202, 0.174705063937, 0.174390985949, 0.174076566628, \
0.173761805364, 0.173446701542, 0.173131254545, 0.172815463755, \
0.172499328546, 0.172182848295, 0.171866022373, 0.171548850146, \
0.171231330982, 0.170913464242, 0.170595249286, 0.170276685469, \
0.169957772145, 0.169638508664, 0.169318894373, 0.168998928615, \
0.168678610731, 0.168357940058, 0.16803691593, 0.167715537679, \
0.167393804631, 0.167071716111, 0.16674927144, 0.166426469936, \
0.166103310913, 0.165779793681, 0.165455917548, 0.165131681818, \
0.164807085792, 0.164482128766, 0.164156810034, 0.163831128886, \
0.163505084608, 0.163178676483, 0.162851903789, 0.162524765803, \
0.162197261796, 0.161869391036, 0.161541152788, 0.161212546311, \
0.160883570863, 0.160554225695, 0.160224510058, 0.159894423197, \
0.159563964351, 0.159233132759, 0.158901927654, 0.158570348265, \
0.158238393817, 0.15790606353, 0.157573356623, 0.157240272307, \
0.156906809791, 0.156572968279, 0.156238746972, 0.155904145066, \
0.155569161751, 0.155233796214, 0.15489804764, 0.154561915205, \
0.154225398084, 0.153888495445, 0.153551206454, 0.153213530271, \
0.152875466051, 0.152537012946, 0.152198170101, 0.151858936658, \
0.151519311753, 0.151179294519, 0.150838884082, 0.150498079565, \
0.150156880085, 0.149815284753, 0.149473292678, 0.149130902961, \
0.148788114699, 0.148444926984, 0.148101338903, 0.147757349537, \
0.147412957962, 0.147068163249, 0.146722964463, 0.146377360665, \
0.146031350908, 0.145684934242, 0.14533810971, 0.144990876349, \
0.144643233193, 0.144295179266, 0.14394671359, 0.143597835179, \
0.143248543043, 0.142898836183, 0.142548713597, 0.142198174276, \
0.141847217205, 0.141495841361, 0.141144045718, 0.140791829241, \
0.14043919089, 0.140086129618, 0.139732644373, 0.139378734095, \
0.139024397717, 0.138669634167, 0.138314442365, 0.137958821225, \
0.137602769653, 0.137246286551, 0.13688937081, 0.136532021316, \
0.13617423695, 0.135816016581, 0.135457359075, 0.13509826329, \
0.134738728074, 0.13437875227, 0.134018334713, 0.13365747423, \
0.133296169642, 0.132934419758, 0.132572223385, 0.132209579317, \
0.131846486342, 0.131482943242, 0.131118948787, 0.130754501741, \
0.13038960086, 0.130024244891, 0.129658432571, 0.129292162631, \
0.128925433793, 0.128558244767, 0.128190594259, 0.127822480963, \
0.127453903564, 0.127084860738, 0.126715351154, 0.126345373469, \
0.125974926331, 0.12560400838, 0.125232618245, 0.124860754545, \
0.12448841589, 0.124115600881, 0.123742308106, 0.123368536146, \
0.12299428357, 0.122619548937, 0.122244330795, 0.121868627682, \
0.121492438126, 0.121115760642, 0.120738593735, 0.120360935901, \
0.119982785621, 0.119604141367, 0.1192250016, 0.118845364768, \
0.118465229306, 0.118084593641, 0.117703456185, 0.117321815339, \
0.11693966949, 0.116557017014, 0.116173856276, 0.115790185624, \
0.115406003396, 0.115021307918, 0.1146360975, 0.11425037044, \
0.113864125022, 0.113477359516, 0.113090072181, 0.112702261257, \
0.112313924974, 0.111925061545, 0.111535669171, 0.111145746034, \
0.110755290307, 0.110364300142, 0.109972773679, 0.109580709043, \
0.10918810434, 0.108794957663, 0.108401267088, 0.108007030675, \
0.107612246467, 0.10721691249, 0.106821026753, 0.106424587249, \
0.106027591953, 0.10563003882, 0.105231925791, 0.104833250787, \
0.10443401171, 0.104034206444, 0.103633832853, 0.103232888785, \
0.102831372066, 0.102429280502, 0.102026611881, 0.101623363968, \
0.10121953451, 0.100815121233, 0.100410121841, 0.100004534016, \
0.0995983554201, 0.0991915836918, 0.098784216448, 0.098376251283, \
0.0979676857678, 0.0975585174503, 0.0971487438546, 0.0967383624809, \
0.0963273708049, 0.0959157662776, 0.095503546325, 0.0950907083474, \
0.0946772497194, 0.0942631677893, 0.0938484598786, 0.0934331232819, \
0.0930171552661, 0.0926005530704, 0.0921833139053, 0.0917654349529, \
0.0913469133655, 0.0909277462662, 0.0905079307475, 0.0900874638713, \
0.0896663426683, 0.0892445641375, 0.0888221252457, 0.0883990229268, \
0.0879752540816, 0.0875508155769, 0.0871257042449, 0.0866999168832, \
0.0862734502535, 0.0858463010813, 0.0854184660553, 0.0849899418268, \
0.084560725009, 0.0841308121761, 0.0837001998631, 0.0832688845646, \
0.0828368627345, 0.082404130785, 0.0819706850859, 0.081536521964, \
0.081101637702, 0.080666028538, 0.0802296906646, 0.0797926202279, \
0.0793548133268, 0.078916266012, 0.0784769742851, 0.0780369340979, \
0.077596141351, 0.0771545918932, 0.0767122815202, 0.076269205974, \
0.0758253609412, 0.0753807420524, 0.0749353448811, 0.074489164942, \
0.0740421976905, 0.0735944385211, 0.0731458827663, 0.0726965256949, \
0.0722463625114, 0.0717953883543, 0.0713435982942, 0.0708909873334, \
0.0704375504035, 0.0699832823643, 0.0695281780022, 0.0690722320286, \
0.0686154390782, 0.0681577937072, 0.0676992903918, 0.0672399235259, \
0.0667796874201, 0.0663185762986, 0.0658565842984, 0.0653937054663, \
0.0649299337573, 0.0644652630325, 0.0639996870564, 0.0635331994951, \
0.0630657939132, 0.0625974637723, 0.0621282024276, 0.0616580031256, \
0.0611868590013, 0.0607147630756, 0.060241708252, 0.0597676873138, \
0.059292692921, 0.058816717607, 0.0583397537752, 0.0578617936955, \
0.0573828295011, 0.0569028531841, 0.0564218565922, 0.0559398314244, \
0.0554567692269, 0.054972661389, 0.0544874991381, 0.0540012735356, \
0.053513975472, 0.0530255956613, 0.0525361246366, 0.052045552744, \
0.0515538701374, 0.0510610667724, 0.0505671324, 0.0500720565609, \
0.0495758285778, 0.0490784375496, 0.0485798723434, 0.048080121587, \
0.0475791736615, 0.0470770166928, 0.0465736385427, 0.0460690268006, \
0.0455631687734, 0.0450560514761, 0.0445476616215, 0.0440379856092, \
0.0435270095144, 0.0430147190762, 0.0425010996849, 0.0419861363691, \
0.0414698137822, 0.0409521161875, 0.0404330274434, 0.0399125309876, \
0.0393906098197, 0.0388672464844, 0.0383424230521, 0.0378161210995, \
0.037288321689, 0.0367590053466, 0.0362281520387, 0.0356957411476, \
0.0351617514458, 0.034626161068, 0.0340889474822, 0.0335500874589, \
0.033009557038, 0.032467331494, 0.0319233852988, 0.0313776920821, \
0.0308302245895, 0.0302809546372, 0.0297298530646, 0.0291768896823, \
0.0286220332182, 0.0280652512583, 0.0275065101844, 0.0269457751062, \
0.02638300979, 0.0258181765805, 0.0252512363179, 0.0246821482486, \
0.0241108699283, 0.0235373571186, 0.0229615636755, 0.0223834414285, \
0.0218029400512, 0.0212200069209, 0.0206345869682, 0.0200466225144, \
0.0194560530967, 0.0188628152806, 0.0182668424588, 0.0176680646372, \
0.0170664082066, 0.016461795704, 0.0158541455627, 0.015243371858, \
0.0146293840521, 0.0140120867521, 0.0133913794939, 0.0127671565806, \
0.0121393070128, 0.0115077145716, 0.0108722581496, 0.0102328124764, \
0.00958924947315, 0.00894144061242, 0.00828926090091, 0.00763259551896, \
0.00697135089651, 0.00630547338383, 0.00563498133279, 0.00496002177703, \
0.00428097439553, 0.00359865177269, 0.00291471045349, 0.00223256762114, \
0.00155968193081 ])
ncell = array([
0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 2, \
2, 2, 2, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, \
5, 5, 5, 5, 6, 6, 6, 6, 6, 6, 6, 7, 7, 7, 7, 7, 7, 7, 8, 8, \
8, 8, 8, 8, 8, 9, 9, 9, 9, 9, 9, 9, 10, 10, 10, 10, 10, 10, 10, 11, \
11, 11, 11, 11, 11, 12, 12, 12, 12, 12, 12, 12, 13, 13, 13, 13, 13, 13, 13, 14, \
14, 14, 14, 14, 14, 15, 15, 15, 15, 15, 15, 15, 16, 16, 16, 16, 16, 16, 17, 17, \
17, 17, 17, 17, 17, 18, 18, 18, 18, 18, 18, 19, 19, 19, 19, 19, 19, 20, 20, 20, \
20, 20, 20, 20, 21, 21, 21, 21, 21, 21, 22, 22, 22, 22, 22, 22, 23, 23, 23, 23, \
23, 23, 24, 24, 24, 24, 24, 24, 25, 25, 25, 25, 25, 25, 26, 26, 26, 26, 26, 26, \
27, 27, 27, 27, 27, 27, 28, 28, 28, 28, 28, 28, 29, 29, 29, 29, 29, 29, 30, 30, \
30, 30, 30, 30, 31, 31, 31, 31, 31, 31, 32, 32, 32, 32, 32, 32, 33, 33, 33, 33, \
33, 34, 34, 34, 34, 34, 34, 35, 35, 35, 35, 35, 35, 36, 36, 36, 36, 36, 37, 37, \
37, 37, 37, 37, 38, 38, 38, 38, 38, 39, 39, 39, 39, 39, 39, 40, 40, 40, 40, 40, \
40, 41, 41, 41, 41, 41, 42, 42, 42, 42, 42, 43, 43, 43, 43, 43, 43, 44, 44, 44, \
44, 44, 45, 45, 45, 45, 45, 46, 46, 46, 46, 46, 46, 47, 47, 47, 47, 47, 48, 48, \
48, 48, 48, 49, 49, 49, 49, 49, 50, 50, 50, 50, 50, 50, 51, 51, 51, 51, 51, 52, \
52, 52, 52, 52, 53, 53, 53, 53, 53, 54, 54, 54, 54, 54, 55, 55, 55, 55, 55, 56, \
56, 56, 56, 56, 57, 57, 57, 57, 57, 58, 58, 58, 58, 58, 59, 59, 59, 59, 59, 60, \
60, 60, 60, 60, 61, 61, 61, 61, 61, 62, 62, 62, 62, 63, 63, 63, 63, 63, 64, 64, \
64, 64, 64, 65, 65, 65, 65, 65, 66, 66, 66, 66, 66, 67, 67, 67, 67, 68, 68, 68, \
68, 68, 69, 69, 69, 69, 69, 70, 70, 70, 70, 71, 71, 71, 71, 71, 72, 72, 72, 72, \
72, 73, 73, 73, 73, 74, 74, 74, 74, 74, 75, 75, 75, 75, 76, 76, 76, 76, 76, 77, \
77, 77, 77, 78, 78, 78, 78, 78, 79, 79, 79, 79, 80, 80, 80, 80, 80, 81, 81, 81, \
81, 82, 82, 82, 82, 83, 83, 83, 83, 83, 84, 84, 84, 84, 85, 85, 85, 85, 86, 86, \
86, 86, 86, 87, 87, 87, 87, 88, 88, 88, 88, 89, 89, 89, 89, 89, 90, 90, 90, 90, \
91, 91, 91, 91, 92, 92, 92, 92, 93, 93, 93, 93, 94, 94, 94, 94, 95, 95, 95, 95, \
95, 96, 96, 96, 96, 97, 97, 97, 97, 98, 98, 98, 98, 99, 99, 99, 99, 100, 100, 100, \
100, 101, 101, 101, 101, 102, 102, 102, 102, 103, 103, 103, 103, 104, 104, 104, 104, 105, 105, 105, \
105, 106, 106, 106, 106, 107, 107, 107, 107, 108, 108, 108, 108, 109, 109, 109, 109, 110, 110, 110, \
110, 111, 111, 111, 111, 112, 112, 112, 113, 113, 113, 113, 114, 114, 114, 114, 115, 115, 115, 115, \
116, 116, 116, 116, 117, 117, 117, 117, 118, 118, 118, 119, 119, 119, 119, 120, 120, 120, 120, 121, \
121, 121, 121, 122, 122, 122, 123, 123, 123, 123, 124, 124, 124, 124, 125, 125, 125, 126, 126, 126, \
126, 127, 127, 127, 127, 128, 128, 128, 129, 129, 129, 129, 130, 130, 130, 130, 131, 131, 131, 132, \
132, 132, 132, 133, 133, 133, 134, 134, 134, 134, 135, 135, 135, 136, 136, 136, 136, 137, 137, 137, \
137, 138, 138, 138, 139, 139, 139, 139, 140, 140, 140, 141, 141, 141, 141, 142, 142, 142, 143, 143, \
143, 144, 144, 144, 144, 145, 145, 145, 146, 146, 146, 146, 147, 147, 147, 148, 148, 148, 148, 149, \
149, 149, 150, 150, 150, 151, 151, 151, 151, 152, 152, 152, 153, 153, 153, 154, 154, 154, 154, 155, \
155, 155, 156, 156, 156, 157, 157, 157, 157, 158, 158, 158, 159, 159, 159, 160, 160, 160, 160, 161, \
161, 161, 162, 162, 162, 163, 163, 163, 164, 164, 164, 164, 165, 165, 165, 166, 166, 166, 167, 167, \
167, 168, 168, 168, 169, 169, 169, 169, 170, 170, 170, 171, 171, 171, 172, 172, 172, 173, 173, 173, \
174, 174, 174, 175, 175, 175, 176, 176, 176, 176, 177, 177, 177, 178, 178, 178, 179, 179, 179, 180, \
180, 180, 181, 181, 181, 182, 182, 182, 183, 183, 183, 184, 184, 184, 185, 185, 185, 186, 186, 186, \
187, 187, 187, 188, 188, 188, 189, 189, 189, 190, 190, 190, 191, 191, 191, 192, 192, 192, 193, 193, \
193, 194, 194, 194, 195, 195, 195, 196, 196, 196, 197, 197, 197, 198, 198, 198, 199, 199, 199, 200, \
200, 200, 201, 201, 201, 202, 202, 202, 203, 203, 203, 204, 204, 204, 205, 205, 206, 206, 206, 207, \
207, 207, 208, 208, 208, 209, 209, 209, 210, 210, 210, 211, 211, 211, 212, 212, 213, 213, 213, 214, \
214, 214, 215, 215, 215, 216, 216, 216, 217, 217, 217, 218, 218, 219, 219, 219, 220, 220, 220, 221, \
221, 221, 222, 222, 223, 223, 223, 224, 224, 224, 225, 225, 225, 226, 226, 227, 227, 227, 228, 228, \
228, 229, 229, 229, 230, 230, 231, 231, 231, 232, 232, 232, 233, 233, 234, 234, 234, 235, 235, 235, \
236, 236, 237, 237, 237, 238, 238, 238, 239, 239, 240, 240, 240, 241, 241, 241, 242, 242, 243, 243, \
243, 244, 244, 244, 245, 245, 246, 246, 246, 247, 247, 248, 248, 248, 249, 249, 249, 250, 250, 251, \
251, 251, 252, 252, 253, 253, 253, 254, 254, 254, 255, 255, 256, 256, 256, 257, 257, 258, 258, 258, \
259, 259, 260, 260, 260, 261, 261, 262, 262, 262, 263, 263, 264, 264, 264, 265, 265, 266, 266, 266, \
267, 267, 268, 268, 268, 269, 269, 270, 270, 270, 271, 271, 272, 272, 272, 273, 273, 274, 274, 274, \
275, 275, 276, 276, 276, 277, 277, 278, 278, 278, 279, 279, 280, 280, 280, 281, 281, 282, 282, 283, \
283, 283, 284, 284, 285, 285, 285, 286, 286, 287, 287, 288, 288, 288, 289, 289, 290, 290, 290, 291, \
291, 292, 292, 293, 293, 293, 294, 294, 295, 295, 296, 296, 296, 297, 297, 298, 298, 298, 299, 299, \
300, 300, 301, 301, 301, 302, 302, 303, 303, 304, 304, 304, 305, 305, 306, 306, 307, 307, 307, 308, \
308, 309, 309, 310, 310, 311, 311, 311, 312, 312, 313, 313, 314, 314, 314, 315, 315, 316, 316, 317, \
317, 318, 318, 318, 319, 319, 320, 320, 321, 321, 322, 322, 322, 323, 323, 324, 324, 325, 325, 326, \
326, 326, 327, 327, 328, 328, 329, 329, 330, 330, 330, 331, 331, 332, 332, 333, 333, 334, 334, 335, \
335, 335, 336, 336, 337, 337, 338, 338, 339, 339, 340, 340, 340, 341, 341, 342, 342, 343, 343, 344, \
344, 345, 345, 346, 346, 346, 347, 347, 348, 348, 349, 349, 350, 350, 351, 351, 352, 352, 353, 353, \
353, 354, 354, 355, 355, 356, 356, 357, 357, 358, 358, 359, 359, 360, 360, 361, 361, 361, 362, 362, \
363, 363, 364, 364, 365, 365, 366, 366, 367, 367, 368, 368, 369, 369, 370, 370, 371, 371, 371, 372, \
372, 373, 373, 374, 374, 375, 375, 376, 376, 377, 377, 378, 378, 379, 379, 380, 380, 381, 381, 382, \
382, 383, 383, 384, 384, 385, 385, 386, 386, 387, 387, 388, 388, 389, 389, 390, 390, 391, 391, 392, \
392, 393, 393, 394, 394, 395, 395, 396, 396, 397, 397, 398, 398, 399, 399, 400, 400, 401, 401, 402, \
402, 403, 403, 404, 404, 405, 405, 406, 406, 407, 407, 408, 408, 409, 409, 410, 410, 411, 411, 412, \
412, 413, 413, 414, 414, 415, 415, 416, 416, 417, 417, 418, 418, 419, 419, 420, 420, 421, 421, 422, \
423, 423, 424, 424, 425, 425, 426, 426, 427, 427, 428, 428, 429, 429, 430, 430, 431, 431, 432, 432, \
433, 433, 434, 435, 435, 436, 436, 437, 437, 438, 438, 439, 439, 440, 440, 441, 441, 442, 442, 443, \
444, 444, 445, 445, 446, 446, 447, 447, 448, 448, 449, 449, 450, 450, 451, 452, 452, 453, 453, 454, \
454, 455, 455, 456, 456, 457, 458, 458, 459, 459, 460, 460, 461, 461, 462, 462, 463, 464, 464, 465, \
465, 466, 466, 467, 467, 468, 468, 469, 470, 470, 471, 471, 472, 472, 473, 473, 474, 475, 475, 476, \
476, 477, 477, 478, 478, 479, 480, 480, 481, 481, 482, 482, 483, 483, 484, 485, 485, 486, 486, 487, \
487, 488, 488, 489, 490, 490, 491, 491, 492, 492, 493, 494, 494, 495, 495, 496, 496, 497, 498, 498, \
499, 499, 500, 500, 501, 502, 502, 503, 503, 504, 504, 505, 506, 506, 507, 507, 508, 508, 509, 510, \
510, 511, 511, 512, 512, 513, 514, 514, 515, 515, 516, 517, 517, 518, 518, 519, 519, 520, 521, 521, \
522, 522, 523, 524, 524, 525, 525, 526, 526, 527, 528, 528, 529, 529, 530, 531, 531, 532, 532, 533, \
534, 534, 535, 535, 536, 537, 537, 538, 538, 539, 539, 540, 541, 541, 542, 542, 543, 544, 544, 545, \
545, 546, 547, 547, 548, 548, 549, 550, 550, 551, 551, 552, 553, 553, 554, 555, 555, 556, 556, 557, \
558, 558, 559, 559, 560, 561, 561, 562, 562, 563, 564, 564, 565, 565, 566, 567, 567, 568, 569, 569, \
570, 570, 571, 572, 572, 573, 573, 574, 575, 575, 576, 577, 577, 578, 578, 579, 580, 580, 581, 582, \
582, 583, 583, 584, 585, 585, 586, 586, 587, 588, 588, 589, 590, 590, 591, 591, 592, 593, 593, 594, \
595, 595, 596, 597, 597, 598, 598, 599, 600, 600, 601, 602, 602, 603, 603, 604, 605, 605, 606, 607, \
607, 608, 609, 609, 610, 610, 611, 612, 612, 613, 614, 614, 615, 616, 616, 617, 618, 618, 619, 619, \
620, 621, 621, 622, 623, 623, 624, 625, 625, 626, 627, 627, 628, 629, 629, 630, 630, 631, 632, 632, \
633, 634, 634, 635, 636, 636, 637, 638, 638, 639, 640, 640, 641, 642, 642, 643, 644, 644, 645, 646, \
646, 647, 647, 648, 649, 649, 650, 651, 651, 652, 653, 653, 654, 655, 655, 656, 657, 657, 658, 659, \
659, 660, 661, 661, 662, 663, 663, 664, 665, 665, 666, 667, 667, 668, 669, 669, 670, 671, 671, 672, \
673, 674, 674, 675, 676, 676, 677, 678, 678, 679, 680, 680, 681, 682, 682, 683, 684, 684, 685, 686, \
686, 687, 688, 688, 689, 690, 690, 691, 692, 693, 693, 694, 695, 695, 696, 697, 697, 698, 699, 699, \
700, 701, 701, 702, 703, 704, 704, 705, 706, 706, 707, 708, 708, 709, 710, 710, 711, 712, 713, 713, \
714, 715, 715, 716, 717, 717, 718, 719, 719, 720, 721, 722, 722, 723, 724, 724, 725, 726, 726, 727, \
728, 729, 729, 730, 731, 731, 732, 733, 734, 734, 735, 736, 736, 737, 738, 738, 739, 740, 741, 741, \
742, 743, 743, 744, 745, 746, 746, 747, 748, 748, 749, 750, 751, 751, 752, 753, 753, 754, 755, 756, \
756, 757, 758, 758, 759, 760, 761, 761, 762, 763, 763, 764, 765, 766, 766, 767, 768, 769, 769, 770, \
771, 771, 772, 773, 774, 774, 775, 776, 777, 777, 778, 779, 779, 780, 781, 782, 782, 783, 784, 785, \
785, 786, 787, 787, 788, 789, 790, 790, 791, 792, 793, 793, 794, 795, 796, 796, 797, 798, 798, 799, \
800, 801, 801, 802, 803, 804, 804, 805, 806, 807, 807, 808, 809, 810, 810, 811, 812, 813, 813, 814, \
815, 816, 816, 817, 818, 819, 819, 820, 821, 822, 822, 823, 824, 825, 825, 826, 827, 828, 828, 829, \
830, 831, 831, 832, 833, 834, 834, 835, 836, 837, 837, 838, 839, 840, 840, 841, 842, 843, 843, 844, \
845, 846, 846, 847, 848, 849, 849, 850, 851, 852, 853, 853, 854, 855, 856, 856, 857, 858, 859, 859, \
860, 861, 862, 862, 863, 864, 865, 866, 866, 867, 868, 869, 869, 870, 871, 872, 872, 873, 874, 875, \
876, 876, 877, 878, 879, 879, 880, 881, 882, 883, 883, 884, 885, 886, 886, 887, 888, 889, 890, 890, \
891, 892, 893, 893, 894, 895, 896, 897, 897, 898, 899, 900, 900, 901, 902, 903, 904, 904, 905, 906, \
907, 908, 908, 909, 910, 911, 911, 912, 913, 914, 915, 915, 916, 917, 918, 919, 919, 920, 921, 922, \
923, 923, 924, 925, 926, 927, 927, 928, 929, 930, 931, 931, 932, 933, 934, 935, 935, 936, 937, 938, \
939, 939, 940, 941, 942, 943, 943, 944, 945, 946, 947, 947, 948, 949, 950, 951, 951, 952, 953, 954, \
955, 955, 956, 957, 958, 959, 959, 960, 961, 962, 963, 963, 964, 965, 966, 967, 968, 968, 969, 970, \
971, 972, 972, 973, 974, 975, 976, 976, 977, 978, 979, 980, 981, 981, 982, 983, 984, 985, 985, 986, \
987, 988, 989, 990, 990, 991, 992, 993, 994, 994, 995, 996, 997, 998, 999, 999, 1000, 1001, 1002, 1003, \
1004, 1004, 1005, 1006, 1007, 1008, 1008, 1009, 1010, 1011, 1012, 1013, 1013, 1014, 1015, 1016, 1017, 1018, 1018, 1019, \
1020, 1021, 1022, 1023, 1023, 1024, 1025, 1026, 1027, 1028, 1028, 1029, 1030, 1031, 1032, 1033, 1033, 1034, 1035, 1036, \
1037, 1038, 1038, 1039, 1040, 1041, 1042, 1043, 1044, 1044, 1045, 1046, 1047, 1048, 1049, 1049, 1050, 1051, 1052, 1053, \
1054, 1054, 1055, 1056, 1057, 1058, 1059, 1060, 1060, 1061, 1062, 1063, 1064, 1065, 1065, 1066, 1067, 1068, 1069, 1070, \
1071, 1071, 1072, 1073, 1074, 1075, 1076, 1077, 1077, 1078, 1079, 1080, 1081, 1082, 1082, 1083, 1084, 1085, 1086, 1087, \
1088, 1088, 1089, 1090, 1091, 1092, 1093, 1094, 1094, 1095, 1096, 1097, 1098, 1099, 1100, 1100, 1101, 1102, 1103, 1104, \
1105, 1106, 1107, 1107, 1108, 1109, 1110, 1111, 1112, 1113, 1113, 1114, 1115, 1116, 1117, 1118, 1119, 1119, 1120, 1121, \
1122, 1123, 1124, 1125, 1126, 1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133, 1133, 1134, 1135, 1136, 1137, 1138, 1139, \
1139, 1140, 1141, 1142, 1143, 1144, 1145, 1146, 1146, 1147, 1148, 1149, 1150, 1151, 1152, 1153, 1153, 1154, 1155, 1156, \
1157, 1158, 1159, 1160, 1161, 1161, 1162, 1163, 1164, 1165, 1166, 1167, 1168, 1168, 1169, 1170, 1171, 1172, 1173, 1174, \
1175, 1176, 1176, 1177, 1178, 1179, 1180, 1181, 1182, 1183, 1183, 1184, 1185, 1186, 1187, 1188, 1189, 1190, 1191, 1191, \
1192, 1193, 1194, 1195, 1196, 1197, 1198, 1199, 1199, 1200, 1201, 1202, 1203, 1204, 1205, 1206, 1207, 1208, 1208, 1209, \
1210, 1211, 1212, 1213, 1214, 1215, 1216, 1216, 1217, 1218, 1219, 1220, 1221, 1222, 1223, 1224, 1225, 1225, 1226, 1227, \
1228, 1229, 1230, 1231, 1232, 1233, 1234, 1234, 1235, 1236, 1237, 1238, 1239, 1240, 1241, 1242, 1243, 1243, 1244, 1245, \
1246, 1247, 1248, 1249, 1250, 1251, 1252, 1253, 1253, 1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261, 1262, 1263, 1263, \
1264, 1265, 1266, 1267, 1268, 1269, 1270, 1271, 1272, 1273, 1273, 1274, 1275, 1276, 1277, 1278, 1279, 1280, 1281, 1282, \
1283, 1283, 1284, 1285, 1286, 1287, 1288, 1289, 1290, 1291, 1292, 1293, 1294, 1294, 1295, 1296, 1297, 1298, 1299, 1300, \
1301, 1302, 1303, 1304, 1305, 1305, 1306, 1307, 1308, 1309, 1310, 1311, 1312, 1313, 1314, 1315, 1316, 1317, 1317, 1318, \
1319, 1320, 1321, 1322, 1323, 1324, 1325, 1326, 1327, 1328, 1329, 1329, 1330, 1331, 1332, 1333, 1334, 1335, 1336, 1337, \
1338, 1339, 1340, 1341, 1342, 1342, 1343, 1344, 1345, 1346, 1347, 1348, 1349, 1350, 1351, 1352, 1353, 1354, 1355, 1356, \
1356, 1357, 1358, 1359, 1360, 1361, 1362, 1363, 1364, 1365, 1366, 1367, 1368, 1369, 1370, 1370, 1371, 1372, 1373, 1374, \
1375, 1376, 1377, 1378, 1379, 1380, 1381, 1382, 1383, 1384, 1385, 1385, 1386, 1387, 1388, 1389, 1390, 1391, 1392, 1393, \
1394, 1395, 1396, 1397, 1398, 1399, 1400, 1401, 1401, 1402, 1403, 1404, 1405, 1406, 1407, 1408, 1409, 1410, 1411, 1412, \
1413, 1414, 1415, 1416, 1417, 1417, 1418, 1419, 1420, 1421, 1422, 1423, 1424, 1425, 1426, 1427, 1428, 1429, 1430, 1431, \
1432, 1433, 1434, 1435, 1435, 1436, 1437, 1438, 1439, 1440, 1441, 1442, 1443, 1444, 1445, 1446, 1447, 1448, 1449, 1450, \
1451, 1452, 1453, 1454, 1455, 1455, 1456, 1457, 1458, 1459, 1460, 1461, 1462, 1463, 1464, 1465, 1466, 1467, 1468, 1469, \
1470, 1471, 1472, 1473, 1474, 1475, 1476, 1476, 1477, 1478, 1479, 1480, 1481, 1482, 1483, 1484, 1485, 1486, 1487, 1488, \
1489, 1490, 1491, 1492, 1493, 1494, 1495, 1496, 1497, 1498, 1499, 1500, 1500, 1501, 1502, 1503, 1504, 1505, 1506, 1507, \
1508, 1509, 1510, 1511, 1512, 1513, 1514, 1515, 1516, 1517, 1518, 1519, 1520, 1521, 1522, 1523, 1524, 1525, 1526, 1526, \
1527, 1528, 1529, 1530, 1531, 1532, 1533, 1534, 1535, 1536, 1537, 1538, 1539, 1540, 1541, 1542, 1543, 1544, 1545, 1546, \
1547, 1548, 1549, 1550, 1551, 1552, 1553, 1554, 1555, 1556, 1556, 1557, 1558, 1559, 1560, 1561, 1562, 1563, 1564, 1565, \
1566, 1567, 1568, 1569, 1570, 1571, 1572, 1573, 1574, 1575, 1576, 1577, 1578, 1579, 1580, 1581, 1582, 1583, 1584, 1585, \
1586, 1587, 1588, 1589, 1590, 1591, 1592, 1592, 1593, 1594, 1595, 1596, 1597, 1598, 1599, 1600, 1601, 1602, 1603, 1604, \
1605, 1606, 1607, 1608, 1609, 1610, 1611, 1612, 1613, 1614, 1615, 1616, 1617, 1618, 1619, 1620, 1621, 1622, 1623, 1624, \
1625, 1626, 1627, 1628, 1629, 1630, 1631, 1632, 1633, 1634, 1635, 1636, 1637, 1637, 1638, 1639, 1640, 1641, 1642, 1643, \
1644, 1645, 1646, 1647, 1648, 1649, 1650, 1651, 1652, 1653, 1654, 1655, 1656, 1657, 1658, 1659, 1660, 1661, 1662, 1663, \
1664, 1665, 1666, 1667, 1668, 1669, 1670, 1671, 1672, 1673, 1674, 1675, 1676, 1677, 1678, 1679, 1680, 1681, 1682, 1683, \
1684, 1685, 1686, 1687, 1688, 1689, 1690, 1691, 1692, 1693, 1694, 1695, 1696, 1697, 1698, 1699, 1700, 1701, 1702, 1702, \
1703, 1704, 1705, 1706, 1707, 1708, 1709, 1710, 1711, 1712, 1713, 1714, 1715, 1716, 1717, 1718, 1719, 1720, 1721, 1722, \
1723, 1724, 1725, 1726, 1727, 1728, 1729, 1730, 1731, 1732, 1733, 1734, 1735, 1736, 1737, 1738, 1739, 1740, 1741, 1742, \
1743, 1744, 1745, 1746, 1747, 1748, 1749, 1750, 1751, 1752, 1753, 1754, 1755, 1756, 1757, 1758, 1759, 1760, 1761, 1762, \
1763, 1764, 1765, 1766, 1767, 1768, 1769, 1770, 1771, 1772, 1773, 1774, 1775, 1776, 1777, 1778, 1779, 1780, 1781, 1782, \
1783, 1784, 1785, 1786, 1787, 1788, 1789, 1790, 1791, 1792, 1793, 1794, 1795, 1796, 1797, 1798, 1799, 1800, 1801, 1802, \
1803, 1804, 1805, 1806, 1807, 1808, 1809, 1810, 1811, 1812, 1813, 1814, 1815, 1816, 1817, 1818, 1819, 1820, 1821, 1822, \
1823, 1824, 1825, 1826, 1827, 1828, 1829, 1830, 1831, 1832, 1833, 1834, 1835, 1836, 1837, 1838, 1839, 1840, 1841, 1842, \
1843, 1844, 1845, 1846, 1847, 1848, 1849, 1850, 1851, 1852, 1853, 1854, 1855, 1856, 1857, 1858, 1859, 1860, 1861, 1862, \
1863, 1864, 1865, 1866, 1867, 1868, 1869, 1870, 1871, 1872, 1873, 1874, 1875, 1876, 1877, 1878, 1879, 1880, 1881, 1882, \
1883, 1884, 1885, 1886, 1887, 1888, 1889, 1890, 1891, 1892, 1893, 1894, 1895, 1896, 1897, 1898, 1899, 1900, 1901, 1902, \
1903, 1904, 1905, 1906, 1907, 1908, 1909, 1910, 1911, 1912, 1913, 1914, 1915, 1916, 1917, 1918, 1919, 1920, 1921, 1922, \
1923, 1924, 1925, 1926, 1927, 1928, 1929, 1930, 1931, 1932, 1933, 1934, 1935, 1936, 1937, 1938, 1939, 1940, 1941, 1942, \
1943, 1944, 1945, 1946, 1947, 1948, 1949, 1950, 1951, 1952, 1953, 1954, 1955, 1955, 1956, 1957, 1958, 1959, 1960, 1961, \
1962, 1963, 1964, 1965, 1966, 1967, 1968, 1969, 1970, 1971, 1972, 1973, 1974, 1975, 1976, 1977, 1978, 1979, 1980, 1981, \
1982, 1983, 1984, 1985, 1986, 1987, 1988, 1989, 1990, 1991, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, \
2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013, 2014, 2015, 2016, 2017, 2018, 2019, 2020, 2021, \
2022, 2023, 2024, 2025, 2026, 2027, 2028, 2029, 2030, 2031, 2032, 2033, 2034, 2035, 2036, 2037, 2038, 2039, 2040, 2041, \
2042, 2043, 2044, 2045, 2046, 2047, 2048, 2049, 2050, 2051, 2052, 2053, 2054, 2055, 2056, 2057, 2058, 2059, 2060, 2061, \
2062, 2063, 2064, 2065, 2066, 2067, 2068, 2069, 2070, 2071, 2072, 2073, 2074, 2075, 2076, 2077, 2078, 2079, 2080, 2081, \
2082, 2083, 2084, 2085, 2086, 2087, 2088, 2089, 2090, 2091, 2092, 2093, 2094, 2095, 2096, 2097, 2098, 2099, 2100, 2101, \
2102, 2103, 2104, 2105, 2106, 2107, 2108, 2109, 2110, 2111, 2112, 2113, 2114, 2115, 2116, 2117, 2118, 2119, 2120, 2121, \
2122, 2123, 2124, 2125, 2126, 2127, 2128, 2129, 2130, 2131, 2132, 2133, 2134, 2135, 2136, 2137, 2138, 2139, 2140, 2141, \
2142, 2143, 2144, 2145, 2146, 2147, 2148, 2149, 2150, 2151, 2152, 2153, 2154, 2155, 2156, 2157, 2158, 2159, 2160, 2161, \
2162, 2163, 2164, 2165, 2166, 2167, 2168, 2169, 2170, 2171, 2172, 2173, 2174, 2175, 2176, 2177, 2178, 2179, 2180, 2181, \
2182, 2183, 2184, 2185, 2186, 2187, 2188, 2189, 2190, 2191, 2192, 2193, 2194, 2195, 2196, 2197, 2198, 2199, 2200, 2201, \
2202, 2203, 2204, 2205, 2205, 2206, 2207, 2208, 2209, 2210, 2211, 2212, 2213, 2214, 2215, 2216, 2217, 2218, 2219, 2220, \
2221, 2222, 2223, 2224, 2225, 2226, 2227, 2228, 2229, 2230, 2231, 2232, 2233, 2234, 2235, 2236, 2237, 2238, 2239, 2240, \
2241, 2242, 2243, 2244, 2245, 2246, 2247, 2248, 2249, 2250, 2251, 2252, 2253, 2254, 2255, 2256, 2257, 2258, 2259, 2260, \
2261, 2262, 2263, 2264, 2265, 2266, 2267, 2268, 2269, 2270, 2270, 2271, 2272, 2273, 2274, 2275, 2276, 2277, 2278, 2279, \
2280, 2281, 2282, 2283, 2284, 2285, 2286, 2287, 2288, 2289, 2290, 2291, 2292, 2293, 2294, 2295, 2296, 2297, 2298, 2299, \
2300, 2301, 2302, 2303, 2304, 2305, 2306, 2307, 2308, 2309, 2310, 2311, 2312, 2313, 2314, 2315, 2315, 2316, 2317, 2318, \
2319, 2320, 2321, 2322, 2323, 2324, 2325, 2326, 2327, 2328, 2329, 2330, 2331, 2332, 2333, 2334, 2335, 2336, 2337, 2338, \
2339, 2340, 2341, 2342, 2343, 2344, 2345, 2346, 2347, 2348, 2349, 2350, 2351, 2351, 2352, 2353, 2354, 2355, 2356, 2357, \
2358, 2359, 2360, 2361, 2362, 2363, 2364, 2365, 2366, 2367, 2368, 2369, 2370, 2371, 2372, 2373, 2374, 2375, 2376, 2377, \
2378, 2379, 2380, 2381, 2381, 2382, 2383, 2384, 2385, 2386, 2387, 2388, 2389, 2390, 2391, 2392, 2393, 2394, 2395, 2396, \
2397, 2398, 2399, 2400, 2401, 2402, 2403, 2404, 2405, 2406, 2407, 2407, 2408, 2409, 2410, 2411, 2412, 2413, 2414, 2415, \
2416, 2417, 2418, 2419, 2420, 2421, 2422, 2423, 2424, 2425, 2426, 2427, 2428, 2429, 2430, 2431, 2431, 2432, 2433, 2434, \
2435, 2436, 2437, 2438, 2439, 2440, 2441, 2442, 2443, 2444, 2445, 2446, 2447, 2448, 2449, 2450, 2451, 2452, 2452, 2453, \
2454, 2455, 2456, 2457, 2458, 2459, 2460, 2461, 2462, 2463, 2464, 2465, 2466, 2467, 2468, 2469, 2470, 2471, 2472, 2472, \
2473, 2474, 2475, 2476, 2477, 2478, 2479, 2480, 2481, 2482, 2483, 2484, 2485, 2486, 2487, 2488, 2489, 2490, 2490, 2491, \
2492, 2493, 2494, 2495, 2496, 2497, 2498, 2499, 2500, 2501, 2502, 2503, 2504, 2505, 2506, 2506, 2507, 2508, 2509, 2510, \
2511, 2512, 2513, 2514, 2515, 2516, 2517, 2518, 2519, 2520, 2521, 2522, 2522, 2523, 2524, 2525, 2526, 2527, 2528, 2529, \
2530, 2531, 2532, 2533, 2534, 2535, 2536, 2537, 2537, 2538, 2539, 2540, 2541, 2542, 2543, 2544, 2545, 2546, 2547, 2548, \
2549, 2550, 2551, 2551, 2552, 2553, 2554, 2555, 2556, 2557, 2558, 2559, 2560, 2561, 2562, 2563, 2564, 2565, 2565, 2566, \
2567, 2568, 2569, 2570, 2571, 2572, 2573, 2574, 2575, 2576, 2577, 2578, 2578, 2579, 2580, 2581, 2582, 2583, 2584, 2585, \
2586, 2587, 2588, 2589, 2590, 2590, 2591, 2592, 2593, 2594, 2595, 2596, 2597, 2598, 2599, 2600, 2601, 2602, 2602, 2603, \
2604, 2605, 2606, 2607, 2608, 2609, 2610, 2611, 2612, 2613, 2613, 2614, 2615, 2616, 2617, 2618, 2619, 2620, 2621, 2622, \
2623, 2624, 2624, 2625, 2626, 2627, 2628, 2629, 2630, 2631, 2632, 2633, 2634, 2634, 2635, 2636, 2637, 2638, 2639, 2640, \
2641, 2642, 2643, 2644, 2644, 2645, 2646, 2647, 2648, 2649, 2650, 2651, 2652, 2653, 2654, 2654, 2655, 2656, 2657, 2658, \
2659, 2660, 2661, 2662, 2663, 2664, 2664, 2665, 2666, 2667, 2668, 2669, 2670, 2671, 2672, 2673, 2673, 2674, 2675, 2676, \
2677, 2678, 2679, 2680, 2681, 2682, 2682, 2683, 2684, 2685, 2686, 2687, 2688, 2689, 2690, 2691, 2691, 2692, 2693, 2694, \
2695, 2696, 2697, 2698, 2699, 2699, 2700, 2701, 2702, 2703, 2704, 2705, 2706, 2707, 2708, 2708, 2709, 2710, 2711, 2712, \
2713, 2714, 2715, 2716, 2716, 2717, 2718, 2719, 2720, 2721, 2722, 2723, 2724, 2724, 2725, 2726, 2727, 2728, 2729, 2730, \
2731, 2731, 2732, 2733, 2734, 2735, 2736, 2737, 2738, 2739, 2739, 2740, 2741, 2742, 2743, 2744, 2745, 2746, 2746, 2747, \
2748, 2749, 2750, 2751, 2752, 2753, 2754, 2754, 2755, 2756, 2757, 2758, 2759, 2760, 2761, 2761, 2762, 2763, 2764, 2765, \
2766, 2767, 2768, 2768, 2769, 2770, 2771, 2772, 2773, 2774, 2774, 2775, 2776, 2777, 2778, 2779, 2780, 2781, 2781, 2782, \
2783, 2784, 2785, 2786, 2787, 2788, 2788, 2789, 2790, 2791, 2792, 2793, 2794, 2794, 2795, 2796, 2797, 2798, 2799, 2800, \
2800, 2801, 2802, 2803, 2804, 2805, 2806, 2807, 2807, 2808, 2809, 2810, 2811, 2812, 2813, 2813, 2814, 2815, 2816, 2817, \
2818, 2819, 2819, 2820, 2821, 2822, 2823, 2824, 2825, 2825, 2826, 2827, 2828, 2829, 2830, 2830, 2831, 2832, 2833, 2834, \
2835, 2836, 2836, 2837, 2838, 2839, 2840, 2841, 2842, 2842, 2843, 2844, 2845, 2846, 2847, 2847, 2848, 2849, 2850, 2851, \
2852, 2853, 2853, 2854, 2855, 2856, 2857, 2858, 2858, 2859, 2860, 2861, 2862, 2863, 2863, 2864, 2865, 2866, 2867, 2868, \
2869, 2869, 2870, 2871, 2872, 2873, 2874, 2874, 2875, 2876, 2877, 2878, 2879, 2879, 2880, 2881, 2882, 2883, 2884, 2884, \
2885, 2886, 2887, 2888, 2889, 2889, 2890, 2891, 2892, 2893, 2894, 2894, 2895, 2896, 2897, 2898, 2899, 2899, 2900, 2901, \
2902, 2903, 2903, 2904, 2905, 2906, 2907, 2908, 2908, 2909, 2910, 2911, 2912, 2913, 2913, 2914, 2915, 2916, 2917, 2917, \
2918, 2919, 2920, 2921, 2922, 2922, 2923, 2924, 2925, 2926, 2926, 2927, 2928, 2929, 2930, 2931, 2931, 2932, 2933, 2934, \
2935, 2935, 2936, 2937, 2938, 2939, 2939, 2940, 2941, 2942, 2943, 2944, 2944, 2945, 2946, 2947, 2948, 2948, 2949, 2950, \
2951, 2952, 2952, 2953, 2954, 2955, 2956, 2956, 2957, 2958, 2959, 2960, 2960, 2961, 2962, 2963, 2964, 2964, 2965, 2966, \
2967, 2968, 2968, 2969, 2970, 2971, 2972, 2972, 2973, 2974, 2975, 2976, 2976, 2977, 2978, 2979, 2980, 2980, 2981, 2982, \
2983, 2984, 2984, 2985, 2986, 2987, 2988, 2988, 2989, 2990, 2991, 2992, 2992, 2993, 2994, 2995, 2996, 2996, 2997, 2998, \
2999, 2999, 3000, 3001, 3002, 3003, 3003, 3004, 3005, 3006, 3007, 3007, 3008, 3009, 3010, 3010, 3011, 3012, 3013, 3014, \
3014, 3015, 3016, 3017, 3017, 3018, 3019, 3020, 3021, 3021, 3022, 3023, 3024, 3024, 3025, 3026, 3027, 3028, 3028, 3029, \
3030, 3031, 3031, 3032, 3033, 3034, 3035, 3035, 3036, 3037, 3038, 3038, 3039, 3040, 3041, 3041, 3042, 3043, 3044, 3045, \
3045, 3046, 3047, 3048, 3048, 3049, 3050, 3051, 3051, 3052, 3053, 3054, 3054, 3055, 3056, 3057, 3058, 3058, 3059, 3060, \
3061, 3061, 3062, 3063, 3064, 3064, 3065, 3066, 3067, 3067, 3068, 3069, 3070, 3070, 3071, 3072, 3073, 3073, 3074, 3075, \
3076, 3076, 3077, 3078, 3079, 3079, 3080, 3081, 3082, 3082, 3083, 3084, 3085, 3085, 3086, 3087, 3088, 3088, 3089, 3090, \
3091, 3091, 3092, 3093, 3094, 3094, 3095, 3096, 3097, 3097, 3098, 3099, 3100, 3100, 3101, 3102, 3103, 3103, 3104, 3105, \
3106, 3106, 3107, 3108, 3109, 3109, 3110, 3111, 3111, 3112, 3113, 3114, 3114, 3115, 3116, 3117, 3117, 3118, 3119, 3120, \
3120, 3121, 3122, 3122, 3123, 3124, 3125, 3125, 3126, 3127, 3128, 3128, 3129, 3130, 3130, 3131, 3132, 3133, 3133, 3134, \
3135, 3136, 3136, 3137, 3138, 3138, 3139, 3140, 3141, 3141, 3142, 3143, 3144, 3144, 3145, 3146, 3146, 3147, 3148, 3149, \
3149, 3150, 3151, 3151, 3152, 3153, 3154, 3154, 3155, 3156, 3156, 3157, 3158, 3159, 3159, 3160, 3161, 3161, 3162, 3163, \
3164, 3164, 3165, 3166, 3166, 3167, 3168, 3169, 3169, 3170, 3171, 3171, 3172, 3173, 3173, 3174, 3175, 3176, 3176, 3177, \
3178, 3178, 3179, 3180, 3181, 3181, 3182, 3183, 3183, 3184, 3185, 3185, 3186, 3187, 3188, 3188, 3189, 3190, 3190, 3191, \
3192, 3192, 3193, 3194, 3194, 3195, 3196, 3197, 3197, 3198, 3199, 3199, 3200, 3201, 3201, 3202, 3203, 3203, 3204, 3205, \
3206, 3206, 3207, 3208, 3208, 3209, 3210, 3210, 3211, 3212, 3212, 3213, 3214, 3214, 3215, 3216, 3217, 3217, 3218, 3219, \
3219, 3220, 3221, 3221, 3222, 3223, 3223, 3224, 3225, 3225, 3226, 3227, 3227, 3228, 3229, 3229, 3230, 3231, 3231, 3232, \
3233, 3233, 3234, 3235, 3236, 3236, 3237, 3238, 3238, 3239, 3240, 3240, 3241, 3242, 3242, 3243, 3244, 3244, 3245, 3246, \
3246, 3247, 3248, 3248, 3249, 3250, 3250, 3251, 3252, 3252, 3253, 3254, 3254, 3255, 3256, 3256, 3257, 3258, 3258, 3259, \
3260, 3260, 3261, 3261, 3262, 3263, 3263, 3264, 3265, 3265, 3266, 3267, 3267, 3268, 3269, 3269, 3270, 3271, 3271, 3272, \
3273, 3273, 3274, 3275, 3275, 3276, 3277, 3277, 3278, 3278, 3279, 3280, 3280, 3281, 3282, 3282, 3283, 3284, 3284, 3285, \
3286, 3286, 3287, 3288, 3288, 3289, 3289, 3290, 3291, 3291, 3292, 3293, 3293, 3294, 3295, 3295, 3296, 3297, 3297, 3298, \
3298, 3299, 3300, 3300, 3301, 3302, 3302, 3303, 3304, 3304, 3305, 3305, 3306, 3307, 3307, 3308, 3309, 3309, 3310, 3310, \
3311, 3312, 3312, 3313, 3314, 3314, 3315, 3316, 3316, 3317, 3317, 3318, 3319, 3319, 3320, 3321, 3321, 3322, 3322, 3323, \
3324, 3324, 3325, 3325, 3326, 3327, 3327, 3328, 3329, 3329, 3330, 3330, 3331, 3332, 3332, 3333, 3334, 3334, 3335, 3335, \
3336, 3337, 3337, 3338, 3338, 3339, 3340, 3340, 3341, 3342, 3342, 3343, 3343, 3344, 3345, 3345, 3346, 3346, 3347, 3348, \
3348, 3349, 3349, 3350, 3351, 3351, 3352, 3352, 3353, 3354, 3354, 3355, 3356, 3356, 3357, 3357, 3358, 3359, 3359, 3360, \
3360, 3361, 3362, 3362, 3363, 3363, 3364, 3365, 3365, 3366, 3366, 3367, 3368, 3368, 3369, 3369, 3370, 3370, 3371, 3372, \
3372, 3373, 3373, 3374, 3375, 3375, 3376, 3376, 3377, 3378, 3378, 3379, 3379, 3380, 3381, 3381, 3382, 3382, 3383, 3383, \
3384, 3385, 3385, 3386, 3386, 3387, 3388, 3388, 3389, 3389, 3390, 3390, 3391, 3392, 3392, 3393, 3393, 3394, 3395, 3395, \
3396, 3396, 3397, 3397, 3398, 3399, 3399, 3400, 3400, 3401, 3401, 3402, 3403, 3403, 3404, 3404, 3405, 3405, 3406, 3407, \
3407, 3408, 3408, 3409, 3409, 3410, 3411, 3411, 3412, 3412, 3413, 3413, 3414, 3415, 3415, 3416, 3416, 3417, 3417, 3418, \
3419, 3419, 3420, 3420, 3421, 3421, 3422, 3422, 3423, 3424, 3424, 3425, 3425, 3426, 3426, 3427, 3427, 3428, 3429, 3429, \
3430, 3430, 3431, 3431, 3432, 3432, 3433, 3434, 3434, 3435, 3435, 3436, 3436, 3437, 3437, 3438, 3439, 3439, 3440, 3440, \
3441, 3441, 3442, 3442, 3443, 3443, 3444, 3445, 3445, 3446, 3446, 3447, 3447, 3448, 3448, 3449, 3449, 3450, 3451, 3451, \
3452, 3452, 3453, 3453, 3454, 3454, 3455, 3455, 3456, 3457, 3457, 3458, 3458, 3459, 3459, 3460, 3460, 3461, 3461, 3462, \
3462, 3463, 3463, 3464, 3465, 3465, 3466, 3466, 3467, 3467, 3468, 3468, 3469, 3469, 3470, 3470, 3471, 3471, 3472, 3472, \
3473, 3474, 3474, 3475, 3475, 3476, 3476, 3477, 3477, 3478, 3478, 3479, 3479, 3480, 3480, 3481, 3481, 3482, 3482, 3483, \
3483, 3484, 3484, 3485, 3486, 3486, 3487, 3487, 3488, 3488, 3489, 3489, 3490, 3490, 3491, 3491, 3492, 3492, 3493, 3493, \
3494, 3494, 3495, 3495, 3496, 3496, 3497, 3497, 3498, 3498, 3499, 3499, 3500, 3500, 3501, 3501, 3502, 3502, 3503, 3503, \
3504, 3504, 3505, 3505, 3506, 3506, 3507, 3507, 3508, 3508, 3509, 3509, 3510, 3510, 3511, 3511, 3512, 3512, 3513, 3513, \
3514, 3514, 3515, 3515, 3516, 3516, 3517, 3517, 3518, 3518, 3519, 3519, 3520, 3520, 3521, 3521, 3522, 3522, 3523, 3523, \
3524, 3524, 3525, 3525, 3526, 3526, 3527, 3527, 3528, 3528, 3529, 3529, 3530, 3530, 3531, 3531, 3532, 3532, 3533, 3533, \
3534, 3534, 3535, 3535, 3536, 3536, 3536, 3537, 3537, 3538, 3538, 3539, 3539, 3540, 3540, 3541, 3541, 3542, 3542, 3543, \
3543, 3544, 3544, 3545, 3545, 3546, 3546, 3546, 3547, 3547, 3548, 3548, 3549, 3549, 3550, 3550, 3551, 3551, 3552, 3552, \
3553, 3553, 3554, 3554, 3554, 3555, 3555, 3556, 3556, 3557, 3557, 3558, 3558, 3559, 3559, 3560, 3560, 3561, 3561, 3561, \
3562, 3562, 3563, 3563, 3564, 3564, 3565, 3565, 3566, 3566, 3567, 3567, 3567, 3568, 3568, 3569, 3569, 3570, 3570, 3571, \
3571, 3572, 3572, 3572, 3573, 3573, 3574, 3574, 3575, 3575, 3576, 3576, 3577, 3577, 3577, 3578, 3578, 3579, 3579, 3580, \
3580, 3581, 3581, 3581, 3582, 3582, 3583, 3583, 3584, 3584, 3585, 3585, 3585, 3586, 3586, 3587, 3587, 3588, 3588, 3589, \
3589, 3589, 3590, 3590, 3591, 3591, 3592, 3592, 3593, 3593, 3593, 3594, 3594, 3595, 3595, 3596, 3596, 3596, 3597, 3597, \
3598, 3598, 3599, 3599, 3600, 3600, 3600, 3601, 3601, 3602, 3602, 3603, 3603, 3603, 3604, 3604, 3605, 3605, 3606, 3606, \
3606, 3607, 3607, 3608, 3608, 3609, 3609, 3609, 3610, 3610, 3611, 3611, 3611, 3612, 3612, 3613, 3613, 3614, 3614, 3614, \
3615, 3615, 3616, 3616, 3617, 3617, 3617, 3618, 3618, 3619, 3619, 3619, 3620, 3620, 3621, 3621, 3622, 3622, 3622, 3623, \
3623, 3624, 3624, 3624, 3625, 3625, 3626, 3626, 3627, 3627, 3627, 3628, 3628, 3629, 3629, 3629, 3630, 3630, 3631, 3631, \
3631, 3632, 3632, 3633, 3633, 3633, 3634, 3634, 3635, 3635, 3635, 3636, 3636, 3637, 3637, 3637, 3638, 3638, 3639, 3639, \
3639, 3640, 3640, 3641, 3641, 3641, 3642, 3642, 3643, 3643, 3643, 3644, 3644, 3645, 3645, 3645, 3646, 3646, 3647, 3647, \
3647, 3648, 3648, 3649, 3649, 3649, 3650, 3650, 3651, 3651, 3651, 3652, 3652, 3653, 3653, 3653, 3654, 3654, 3654, 3655, \
3655, 3656, 3656, 3656, 3657, 3657, 3658, 3658, 3658, 3659, 3659, 3659, 3660, 3660, 3661, 3661, 3661, 3662, 3662, 3663, \
3663, 3663, 3664, 3664, 3664, 3665, 3665, 3666, 3666, 3666, 3667, 3667, 3667, 3668, 3668, 3669, 3669, 3669, 3670, 3670, \
3670, 3671, 3671, 3672, 3672, 3672, 3673, 3673, 3673, 3674, 3674, 3675, 3675, 3675, 3676, 3676, 3676, 3677, 3677, 3678, \
3678, 3678, 3679, 3679, 3679, 3680, 3680, 3680, 3681, 3681, 3682, 3682, 3682, 3683, 3683, 3683, 3684, 3684, 3684, 3685, \
3685, 3686, 3686, 3686, 3687, 3687, 3687, 3688, 3688, 3688, 3689, 3689, 3690, 3690, 3690, 3691, 3691, 3691, 3692, 3692, \
3692, 3693, 3693, 3693, 3694, 3694, 3694, 3695, 3695, 3696, 3696, 3696, 3697, 3697, 3697, 3698, 3698, 3698, 3699, 3699, \
3699, 3700, 3700, 3700, 3701, 3701, 3701, 3702, 3702, 3703, 3703, 3703, 3704, 3704, 3704, 3705, 3705, 3705, 3706, 3706, \
3706, 3707, 3707, 3707, 3708, 3708, 3708, 3709, 3709, 3709, 3710, 3710, 3710, 3711, 3711, 3711, 3712, 3712, 3712, 3713, \
3713, 3713, 3714, 3714, 3714, 3715, 3715, 3715, 3716, 3716, 3716, 3717, 3717, 3717, 3718, 3718, 3718, 3719, 3719, 3719, \
3720, 3720, 3720, 3721, 3721, 3721, 3722, 3722, 3722, 3723, 3723, 3723, 3724, 3724, 3724, 3725, 3725, 3725, 3726, 3726, \
3726, 3727, 3727, 3727, 3728, 3728, 3728, 3729, 3729, 3729, 3730, 3730, 3730, 3731, 3731, 3731, 3731, 3732, 3732, 3732, \
3733, 3733, 3733, 3734, 3734, 3734, 3735, 3735, 3735, 3736, 3736, 3736, 3737, 3737, 3737, 3738, 3738, 3738, 3738, 3739, \
3739, 3739, 3740, 3740, 3740, 3741, 3741, 3741, 3742, 3742, 3742, 3743, 3743, 3743, 3743, 3744, 3744, 3744, 3745, 3745, \
3745, 3746, 3746, 3746, 3747, 3747, 3747, 3747, 3748, 3748, 3748, 3749, 3749, 3749, 3750, 3750, 3750, 3750, 3751, 3751, \
3751, 3752, 3752, 3752, 3753, 3753, 3753, 3753, 3754, 3754, 3754, 3755, 3755, 3755, 3756, 3756, 3756, 3756, 3757, 3757, \
3757, 3758, 3758, 3758, 3759, 3759, 3759, 3759, 3760, 3760, 3760, 3761, 3761, 3761, 3761, 3762, 3762, 3762, 3763, 3763, \
3763, 3763, 3764, 3764, 3764, 3765, 3765, 3765, 3766, 3766, 3766, 3766, 3767, 3767, 3767, 3768, 3768, 3768, 3768, 3769, \
3769, 3769, 3770, 3770, 3770, 3770, 3771, 3771, 3771, 3771, 3772, 3772, 3772, 3773, 3773, 3773, 3773, 3774, 3774, 3774, \
3775, 3775, 3775, 3775, 3776, 3776, 3776, 3777, 3777, 3777, 3777, 3778, 3778, 3778, 3778, 3779, 3779, 3779, 3780, 3780, \
3780, 3780, 3781, 3781, 3781, 3781, 3782, 3782, 3782, 3783, 3783, 3783, 3783, 3784, 3784, 3784, 3784, 3785, 3785, 3785, \
3786, 3786, 3786, 3786, 3787, 3787, 3787, 3787, 3788, 3788, 3788, 3788, 3789, 3789, 3789, 3790, 3790, 3790, 3790, 3791, \
3791, 3791, 3791, 3792, 3792, 3792, 3792, 3793, 3793, 3793, 3793, 3794, 3794, 3794, 3794, 3795, 3795, 3795, 3796, 3796, \
3796, 3796, 3797, 3797, 3797, 3797, 3798, 3798, 3798, 3798, 3799, 3799, 3799, 3799, 3800, 3800, 3800, 3800, 3801, 3801, \
3801, 3801, 3802, 3802, 3802, 3802, 3803, 3803, 3803, 3803, 3804, 3804, 3804, 3804, 3805, 3805, 3805, 3805, 3806, 3806, \
3806, 3806, 3807, 3807, 3807, 3807, 3808, 3808, 3808, 3808, 3809, 3809, 3809, 3809, 3810, 3810, 3810, 3810, 3811, 3811, \
3811, 3811, 3812, 3812, 3812, 3812, 3812, 3813, 3813, 3813, 3813, 3814, 3814, 3814, 3814, 3815, 3815, 3815, 3815, 3816, \
3816, 3816, 3816, 3817, 3817, 3817, 3817, 3818, 3818, 3818, 3818, 3818, 3819, 3819, 3819, 3819, 3820, 3820, 3820, 3820, \
3821, 3821, 3821, 3821, 3821, 3822, 3822, 3822, 3822, 3823, 3823, 3823, 3823, 3824, 3824, 3824, 3824, 3824, 3825, 3825, \
3825, 3825, 3826, 3826, 3826, 3826, 3827, 3827, 3827, 3827, 3827, 3828, 3828, 3828, 3828, 3829, 3829, 3829, 3829, 3829, \
3830, 3830, 3830, 3830, 3831, 3831, 3831, 3831, 3831, 3832, 3832, 3832, 3832, 3833, 3833, 3833, 3833, 3833, 3834, 3834, \
3834, 3834, 3835, 3835, 3835, 3835, 3835, 3836, 3836, 3836, 3836, 3836, 3837, 3837, 3837, 3837, 3838, 3838, 3838, 3838, \
3838, 3839, 3839, 3839, 3839, 3839, 3840, 3840, 3840, 3840, 3841, 3841, 3841, 3841, 3841, 3842, 3842, 3842, 3842, 3842, \
3843, 3843, 3843, 3843, 3843, 3844, 3844, 3844, 3844, 3844, 3845, 3845, 3845, 3845, 3846, 3846, 3846, 3846, 3846, 3847, \
3847, 3847, 3847, 3847, 3848, 3848, 3848, 3848, 3848, 3849, 3849, 3849, 3849, 3849, 3850, 3850, 3850, 3850, 3850, 3851, \
3851, 3851, 3851, 3851, 3852, 3852, 3852, 3852, 3852, 3853, 3853, 3853, 3853, 3853, 3854, 3854, 3854, 3854, 3854, 3855, \
3855, 3855, 3855, 3855, 3856, 3856, 3856, 3856, 3856, 3857, 3857, 3857, 3857, 3857, 3857, 3858, 3858, 3858, 3858, 3858, \
3859, 3859, 3859, 3859, 3859, 3860, 3860, 3860, 3860, 3860, 3861, 3861, 3861, 3861, 3861, 3861, 3862, 3862, 3862, 3862, \
3862, 3863, 3863, 3863, 3863, 3863, 3864, 3864, 3864, 3864, 3864, 3864, 3865, 3865, 3865, 3865, 3865, 3866, 3866, 3866, \
3866, 3866, 3867, 3867, 3867, 3867, 3867, 3867, 3868, 3868, 3868, 3868, 3868, 3868, 3869, 3869, 3869, 3869, 3869, 3870, \
3870, 3870, 3870, 3870, 3870, 3871, 3871, 3871, 3871, 3871, 3872, 3872, 3872, 3872, 3872, 3872, 3873, 3873, 3873, 3873, \
3873, 3873, 3874, 3874, 3874, 3874, 3874, 3875, 3875, 3875, 3875, 3875, 3875, 3876, 3876, 3876, 3876, 3876, 3876, 3877, \
3877, 3877, 3877, 3877, 3877, 3878, 3878, 3878, 3878, 3878, 3878, 3879, 3879, 3879, 3879, 3879, 3879, 3880, 3880, 3880, \
3880, 3880, 3880, 3881, 3881, 3881, 3881, 3881, 3881, 3882, 3882, 3882, 3882, 3882, 3882, 3883, 3883, 3883, 3883, 3883, \
3883, 3884, 3884, 3884, 3884, 3884, 3884, 3885, 3885, 3885, 3885, 3885, 3885, 3886, 3886, 3886, 3886, 3886, 3886, 3887, \
3887, 3887, 3887, 3887, 3887, 3887, 3888, 3888, 3888, 3888, 3888, 3888, 3889, 3889, 3889, 3889, 3889, 3889, 3890, 3890, \
3890, 3890, 3890, 3890, 3890, 3891, 3891, 3891, 3891, 3891, 3891, 3892, 3892, 3892, 3892, 3892, 3892, 3892, 3893, 3893, \
3893, 3893, 3893, 3893, 3894, 3894, 3894, 3894, 3894, 3894, 3894, 3895, 3895, 3895, 3895, 3895, 3895, 3895, 3896, 3896, \
3896, 3896, 3896, 3896, 3897, 3897, 3897, 3897, 3897, 3897, 3897, 3898, 3898, 3898, 3898, 3898, 3898, 3898, 3899, 3899, \
3899, 3899, 3899, 3899, 3899, 3900, 3900, 3900, 3900, 3900, 3900, 3900, 3901, 3901, 3901, 3901, 3901, 3901, 3901, 3902, \
3902, 3902, 3902, 3902, 3902, 3902, 3903, 3903, 3903, 3903, 3903, 3903, 3903, 3904, 3904, 3904, 3904, 3904, 3904, 3904, \
3905, 3905, 3905, 3905, 3905, 3905, 3905, 3906, 3906, 3906, 3906, 3906, 3906, 3906, 3906, 3907, 3907, 3907, 3907, 3907, \
3907, 3907, 3908, 3908, 3908, 3908, 3908, 3908, 3908, 3908, 3909, 3909, 3909, 3909, 3909, 3909, 3909, 3910, 3910, 3910, \
3910, 3910, 3910, 3910, 3910, 3911, 3911, 3911, 3911, 3911, 3911, 3911, 3912, 3912, 3912, 3912, 3912, 3912, 3912, 3912, \
3913, 3913, 3913, 3913, 3913, 3913, 3913, 3913, 3914, 3914, 3914, 3914, 3914, 3914, 3914, 3914, 3915, 3915, 3915, 3915, \
3915, 3915, 3915, 3915, 3916, 3916, 3916, 3916, 3916, 3916, 3916, 3916, 3917, 3917, 3917, 3917, 3917, 3917, 3917, 3917, \
3918, 3918, 3918, 3918, 3918, 3918, 3918, 3918, 3919, 3919, 3919, 3919, 3919, 3919, 3919, 3919, 3919, 3920, 3920, 3920, \
3920, 3920, 3920, 3920, 3920, 3921, 3921, 3921, 3921, 3921, 3921, 3921, 3921, 3922, 3922, 3922, 3922, 3922, 3922, 3922, \
3922, 3922, 3923, 3923, 3923, 3923, 3923, 3923, 3923, 3923, 3923, 3924, 3924, 3924, 3924, 3924, 3924, 3924, 3924, 3925, \
3925, 3925, 3925, 3925, 3925, 3925, 3925, 3925, 3926, 3926, 3926, 3926, 3926, 3926, 3926, 3926, 3926, 3927, 3927, 3927, \
3927, 3927, 3927, 3927, 3927, 3927, 3928, 3928, 3928, 3928, 3928, 3928, 3928, 3928, 3928, 3929, 3929, 3929, 3929, 3929, \
3929, 3929, 3929, 3929, 3929, 3930, 3930, 3930, 3930, 3930, 3930, 3930, 3930, 3930, 3931, 3931, 3931, 3931, 3931, 3931, \
3931, 3931, 3931, 3931, 3932, 3932, 3932, 3932, 3932, 3932, 3932, 3932, 3932, 3933, 3933, 3933, 3933, 3933, 3933, 3933, \
3933, 3933, 3933, 3934, 3934, 3934, 3934, 3934, 3934, 3934, 3934, 3934, 3934, 3935, 3935, 3935, 3935, 3935, 3935, 3935, \
3935, 3935, 3935, 3936, 3936, 3936, 3936, 3936, 3936, 3936, 3936, 3936, 3936, 3937, 3937, 3937, 3937, 3937, 3937, 3937, \
3937, 3937, 3937, 3938, 3938, 3938, 3938, 3938, 3938, 3938, 3938, 3938, 3938, 3938, 3939, 3939, 3939, 3939, 3939, 3939, \
3939, 3939, 3939, 3939, 3940, 3940, 3940, 3940, 3940, 3940, 3940, 3940, 3940, 3940, 3940, 3941, 3941, 3941, 3941, 3941, \
3941, 3941, 3941, 3941, 3941, 3941, 3942, 3942, 3942, 3942, 3942, 3942, 3942, 3942, 3942, 3942, 3942, 3943, 3943, 3943, \
3943, 3943, 3943, 3943, 3943, 3943, 3943, 3943, 3944, 3944, 3944, 3944, 3944, 3944, 3944, 3944, 3944, 3944, 3944, 3945, \
3945, 3945, 3945, 3945, 3945, 3945, 3945, 3945, 3945, 3945, 3945, 3946, 3946, 3946, 3946, 3946, 3946, 3946, 3946, 3946, \
3946, 3946, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3947, 3948, 3948, 3948, 3948, 3948, 3948, \
3948, 3948, 3948, 3948, 3948, 3948, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3949, 3950, \
3950, 3950, 3950, 3950, 3950, 3950, 3950, 3950, 3950, 3950, 3950, 3951, 3951, 3951, 3951, 3951, 3951, 3951, 3951, 3951, \
3951, 3951, 3951, 3951, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3952, 3953, 3953, 3953, \
3953, 3953, 3953, 3953, 3953, 3953, 3953, 3953, 3953, 3953, 3954, 3954, 3954, 3954, 3954, 3954, 3954, 3954, 3954, 3954, \
3954, 3954, 3954, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3955, 3956, 3956, 3956, \
3956, 3956, 3956, 3956, 3956, 3956, 3956, 3956, 3956, 3956, 3956, 3957, 3957, 3957, 3957, 3957, 3957, 3957, 3957, 3957, \
3957, 3957, 3957, 3957, 3957, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, 3958, \
3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3959, 3960, 3960, 3960, 3960, 3960, \
3960, 3960, 3960, 3960, 3960, 3960, 3960, 3960, 3960, 3960, 3961, 3961, 3961, 3961, 3961, 3961, 3961, 3961, 3961, 3961, \
3961, 3961, 3961, 3961, 3961, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, 3962, \
3962, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3963, 3964, 3964, 3964, \
3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3964, 3965, 3965, 3965, 3965, 3965, 3965, \
3965, 3965, 3965, 3965, 3965, 3965, 3965, 3965, 3965, 3965, 3965, 3966, 3966, 3966, 3966, 3966, 3966, 3966, 3966, 3966, \
3966, 3966, 3966, 3966, 3966, 3966, 3966, 3966, 3967, 3967, 3967, 3967, 3967, 3967, 3967, 3967, 3967, 3967, 3967, 3967, \
3967, 3967, 3967, 3967, 3967, 3967, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, 3968, \
3968, 3968, 3968, 3968, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, 3969, \
3969, 3969, 3969, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, 3970, \
3970, 3970, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, 3971, \
3971, 3971, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, 3972, \
3972, 3972, 3972, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, 3973, \
3973, 3973, 3973, 3973, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, 3974, \
3974, 3974, 3974, 3974, 3974, 3974, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, \
3975, 3975, 3975, 3975, 3975, 3975, 3975, 3975, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, \
3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3976, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, \
3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3977, 3978, 3978, 3978, 3978, \
3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, 3978, \
3978, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, 3979, \
3979, 3979, 3979, 3979, 3979, 3979, 3979, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, \
3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3980, 3981, 3981, 3981, 3981, 3981, 3981, \
3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, 3981, \
3981, 3981, 3981, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, \
3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3982, 3983, 3983, 3983, 3983, 3983, 3983, 3983, \
3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, 3983, \
3983, 3983, 3983, 3983, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, \
3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3984, 3985, 3985, 3985, \
3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, \
3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3985, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, \
3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, \
3986, 3986, 3986, 3986, 3986, 3986, 3986, 3986, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, \
3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, 3987, \
3987, 3987, 3987, 3987, 3987, 3987, 3987, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, \
3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, \
3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3988, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, \
3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, \
3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3989, 3990, 3990, 3990, 3990, 3990, 3990, 3990, \
3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, \
3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, 3990, \
3990, 3990, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, \
3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, \
3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3991, 3992, 3992, 3992, 3992, 3992, 3992, \
3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, \
3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, \
3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3992, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, \
3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, \
3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, \
3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3993, 3994, 3994, 3994, 3994, 3994, 3994, \
3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, \
3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, \
3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, 3994, \
3994, 3994, 3994, 3994, 3994, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, \
3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, \
3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, \
3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, 3995, \
3995, 3995, 3995, 3995, 3995, 3995, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, \
3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, \
3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, \
3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, \
3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3996, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, \
3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3997, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, 3998, \
3998, 3998, 3998, 3998, 3998, 3998, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, 3999, \
3999, 3999, 3999, 3999, 3999, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, 4000, \
4000 ])
| mit | -588,227,687,337,240,200 | 74.253057 | 125 | 0.691327 | false | 2.051741 | false | false | false |
Shrobs/climulon | climulon/handlers/provision.py | 1 | 9907 | import boto3
import botocore
import time
from handlers import utils
import dependency_engine as engine
import taskDefs
import services
from handlers.exceptions import (StackExistsError,
TaskDefExistsError,
StackUnresolvedDependency,
ExternalStackNotFound)
def provision_handler(args):
# Handler for both codeship and regular cli, for provision using
# cloudformation stacks
conf = args.conf
stackSubset = args.stacks
timeout = args.timeout
if timeout is None:
timeout = 60
dry_run = args.dry_run
run_provision(conf, stackSubset, timeout, dry_run)
def run_provision(conf, stackSubset, timeout, dry_run):
(config, configParams, templates, tasksDefsContent,
servicesContent, externalStacks) = utils.check_and_get_conf(conf)
if stackSubset:
print("Stack list detected, will only provision this sub-set "
"of stacks :")
print(stackSubset)
else:
stackSubset = []
for template in templates:
stackSubset.append(template["StackName"])
ComputeStackFound = utils.verify_subset(stackSubset, templates)
utils.change_workdir(conf)
# Checking that all the external stacks exist
if externalStacks:
print("Checking if external templates exist")
for stack in externalStacks:
client = boto3.client('cloudformation', region_name=stack["StackRegion"])
try:
response = client.describe_stacks(StackName=stack["StackName"])
except botocore.exceptions.ClientError as e:
if (e.response['Error']['Code'] ==
'ValidationError' and
"does not exist" in
e.response['Error']['Message']):
raise ExternalStackNotFound(stack["StackName"])
else:
raise
# Checking if there are existant stacks with the names of the templates
# to be created
print("Checking if there are CF stack with current names")
for template in templates:
if template["StackName"] not in stackSubset:
continue
client = boto3.client('cloudformation', region_name=template["StackRegion"])
listStacksResponse = client.list_stacks(
StackStatusFilter=[
'CREATE_IN_PROGRESS',
'CREATE_COMPLETE',
'ROLLBACK_IN_PROGRESS',
'ROLLBACK_FAILED',
'ROLLBACK_COMPLETE',
'DELETE_IN_PROGRESS',
'DELETE_FAILED',
'UPDATE_IN_PROGRESS',
'UPDATE_COMPLETE_CLEANUP_IN_PROGRESS',
'UPDATE_COMPLETE',
'UPDATE_ROLLBACK_IN_PROGRESS',
'UPDATE_ROLLBACK_FAILED',
'UPDATE_ROLLBACK_COMPLETE_CLEANUP_IN_PROGRESS',
'UPDATE_ROLLBACK_COMPLETE'
]
)
for stack in listStacksResponse["StackSummaries"]:
if stack["StackName"] == template["StackName"]:
raise StackExistsError(template["StackName"])
if template["ComputeStack"].lower() == "true":
# Checking if there are task defs with same names
print("Checking if there are task defs with current names")
client = boto3.client('ecs', region_name=template["StackRegion"])
for key, value in config["globalParameters"].items():
if "TASK_DEF_NAME" in key:
similarTaskDefs = client.list_task_definition_families(
familyPrefix=value, status="ACTIVE")
for task in similarTaskDefs['families']:
if value == task:
raise TaskDefExistsError(task)
print("Checks complete, ready for provisioning")
if dry_run is True:
return
# Stacks Creation
print("Creating Stacks...")
# Will be filled with the output of the created stack at the end of
# each loop
extStacksOutput = {}
for stack in externalStacks:
extStackOutput = {}
client = boto3.client('cloudformation', region_name=stack["StackRegion"])
try:
describeStackResponse = client.describe_stacks(
StackName=stack["StackName"])
stack = describeStackResponse["Stacks"][0]
stackOutputs = stack["Outputs"]
print("Getting stack output from %s" % (stack["StackName"]))
for outputSet in stackOutputs:
extStackOutput[outputSet["OutputKey"]] = outputSet["OutputValue"]
utils.mergeOutputConfig(extStackOutput, extStacksOutput, stack)
except botocore.exceptions.ClientError as e:
if (e.response['Error']['Code'] ==
'ValidationError' and
"does not exist" in
e.response['Error']['Message']):
raise ExternalStackNotFound(stack["StackName"])
else:
raise
if externalStacks:
utils.mergeOutputConfig(extStacksOutput, configParams, stack)
configOutput = {}
for template in templates:
if template["StackName"] in stackSubset:
print("Creating stack : %s" % (template["StackName"]))
else:
print("Getting output from stack %s if it exists" %
(template["StackName"]))
print("Converting stack config for %s..." %
(template["StackName"]))
# Output of the current running stack, will be filled later
stackOutputs = None
if template["StackName"] in stackSubset:
missingRefs = engine.dependencyResolver(
target=template["StackParameters"],
resolve=True,
valueSources=[configParams])
if missingRefs:
raise StackUnresolvedDependency(
template["StackName"], missingRefs)
parameterList = []
for key in template["StackParameters"]:
param = {
'ParameterKey': key,
'ParameterValue': template["StackParameters"][key],
'UsePreviousValue': False
}
parameterList.append(param)
client = boto3.client('cloudformation', region_name=template["StackRegion"])
createStackResponse = client.create_stack(
StackName=template["StackName"],
TemplateBody=template["TemplateContent"],
Parameters=parameterList,
TimeoutInMinutes=timeout,
Capabilities=[
'CAPABILITY_IAM',
]
)
stackId = createStackResponse["StackId"]
print("Stack creation launched for stack : %s" %
(template["StackName"]))
print(stackId)
# Waiting for stack creation
while True:
describeStackResponse = client.describe_stacks(
StackName=template["StackName"])
stack = describeStackResponse["Stacks"][0]
if (stack["StackStatus"] == 'CREATE_FAILED' or
stack["StackStatus"] == 'ROLLBACK_COMPLETE'):
print("Stack creating failed")
print(stack["StackStatusReason"])
elif stack["StackStatus"] == 'CREATE_COMPLETE':
print("Stack creation complete")
if "Outputs" in stack:
print("Stack Output :")
stackOutputs = stack["Outputs"]
else:
print("Stack with no output to print")
stackOutputs = None
break
else:
print("Stack creation in progress")
time.sleep(20)
else:
print("Getting stack output")
for stack in listStacksResponse["StackSummaries"]:
if stack["StackName"] == template["StackName"]:
print("Stack found")
client = boto3.client('cloudformation', region_name=template["StackRegion"])
describeStackResponse = client.describe_stacks(
StackName=template["StackName"])
stack = describeStackResponse["Stacks"][0]
if "Outputs" in stack:
print("Stack Output :")
stackOutputs = stack["Outputs"]
else:
print("Stack with no output to print")
stackOutputs = None
if not stackOutputs:
print("Stack does not exist, ignoring step")
configOutput = {}
if stackOutputs:
print("Converting stack output...")
for outputSet in stackOutputs:
configOutput[outputSet["OutputKey"]
] = outputSet["OutputValue"]
print("Output parameters from stack:")
print(configOutput)
utils.mergeOutputConfig(configOutput, configParams, template)
if template["ComputeStack"].lower() == "true" and template["StackName"] in stackSubset:
tasksDefsContent = taskDefs.fill_taskDef_templates(
tasksDefsContent, configParams)
taskDefs.register_taskDef(tasksDefsContent, template["StackRegion"])
client = boto3.client('ecs', region_name=template["StackRegion"])
servicesContent = services.fill_service_templates(
servicesContent, configParams)
services.create_services(servicesContent, template["StackRegion"])
print("Provision process successful")
| gpl-3.0 | -8,367,918,283,107,603,000 | 38.47012 | 96 | 0.556576 | false | 5.154527 | true | false | false |
twisted/quotient | xquotient/test/test_scrubber.py | 1 | 4268 | from twisted.trial.unittest import TestCase
from twisted.web.microdom import parseString, getElementsByTagName
from twisted.web.domhelpers import gatherTextNodes
from xquotient.scrubber import scrub, scrubCIDLinks
class ScrubberTestCase(TestCase):
def test_scrubCIDLinksWithLinks(self):
"""
Test L{xquotient.scrubber.Scrubber.scrubCIDLinks with <a>s
"""
node = parseString("""
<html>
<a href="cid:foo">with a CID URI</a>
<a href="not a cid uri">without a CID URI</a>
</html>
""").documentElement
scrubCIDLinks(node)
(link,) = node.childNodes
self.assertEquals(link.attributes['href'], 'not a cid uri')
def test_scrubCIDLinksWithIFrames(self):
"""
Test L{xquotient.scrubber.Scrubber.scrubCIDLinks} with <iframe>s
"""
node = parseString("""
<html>
<IFRAME SRC="CID:foo">with a CID URL</IFRAME>
<IFRAME SRC="http://foo.bar">without a CID URI</IFRAME>
</html>
""").documentElement
scrubCIDLinks(node)
(iframe,) = node.childNodes
self.assertEquals(iframe.attributes['src'], 'http://foo.bar')
def test_scrubCIDLinksWithImages(self):
"""
Test L{xquotient.scrubber.Scrubber.scrubCIDLinks} with <img>s
"""
node = parseString("""
<html>
<img src="cid:foo" />
<img src="http://foo.bar" />
<img src="cid:bar" />
<img src="http://bar.baz" />
</html>
""").documentElement
scrubCIDLinks(node)
self.assertEquals(list(e.attributes['src'] for e in node.childNodes),
['http://foo.bar', 'http://bar.baz'])
def test_scrubCIDLinks(self):
"""
Test L{xquotient.scrubber.Scrubber.scrubCIDLinks} with a bunch of
different nodes
"""
node = parseString("""
<html>
<img src="cid:foo" />
<a href="x" name="1" />
<iframe src="cid:bar" />
<iframe name="2" />
<a href="cid:xxx" />
<img src="123" name="3" />
<link href="cid:foo" />
<link href="xyz" name="4" />
<script src="cid:baz" />
<script href="x" name="5" />
</html>""").documentElement
scrubCIDLinks(node)
self.assertEquals(
list(int(e.attributes['name']) for e in node.childNodes),
[1, 2, 3, 4, 5])
def test_scrubWithCIDLinkArg(self):
"""
Test that L{xquotient.scrubber.Scrubber.scrub} pays attention to
the C{filterCIDLinks} argument, when passed <a>s
"""
node = parseString("""
<html>
<a href="x" />
<a href="cid:foo" />
</html>
""").documentElement
scrubbed = scrub(node, filterCIDLinks=False)
self.assertEquals(
list(e.attributes['href'] for e in scrubbed.firstChild().childNodes),
['x', 'cid:foo'])
scrubbed = scrub(node, filterCIDLinks=True)
self.assertEquals(
list(e.attributes['href'] for e in scrubbed.firstChild().childNodes),
['x'])
def test_scrubTrustsSpan(self):
"""
Test that L{xquotient.scrubber.Scrubber} considers span to be a safe
tag. Added because of #1641.
"""
node = parseString("""
<html>
<span style='font-weight: bold; font-family:"Book Antiqua"'>
Hello
</span>
</html>
""").documentElement
scrubbed = scrub(node)
spans = getElementsByTagName(scrubbed, 'span')
self.assertEquals(len(spans), 1)
self.assertEquals(gatherTextNodes(spans[0]).strip(), "Hello")
def test_scrubTrustsH1(self):
"""
Test that L{xquotient.scrubber.Scrubber} considers h1 to be a safe tag.
Added because of #1895.
"""
node = parseString("<h1>Foo</h1>").documentElement
scrubbed = scrub(node)
h1s = getElementsByTagName(scrubbed, 'h1')
self.assertEquals(len(h1s), 1)
self.assertEquals(gatherTextNodes(h1s[0]).strip(), "Foo")
| mit | 7,923,884,959,483,736,000 | 29.269504 | 85 | 0.545923 | false | 3.598651 | true | false | false |
simpeg/discretize | discretize/utils/mesh_utils.py | 1 | 36741 | import numpy as np
import scipy.ndimage as ndi
import scipy.sparse as sp
from discretize.utils.matrix_utils import ndgrid
from discretize.utils.code_utils import as_array_n_by_dim, is_scalar
from scipy.spatial import cKDTree, Delaunay
from scipy import interpolate
import discretize
from discretize.utils.code_utils import deprecate_function
import warnings
num_types = [int, float]
def random_model(shape, seed=None, anisotropy=None, its=100, bounds=None):
"""Create random tensor model.
Creates a random tensor model by convolving a kernel function with a
uniformly distributed model. The user specifies the number of cells
along the x, (y and z) directions with the input argument *shape* and
the function outputs a tensor model with the same shape. Afterwards,
the user may use the :py:func:`~discretize.utils.mkvc` function
to convert the tensor to a vector which can be plotting on a
corresponding tensor mesh.
Parameters
----------
shape : tuple
shape of the model.
seed : int
pick which model to produce, prints the seed if you don't choose
anisotropy : numpy.ndarray
this is the (3 x n) blurring kernel that is used
its : int
number of smoothing iterations
bounds : list
Lower and upper bounds on the model. Has the form [lower_bound, upper_bound].
Returns
-------
numpy.ndarray
A random renerated model whose shape was specified by the input parameter *shape*
Examples
--------
Here, we generate a random model for a 2D tensor mesh and plot.
>>> from discretize import TensorMesh
>>> from discretize.utils import random_model, mkvc
>>> import matplotlib as mpl
>>> import matplotlib.pyplot as plt
>>>
>>> h = [(1., 50)]
>>> vmin, vmax = 0., 1.
>>> mesh = TensorMesh([h, h])
>>> model = random_model(mesh.shape_cells, seed=4, bounds=[vmin, vmax])
>>> model = mkvc(model)
>>>
>>> fig = plt.figure(figsize=(5, 4))
>>> ax1 = fig.add_axes([0.1, 0.1, 0.7, 0.8])
>>> ax2 = fig.add_axes([0.83, 0.1, 0.03, 0.8])
>>> mesh.plot_image(model, grid=False, ax=ax1)
>>> norm = mpl.colors.Normalize(vmin=vmin, vmax=vmax)
>>> mpl.colorbar.ColorbarBase(ax2, norm=norm)
>>> ax1.set_title('Random Tensor Model')
"""
if bounds is None:
bounds = [0, 1]
if seed is None:
seed = np.random.randint(1e3)
print("Using a seed of: ", seed)
if type(shape) in num_types:
shape = (shape,) # make it a tuple for consistency
np.random.seed(seed)
mr = np.random.rand(*shape)
if anisotropy is None:
if len(shape) == 1:
smth = np.array([1, 10.0, 1], dtype=float)
elif len(shape) == 2:
smth = np.array([[1, 7, 1], [2, 10, 2], [1, 7, 1]], dtype=float)
elif len(shape) == 3:
kernal = np.array([1, 4, 1], dtype=float).reshape((1, 3))
smth = np.array(
sp.kron(sp.kron(kernal, kernal.T).todense()[:], kernal).todense()
).reshape((3, 3, 3))
else:
if len(anisotropy.shape) != len(shape):
raise ValueError("Anisotropy must be the same shape.")
smth = np.array(anisotropy, dtype=float)
smth = smth / smth.sum() # normalize
mi = mr
for i in range(its):
mi = ndi.convolve(mi, smth)
# scale the model to live between the bounds.
mi = (mi - mi.min()) / (mi.max() - mi.min()) # scaled between 0 and 1
mi = mi * (bounds[1] - bounds[0]) + bounds[0]
return mi
def unpack_widths(value):
"""Unpack a condensed representation of cell widths or time steps.
For a list of numbers, if the same value is repeat or expanded by a constant
factor, it may be represented in a condensed form using list of floats
and/or tuples. **unpack_widths** takes a list of floats and/or tuples in
condensed form, e.g.:
[ float, (cellSize, numCell), (cellSize, numCell, factor) ]
and expands the representation to a list containing all widths in order. That is:
[ w1, w2, w3, ..., wn ]
Parameters
----------
value : list of floats and/or tuples
The list of floats and/or tuples that are to be unpacked
Returns
-------
list
The unpacked list with all widths in order
Examples
--------
Time stepping for time-domain codes can be represented in condensed form, e.g.:
[ (1e-5, 10), (1e-4, 4), 1e-3 ]
The above means to take 10 steps at a step width of 1e-5 s and then
4 more at 1e-4 s, and then one step of 1e-3 s. When unpacked, the output is
of length 15 and is given by:
[1e-5, ..., 1e-5, 1e-4, 1e-4, 1e-4, 1e-4, 1e-3]
Each axis of a tensor mesh can also be defined as a condensed list of floats
and/or tuples. When a third number is defined in any tuple, the width value
is successively expanded by that factor, e.g.:
[ 6., 8., (10.0, 3), (8.0, 4, 2.) ]
If we unpacked this, we would obtain:
[ 6., 8., 10., 10., 10., 8., 16., 32., 64. ]
"""
if type(value) is not list:
raise Exception("unpack_widths must be a list of scalars and tuples.")
proposed = []
for v in value:
if is_scalar(v):
proposed += [float(v)]
elif type(v) is tuple and len(v) == 2:
proposed += [float(v[0])] * int(v[1])
elif type(v) is tuple and len(v) == 3:
start = float(v[0])
num = int(v[1])
factor = float(v[2])
pad = ((np.ones(num) * np.abs(factor)) ** (np.arange(num) + 1)) * start
if factor < 0:
pad = pad[::-1]
proposed += pad.tolist()
else:
raise Exception(
"unpack_widths must contain only scalars and len(2) or len(3) tuples."
)
return np.array(proposed)
def closest_points_index(mesh, pts, grid_loc="CC", **kwargs):
"""Find the indicies for the nearest cell center, node, face or edge for a set of points.
Parameters
----------
mesh : discretize.BaseMesh
An instance of *discretize.BaseMesh*
pts : numpy.ndarray
Points being moved. Has shape (n, dim)
grid_loc : str
Specifies the grid on which points are being moved to. Choose one
of {'CC', 'N', 'Fx', 'Fy', 'Fz', 'Ex', 'Ex', 'Ey', 'Ez'}
Returns
-------
numpy.ndarray
Vector of length *n* containing the indicies for the closest
respective cell center, node, face or edge.
Examples
--------
Here we define a set of random (x, y) locations and find the closest
cell centers and nodes on a mesh.
>>> from discretize import TensorMesh
>>> from discretize.utils import closest_points_index
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>>
>>> h = 2*np.ones(5)
>>> mesh = TensorMesh([h, h], x0='00')
>>>
>>> # Random locations, grid cell centers and grid nodes
>>> xy_random = np.random.uniform(0, 10, size=(4,2))
>>> xy_centers = mesh.cell_centers
>>> xy_nodes = mesh.nodes
>>>
>>> # Find indicies of closest cell centers and nodes
>>> ind_centers = closest_points_index(mesh, xy_random, 'CC')
>>> ind_nodes = closest_points_index(mesh, xy_random, 'N')
>>>
>>> # Plot closest cell centers and nodes
>>> fig = plt.figure(figsize=(5, 5))
>>> ax = fig.add_axes([0.1, 0.1, 0.8, 0.8])
>>> mesh.plot_grid(ax=ax)
>>> ax.scatter(xy_random[:, 0], xy_random[:, 1], 50, 'k')
>>> ax.scatter(xy_centers[ind_centers, 0], xy_centers[ind_centers, 1], 50, 'r')
>>> ax.scatter(xy_nodes[ind_nodes, 0], xy_nodes[ind_nodes, 1], 50, 'b')
>>> plt.show()
"""
if "gridLoc" in kwargs:
warnings.warn(
"The gridLoc keyword argument has been deprecated, please use grid_loc. "
"This will be removed in discretize 1.0.0",
DeprecationWarning,
)
grid_loc = kwargs["gridLoc"]
pts = as_array_n_by_dim(pts, mesh.dim)
grid = getattr(mesh, "grid" + grid_loc)
nodeInds = np.empty(pts.shape[0], dtype=int)
for i, pt in enumerate(pts):
if mesh.dim == 1:
nodeInds[i] = ((pt - grid) ** 2).argmin()
else:
nodeInds[i] = (
((np.tile(pt, (grid.shape[0], 1)) - grid) ** 2).sum(axis=1).argmin()
)
return nodeInds
def extract_core_mesh(xyzlim, mesh, mesh_type="tensor"):
"""Extracts the core mesh from a global mesh.
Parameters
----------
xyzlim : numpy.ndarray
2D array defining the x, y and z cutoffs for the core mesh region. Each
row contains the minimum and maximum limit for the x, y and z axis, respectively.
Thus the array has shape (ndim, 2)
mesh : discretize.BaseMesh
The mesh
mesh_type : str, optional
Unused currently
Returns
-------
tuple: (**active_index**, **core_mesh**)
**active_index** is a boolean array that maps from the global the mesh
to core mesh. **core_mesh** is a *discretize.BaseMesh* object representing
the core mesh.
Examples
--------
Here, we define a 2D tensor mesh that has both a core region and padding.
We use the function **extract_core_mesh** to return a mesh which contains
only the core region.
>>> from discretize.utils import extract_core_mesh
>>> from discretize import TensorMesh
>>> import numpy as np
>>> import matplotlib.pyplot as plt
>>> import matplotlib as mpl
>>>
>>> mpl.rcParams.update({"font.size": 14})
>>>
>>> # Corners of a uniform cube
>>> h = [(1., 5, -1.5), (1., 20), (1., 5, 1.5)]
>>> mesh = TensorMesh([h, h], origin='CC')
>>>
>>> # Plot original mesh
>>> fig = plt.figure(figsize=(7, 7))
>>> ax = fig.add_subplot(111)
>>> mesh.plot_grid(ax=ax)
>>> ax.set_title('Original Tensor Mesh')
>>> plt.show()
>>>
>>> # Set the limits for the cutoff of the core mesh (dim, 2)
>>> xlim = np.c_[-10., 10]
>>> ylim = np.c_[-10., 10]
>>> core_limits = np.r_[xlim, ylim]
>>>
>>> # Extract indices of core mesh cells and the core mesh, then plot
>>> core_ind, core_mesh = extract_core_mesh(core_limits, mesh)
>>> fig = plt.figure(figsize=(4, 4))
>>> ax = fig.add_subplot(111)
>>> core_mesh.plot_grid(ax=ax)
>>> ax.set_title('Core Mesh')
"""
if not isinstance(mesh, discretize.TensorMesh):
raise Exception("Only implemented for class TensorMesh")
if mesh.dim == 1:
xyzlim = xyzlim.flatten()
xmin, xmax = xyzlim[0], xyzlim[1]
xind = np.logical_and(mesh.cell_centers_x > xmin, mesh.cell_centers_x < xmax)
xc = mesh.cell_centers_x[xind]
hx = mesh.h[0][xind]
origin = [xc[0] - hx[0] * 0.5]
meshCore = discretize.TensorMesh([hx], origin=origin)
actind = (mesh.cell_centers > xmin) & (mesh.cell_centers < xmax)
elif mesh.dim == 2:
xmin, xmax = xyzlim[0, 0], xyzlim[0, 1]
ymin, ymax = xyzlim[1, 0], xyzlim[1, 1]
xind = np.logical_and(mesh.cell_centers_x > xmin, mesh.cell_centers_x < xmax)
yind = np.logical_and(mesh.cell_centers_y > ymin, mesh.cell_centers_y < ymax)
xc = mesh.cell_centers_x[xind]
yc = mesh.cell_centers_y[yind]
hx = mesh.h[0][xind]
hy = mesh.h[1][yind]
origin = [xc[0] - hx[0] * 0.5, yc[0] - hy[0] * 0.5]
meshCore = discretize.TensorMesh([hx, hy], origin=origin)
actind = (
(mesh.cell_centers[:, 0] > xmin)
& (mesh.cell_centers[:, 0] < xmax)
& (mesh.cell_centers[:, 1] > ymin)
& (mesh.cell_centers[:, 1] < ymax)
)
elif mesh.dim == 3:
xmin, xmax = xyzlim[0, 0], xyzlim[0, 1]
ymin, ymax = xyzlim[1, 0], xyzlim[1, 1]
zmin, zmax = xyzlim[2, 0], xyzlim[2, 1]
xind = np.logical_and(mesh.cell_centers_x > xmin, mesh.cell_centers_x < xmax)
yind = np.logical_and(mesh.cell_centers_y > ymin, mesh.cell_centers_y < ymax)
zind = np.logical_and(mesh.cell_centers_z > zmin, mesh.cell_centers_z < zmax)
xc = mesh.cell_centers_x[xind]
yc = mesh.cell_centers_y[yind]
zc = mesh.cell_centers_z[zind]
hx = mesh.h[0][xind]
hy = mesh.h[1][yind]
hz = mesh.h[2][zind]
origin = [xc[0] - hx[0] * 0.5, yc[0] - hy[0] * 0.5, zc[0] - hz[0] * 0.5]
meshCore = discretize.TensorMesh([hx, hy, hz], origin=origin)
actind = (
(mesh.cell_centers[:, 0] > xmin)
& (mesh.cell_centers[:, 0] < xmax)
& (mesh.cell_centers[:, 1] > ymin)
& (mesh.cell_centers[:, 1] < ymax)
& (mesh.cell_centers[:, 2] > zmin)
& (mesh.cell_centers[:, 2] < zmax)
)
else:
raise Exception("Not implemented!")
return actind, meshCore
def mesh_builder_xyz(
xyz,
h,
padding_distance=[[0, 0], [0, 0], [0, 0]],
base_mesh=None,
depth_core=None,
expansion_factor=1.3,
mesh_type="tensor",
):
"""Generate a tensor or tree mesh using a cloud of points.
For a cloud of (x,y[,z]) locations and specified minimum cell widths
(hx,hy,[hz]), this function creates a tensor or a tree mesh.
The lateral extent of the core region is determine by the cloud of points.
Other properties of the mesh can be defined automatically or by the user.
If *base_mesh* is an instance of :class:`~discretize.TensorMesh` or
:class:`~discretize.TreeMesh`, the core cells will be centered
on the underlying mesh to reduce interpolation errors.
Parameters
----------
xyz : numpy.ndarray
Location points [n x dim]
h : list
Cell size(s) for the core mesh [1 x ndim]
padding_distance : list
Padding distances [[W,E], [N,S], [Down,Up]]
base_mesh : discretize.TensorMesh or discretize.TreeMesh
discretize mesh used to center the core mesh
depth_core : float
Depth of core mesh below xyz
expansion_factor : float
Expansion factor for padding cells. Ignored is *mesh_type* = *tree*
mesh_type : str
Specify output mesh type. Choose from {'tensor", "tree"}
Returns
--------
discretize.TensorMesh or discretize.TreeMesh
Mesh of type specified by *mesh_type*
Examples
--------
>>> import discretize
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>>
>>> xyLoc = np.random.randn(8,2)
>>>
>>> mesh = discretize.utils.mesh_builder_xyz(
>>> xyLoc, [0.1, 0.1], depth_core=0.5,
>>> padding_distance=[[1,2], [1,0]],
>>> mesh_type='tensor',
>>> )
>>>
>>> axs = plt.subplot()
>>> mesh.plot_image(mesh.cell_volumes, grid=True, ax=axs)
>>> axs.scatter(xyLoc[:,0], xyLoc[:,1], 15, c='w', zorder=3)
>>> axs.set_aspect('equal')
>>> plt.show()
"""
if mesh_type.lower() not in ["tensor", "tree"]:
raise ValueError("Revise mesh_type. Only TENSOR | TREE mesh are implemented")
# Get extent of points
limits = []
center = []
nC = []
for dim in range(xyz.shape[1]):
max_min = np.r_[xyz[:, dim].max(), xyz[:, dim].min()]
limits += [max_min]
center += [np.mean(max_min)]
nC += [int((max_min[0] - max_min[1]) / h[dim])]
if depth_core is not None:
nC[-1] += int(depth_core / h[-1])
limits[-1][1] -= depth_core
if mesh_type.lower() == "tensor":
# Figure out padding cells from distance
def expand(dx, pad):
length = 0
nc = 0
while length < pad:
nc += 1
length = np.sum(dx * expansion_factor ** (np.asarray(range(nc)) + 1))
return nc
# Define h along each dimension
h_dim = []
nC_origin = []
for dim in range(xyz.shape[1]):
h_dim += [
[
(
h[dim],
expand(h[dim], padding_distance[dim][0]),
-expansion_factor,
),
(h[dim], nC[dim]),
(
h[dim],
expand(h[dim], padding_distance[dim][1]),
expansion_factor,
),
]
]
nC_origin += [h_dim[-1][0][1]]
# Create mesh
mesh = discretize.TensorMesh(h_dim)
elif mesh_type.lower() == "tree":
# Figure out full extent required from input
h_dim = []
nC_origin = []
for ii, cc in enumerate(nC):
extent = limits[ii][0] - limits[ii][1] + np.sum(padding_distance[ii])
# Number of cells at the small octree level
maxLevel = int(np.log2(extent / h[ii])) + 1
h_dim += [np.ones(2 ** maxLevel) * h[ii]]
# Define the mesh and origin
mesh = discretize.TreeMesh(h_dim)
for ii, cc in enumerate(nC):
core = limits[ii][0] - limits[ii][1]
pad2 = int(np.log2(padding_distance[ii][0] / h[ii] + 1))
nC_origin += [int(np.ceil((mesh.h[ii].sum() - core) / h[ii] / 2))]
# Set origin
origin = []
for ii, hi in enumerate(mesh.h):
origin += [limits[ii][1] - np.sum(hi[: nC_origin[ii]])]
mesh.origin = np.hstack(origin)
# Shift mesh if global mesh is used based on closest to centroid
axis = ["x", "y", "z"]
if base_mesh is not None:
for dim in range(base_mesh.dim):
cc_base = getattr(
base_mesh,
"cell_centers_{orientation}".format(orientation=axis[dim]),
)
cc_local = getattr(
mesh, "cell_centers_{orientation}".format(orientation=axis[dim])
)
shift = (
cc_base[np.max([np.searchsorted(cc_base, center[dim]) - 1, 0])]
- cc_local[np.max([np.searchsorted(cc_local, center[dim]) - 1, 0])]
)
origin[dim] += shift
mesh.origin = np.hstack(origin)
return mesh
def refine_tree_xyz(
mesh,
xyz,
method="radial",
octree_levels=[1, 1, 1],
octree_levels_padding=None,
finalize=False,
min_level=0,
max_distance=np.inf,
):
"""Refine region within a :class:`~discretize.TreeMesh`
This function refines the specified region of a tree mesh using
one of several methods. These are summarized below:
**radial:** refines based on radial distances from a set of xy[z] locations.
Consider a tree mesh whose smallest cell size has a width of *h* . And
*octree_levels = [nc1, nc2, nc3, ...]* . Within a distance of *nc1 x h*
from any of the points supplied, the smallest cell size is used. Within a distance of
*nc2 x (2h)* , the cells will have a width of *2h* . Within a distance of *nc3 x (4h)* ,
the cells will have a width of *4h* . Etc...
**surface:** refines downward from a triangulated surface.
Consider a tree mesh whose smallest cell size has a width of *h*. And
*octree_levels = [nc1, nc2, nc3, ...]* . Within a downward distance of *nc1 x h*
from the topography (*xy[z]* ) supplied, the smallest cell size is used. The
topography is triangulated if the points supplied are coarser than the cell
size. No refinement is done above the topography. Within a vertical distance of
*nc2 x (2h)* , the cells will have a width of *2h* . Within a vertical distance
of *nc3 x (4h)* , the cells will have a width of *4h* . Etc...
**box:** refines inside the convex hull defined by the xy[z] locations.
Consider a tree mesh whose smallest cell size has a width of *h*. And
*octree_levels = [nc1, nc2, nc3, ...]* . Within the convex hull defined by *xyz* ,
the smallest cell size is used. Within a distance of *nc2 x (2h)* from that convex
hull, the cells will have a width of *2h* . Within a distance of *nc3 x (4h)* ,
the cells will have a width of *4h* . Etc...
Parameters
----------
mesh : discretize.TreeMesh
The tree mesh object to be refined
xyz : numpy.ndarray
2D array of points (n, dim)
method : str
Method used to refine the mesh based on xyz locations. Choose from
{'radial', 'surface', 'box'}.
- *radial:* Based on radial distance xy[z] and cell centers
- *surface:* Refines downward from a triangulated surface
- *box:* Inside limits defined by outer xy[z] locations
octree_levels : list of int
Minimum number of cells around points in each *k* octree level
starting from the smallest cells size; i.e. *[nc(k), nc(k-1), ...]* .
Note that you *can* set entries to 0; e.g. you don't want to discretize
using the smallest cell size.
octree_levels_padding : list of int
Padding cells added to extend the region of refinement at each level.
Used for *method = surface* and *box*. Has the form *[nc(k), nc(k-1), ...]*
finalize : bool
Finalize the tree mesh {True, False}. (*Default = False*)
min_level : int
Sets the largest cell size allowed in the mesh. The default is *0* ,
which allows the largest cell size to be used.
max_distance : float
Maximum refinement distance from xy[z] locations.
Used if *method* = "surface" to reduce interpolation distance
Returns
--------
discretize.TreeMesh
Tree mesh
Examples
--------
Here we use the **refine_tree_xyz** function refine a tree mesh
based on topography as well as a cluster of points.
>>> from discretize import TreeMesh
>>> from discretize.utils import mkvc, refine_tree_xyz
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>>
>>> dx = 5 # minimum cell width (base mesh cell width) in x
>>> dy = 5 # minimum cell width (base mesh cell width) in y
>>>
>>> x_length = 300.0 # domain width in x
>>> y_length = 300.0 # domain width in y
>>>
>>> # Compute number of base mesh cells required in x and y
>>> nbcx = 2 ** int(np.round(np.log(x_length / dx) / np.log(2.0)))
>>> nbcy = 2 ** int(np.round(np.log(y_length / dy) / np.log(2.0)))
>>>
>>> # Define the base mesh
>>> hx = [(dx, nbcx)]
>>> hy = [(dy, nbcy)]
>>> mesh = TreeMesh([hx, hy], x0="CC")
>>>
>>> # Refine surface topography
>>> xx = mesh.vectorNx
>>> yy = -3 * np.exp((xx ** 2) / 100 ** 2) + 50.0
>>> pts = np.c_[mkvc(xx), mkvc(yy)]
>>> mesh = refine_tree_xyz(
>>> mesh, pts, octree_levels=[2, 4], method="surface", finalize=False
>>> )
>>>
>>> # Refine mesh near points
>>> xx = np.array([-10.0, 10.0, 10.0, -10.0])
>>> yy = np.array([-40.0, -40.0, -60.0, -60.0])
>>> pts = np.c_[mkvc(xx), mkvc(yy)]
>>> mesh = refine_tree_xyz(mesh, pts, octree_levels=[4, 2], method="radial", finalize=False)
>>>
>>> mesh.finalize()
>>>
>>> fig = plt.figure(figsize=(6, 6))
>>> ax = fig.add_subplot(111)
>>> mesh.plotGrid(ax=ax)
>>> ax.set_xbound(mesh.x0[0], mesh.x0[0] + np.sum(mesh.hx))
>>> ax.set_ybound(mesh.x0[1], mesh.x0[1] + np.sum(mesh.hy))
>>> ax.set_title("QuadTree Mesh")
>>> plt.show()
"""
if octree_levels_padding is not None:
if len(octree_levels_padding) != len(octree_levels):
raise ValueError(
"'octree_levels_padding' must be the length %i" % len(octree_levels)
)
else:
octree_levels_padding = np.zeros_like(octree_levels)
octree_levels = np.asarray(octree_levels)
octree_levels_padding = np.asarray(octree_levels_padding)
# Trigger different refine methods
if method.lower() == "radial":
# Compute the outer limits of each octree level
rMax = np.cumsum(
mesh.h[0].min() * octree_levels * 2 ** np.arange(len(octree_levels))
)
rs = np.ones(xyz.shape[0])
level = np.ones(xyz.shape[0], dtype=np.int32)
for ii, nC in enumerate(octree_levels):
# skip "zero" sized balls
if rMax[ii] > 0:
mesh.refine_ball(
xyz, rs * rMax[ii], level * (mesh.max_level - ii), finalize=False
)
if finalize:
mesh.finalize()
elif method.lower() == "surface":
# Compute centroid
centroid = np.mean(xyz, axis=0)
if mesh.dim == 2:
rOut = np.abs(centroid[0] - xyz).max()
hz = mesh.h[1].min()
else:
# Largest outer point distance
rOut = np.linalg.norm(
np.r_[
np.abs(centroid[0] - xyz[:, 0]).max(),
np.abs(centroid[1] - xyz[:, 1]).max(),
]
)
hz = mesh.h[2].min()
# Compute maximum depth of refinement
zmax = np.cumsum(hz * octree_levels * 2 ** np.arange(len(octree_levels)))
# Compute maximum horizontal padding offset
padWidth = np.cumsum(
mesh.h[0].min()
* octree_levels_padding
* 2 ** np.arange(len(octree_levels_padding))
)
# Increment the vertical offset
zOffset = 0
xyPad = -1
depth = zmax[-1]
# Cycle through the Tree levels backward
for ii in range(len(octree_levels) - 1, -1, -1):
dx = mesh.h[0].min() * 2 ** ii
if mesh.dim == 3:
dy = mesh.h[1].min() * 2 ** ii
dz = mesh.h[2].min() * 2 ** ii
else:
dz = mesh.h[1].min() * 2 ** ii
# Increase the horizontal extent of the surface
if xyPad != padWidth[ii]:
xyPad = padWidth[ii]
# Calculate expansion for padding XY cells
expansion_factor = (rOut + xyPad) / rOut
xLoc = (xyz - centroid) * expansion_factor + centroid
if mesh.dim == 3:
# Create a new triangulated surface
tri2D = Delaunay(xLoc[:, :2])
F = interpolate.LinearNDInterpolator(tri2D, xLoc[:, 2])
else:
F = interpolate.interp1d(
xLoc[:, 0], xLoc[:, 1], fill_value="extrapolate"
)
limx = np.r_[xLoc[:, 0].max(), xLoc[:, 0].min()]
nCx = int(np.ceil((limx[0] - limx[1]) / dx))
if mesh.dim == 3:
limy = np.r_[xLoc[:, 1].max(), xLoc[:, 1].min()]
nCy = int(np.ceil((limy[0] - limy[1]) / dy))
# Create a grid at the octree level in xy
CCx, CCy = np.meshgrid(
np.linspace(limx[1], limx[0], nCx),
np.linspace(limy[1], limy[0], nCy),
)
xy = np.c_[CCx.reshape(-1), CCy.reshape(-1)]
# Only keep points within triangulation
indexTri = tri2D.find_simplex(xy)
else:
xy = np.linspace(limx[1], limx[0], nCx)
indexTri = np.ones_like(xy, dtype="bool")
# Interpolate the elevation linearly
z = F(xy[indexTri != -1])
newLoc = np.c_[xy[indexTri != -1], z]
# Only keep points within max_distance
tree = cKDTree(xyz)
r, ind = tree.query(newLoc)
# Apply vertical padding for current octree level
dim = mesh.dim - 1
zOffset = 0
while zOffset < depth:
indIn = r < (max_distance + padWidth[ii])
nnz = int(np.sum(indIn))
if nnz > 0:
mesh.insert_cells(
np.c_[newLoc[indIn, :dim], newLoc[indIn, -1] - zOffset],
np.ones(nnz) * mesh.max_level - ii,
finalize=False,
)
zOffset += dz
depth -= dz * octree_levels[ii]
if finalize:
mesh.finalize()
elif method.lower() == "box":
# Define the data extent [bottom SW, top NE]
bsw = np.min(xyz, axis=0)
tne = np.max(xyz, axis=0)
hs = np.asarray([h.min() for h in mesh.h])
hx = hs[0]
hz = hs[-1]
# Pre-calculate outer extent of each level
# x_pad
padWidth = np.cumsum(
hx * octree_levels_padding * 2 ** np.arange(len(octree_levels))
)
if mesh.dim == 3:
# y_pad
hy = hs[1]
padWidth = np.c_[
padWidth,
np.cumsum(
hy * octree_levels_padding * 2 ** np.arange(len(octree_levels))
),
]
# Pre-calculate max depth of each level
padWidth = np.c_[
padWidth,
np.cumsum(
hz
* np.maximum(octree_levels - 1, 0)
* 2 ** np.arange(len(octree_levels))
),
]
levels = []
BSW = []
TNE = []
for ii, octZ in enumerate(octree_levels):
if octZ > 0:
levels.append(mesh.max_level - ii)
BSW.append(bsw - padWidth[ii])
TNE.append(tne + padWidth[ii])
mesh.refine_box(BSW, TNE, levels, finalize=finalize)
else:
raise NotImplementedError(
"Only method= 'radial', 'surface'" " or 'box' have been implemented"
)
return mesh
def active_from_xyz(mesh, xyz, grid_reference="CC", method="linear"):
"""Return boolean array indicating which cells are below surface
For a set of locations defining a surface, **active_from_xyz** outputs a
boolean array indicating which mesh cells like below the surface points.
This method uses SciPy's interpolation routine to interpolate between
location points defining the surface. Nearest neighbour interpolation
is used for cells outside the convex hull of the surface points.
Parameters
----------
mesh : discretize.TensorMesh or discretize.TreeMesh or discretize.CylindricalMesh
Mesh object. If *mesh* is a cylindrical mesh, it must be symmetric
xyz : numpy.ndarray
Points defining the surface topography (*, dim).
grid_reference : str {'CC', 'N'}
Define where the cell is defined relative to surface. Choose between {'CC','N'}
- If 'CC' is used, cells are active if their centers are below the surface.
- If 'N' is used, cells are active if they lie entirely below the surface.
method : str {'linear', 'nearest'}
Interpolation method for locations between the xyz points.
Returns
-------
numpy.ndarray of bool
1D mask array of *bool* for the active cells below xyz.
Examples
--------
Here we define the active cells below a parabola. We demonstrate the differences
that appear when using the 'CC' and 'N' options for *reference_grid*.
>>> import matplotlib.pyplot as plt
>>> import numpy as np
>>> from discretize import TensorMesh
>>> from discretize.utils import active_from_xyz
>>>
>>> mesh = TensorMesh([5, 5])
>>> topo_func = lambda x: -3*(x-0.2)*(x-0.8)+.5
>>> topo_points = np.linspace(0, 1)
>>> topo_vals = topo_func(topo_points)
>>>
>>> active_cc = active_from_xyz(mesh, np.c_[topo_points, topo_vals], grid_reference='CC')
>>> ax = plt.subplot(121)
>>> mesh.plot_image(active_cc, ax=ax)
>>> mesh.plot_grid(centers=True, ax=ax)
>>> ax.plot(np.linspace(0,1), topo_func(np.linspace(0,1)), color='C3')
>>> ax.set_title("CC")
>>>
>>> active_n = active_from_xyz(mesh, np.c_[topo_points, topo_vals], grid_reference='N')
>>> ax = plt.subplot(122)
>>> mesh.plot_image(active_n, ax=ax)
>>> mesh.plot_grid(nodes=True, ax=ax)
>>> ax.plot(np.linspace(0,1), topo_func(np.linspace(0,1)), color='C3')
>>> ax.set_title("N")
>>> plt.show()
"""
try:
if not mesh.is_symmetric:
raise NotImplementedError(
"Unsymmetric CylindricalMesh is not yet supported"
)
except AttributeError:
pass
if grid_reference not in ["N", "CC"]:
raise ValueError(
"Value of grid_reference must be 'N' (nodal) or 'CC' (cell center)"
)
dim = mesh.dim - 1
if mesh.dim == 3:
if xyz.shape[1] != 3:
raise ValueError("xyz locations of shape (*, 3) required for 3D mesh")
if method == "linear":
tri2D = Delaunay(xyz[:, :2])
z_interpolate = interpolate.LinearNDInterpolator(tri2D, xyz[:, 2])
else:
z_interpolate = interpolate.NearestNDInterpolator(xyz[:, :2], xyz[:, 2])
elif mesh.dim == 2:
if xyz.shape[1] != 2:
raise ValueError("xyz locations of shape (*, 2) required for 2D mesh")
z_interpolate = interpolate.interp1d(
xyz[:, 0], xyz[:, 1], bounds_error=False, fill_value=np.nan, kind=method
)
else:
if xyz.ndim != 1:
raise ValueError("xyz locations of shape (*, ) required for 1D mesh")
if grid_reference == "CC":
# this should work for all 4 mesh types...
locations = mesh.cell_centers
if mesh.dim == 1:
active = np.zeros(mesh.nC, dtype="bool")
active[np.searchsorted(mesh.cell_centers_x, xyz).max() :] = True
return active
elif grid_reference == "N":
try:
# try for Cyl, Tensor, and Tree operations
if mesh.dim == 3:
locations = np.vstack(
[
mesh.cell_centers
+ (np.c_[-1, 1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
mesh.cell_centers
+ (np.c_[-1, -1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
mesh.cell_centers
+ (np.c_[1, 1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
mesh.cell_centers
+ (np.c_[1, -1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
]
)
elif mesh.dim == 2:
locations = np.vstack(
[
mesh.cell_centers
+ (np.c_[-1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
mesh.cell_centers
+ (np.c_[1, 1][:, None] * mesh.h_gridded / 2.0).squeeze(),
]
)
else:
active = np.zeros(mesh.nC, dtype="bool")
active[np.searchsorted(mesh.nodes_x, xyz).max() :] = True
return active
except AttributeError:
# Try for Curvilinear Mesh
gridN = mesh.gridN.reshape((*mesh.vnN, mesh.dim), order="F")
if mesh.dim == 3:
locations = np.vstack(
[
gridN[:-1, 1:, 1:].reshape((-1, mesh.dim), order="F"),
gridN[:-1, :-1, 1:].reshape((-1, mesh.dim), order="F"),
gridN[1:, 1:, 1:].reshape((-1, mesh.dim), order="F"),
gridN[1:, :-1, 1:].reshape((-1, mesh.dim), order="F"),
]
)
elif mesh.dim == 2:
locations = np.vstack(
[
gridN[:-1, 1:].reshape((-1, mesh.dim), order="F"),
gridN[1:, 1:].reshape((-1, mesh.dim), order="F"),
]
)
# Interpolate z values on CC or N
z_xyz = z_interpolate(locations[:, :-1]).squeeze()
# Apply nearest neighbour if in extrapolation
ind_nan = np.isnan(z_xyz)
if any(ind_nan):
tree = cKDTree(xyz)
_, ind = tree.query(locations[ind_nan, :])
z_xyz[ind_nan] = xyz[ind, dim]
# Create an active bool of all True
active = np.all(
(locations[:, dim] < z_xyz).reshape((mesh.nC, -1), order="F"), axis=1
)
return active.ravel()
meshTensor = deprecate_function(unpack_widths, "meshTensor", removal_version="1.0.0")
closestPoints = deprecate_function(
closest_points_index, "closestPoints", removal_version="1.0.0"
)
ExtractCoreMesh = deprecate_function(
extract_core_mesh, "ExtractCoreMesh", removal_version="1.0.0"
)
closest_points = deprecate_function(
closest_points_index, "closest_points", removal_version="1.0.0"
)
| mit | -6,004,870,361,446,141,000 | 33.145911 | 96 | 0.544269 | false | 3.540619 | false | false | false |
saseumn/website | migrations/versions/e7410a32fd81_.py | 1 | 1218 | """Add resumes.
Revision ID: e7410a32fd81
Revises: 4453e9c3a9be
Create Date: 2018-01-17 11:49:53.974612
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e7410a32fd81'
down_revision = '4453e9c3a9be'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('resume_permissions',
sa.Column('id', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('resumes',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('name', sa.Unicode(length=32), nullable=True),
sa.Column('hashed', sa.Unicode(length=64), nullable=True),
sa.Column('date_created', sa.DateTime(), nullable=True),
sa.Column('date_updated', sa.DateTime(), nullable=True),
sa.Column('user_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('resumes')
op.drop_table('resume_permissions')
# ### end Alembic commands ###
| mit | 9,013,851,385,332,065,000 | 28 | 65 | 0.666667 | false | 3.327869 | false | false | false |
fedhere/fedsastroutils | myastrotools.py | 1 | 8101 | #!/usr/bin/env python
# myastrotools
# some useful astro functions
# not really "MY" astro toold either. mostly inheritade, as they propagate through generations of astronomers. couldnt tell who was the author of which function....
import datetime
# chomp
####################
def absmag(appmag,d,dunits='pc'):
from numpy import log10
if dunits == 'Mpc':
d=d*1.0e6
return appmag - 2.5 *log10( (d/10)**2 )
def chomp(ifile):
lines = []
for l in [l.strip() for l in open(ifile).readlines()]:
if len(l)<1:
continue
lines.append(l)
return lines
##################
# compare lists of ra,decl
#####################
def match_radeclists(data1, data2, tol):
import math
mdata = []
for d1 in data1:
x1 = d1[0]
y1 = d1[1]
for d2 in data2:
x2 = d2[0]
y2 = d2[1]
print x1,x2,y1,y2
dist = math.sqrt( (x1-x2)*(x1-x2) + (y1-y2)*(y1-y2) )
if dist < tol:
# print x1, y1, mag1, x2, y2, mag2, dist/0.0002
mdata.append([dist, x1, y1, x2, y2])
print mdata
return mdata
##################
# print offset
#####################
def offsets(data1, data2):
import math
mdata = []
for d1 in data1:
x1 = d1[0]
y1 = d1[1]
for d2 in data2:
x2 = d2[0]
y2 = d2[1]
# print x1,x2,y1,y2,
offx= x1-x2
offy = y1-y2
#print offx,offy
return offx,offy
##################
# ra in seg to deg
#####################
def raseg2deg(raseg):
if type(raseg) is float:
return raseg
st = raseg.split(':') # hr, min sec
# print 'ra2deg: st--> ', st
if len(st)<1:
return -999.0
return 15.0*(float(st[0]) + float(st[1])/60.0 + float(st[2])/3600.0)
#####################
# dec in seg to degree
#####################
def decseg2deg(decseg):
if type(decseg) is float:
return decseg
decseg = decseg.replace(' ','')
if not (decseg[0] == '+' or decseg[0] == '-'):
decseg='+'+decseg
st = decseg[1:].split(':') # deg, min, sec
# print 'dec2deg: st--> ', st
if len(st)<1:
return -999.0
parity = decseg[0]
decdeg = float(st[0])+ float(st[1])/60.0 + float(st[2])/3600.0
if parity == '-':
decdeg *= -1.0
return decdeg
##################
# ra in deg to seg
def radeg2seg(ra):
# ra
ra /= 15.0
try:
rhr = int(ra)
except:
rhr=list(map(int, ra))
ra -= rhr
ra *= 60.0
try:
rmn = int(ra)
except:
rmn=list(map(int, ra))
ra -= rmn
ra *= 60.0
rsc = ra
try:
return(':'.join([str('%02d' % rhr), str('%02d' % rmn), '%02d' % (int(rsc)) + ('%.3f' % (rsc-int(rsc)))[-4:]]))
except:
newlist=[]
for i,hr in enumerate(rhr):
newlist.append(':'.join([str('%02d' % hr), str('%02d' % rmn[i]), '%02d' % (int(rsc[i])) + ('%.3f' % (rsc[i]-int(rsc[i])))[-4:]]))
return(newlist)
##################
# dec in deg to seg
def decdeg2seg(dec):
# dec
iamneg = 0
try:
if dec<0:
iamneg = 1
dec *= -1.0
ddeg = int(dec)
parity = '+'
if iamneg==1:
parity = '-'
except:
ddeg=list(map(int, dec))
parity=['+']*len(ddeg)
for i,d in enumerate(ddeg):
if d<0:
parity[i]='-'
dec -= ddeg
dec *= 60.0
try:
dmn = int(dec)
except:
dmn=list(map(int, dec))
dec -= dmn
dec *= 60.0
dsc = dec
try:
return(parity + ':'.join([str(ddeg), str('%02d' % dmn), '%02d' % (int(dsc)) + ('%.2f' % (dsc-int(dsc)))[-3:]]))
except:
newlist=[]
for i,dg in enumerate(ddeg):
newlist.append('%s' % str(parity[i])+':'.join([str('%02d' % dg), str('%02d' % dmn[i]), '%02d' % (int(dsc[i])) + ('%.3f' % (dsc[i]-int(dsc[i])))[-4:]]))
return(newlist)
##################
# list of ra, decl in seg to deg
#####################
def posseg2deg(pos):
raseg = pos[0]
decseg = pos[1]
radeg = raseg2deg(raseg)
decdeg = decseg2deg(decseg)
ans = [radeg, raseg]
return(ans)
##################
# list of ra, decl in deg to seg
#####################
def posdeg2seg(pos):
radeg = pos[0]
decdeg = pos[1]
raseg = radeg2seg(radeg)
decseg = decdeg2seg(decdeg)
ans = [raseg, decseg]
return(ans)
#######################
def gregorian_to_ut_mjd(date):
d0 = datetime.datetime(1858, 11, 17)
if type(date)==datetime.date:
d0 = datetime.date(1858, 11, 17)
date=date-d0
# print date
#hours/24.0+date.minuted/1440+(date.seconds)/86400.
return date.days+ (date.seconds)/86400.
#######################
def jd2gdate(myjd):
"""Julian date to Gregorian calendar date and time of day.
The input and output are for the proleptic Gregorian calendar.
Parameters
----------
myjd:
julian date (float).
Returns
-------
y, m, d, f : int, int, int, float
Four element tuple containing year, month, day and the
fractional part of the day in the Gregorian calendar. The first
three are integers, and the last part is a float.
"""
from math import modf
jd_i = int(myjd)
f = myjd-jd_i
# Set JD to noon of the current date. Fractional part is the
# fraction from midnight of the current date.
if -0.5 < f < 0.5:
f += 0.5
elif f >= 0.5:
jd_i += 1
f -= 0.5
elif f <= -0.5:
jd_i -= 1
f += 1.5
l = jd_i + 68569
n = int((4 * l) / 146097.0)
l -= int(((146097 * n) + 3) / 4.0)
i = int((4000 * (l + 1)) / 1461001)
l -= int((1461 * i) / 4.0) - 31
j = int((80 * l) / 2447.0)
day = l - int((2447 * j) / 80.0)
l = int(j / 11.0)
month = j + 2 - (12 * l)
year = 100 * (n - 49) + i + l
return int(year), int(month), int(day), f
#######################
def get_mjdoff(dt):
mjdoff = 60*60*dt.hour + 60*dt.minute + dt.second
mjdoff /= 24.0*3600
return mjdoff
#######################
def get_cur_epoch(pmjd):
unow = datetime.datetime.utcnow()
nmjd = gregorian_to_ut_mjd(unow)
mjdoff = get_mjdoff(unow)
nmjd += mjdoff
if pmjd<0:
return [nmjd, -1.0]
# return nmjd-pmjd
return [nmjd, '%.2f' % (nmjd-pmjd)]
# vacuum to air conversion from SDSS-III website
def vac2air(x):
''' vacuum to air conversion
as given on the SDSS-III website
x in Angstroms
'''
tmp = 1.0 +\
2.735182e-4 +\
131.4182/x**2 +\
2.76249e8/x**4
return x/tmp
# vacuum to air conversion from SDSS-III website
def indexOfRefraction_makee(x):
''' index of refraction at 0C as given by makee website
x in Angstroms
'''
n = (2875.66 + 13.412/(x**2*1e-8) + 0.3777/(x**4*1e-16))*1e-7
return n+1
def indexOfRefraction_morton(x):
''' index of refraction at 0C as given by Morton 1991
x in Angstroms
'''
s = 1.0e4/x
tmp = 6.4328e-5 + 2.94981e-2/(146-s**2) + 2.5540e-4/(41-s**2)
return 1+tmp
def makeGaussian2d(sizex, fwhm = 20):
""" Make a square gaussian kernel.
size is the length of a side of the square
fwhm is full-width-half-maximum, which
can be thought of as an effective radius.
"""
from scipy import arange,newaxis, exp, log
x=arange(sizex)
y=x[:,newaxis]
x0,y0=sizex/2,sizex/2
g=exp(-4*log(2)*((x-x0)**2+(y-y0)**2)/fwhm**2)
return g
#def indexOfRefraction_makee(x):
# ''' index of refraction as given in makee website
# '''
# n = ((2875.66 + 13.412/(w**2*1e-8) + 0.3777/(w**4*1e-16))*1e-7)
# return n+1
def print_timing(func):
import time
print "timingit"
print func
def wrapper(*arg):
t1 = time.time()
res = func(*arg)
t2 = time.time()
print '%s took %0.3f ms' % (func.func_name, (t2-t1)*1000.0)
return res
return wrapper
# declare the @ decorator just before the function, invokes print_timing()
| mit | -8,112,262,724,564,647,000 | 24.080495 | 164 | 0.504876 | false | 2.846451 | false | false | false |
VanceKingSaxbeA/GoldSVM | GoldSaxSVM.py | 1 | 16983 | /*Owner & Copyrights: Vance King Saxbe. A.*/""" Copyright (c) <2014> Author Vance King Saxbe. A, and contributors Power Dominion Enterprise, Precieux Consulting and other contributors. Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting and GoldSax Technologies email @vsaxbe@yahoo.com. Development teams from Power Dominion Enterprise, Precieux Consulting. Project sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager."""'''
Modelled, Architected and designed by Vance King Saxbe. A. with the geeks from GoldSax Consulting, GoldSax Money, GoldSax Treasury, GoldSax Finance, GoldSax Banking and GoldSax Technologies email @vsaxbe@yahoo.com. Development teams from Power Dominion Enterprise, Precieux Consulting. Project sponsored by GoldSax Foundation, GoldSax Group and executed by GoldSax Manager.
'''
from numpy import *
from time import sleep
class GoldSaxSVM:
def loadDataSet(fileName):
dataMat = []; labelMat = []
fr = open(fileName)
for line in fr.readlines():
lineArr = line.strip().split('\t')
dataMat.append([float(lineArr[0]), float(lineArr[1])])
labelMat.append(float(lineArr[2]))
return dataMat,labelMat
def selectJrand(i,m):
j=i #we want to select any J not equal to i
while (j==i):
j = int(random.uniform(0,m))
return j
def clipAlpha(aj,H,L):
if aj > H:
aj = H
if L > aj:
aj = L
return aj
def smoSimple(dataMatIn, classLabels, C, toler, maxIter):
dataMatrix = mat(dataMatIn); labelMat = mat(classLabels).transpose()
b = 0; m,n = shape(dataMatrix)
alphas = mat(zeros((m,1)))
iter = 0
while (iter < maxIter):
alphaPairsChanged = 0
for i in range(m):
fXi = float(multiply(alphas,labelMat).T*(dataMatrix*dataMatrix[i,:].T)) + b
Ei = fXi - float(labelMat[i])#if checks if an example violates KKT conditions
if ((labelMat[i]*Ei < -toler) and (alphas[i] < C)) or ((labelMat[i]*Ei > toler) and (alphas[i] > 0)):
j = selectJrand(i,m)
fXj = float(multiply(alphas,labelMat).T*(dataMatrix*dataMatrix[j,:].T)) + b
Ej = fXj - float(labelMat[j])
alphaIold = alphas[i].copy(); alphaJold = alphas[j].copy();
if (labelMat[i] != labelMat[j]):
L = max(0, alphas[j] - alphas[i])
H = min(C, C + alphas[j] - alphas[i])
else:
L = max(0, alphas[j] + alphas[i] - C)
H = min(C, alphas[j] + alphas[i])
if L==H: print "L==H"; continue
eta = 2.0 * dataMatrix[i,:]*dataMatrix[j,:].T - dataMatrix[i,:]*dataMatrix[i,:].T - dataMatrix[j,:]*dataMatrix[j,:].T
if eta >= 0: print "eta>=0"; continue
alphas[j] -= labelMat[j]*(Ei - Ej)/eta
alphas[j] = clipAlpha(alphas[j],H,L)
if (abs(alphas[j] - alphaJold) < 0.00001): print "j not moving enough"; continue
alphas[i] += labelMat[j]*labelMat[i]*(alphaJold - alphas[j])#update i by the same amount as j
#the update is in the oppostie direction
b1 = b - Ei- labelMat[i]*(alphas[i]-alphaIold)*dataMatrix[i,:]*dataMatrix[i,:].T - labelMat[j]*(alphas[j]-alphaJold)*dataMatrix[i,:]*dataMatrix[j,:].T
b2 = b - Ej- labelMat[i]*(alphas[i]-alphaIold)*dataMatrix[i,:]*dataMatrix[j,:].T - labelMat[j]*(alphas[j]-alphaJold)*dataMatrix[j,:]*dataMatrix[j,:].T
if (0 < alphas[i]) and (C > alphas[i]): b = b1
elif (0 < alphas[j]) and (C > alphas[j]): b = b2
else: b = (b1 + b2)/2.0
alphaPairsChanged += 1
print "iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged)
if (alphaPairsChanged == 0): iter += 1
else: iter = 0
print "iteration number: %d" % iter
return b,alphas
def kernelTrans(X, A, kTup): #calc the kernel or transform data to a higher dimensional space
m,n = shape(X)
K = mat(zeros((m,1)))
if kTup[0]=='lin': K = X * A.T #linear kernel
elif kTup[0]=='rbf':
for j in range(m):
deltaRow = X[j,:] - A
K[j] = deltaRow*deltaRow.T
K = exp(K/(-1*kTup[1]**2)) #divide in NumPy is element-wise not matrix like Matlab
else: raise NameError('Houston We Have a Problem -- \
That Kernel is not recognized')
return K
class optStruct:
def __init__(self,dataMatIn, classLabels, C, toler, kTup): # Initialize the structure with the parameters
self.X = dataMatIn
self.labelMat = classLabels
self.C = C
self.tol = toler
self.m = shape(dataMatIn)[0]
self.alphas = mat(zeros((self.m,1)))
self.b = 0
self.eCache = mat(zeros((self.m,2))) #first column is valid flag
self.K = mat(zeros((self.m,self.m)))
for i in range(self.m):
self.K[:,i] = kernelTrans(self.X, self.X[i,:], kTup)
def calcEk(oS, k):
fXk = float(multiply(oS.alphas,oS.labelMat).T*oS.K[:,k] + oS.b)
Ek = fXk - float(oS.labelMat[k])
return Ek
def selectJ(i, oS, Ei): #this is the second choice -heurstic, and calcs Ej
maxK = -1; maxDeltaE = 0; Ej = 0
oS.eCache[i] = [1,Ei] #set valid #choose the alpha that gives the maximum delta E
validEcacheList = nonzero(oS.eCache[:,0].A)[0]
if (len(validEcacheList)) > 1:
for k in validEcacheList: #loop through valid Ecache values and find the one that maximizes delta E
if k == i: continue #don't calc for i, waste of time
Ek = calcEk(oS, k)
deltaE = abs(Ei - Ek)
if (deltaE > maxDeltaE):
maxK = k; maxDeltaE = deltaE; Ej = Ek
return maxK, Ej
else: #in this case (first time around) we don't have any valid eCache values
j = selectJrand(i, oS.m)
Ej = calcEk(oS, j)
return j, Ej
def updateEk(oS, k):#after any alpha has changed update the new value in the cache
Ek = calcEk(oS, k)
oS.eCache[k] = [1,Ek]
def innerL(i, oS):
Ei = calcEk(oS, i)
if ((oS.labelMat[i]*Ei < -oS.tol) and (oS.alphas[i] < oS.C)) or ((oS.labelMat[i]*Ei > oS.tol) and (oS.alphas[i] > 0)):
j,Ej = selectJ(i, oS, Ei) #this has been changed from selectJrand
alphaIold = oS.alphas[i].copy(); alphaJold = oS.alphas[j].copy();
if (oS.labelMat[i] != oS.labelMat[j]):
L = max(0, oS.alphas[j] - oS.alphas[i])
H = min(oS.C, oS.C + oS.alphas[j] - oS.alphas[i])
else:
L = max(0, oS.alphas[j] + oS.alphas[i] - oS.C)
H = min(oS.C, oS.alphas[j] + oS.alphas[i])
if L==H: print "L==H"; return 0
eta = 2.0 * oS.K[i,j] - oS.K[i,i] - oS.K[j,j] #changed for kernel
if eta >= 0: print "eta>=0"; return 0
oS.alphas[j] -= oS.labelMat[j]*(Ei - Ej)/eta
oS.alphas[j] = clipAlpha(oS.alphas[j],H,L)
updateEk(oS, j) #added this for the Ecache
if (abs(oS.alphas[j] - alphaJold) < 0.00001): print "j not moving enough"; return 0
oS.alphas[i] += oS.labelMat[j]*oS.labelMat[i]*(alphaJold - oS.alphas[j])#update i by the same amount as j
updateEk(oS, i) #added this for the Ecache #the update is in the oppostie direction
b1 = oS.b - Ei- oS.labelMat[i]*(oS.alphas[i]-alphaIold)*oS.K[i,i] - oS.labelMat[j]*(oS.alphas[j]-alphaJold)*oS.K[i,j]
b2 = oS.b - Ej- oS.labelMat[i]*(oS.alphas[i]-alphaIold)*oS.K[i,j]- oS.labelMat[j]*(oS.alphas[j]-alphaJold)*oS.K[j,j]
if (0 < oS.alphas[i]) and (oS.C > oS.alphas[i]): oS.b = b1
elif (0 < oS.alphas[j]) and (oS.C > oS.alphas[j]): oS.b = b2
else: oS.b = (b1 + b2)/2.0
return 1
else: return 0
def smoP(dataMatIn, classLabels, C, toler, maxIter,kTup=('lin', 0)): #full Platt SMO
oS = optStruct(mat(dataMatIn),mat(classLabels).transpose(),C,toler, kTup)
iter = 0
entireSet = True; alphaPairsChanged = 0
while (iter < maxIter) and ((alphaPairsChanged > 0) or (entireSet)):
alphaPairsChanged = 0
if entireSet: #go over all
for i in range(oS.m):
alphaPairsChanged += innerL(i,oS)
print "fullSet, iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged)
iter += 1
else:#go over non-bound (railed) alphas
nonBoundIs = nonzero((oS.alphas.A > 0) * (oS.alphas.A < C))[0]
for i in nonBoundIs:
alphaPairsChanged += innerL(i,oS)
print "non-bound, iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged)
iter += 1
if entireSet: entireSet = False #toggle entire set loop
elif (alphaPairsChanged == 0): entireSet = True
print "iteration number: %d" % iter
return oS.b,oS.alphas
def calcWs(alphas,dataArr,classLabels):
X = mat(dataArr); labelMat = mat(classLabels).transpose()
m,n = shape(X)
w = zeros((n,1))
for i in range(m):
w += multiply(alphas[i]*labelMat[i],X[i,:].T)
return w
def testRbf(k1=1.3):
dataArr,labelArr = loadDataSet('testSetRBF.txt')
b,alphas = smoP(dataArr, labelArr, 200, 0.0001, 10000, ('rbf', k1)) #C=200 important
datMat=mat(dataArr); labelMat = mat(labelArr).transpose()
svInd=nonzero(alphas.A>0)[0]
sVs=datMat[svInd] #get matrix of only support vectors
labelSV = labelMat[svInd];
print "there are %d Support Vectors" % shape(sVs)[0]
m,n = shape(datMat)
errorCount = 0
for i in range(m):
kernelEval = kernelTrans(sVs,datMat[i,:],('rbf', k1))
predict=kernelEval.T * multiply(labelSV,alphas[svInd]) + b
if sign(predict)!=sign(labelArr[i]): errorCount += 1
print "the training error rate is: %f" % (float(errorCount)/m)
dataArr,labelArr = loadDataSet('testSetRBF2.txt')
errorCount = 0
datMat=mat(dataArr); labelMat = mat(labelArr).transpose()
m,n = shape(datMat)
for i in range(m):
kernelEval = kernelTrans(sVs,datMat[i,:],('rbf', k1))
predict=kernelEval.T * multiply(labelSV,alphas[svInd]) + b
if sign(predict)!=sign(labelArr[i]): errorCount += 1
print "the test error rate is: %f" % (float(errorCount)/m)
def img2vector(filename):
returnVect = zeros((1,1024))
fr = open(filename)
for i in range(32):
lineStr = fr.readline()
for j in range(32):
returnVect[0,32*i+j] = int(lineStr[j])
return returnVect
def loadImages(dirName):
from os import listdir
hwLabels = []
trainingFileList = listdir(dirName) #load the training set
m = len(trainingFileList)
trainingMat = zeros((m,1024))
for i in range(m):
fileNameStr = trainingFileList[i]
fileStr = fileNameStr.split('.')[0] #take off .txt
classNumStr = int(fileStr.split('_')[0])
if classNumStr == 9: hwLabels.append(-1)
else: hwLabels.append(1)
trainingMat[i,:] = img2vector('%s/%s' % (dirName, fileNameStr))
return trainingMat, hwLabels
def testDigits(kTup=('rbf', 10)):
dataArr,labelArr = loadImages('trainingDigits')
b,alphas = smoP(dataArr, labelArr, 200, 0.0001, 10000, kTup)
datMat=mat(dataArr); labelMat = mat(labelArr).transpose()
svInd=nonzero(alphas.A>0)[0]
sVs=datMat[svInd]
labelSV = labelMat[svInd];
print "there are %d Support Vectors" % shape(sVs)[0]
m,n = shape(datMat)
errorCount = 0
for i in range(m):
kernelEval = kernelTrans(sVs,datMat[i,:],kTup)
predict=kernelEval.T * multiply(labelSV,alphas[svInd]) + b
if sign(predict)!=sign(labelArr[i]): errorCount += 1
print "the training error rate is: %f" % (float(errorCount)/m)
dataArr,labelArr = loadImages('testDigits')
errorCount = 0
datMat=mat(dataArr); labelMat = mat(labelArr).transpose()
m,n = shape(datMat)
for i in range(m):
kernelEval = kernelTrans(sVs,datMat[i,:],kTup)
predict=kernelEval.T * multiply(labelSV,alphas[svInd]) + b
if sign(predict)!=sign(labelArr[i]): errorCount += 1
print "the test error rate is: %f" % (float(errorCount)/m)
'''#######********************************
Non-Kernel VErsions below
'''#######********************************
class optStructK:
def __init__(self,dataMatIn, classLabels, C, toler): # Initialize the structure with the parameters
self.X = dataMatIn
self.labelMat = classLabels
self.C = C
self.tol = toler
self.m = shape(dataMatIn)[0]
self.alphas = mat(zeros((self.m,1)))
self.b = 0
self.eCache = mat(zeros((self.m,2))) #first column is valid flag
def calcEkK(oS, k):
fXk = float(multiply(oS.alphas,oS.labelMat).T*(oS.X*oS.X[k,:].T)) + oS.b
Ek = fXk - float(oS.labelMat[k])
return Ek
def selectJK(i, oS, Ei): #this is the second choice -heurstic, and calcs Ej
maxK = -1; maxDeltaE = 0; Ej = 0
oS.eCache[i] = [1,Ei] #set valid #choose the alpha that gives the maximum delta E
validEcacheList = nonzero(oS.eCache[:,0].A)[0]
if (len(validEcacheList)) > 1:
for k in validEcacheList: #loop through valid Ecache values and find the one that maximizes delta E
if k == i: continue #don't calc for i, waste of time
Ek = calcEk(oS, k)
deltaE = abs(Ei - Ek)
if (deltaE > maxDeltaE):
maxK = k; maxDeltaE = deltaE; Ej = Ek
return maxK, Ej
else: #in this case (first time around) we don't have any valid eCache values
j = selectJrand(i, oS.m)
Ej = calcEk(oS, j)
return j, Ej
def updateEkK(oS, k):#after any alpha has changed update the new value in the cache
Ek = calcEk(oS, k)
oS.eCache[k] = [1,Ek]
def innerLK(i, oS):
Ei = calcEk(oS, i)
if ((oS.labelMat[i]*Ei < -oS.tol) and (oS.alphas[i] < oS.C)) or ((oS.labelMat[i]*Ei > oS.tol) and (oS.alphas[i] > 0)):
j,Ej = selectJ(i, oS, Ei) #this has been changed from selectJrand
alphaIold = oS.alphas[i].copy(); alphaJold = oS.alphas[j].copy();
if (oS.labelMat[i] != oS.labelMat[j]):
L = max(0, oS.alphas[j] - oS.alphas[i])
H = min(oS.C, oS.C + oS.alphas[j] - oS.alphas[i])
else:
L = max(0, oS.alphas[j] + oS.alphas[i] - oS.C)
H = min(oS.C, oS.alphas[j] + oS.alphas[i])
if L==H: print "L==H"; return 0
eta = 2.0 * oS.X[i,:]*oS.X[j,:].T - oS.X[i,:]*oS.X[i,:].T - oS.X[j,:]*oS.X[j,:].T
if eta >= 0: print "eta>=0"; return 0
oS.alphas[j] -= oS.labelMat[j]*(Ei - Ej)/eta
oS.alphas[j] = clipAlpha(oS.alphas[j],H,L)
updateEk(oS, j) #added this for the Ecache
if (abs(oS.alphas[j] - alphaJold) < 0.00001): print "j not moving enough"; return 0
oS.alphas[i] += oS.labelMat[j]*oS.labelMat[i]*(alphaJold - oS.alphas[j])#update i by the same amount as j
updateEk(oS, i) #added this for the Ecache #the update is in the oppostie direction
b1 = oS.b - Ei- oS.labelMat[i]*(oS.alphas[i]-alphaIold)*oS.X[i,:]*oS.X[i,:].T - oS.labelMat[j]*(oS.alphas[j]-alphaJold)*oS.X[i,:]*oS.X[j,:].T
b2 = oS.b - Ej- oS.labelMat[i]*(oS.alphas[i]-alphaIold)*oS.X[i,:]*oS.X[j,:].T - oS.labelMat[j]*(oS.alphas[j]-alphaJold)*oS.X[j,:]*oS.X[j,:].T
if (0 < oS.alphas[i]) and (oS.C > oS.alphas[i]): oS.b = b1
elif (0 < oS.alphas[j]) and (oS.C > oS.alphas[j]): oS.b = b2
else: oS.b = (b1 + b2)/2.0
return 1
else: return 0
def smoPK(dataMatIn, classLabels, C, toler, maxIter): #full Platt SMO
oS = optStruct(mat(dataMatIn),mat(classLabels).transpose(),C,toler)
iter = 0
entireSet = True; alphaPairsChanged = 0
while (iter < maxIter) and ((alphaPairsChanged > 0) or (entireSet)):
alphaPairsChanged = 0
if entireSet: #go over all
for i in range(oS.m):
alphaPairsChanged += innerL(i,oS)
print "fullSet, iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged)
iter += 1
else:#go over non-bound (railed) alphas
nonBoundIs = nonzero((oS.alphas.A > 0) * (oS.alphas.A < C))[0]
for i in nonBoundIs:
alphaPairsChanged += innerL(i,oS)
print "non-bound, iter: %d i:%d, pairs changed %d" % (iter,i,alphaPairsChanged)
iter += 1
if entireSet: entireSet = False #toggle entire set loop
elif (alphaPairsChanged == 0): entireSet = True
print "iteration number: %d" % iter
return oS.b,oS.alphas
/*email to provide support at vancekingsaxbe@powerdominionenterprise.com, businessaffairs@powerdominionenterprise.com, For donations please write to fundraising@powerdominionenterprise.com*/ | mit | -3,275,144,992,903,995,000 | 47.945245 | 497 | 0.58217 | false | 2.913536 | true | false | false |
danzek/nlhbi-malware-extractor | fileurl.py | 1 | 4765 | #!/usr/bin/env python
# ----------------------------------------------------------------------
# This file is part of PEframe.
#
# PEframe is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# PEframe is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PEframe. If not, see <http://www.gnu.org/licenses/>.
# ----------------------------------------------------------------------
# Dan O'Day, 2014, modifications made for use in project to extract NL host-based indicators from malware
import re
import string
# Extract Strings
printable = set(string.printable)
def get_process(stream):
found_str = ""
while True:
data = stream.read(1024*4)
if not data:
break
for char in data:
if char in printable:
found_str += char
elif len(found_str) >= 4:
yield found_str
found_str = ""
else:
found_str = ""
def get_file_names_and_urls(filename):
PEtoStr = open(filename, 'rb')
array = [] # word raw
arrayURL = [] # url
arrayFILE = [] # file raw
arrayFileNames = [] # description, filename
for found_str in get_process(PEtoStr):
fname = re.findall("(.+\.([a-z]{2,3}$))+", found_str, re.IGNORECASE | re.MULTILINE)
if fname:
word = fname[0][0]
array.append(word)
for elem in sorted(set(array)): # Dan O'Day: the regex below is poor, so I do it again (differently) with strings
match = re.search("^http:|^ftp:|^sftp:|^ssh:|^www|.com$|.org$|.it$|.co.uk$|.ru$|.jp$|.net$|.ly$|.gl$|^([0-9]{1,3})(?:\.[0-9]{1,3}){3}$", elem, re.IGNORECASE)
if match and len(elem) > 6: # len(c.it) = 4 <- false positive
arrayURL.append(elem)
else:
arrayFILE.append(elem)
for elem in sorted(set(arrayFILE)):
file_type = {
"Video":".3gp",
"Compressed":".7z",
"Video":".asf",
"Web Page":".asp",
"Web Page":".aspx",
"Video":".asx",
"Video":".avi",
"Backup":".bak",
"Binary":".bin",
"Image":".bmp",
"Cabinet":".cab",
"Data":".dat",
"Database":".db",
"Word":".doc",
"Word":".docx",
"Library":".dll",
"Autocad":".dwg",
"Executable":".exe",
"Email":".eml",
"Video":".flv",
"FTP Config":".ftp",
"Image":".gif",
"Compressed":".gz",
"Web Page":".htm",
"Web Page":".html",
"Disc Image":".iso",
"Log":".log",
"Archive Java":".jar",
"Image":".jpg",
"Image":".jpeg",
"Audio":".mp3",
"Video":".mp4",
"Video":".mpg",
"Video":".mpeg",
"Video":".mov",
"Installer":".msi",
"Object":".oca",
"Object":".ocx",
"Autogen":".olb",
"Backup":".old",
"Registry":".reg",
"Portable":".pdf",
"Web Page":".php",
"Image":".png",
"Slideshow":".pps",
"Presentation":".ppt",
"Image":".psd",
"Email":".pst",
"Document":".pub",
"Compressed":".rar",
"Text":".rtf",
"Query DB":".sql",
"Adobe Flash":".swf",
"Image":".tif",
"Temporary":".tmp",
"Text":".txt",
"Compressed":".tgz",
"Audio":".wav",
"Audio":".wma",
"Video":".wmv",
"Excel":".xls",
"Excel":".xlsx",
"Compressed":".zip"
}
for descr in file_type:
match = re.search(file_type[descr]+"$", elem, re.IGNORECASE)
if match:
arrayFileNames.append([descr, elem]) # does this ignore files with extensions not listed above?
filelist = []
if arrayFileNames:
"""
arrayFileNames ->
[ ['Web Page', 'gate.php'],
['Binary', 'core_x86.bin'],
['Binary', 'dropper_x86.bin'],
['Library', 'IPHLPAPI.DLL'],
['Library', 'WININET.dll'] ]
"""
# Get unique tuple from list
uniq_descr = []
[item for item in arrayFileNames if item[0] not in uniq_descr and not uniq_descr.append(item[0])]
# uniq_descr -> ['Web Page', 'Library', 'Binary']
found = {}
match = []
for descr in uniq_descr:
for elem in arrayFileNames:
if elem[0] == descr:
match.append(elem[1])
found[descr] = match
match = []
filelist = found.items()
"""
'print found' -> Dictionary {}
{ 'Binary': ['core_x86.bin', 'dropper_x86.bin'],
'Web Page': ['gate.php'],
'Library': ['IPHLPAPI.DLL', 'WININET.dll'] }
'print found.items()' -> List []
[ ('Binary', ['core_x86.bin', 'dropper_x86.bin']),
('Web Page', ['gate.php']),
('Library', ['IPHLPAPI.DLL', 'WININET.dll']) ]
"""
return filelist, arrayURL
| gpl-2.0 | 2,750,757,777,264,042,000 | 25.325967 | 159 | 0.556558 | false | 2.944994 | false | false | false |
commoncrawl/cc-crawl-statistics | plot/mimetype_detected.py | 1 | 1073 | import sys
from plot.mimetype import MimeTypeStats
from crawlstats import CST, MonthlyCrawl
class MimeTypeDetectedStats(MimeTypeStats):
def __init__(self):
super().__init__()
self.MAX_TYPE_VALUES = MimeTypeStats.MAX_MIME_TYPES
def norm_value(self, mimetype):
return mimetype
def add(self, key, val):
self.add_check_type(key, val, CST.mimetype_detected)
if __name__ == '__main__':
plot_crawls = sys.argv[1:]
plot_name = 'mimetypes_detected'
column_header = 'mimetype_detected'
if len(plot_crawls) == 0:
plot_crawls = MonthlyCrawl.get_latest(3)
print(plot_crawls)
else:
plot_name += '-' + '-'.join(plot_crawls)
plot = MimeTypeDetectedStats()
plot.read_data(sys.stdin)
plot.transform_data(MimeTypeStats.MAX_MIME_TYPES,
MimeTypeStats.MIN_AVERAGE_COUNT,
None)
plot.save_data_percentage(plot_name, dir_name='plots', type_name='mimetype_detected')
plot.plot(plot_crawls, plot_name, column_header, ['tablesearcher'])
| apache-2.0 | -2,455,439,897,108,466,700 | 29.657143 | 89 | 0.634669 | false | 3.291411 | false | false | false |
shixiaobo8/yjy_django_omsa | Myapp/learn/admin.py | 1 | 3110 | #! /usr/bin/env python
# -*- coding:utf-8 -*-
from django.contrib import admin
from .models import Nav, MyUser
from .forms import NewForm, LoginForm
from django import forms
from django.contrib.auth.models import Group
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.forms import ReadOnlyPasswordHashField
from learn.models import MyUser
class UserCreationForm(forms.ModelForm):
"""A form for creating new users. Includes all the required
fields, plus a repeated password."""
password1 = forms.CharField(label='Password', widget=forms.PasswordInput)
password2 = forms.CharField(label='Password confirmation', widget=forms.PasswordInput)
class Meta:
model = MyUser
fields = ('email', 'phone')
def clean_password2(self):
# Check that the two password entries match
password1 = self.cleaned_data.get("password1")
password2 = self.cleaned_data.get("password2")
if password1 and password2 and password1 != password2:
raise forms.ValidationError("Passwords don't match")
return password2
def save(self, commit=True):
# Save the provided password in hashed format
user = super(UserCreationForm, self).save(commit=False)
user.set_password(self.cleaned_data["password1"])
if commit:
user.save()
return user
class UserChangeForm(forms.ModelForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
password = ReadOnlyPasswordHashField()
class Meta:
model = MyUser
fields = ('email', 'username','password', 'phone', 'is_active', 'is_admin')
def clean_password(self):
# Regardless of what the user provides, return the initial value.
# This is done here, rather than on the field, because the
# field does not have access to the initial value
return self.initial["password"]
class MyUserAdmin(UserAdmin):
# The forms to add and change user instances
form = UserChangeForm
add_form = UserCreationForm
# The fields to be used in displaying the User model.
# These override the definitions on the base UserAdmin
# that reference specific fields on auth.User.
list_display = ('email', 'phone', 'username','is_admin')
list_filter = ('is_admin',)
fieldsets = (
(None, {'fields': ('email', 'password','username',)}),
('Personal info', {'fields': ('phone','username',)}),
('Permissions', {'fields': ('is_admin',)}),
)
# add_fieldsets is not a standard ModelAdmin attribute. UserAdmin
# overrides get_fieldsets to use this attribute when creating a user.
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': ('email', 'username','phone', 'password1', 'password2')}
),
)
search_fields = ('email',)
ordering = ('email',)
filter_horizontal = ()
admin.site.register(MyUser, MyUserAdmin)
admin.site.unregister(Group)
admin.site.register(Nav)
| apache-2.0 | 4,027,522,227,069,869,600 | 34.747126 | 90 | 0.663987 | false | 4.180108 | false | false | false |
rwhite226/Graphalyzer | graphalyzer-server/src/websocketserver/handlers/GetGraphChunkHandler.py | 1 | 5345 | from itertools import chain
from websocketserver.handlers.ErrorHandler import *
from py2neo import Graph
import math
import logging
import concurrent
class GetGraphChunkHandler(HandleInterface):
"""Class to handle sending whole graph to client."""
_payload = ""
_request = ""
def __init__(self, request, payload):
self._payload = payload
self._request = request
"""Sends data to frontend. Specificly built for chuncking"""
def __send(self, socket: WebSocketServerProtocol, nodes, edges, cur, total):
jsonmsg = {}
graph = {}
#Check if nodes or edges are empty and correct the json of they are not.
if(nodes != ""):
nodes = nodes[:-1]
nodes += "]"
graph["nodes"] = json.loads(nodes, strict=False)
if(edges != ""):
edges = edges[:-1]
edges += "]"
graph["edges"] = json.loads(edges, strict=False)
jsonmsg["message_id"] = "".join(
random.choice(string.ascii_uppercase + string.digits) for _ in
range(0, 15))
jsonmsg["sender_id"] = "server"
jsonmsg["time"] = int(time.time())
jsonmsg["request"] = "response"
jsonmsg["status"] = "success"
jsonmsg["error"] = ""
jsonmsg["payload"] = graph
message = {}
message["client_request_type"] = self._request
message["currchunk"] = cur
message["totalchunk"] = total
jsonmsg["message"] = message
socket.sendMessage(json.dumps(jsonmsg,
separators=(',', ':')).encode('utf8'))
def __getGraphCount(self, socket, chunksize, graphid):
neo4j = Graph()
query = "START n=node(*) MATCH n WHERE n.graphid='" \
+ graphid + "' RETURN COUNT(n)"
for record in neo4j.cypher.execute(query):
nodenum = record[0]
query = "START n=node(*) MATCH (n {graphid:'" + graphid \
+ "'})-[r{graphid:'" + graphid + "'}]->(m{graphid:'" \
+ graphid + "'}) RETURN COUNT(r)"
for record in neo4j.cypher.execute(query):
edgenum = record[0]
total = int(nodenum) + int(edgenum)
return int(math.ceil(total/chunksize))
def __queryNeo4J(self, query):
neo4j = Graph()
return neo4j.cypher.stream(query)
def handle(self, socket: WebSocketServerProtocol):
graphid = self._payload
chunksize = 100
if graphid == "":
ErrorHandler("No graph specified", "").handle(socket)
return
# noinspection PyBroadException
try:
with concurrent.futures.ThreadPoolExecutor(max_workers=3) as executor:
# Get total number of nodes and edges in the graph.
numofchunks = executor.submit(self.__getGraphCount, socket, chunksize, graphid)
query = "START n=node(*) MATCH n WHERE n.graphid='" \
+ graphid + "' RETURN n"
nodequery = executor.submit(self.__queryNeo4J, query)
query = "START n=node(*) MATCH (n {graphid:'" + graphid \
+ "'})-[r{graphid:'" + graphid + "'}]->(m{graphid:'" \
+ graphid + "'}) RETURN r"
edgequery = executor.submit(self.__queryNeo4J, query)
nodes = "["
edges = "["
currchunk = 1
counter = 0
for record in nodequery.result():
nodes += "{"
for key in record[0].properties:
if key == "graphid":
continue
nodes += "\"" + key + "\":\"" + record[0].properties[key] \
+ "\","
nodes = nodes[:-1]
nodes += "},"
counter += 1
if(counter >= chunksize):
self.__send(socket, nodes, "", str(currchunk), str(numofchunks.result()))
currchunk += 1
nodes = "["
counter = 0
if(nodes == "["):
nodes = ""
for record in edgequery.result():
edges += "{"
for key in record[0].properties:
if key == "graphid":
continue
edges += "\"" + key + "\":\"" + record[0].properties[key] \
+ "\","
edges += "\"from\":\"" + \
record[0].start_node.properties["id"] + \
"\",\"to\":\"" + record[0].end_node.properties["id"] \
+ "\"},"
counter += 1
if(counter >= chunksize):
self.__send(socket, nodes, edges, str(currchunk), str(numofchunks.result()))
currchunk += 1
edges = "["
nodes = ""
counter = 0
# Send final chunk
self.__send(socket, nodes, edges, str(currchunk), str(numofchunks.result()))
except Exception as e:
logging.error(e)
logging.error("Unable to connect to neo4j")
ErrorHandler(self._request, "Unable to connect to neo4j", "").handle(socket)
return
| apache-2.0 | 2,174,785,616,006,924,500 | 38.014599 | 96 | 0.481946 | false | 4.338474 | false | false | false |
pabigot/coapy | coapy/util.py | 1 | 12842 | # -*- coding: utf-8 -*-
# Copyright 2013, Peter A. Bigot
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain a
# copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility classes and functions used within CoAPy.
:copyright: Copyright 2013, Peter A. Bigot
:license: Apache-2.0
"""
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
from __future__ import division
import logging
_log = logging.getLogger(__name__)
import sys
import coapy
import unicodedata
import functools
import bisect
import time
import datetime
import calendar
import urllib
class ClassReadOnly (object):
"""A marker to indicate an attribute of a class should be
read-only within the class as well as instances of the class.
Effective only if the metaclass is (or is derived from)
:class:`ReadOnlyMeta`.
Example::
class C(Object):
__metaclass__ = ReadOnlyMeta
Zero = ClassReadOnly(0)
instance = C()
assert 0 == C.Zero
assert 0 == instance.Zero
# This will raise an exception:
C.Zero = 4
# As will this:
instance.Zero = 4
"""
def __init__(self, value):
self.value = value
class ReadOnlyMeta (type):
"""Metaclass for supporting read-only values in classes.
When used as a metaclass, this inserts an intermediary type that
prevents assignment to certain attributes at both the instance and
class levels. Any attribute in the class that is initialized in
the class body with a value of type :class:`ClassReadOnly` is made
read-only.
See example at :class:`ClassReadOnly`.
"""
def __new__(cls, name, bases, namespace):
# Provide a unique type that can hold the read-only class
# values.
class ReadOnly (cls):
pass
nsdup = namespace.copy()
for (n, v) in namespace.iteritems():
if isinstance(v, ClassReadOnly):
mp = property(lambda self_or_cls, _v=v.value: _v)
nsdup[n] = mp
setattr(ReadOnly, n, mp)
return super(ReadOnlyMeta, cls).__new__(ReadOnly, name, bases, nsdup)
@functools.total_ordering
class TimeDueOrdinal(object):
"""Base class for elements that are sorted by time.
The intent is that information related to an activity that should
occur at or after a particular time be held in a subclass of
:class:`TimeDueOrdinal`. The priority queue of upcoming activity
is implemented using a sorted list, as instances of (subclasses
of) :class:`TimeDueOrdinal` are ordered by increasing value of
:attr:`time_due` using the features of :mod:`python:bisect`.
Insertion, removal, and repositioning of elements in the priority
queue may be accomplished using :meth:`queue_insert`,
:meth:`queue_remove`, and :meth:`queue_reposition`.
*time_due* as a keyword parameter initializes :attr:`time_due` and
is removed from *kw*. Any positional parameters and remaining
keyword parameters are passed to the next superclass.
"""
time_due = None
"""The time at which the subclass instance becomes relevant.
This is a value in the ordinal space defined by
:func:`coapy.clock`.
"""
def __init__(self, *args, **kw):
self.time_due = kw.pop('time_due', None)
super(TimeDueOrdinal, self).__init__(*args, **kw)
def __eq__(self, other):
return (self.time_due is not None) and (self.time_due == other.time_due)
# total_ordering doesn't handle eq/ne inference, so need both
def __ne__(self, other):
return self.time_due != other.time_due
def __lt__(self, other):
return self.time_due < other.time_due
def queue_reposition(self, queue):
"""Reposition this entry within *queue*.
*self* must already be in the queue; only its position changes
(if necessary).
"""
bisect.insort(queue, queue.pop(queue.index(self)))
def queue_insert(self, queue):
"""Insert this entry into *queue*."""
bisect.insort(queue, self)
def queue_remove(self, queue):
"""Remove this entry from *queue*."""
queue.remove(self)
@staticmethod
def queue_ready_prefix(queue, now=None):
"""Return the elements of *queue* that are due.
*queue* is a sorted list of :class:`TimeDueOrdinal` instances.
*now* is the timestamp, and defaults to :func:`coapy.clock`.
Elements are due when :attr:`time_due` <= *now*.
"""
if now is None:
now = coapy.clock()
ub = 0
while ub < len(queue) and (queue[ub].time_due <= now):
ub += 1
return list(queue[:ub])
def to_net_unicode(text):
"""Convert text to Net-Unicode (:rfc:`5198`) data.
This normalizes *text* to ensure all characters are their own
canonical equivalent in the NFC form (section 3 of :rfc:`5198`).
The result is encoded in UTF-8 and returned as data.
The operation currently does not handle newline normalization
(section 2 item 2), since its use in CoAP is currently limited to
values of options with format :class:`coapy.option.format_string`
and diagnostic payloads.
"""
# At first blush, this is Net-Unicode.
return unicodedata.normalize('NFC', text).encode('utf-8')
def to_display_text(data):
"""Return *data* as human-readable text.
This is intended for diagnostic messages for values like tokens
and payloads that are sometimes text, and sometimes raw data. If
*data* is :class:`bytes` but all its characters are
:data:`printable<python:string.printable>` return it as text,
otherwise return it as hex-encoded data (wrapped in square
brackets to distinguish the encoding, e.g.: ``[01020304]`` for
``b'\\x01\\x02\\x03\\x04'``).
Non-bytes data is simply converted to Unicode and returned in that
format. (If *data* is already text, even if it's Unicode, we
assume it's displayable. If it isn't, select a better terminal
configuration.)
"""
if isinstance(data, bytes):
import string
need_binascii = True
if sys.version_info < (3, 0):
need_binascii = not all(_c in string.printable for _c in data)
else:
need_binascii = not all(chr(_c) in string.printable for _c in data)
if need_binascii:
import binascii
return '[{0}]'.format(binascii.hexlify(data).decode('utf-8'))
data = data.decode('utf-8')
return unicode(data)
def url_quote(text, safe='/'):
"""Perform URL percent encoding on *text*.
If *text* is Unicode, it is first converted to
:func:`Net-Unicode<to_net_unicode>`. *text* may also be data.
Unsafe characters are percent-escaped, and the result is returned
as text containing only ASCII characters.
*safe* is as in :func:`python:urllib.parse`.
Encapsulated because in Python 3 :func:`python:urllib.parse.quote`
works directly on Unicode strings, while in Python 2 the
corresponding :func:`python:urllib.quote` does not tolerate
Unicode characters and does not like *safe* to be a Unicode
string as it is since we use unicode_literals).
"""
if isinstance(text, unicode):
text = to_net_unicode(text)
if sys.version_info < (3, 0):
# Python 2 quote does not like having a Unicode safe string
safe = str(safe)
quoted = urllib.quote(text, safe)
return quoted
def url_unquote(quoted):
"""Perform URL percent decoding on *quoted*.
Encapsulated because in Python 3
:func:`python:urllib.parse.unquote` works directly on Unicode
strings, while in Python 2 the corresponding
:func:`python:urllib.unquote` does not tolerate Unicode
characters.
"""
if sys.version_info < (3, 0):
data = bytes(quoted)
encoded = urllib.unquote(data)
text = encoded.decode('utf-8')
else:
text = urllib.unquote(quoted)
return text
def format_time(tval=None, format='iso'):
"""Convert a date/time value to a standard representation and
validity duration.
The return value ``(rep, vsec)`` provides the representation of
*tval* using style *format*. Representation *rep* is expected to
be unchanged for *vsec* seconds after the represented time.
*tval* is a :class:`python:datetime.datetime`,
:class:`python:time.struct_time`, or POSIX ordinal as from
:func:`python:time.time`. It is interpreted as being a universal
time (i.e., conversions do not account for time zone).
*format* is one of the following:
====== =================== ===== =========================================
Format rep vsec Description
====== =================== ===== =========================================
iso 2013-10-11T10:46:23 0 ISO 8601 combined date and time
ord 2013-284 47617 ISO 8601 ordinal date
pgd 735152 47617 Proleptic Gregorian Ordinal Day
jd 2456576.94888 0 Julian Date
mjd 56576.4488773 0 Modified Julian Date
tjd 16576.4488773 0 Truncated Julian Date
jdn 2456576 4417 Julian Day Number
doy 284 47617 Day-of-year
dow 5 47617 Day-of-week (ISO: Mon=1 Sun=7)
mod 646 37 Minute-of-day
posix 1381488383 0 Seconds since POSIX epoch 1970-01-01T00:00:00
====== =================== ===== =========================================
"""
if tval is None:
tval = datetime.datetime.utcnow()
if isinstance(tval, datetime.datetime):
dt = tval
elif isinstance(tval, (time.struct_time, tuple)):
dt = datetime.datetime(*tval[:7])
elif isinstance(tval, (float, int, long)):
dt = datetime.datetime.utcfromtimestamp(tval)
else:
raise ValueError(tval)
tt = dt.timetuple()
pt = calendar.timegm(tt)
jd = 2440587.5 + (pt / 86400.0)
exp = 0
sod = dt.second + 60 * (dt.minute + 60 * dt.hour)
# 86400 seconds per day, 43200 seconds per half-day
if 'iso' == format: # ISO8601 combined date and time
rep = dt.isoformat()
elif 'ord' == format: # ISO 8601 ordinal date
rep = '{0:d}-{1:03d}'.format(dt.year, dt.timetuple().tm_yday)
exp = 86400 - sod
elif 'pgd' == format: # Proleptic Gregorian Day
rep = dt.toordinal()
exp = 86400 - sod
elif 'jd' == format: # Julian Date
rep = jd
elif 'mjd' == format: # Modified Julian Date
rep = jd - 2400000.5
elif 'tjd' == format: # Truncated Julian Date
rep = jd - 2440000.5
elif 'jdn' == format: # Julian Day Number
rep = int(jd)
exp = 43200 - sod
if 0 > exp:
exp += 86400
elif 'doy' == format: # Day of Year
rep = dt.timetuple().tm_yday
exp = 86400 - sod
elif 'dow' == format: # Day of Week (ISO M=1 Su=7)
rep = dt.isoweekday()
exp = 86400 - sod
elif 'mod' == format: # Minute of day
rep = dt.minute + 60 * (dt.hour)
exp = 60 - dt.second
elif 'posix' == format: # Seconds since POSIX epoch 1970-01-01T00:00:00
rep = calendar.timegm(tt)
else:
raise ValueError(format)
# Don't add CJD, which is local civil time not UT
return (rep, exp)
if '__main__' == __name__:
styles = (
('iso', 'ISO 8601 combined date and time'),
('ord', 'ISO 8601 ordinal date'),
('pgd', 'Proleptic Gregorian Ordinal Day'),
('jd', 'Julian Date'),
('mjd', 'Modified Julian Date'),
('tjd', 'Truncated Julian Date'),
('jdn', 'Julian Day Number'),
('doy', 'Day-of-year'),
('dow', 'Day-of-week (ISO: Mon=1 Sun=7)'),
('mod', 'Minute-of-day'),
('posix', 'Seconds since POSIX epoch 1970-01-01T00:00:00'),
)
tt = time.gmtime()
ts = calendar.timegm(tt)
dt = datetime.datetime.utcfromtimestamp(ts)
print('tt: {tt}\nts: {ts}\ndt: {dt}'.format(tt=tt, ts=ts, dt=dt))
for (s, d) in styles:
(rep, exp) = format_time(dt, s)
print(' {0:6s} {1!s:20s} {2:5d} {3}'.format(s, rep, exp, d))
| apache-2.0 | 248,601,265,982,407,840 | 34.573407 | 85 | 0.611276 | false | 3.773729 | false | false | false |
flavio-casacurta/Nat2Py | Adabas/demo/ticker.py | 1 | 4117 | """ ticker.py -- Store Timestamp into Ticker File every 60 seconds
The Ticker file has the following field
01,TI,20,A,DE,NU
Each minute will have a separate record with ISN=minute of day.
At most there will be 1440 ISNs.
If the interval is other than 60 the number of records changes by
factor i/60.
Usage: python [-O] ticker.py --dbid <dbid> --fnr <fnr> --repeat <num>
--interval <sec>
-O run optimzied, debug code not generated
-d <dbid> dbid
-f <fnr> file number of ticker file
-r <num> specifies the number of ticks to write
otherwise runs forever
-i <sec> interval in seconds (default = 60)
Options:
-h, --help display this help
Example (short parameter form):
python ticker.py -d 241 -f 12 -r 5
$Date: 2008-08-29 18:20:18 +0200 (Fri, 29 Aug 2008) $
$Rev: 75 $
"""
# Copyright 2004-2008 Software AG
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import time
import string
import adabas
import adabas.dump
from adabas.api import *
from adabas.datamap import *
import getopt
def usage():
print __doc__
FNR=0
DBID=0
COUNT=1987543210 # very high number
SLEEPINT=60 # sleep interval in seconds
try:
opts, args = getopt.getopt(sys.argv[1:],
'hd:f:i:r:',
['help','dbid=','fnr=','interval=','repeat='])
except getopt.GetoptError:
usage()
sys.exit(2)
for opt, arg in opts:
if opt in ('-h', '--help'):
usage()
sys.exit()
elif opt in ('-d', '--dbid'):
DBID=int(arg)
elif opt in ('-f', '--fnr'):
FNR=int(arg)
elif opt in ('-i', '--interval'):
SLEEPINT=int(arg)
elif opt in ('-r', '--repeat'):
COUNT=int(arg)
if FNR==0 or DBID==0 or COUNT <0:
usage()
sys.exit(2)
lastTic = -1
lastHour = -1
lastMin = -1
#fields = '01,TI,20,A,DE,NU %'
c1=Adabas(fbl=64,rbl=128)
c1.dbid=DBID
c1.cb.fnr=FNR
count=COUNT
try:
c1.open(mode=UPD)
c1.cb.cid=12
c1.fb.value='TI,20,A.' # set format
while count>0:
t=time.localtime()
if lastHour != t[3]:
lastHour = t[3]
print time.strftime('\n %Y-%m-%d %H:', t),
lastMin = -1
x = t[5] +60*t[4] + 3600*t[3] # sec + 60*minute + 3600*hour
currTic = int(x/SLEEPINT)
if lastTic < currTic:
lastTic = currTic
newRecord=0
try:
c1.get(isn=currTic+1, hold=1)
except DatabaseError, (line, apa):
if apa.cb.rsp == 113:
newRecord=1
else:
raise
c1.rb[0:20]=time.strftime(' %Y-%m-%d %H:%M:%S',t)
if newRecord == 0:
c1.update()
else:
c1.store(isn=currTic+1)
c1.et()
if lastMin != t[4]:
lastMin = t[4]
print lastMin, # print minute
else:
print '.', # print ticks within minute
count-=1 # count down
time.sleep(SLEEPINT/2.) # make sure we don't miss a minute
# print time.strftime('%Y-%m-%d %H:%M:%S',t),lastTic, currTic
c1.close()
print '\nTerminated after %d ticks' % COUNT
except DatabaseError, (line, apa):
print line
dump.dump(apa.acb, header='Control Block')
raise
except KeyboardInterrupt:
# clean up
c1.close()
print '\nNow terminating due to KeyboardInterrupt after %d ticks.' % (COUNT-count,)
#
| mit | 5,492,103,608,900,403,000 | 25.56129 | 87 | 0.560117 | false | 3.341721 | false | false | false |
MapofLife/MOL | earthengine/google-api-python-client/samples/searchforshopping/crowding.py | 1 | 1667 | #!/usr/bin/python2.4
# -*- coding: utf-8 -*-
#
# Copyright 2010 Google Inc. All Rights Reserved.
"""Query with grouping against the shopping search API"""
import pprint
from apiclient.discovery import build
SHOPPING_API_VERSION = 'v1'
DEVELOPER_KEY = 'AIzaSyACZJW4JwcWwz5taR2gjIMNQrtgDLfILPc'
def main():
"""Get and print a feed of public products in the United States mathing a
text search query for 'digital camera' and grouped by the 8 top brands.
The list method of the resource should be called with the "crowdBy"
parameter. Each parameter should be designed as <attribute>:<occurence>,
where <occurrence> is the number of that <attribute> that will be used. For
example, to crowd by the 5 top brands, the parameter would be "brand:5". The
possible rules for crowding are currently:
account_id:<occurrence> (eg account_id:5)
brand:<occurrence> (eg brand:5)
condition:<occurrence> (eg condition:3)
gtin:<occurrence> (eg gtin:10)
price:<occurrence> (eg price:10)
Multiple crowding rules should be specified by separating them with a comma,
for example to crowd by the top 5 brands and then condition of those items,
the parameter should be crowdBy="brand:5,condition:3"
"""
client = build('shopping', SHOPPING_API_VERSION, developerKey=DEVELOPER_KEY)
resource = client.products()
# The crowdBy parameter to the list method causes the results to be grouped,
# in this case by the top 8 brands.
request = resource.list(source='public', country='US', q=u'digital camera',
crowdBy='brand:8')
response = request.execute()
pprint.pprint(response)
if __name__ == '__main__':
main()
| bsd-3-clause | 7,437,159,467,282,523,000 | 33.729167 | 78 | 0.718056 | false | 3.402041 | false | false | false |
OpenCode/l10n-italy | __unported__/l10n_it_base_crm/crm/crm.py | 15 | 2181 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2013 Agile Business Group sagl (<http://www.agilebg.com>)
# Author: Nicola Malcontenti <nicola.malcontenti@agilebg.com>
# Copyright (C) 2013 Associazione OpenERP Italia
# (<http://www.openerp-italia.org>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm
from openerp.osv import fields
class crm_lead(orm.Model):
_inherit = 'crm.lead'
def on_change_city(self, cr, uid, ids, city):
return self.pool.get('res.partner').on_change_city(cr, uid, ids, city)
def _lead_create_contact(self, cr, uid, lead, name, is_company,
parent_id=False, context=None):
if lead:
partner_id = super(crm_lead, self)._lead_create_contact(
cr, uid, lead, name, is_company,
parent_id=parent_id, context=context
)
if partner_id:
partner = self.pool.get('res.partner')
vals = {
'province': lead.province.id,
'region': lead.region.id,
}
partner.write(cr, uid, partner_id, vals, context=context)
return partner_id
_columns = {
'province': fields.many2one('res.province', string='Provincia'),
'region': fields.many2one('res.region', string='Region'),
}
| agpl-3.0 | 3,865,640,962,493,148,000 | 40.150943 | 78 | 0.574049 | false | 4.001835 | false | false | false |
davidljung/rl-glue-ext | projects/codecs/Python/src/tests/test_speed_environment.py | 8 | 1962 | #
# Copyright (C) 2008, Brian Tanner
#
#http://rl-glue-ext.googlecode.com/
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# $Revision$
# $Date$
# $Author$
# $HeadURL$
import random
import sys
from rlglue.environment.Environment import Environment
from rlglue.environment import EnvironmentLoader as EnvironmentLoader
from rlglue.types import Observation
from rlglue.types import Action
from rlglue.types import Reward_observation_terminal
class test_speed_environment(Environment):
stepCount=0
whichEpisode=0
o=Observation()
def env_init(self):
return ""
def env_start(self):
self.stepCount=0
self.whichEpisode=self.whichEpisode+1
return Observation()
def env_step(self,action):
self.stepCount=self.stepCount+1
if self.whichEpisode % 2 == 0:
self.o.intArray=range(0,50000)
#cheating, might break something
self.o.doubleArray=range(0,50000)
terminal=0
if self.stepCount==200:
terminal=1
ro=Reward_observation_terminal()
ro.r=1.0
ro.o=self.o
ro.terminal=terminal
return ro
self.o.intArray=range(0,5)
#cheating, might break something
self.o.doubleArray=range(0,5)
terminal=0
if self.stepCount==5000:
terminal=1
ro=Reward_observation_terminal()
ro.r=1.0
ro.o=self.o
ro.terminal=terminal
return ro
def env_cleanup(self):
pass
def env_message(self,inMessage):
return None;
if __name__=="__main__":
EnvironmentLoader.loadEnvironment(test_speed_environment())
| apache-2.0 | 4,876,467,533,458,294,000 | 23.525 | 74 | 0.733435 | false | 3.1392 | false | false | false |
eman/iotrelay-influxdb | setup.py | 1 | 1249 | #!/usr/bin/env python
import os
from setuptools import setup
project_dir = os.path.abspath(os.path.dirname(__file__))
long_descriptions = []
for rst in ('README.rst', 'LICENSE.rst'):
with open(os.path.join(project_dir, rst), 'r') as f:
long_descriptions.append(f.read())
setup(name='iotrelay-influxdb',
version='1.7.1',
description='InfluxDB handler module for iotrelay',
long_description='\n\n'.join(long_descriptions),
author='Emmanuel Levijarvi',
author_email='emansl@gmail.com',
url='https://github.com/eman/iotrelay-influxdb',
license='BSD',
py_modules=['iotrelay_influxdb'],
test_suite='tests',
install_requires=['iotrelay>=1.2', 'influxdb>=3.0.0'],
tests_require=['iotrelay>=1.2', 'influxdb>=3.0.0'],
classifiers=[
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Topic :: Home Automation',
'Topic :: Utilities',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2.7',
],
keywords='influxdb IoT',
entry_points={
'iotrelay': ['handler=iotrelay_influxdb:Handler']
})
| bsd-2-clause | -43,143,843,708,971,230 | 33.694444 | 60 | 0.603683 | false | 3.459834 | false | false | false |
jaety/image-river | indexer/inspector.py | 1 | 2144 | import subprocess
import logutil as log
import itertools
import re
import csv
def group(iterable, n, fillvalue=None):
args = [iter(iterable)] * n
return itertools.izip_longest(fillvalue=fillvalue, *args)
@log.timed()
def list_images(subdir):
cmd = [
"/bin/sh", "-c",
"mdfind -onlyin %s 'kMDItemContentType == public.jpeg'" % subdir
]
result = subprocess.check_output(cmd).split("\n")
log.info("found %d images in %s" % (len(result), subdir))
return result
@log.timed(show_args = False)
def image_info(image_paths):
keys = [
"kMDItemContentCreationDate", "kMDItemDisplayName", "kMDItemPixelHeight", "kMDItemPixelWidth"
]
images = " ".join(['"%s"' % s for s in image_paths if s is not None and len(s) > 0])
cmd = [
"/bin/sh", "-c",
"mdls %s %s" % (" ".join(["-name %s" % s for s in keys]), images)
]
result = [s for s in subprocess.check_output(cmd).split("\n") if len(s) > 0]
matcher = re.compile(r"^(.+)\s+=\s+(.+)$")
out = []
for (fn, items) in itertools.izip(image_paths, group(result, len(keys))):
cleaned_items = [matcher.match(item).group(2) for item in items]
out.append([fn] + cleaned_items)
return {
"keys": ["filename"] + keys,
"values": out
}
# data: { keys: Seq[String], values: Seq[Seq[Any]]}
def write_csv(fn, data, write_header=True, append=False):
flags = 'ab' if append else 'wb'
with open(fn, flags) as csvfile:
writer = csv.writer(csvfile)
if write_header:
writer.writerow(data["keys"])
for line in data["values"]:
if line is not None:
writer.writerow(line)
if __name__ == "__main__":
lines = list_images("/Users/jaety/Pictures")
fn = '/Users/jaety/projects/image-river/indexer/out/image_info.csv'
info = image_info(lines[:1000])
write_csv(fn, info)
for grp in group(lines[1000:], 1000):
info = image_info(grp)
write_csv(fn, info, False, True)
# with open(fn) as f:
# for lin in f:
# print lin
# for line in lines[:5]:
# print line
| apache-2.0 | -1,579,843,456,569,222,400 | 29.628571 | 101 | 0.583022 | false | 3.190476 | false | false | false |
williamd1k0/shiori | shiori/plugins/cafeteria/products.py | 1 | 3774 |
"""
MIT License
Copyright (c) 2016 William Tumeo
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
class Product(object):
def __init__(self, name, full, terms, items,
quantity=0, unit=['unit', 'units'],
buy='Vou comprar mais {name}',
done='Aqui está o seu {name}, {user}'
):
self.name = name
self.full = full
self.terms = terms
self.quantity = quantity
self.items = items
self.unit = unit
self.buy = buy
self.done = done
def __repr__(self):
return self.name
def __str__(self):
return self.name
def consume_one(self):
pass
def consume_all(self):
pass
def is_empty(self):
return self.quantity <= 0
def is_full(self):
return self.quantity >= self.full
def make(self):
self.quantity = self.full
class ProductManager(object):
def __init__(self):
self.products = {}
def add_product(self, product: Product):
if not product.name in self.products:
self.products[product.name] = product
def check_order(self, name, content):
content = content.lower()
for context in self.products[name].terms:
for item in self.products[name].items:
if '{unit}' in context:
for unit in self.products[name].unit:
if context.format(unit=unit, item=item) in content:
return True
elif context.format(item=item) in content:
return True
return False
class Drink(Product):
def __init__(self, name, full, items):
terms = [
'quero {item}',
'quero um {item}',
'queria {item}',
'queria um {item}',
'preciso de {item}',
'preciso de um {item}',
'gimme {item}',
'gimme a {item}',
'want a {item}',
'wanna a {item}',
'tomar {item}',
'tomar um {item}',
'uma {unit} de {item}'
]
units = ['xícara', 'xícaras', 'xicara', 'xicaras']
buy = 'Vou preparar mais {name}, {user}'
done = 'Aqui está seu {name}, {user}'
super().__init__(name, full, terms, items, quantity=0, unit=units, buy=buy, done=done)
class Coffee(Drink):
def consume_one(self):
self.quantity -= 0.2
if __name__ == '__main__':
coffee = Coffee("cafe", 1.5, ["cafe", "café", "coffee"])
coffee_p = Coffee("cafe preto", 1.5, ["cafe preto", "café preto", "black coffee"])
coffee_l = Coffee("cafe com leite", 1.5, ["cafe com leite", "café com leite", "milky coffee"])
mg = ProductManager()
mg.add_product(coffee)
| mit | -4,556,619,460,703,059,000 | 26.698529 | 98 | 0.594107 | false | 3.871531 | false | false | false |
makism/dyfunconn | dyconnmap/chronnectomics/dwell_time.py | 1 | 1964 | # -*- coding: utf-8 -*-
""" Dwell Time
Dwell time measures the time (when used in the context of functional connectivity
microstates) a which a state is active consecutive temporal segments (Dimitriadis2019_).
|
.. [Dimitriadis2019] Dimitriadis, S. I., López, M. E., Maestu, F., & Pereda, E. (2019). Modeling the Switching behavior of Functional Connectivity Microstates (FCμstates) as a Novel Biomarker for Mild Cognitive Impairment. Frontiers in Neuroscience, 13.
"""
# Author: Avraam Marimpis <avraam.marimpis@gmail.com>
import numpy as np
def dwell_time(x):
""" Dwell Time
Compute the dwell time for the given symbolic, 1d time series.
Parameters
----------
x : array-like, shape(N)
Input symbolic time series.
Returns
-------
dwell : dictionary
KVP, where K=symbol id and V=array of dwell time.
mean : dictionary
KVP, where K=symbol id and V=mean dwell time.
std : dictionary
KVP, where K=symbol id and V=std dwell time.
"""
data = x
symbols = np.unique(data)
dwell = {}
dwell_mean = {}
dwell_std = {}
for symbol in symbols:
r = np.where(data == symbol)[0]
r_diff = np.diff(r)
r_diff_without_one = np.where(r_diff != 1)
x = r[r_diff_without_one]
segments = len(x)
dur = np.zeros((segments, 1))
len_r = len(r)
tmp1 = np.squeeze(x)
tmp2 = r[len_r - 1]
xx = np.hstack([tmp1, tmp2])
for l in range(segments - 1):
r1 = np.where(r == xx[l + 1])[0]
r2 = np.where(r == xx[l])[0]
dur[l] = r1 - r2
r1 = np.where(r == xx[segments])[0]
r2 = np.where(r == xx[segments - 1])[0]
dur[segments - 1] = r1 - r2 + 1
dwell[symbol] = dur / len(data)
dwell_mean[symbol] = np.mean(dur) / len(data)
dwell_std[symbol] = np.std(dur) / len(data)
return (dwell, dwell_mean, dwell_std)
| bsd-3-clause | 6,350,344,845,959,433,000 | 24.480519 | 253 | 0.576453 | false | 3.060842 | false | false | false |
levibostian/myBlanky | googleAppEngine/google/appengine/ext/endpoints/__init__.py | 6 | 1954 | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Deprecated endpoints module from google.appengine.ext."""
import itertools
import logging
import os
import re
import sys
logging.warning('Importing endpoints from google.appengine.ext is deprecated '
'and will be removed. Add the endpoints library to '
'app.yaml, then endpoints can be imported simply with '
'"import endpoints".')
if 'APPENGINE_RUNTIME' not in os.environ:
if not hasattr(sys, 'version_info'):
raise RuntimeError('Endpoints library isn\'t available in older Python '
'runtime environments. Use the python27 runtime.')
version_tuple = tuple(sys.version_info[:2])
if version_tuple < (2, 7):
raise RuntimeError('Endpoints library isn\'t available in python %d.%d. '
'Use version 2.7 or greater.' % version_tuple)
elif os.environ['APPENGINE_RUNTIME'] == 'python':
raise RuntimeError('Endpoints library isn\'t available in python 2.5 '
'runtime. Use the python27 runtime instead.')
for path in sys.path:
lib_path, final_dir = os.path.split(path)
if re.match('webapp2-.+', final_dir):
endpoints_path = os.path.join(lib_path, 'endpoints-1.0')
if endpoints_path not in sys.path:
sys.path.append(endpoints_path)
break
from endpoints import *
__version__ = '1.0'
| mit | -6,254,910,070,703,599,000 | 26.138889 | 78 | 0.682702 | false | 3.955466 | false | false | false |
lmanzurv/sassy_coffee | sassy_coffee/__init__.py | 1 | 1688 | from django.conf import settings
DJANGO_PATH = None
if hasattr(settings, 'PROJECT_PATH') and settings.PROJECT_PATH:
DJANGO_PATH = settings.PROJECT_PATH
else:
DJANGO_PATH = settings.BASE_DIR
from django.utils import autoreload
from sassy_coffee import utils, compilers
import sys, os
formats_to_compile = [format.lower() for format in getattr(settings, 'DJANGO_SASSY_COFFEE_FORMATS', list())]
exclusions = getattr(settings, 'DJANGO_SASSY_COFFEE_EXCLUSIONS', list())
if settings.DEBUG:
_mtimes = {}
_win = (sys.platform == 'win32')
m = autoreload.main
c = autoreload.code_changed
def code_changed():
global _mtimes, _win
for format in formats_to_compile:
format = '*.%s' % format
files = utils.locate_folders_to_monitor(format)
for folder in files:
stat = os.stat(folder)
mtime = stat.st_mtime
if _win:
mtime -= stat.st_ctime
if folder not in _mtimes:
_mtimes[folder] = mtime
continue
if mtime != _mtimes[folder]:
_mtimes = {}
return True
return c()
def main(main_func, args=None, kwargs=None):
if os.environ.get('RUN_MAIN') == 'true':
def recompile_files(func):
def wrap(*args, **kws):
compilers.compile_files()
return func(*args, **kws)
return wrap
main_func = recompile_files(main_func)
return m(main_func, args, kwargs)
autoreload.main = main
autoreload.code_changed = code_changed
| apache-2.0 | -4,550,573,180,800,013,000 | 29.142857 | 108 | 0.562204 | false | 3.953162 | false | false | false |
nitesh1989/tools-iuc | tools/ngsutils/ngsutils/support/stats.py | 4 | 2967 | '''
various statistical tests and methods...
'''
import math
from ngsutils.support import memoize
def median(vals):
'''
>>> median([1,2,3])
2
>>> median([1,2,3,4])
2.5
'''
vals.sort()
if len(vals) % 2 == 1:
return vals[len(vals) / 2]
else:
a = vals[(len(vals) / 2) - 1]
b = vals[(len(vals) / 2)]
return float(a + b) / 2
def mean_stdev(l):
'''
>>> mean_stdev([1,2,2,2])
(1.75, 0.5)
>>> mean_stdev([2,2,2,2])
(2.0, 0.0)
'''
acc = 0
for el in l:
acc += el
mean = float(acc) / len(l)
acc = 0
for el in l:
acc += (el - mean) ** 2
if len(l) > 2:
stdev = math.sqrt(float(acc) / (len(l) - 1))
else:
stdev = 0.0
return (mean, stdev)
def counts_median(d):
'''
Calculate the median from counts stored in a dictionary
>>> counts_median({ 1: 4, 2: 1, 3: 4 })
2
>>> counts_median({ 1: 4, 3: 4 })
2
'''
count = 0
for k in d:
count += d[k]
if count == 0:
return 0
acc = 0.0
last = 0
for k in sorted(d):
if last:
return (last + k) / 2
acc += d[k]
if acc / count == 0.5:
last = k
elif acc / count > 0.5:
return k
def counts_mean_stdev(d):
'''
calc mean / stdev when data is stored as counts in a dictionary
Ex:
{ 1: 4, 2: 1, 3: 4 } = [1, 1, 1, 1, 2, 3, 3, 3, 3]
>>> counts_mean_stdev({ 1: 4, 2: 1, 3: 4 })
(2.0, 1.0)
'''
acc = 0
count = 0
for k in d:
acc += k * d[k]
count += d[k]
mean = float(acc) / count
acc = 0
for k in d:
acc += (((k - mean) ** 2) * d[k])
if count > 2:
stdev = math.sqrt(float(acc) / (count - 1))
else:
stdev = 0.0
return (mean, stdev)
@memoize
def poisson_prob(x, mean):
'''
Return the probability that you could get x counts in
a Poisson test with a mean value.
prob(x) = sum(i=1..x){poisson(i)}
>>> poisson_prob(6,10)
0.1300960209527205
>>> poisson_prob(8,10)
0.33277427882095645
'''
acc = 0.0
for i in xrange(1, x + 1):
acc += poisson_func(i, mean)
return acc
@memoize
def poisson_func(mu, lambd):
'''
This is the Poisson distribution function
p(mu) = (lambda^mu * e^(-lambda)) / (mu!)
mu is a count
lambd is the mean
>>> poisson_func(1,10)
0.00045399929762484856
>>> poisson_func(2,10)
0.0022699964881242427
>>> poisson_func(3,10)
0.007566654960414142
'''
return (lambd ** mu) * (math.exp(-1 * lambd)) / _factorial(mu)
@memoize
def _factorial(x):
'''
>>> _factorial(1)
1
>>> _factorial(2)
2
>>> _factorial(3)
6
'''
return math.factorial(x)
if __name__ == '__main__':
import doctest
doctest.testmod()
| mit | 5,921,527,001,767,970,000 | 16.981818 | 67 | 0.470509 | false | 2.961078 | false | false | false |
abinit/abinit | abimkdocs_tests/tools.py | 1 | 2845 | from __future__ import division, print_function, unicode_literals, absolute_import
import sys
import os
from unittest import TestCase
_PATCH_DONE = False
def patch_syspath():
global _PATCH_DONE
if _PATCH_DONE: return
# We don't install with setup.py hence we have to add the directory [...]/abinit/tests to $PYTHONPATH
pack_dir = os.path.dirname(os.path.abspath(__file__))
pack_dir = os.path.join(pack_dir, "..")
sys.path.insert(0, pack_dir)
# This needed to import doc.tests
sys.path.insert(0, os.path.join(pack_dir, "doc"))
_PATCH_DONE = True
class AbimkdocsTest(TestCase):
@staticmethod
def get_abinit_varnames_from_f90():
# construct list of input keywords that appear in chkvars.F90
home_dir = os.path.join(os.path.dirname(__file__) , "..")
path = os.path.join(home_dir, "src/44_abitypes_defs/m_dtset.F90")
in_block = False
words = []
with open(path, "rt") as fh:
for line in fh:
if line.find("admitted variable names") > 0: in_block = True
if line.find("Extra token") > 0: in_block = False
if in_block and line.find("list_var") > 0:
line_words = (line.split("'")[1]).split()
for i in range(len(line_words)):
words.append(line_words[i])
if not words:
print("Found empty list of words in %s " % path)
print("Perhaps someone changed the format of the file?")
print("Please modify the code in " + __file__)
raise RuntimeError("")
return set(words)
@staticmethod
def get_anaddb_varnames_from_f90():
# Scan the source and search for the calls to intagm. Parse the arguments
# and extract the name of the variable. The prototype of intagm is:
# call intagm(dprarr,intarr,jdtset,marr,1,string(1:lenstr),'brav',tread,'INT')
import re
re_call = re.compile(r'\s*call\s+intagm\((.+)\)\w*', re.I)
# construct list of key words appearing in anaddb input
home_dir = os.path.join(os.path.dirname(__file__) , "..")
path = os.path.join(home_dir, "src/77_ddb/m_anaddb_dataset.F90")
words = []
with open(path, "rt") as fh:
for line in fh:
m = re_call.match(line)
if m:
tokens = m.group(1).split(",")
assert len(tokens) == 9
words.append(tokens[-3].replace("'","").replace('"',""))
if not words:
print("Found empty list of words in file %s" % path)
print("Perhaps someone changed the format of the file?")
print("Please modify the code in " + __file__)
raise RuntimeError()
return set(words)
| gpl-3.0 | 812,320,049,273,765,500 | 36.434211 | 105 | 0.564148 | false | 3.675711 | true | false | false |
MadsJensen/RP_scripts | graph_path-strength_ada_post.py | 1 | 2045 | import numpy as np
import bct
from sklearn.externals import joblib
from my_settings import (bands, source_folder)
from sklearn.ensemble import AdaBoostClassifier
from sklearn.cross_validation import (StratifiedKFold, cross_val_score)
from sklearn.grid_search import GridSearchCV
subjects = [
"0008", "0009", "0010", "0012", "0014", "0015", "0016", "0017", "0018",
"0019", "0020", "0021", "0022"
]
cls_all = []
pln_all = []
scores_all = np.empty([4, 6])
for subject in subjects:
cls = np.load(source_folder + "graph_data/%s_classic_pow_post.npy" %
subject).item()
pln = np.load(source_folder + "graph_data/%s_plan_pow_post.npy" %
subject).item()
cls_all.append(cls)
pln_all.append(pln)
for k, band in enumerate(bands.keys()):
data_cls = []
for j in range(len(cls_all)):
tmp = cls_all[j][band]
data_cls.append(
np.asarray([bct.strengths_und(g) for g in tmp]).mean(axis=0))
data_pln = []
for j in range(len(pln_all)):
tmp = pln_all[j][band]
data_pln.append(
np.asarray([bct.strengths_und(g) for g in tmp]).mean(axis=0))
data_cls = np.asarray(data_cls)
data_pln = np.asarray(data_pln)
X = np.vstack([data_cls, data_pln])
y = np.concatenate([np.zeros(len(data_cls)), np.ones(len(data_pln))])
cv = StratifiedKFold(y, n_folds=6, shuffle=True)
cv_params = {
"learning_rate": np.arange(0.1, 1.1, 0.1),
'n_estimators': np.arange(1, 80, 2)
}
grid = GridSearchCV(
AdaBoostClassifier(),
cv_params,
scoring='accuracy',
cv=cv,
n_jobs=1,
verbose=1)
grid.fit(X, y)
ada_cv = grid.best_estimator_
scores = cross_val_score(ada_cv, X, y, cv=cv)
scores_all[k, :] = scores
# save the classifier
joblib.dump(ada_cv, source_folder +
"graph_data/sk_models/path-strength_ada_post_%s.plk" % band)
np.save(source_folder + "graph_data/path-strength_scores_all_post.npy",
scores_all)
| bsd-3-clause | -2,531,493,747,815,670,300 | 27.013699 | 76 | 0.599022 | false | 2.97671 | false | false | false |
nlucent/flansible | Flansible/flansible/ansible_task_output.py | 1 | 1661 | from flask_restful import Resource, Api
from flask_restful_swagger import swagger
from flask import render_template, make_response, Response
from flansible import app
from flansible import api, app, celery, auth
from ModelClasses import AnsibleCommandModel, AnsiblePlaybookModel, AnsibleRequestResultModel, AnsibleExtraArgsModel
import celery_runner
import time
class AnsibleTaskOutput(Resource):
@swagger.operation(
notes='Get the output of an Ansible task/job',
nickname='ansibletaskoutput',
parameters=[
{
"name": "task_id",
"description": "The ID of the task/job to get status for",
"required": True,
"allowMultiple": False,
"dataType": 'string',
"paramType": "path"
}
])
@auth.login_required
def get(self, task_id):
title = "Playbook Results"
task = celery_runner.do_long_running_task.AsyncResult(task_id)
if task.state == 'PENDING':
result = "Task not found"
resp = app.make_response((result, 404))
return resp
if task.state == "PROGRESS":
result = task.info['output']
else:
result = task.info['output']
result = result.replace('\n', '<br>\n')
refresh = 5
if "RECAP" in result or "ERROR" in result:
# disable refresh in template
refresh = 1000
response = make_response(render_template('status.j2', title=title, status=result, refresh=refresh))
response.headers['Content-Type'] = 'text/html'
return response
api.add_resource(AnsibleTaskOutput, '/api/ansibletaskoutput/<string:task_id>')
| mit | -7,640,351,620,178,015,000 | 33.604167 | 116 | 0.63817 | false | 4.071078 | false | false | false |