code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
#!/usr/bin/env python
from django.core.management import setup_environ
import settings
setup_environ(settings)
import socket
from trivia.models import *
irc = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
irc.connect((settings.IRC_SERVER, settings.IRC_PORT))
def send(msg):
irc.send(msg + "\r\n")
print "{SENT} " + msg
return
def msg(user, msg):
send("PRIVMSG " + user + " :" + msg)
return
def processline(line):
parts = line.split(' :',1)
args = parts[0].split(' ')
if (len(parts) > 1):
args.append(parts[1])
if args[0] == "PING":
send("PONG :" + args[1])
return
try:
if args[3] == "!questions":
questions = str(Question.objects.all())
msg(args[2], questions)
return
except IndexError:
return
# When we're done, remember to return.
return
send("USER " + (settings.IRC_NICKNAME + " ")*4)
send("NICK " + settings.IRC_NICKNAME)
for channel in settings.IRC_CHANNELS:
send("JOIN " + channel)
while True:
# EXIST
line = irc.recv(1024).rstrip()
if "\r\n" in line:
linesep = line.split()
for l in linesep:
processline(l)
continue
processline(line)
| relrod/pib | bot.py | Python | bsd-3-clause | 1,205 |
"""
Commands that are available from the connect screen.
"""
import re
import traceback
from django.conf import settings
from src.players.models import PlayerDB
from src.objects.models import ObjectDB
from src.server.models import ServerConfig
from src.comms.models import Channel
from src.utils import create, logger, utils, ansi
from src.commands.default.muxcommand import MuxCommand
from src.commands.cmdhandler import CMD_LOGINSTART
# limit symbol import for API
__all__ = ("CmdUnconnectedConnect", "CmdUnconnectedCreate", "CmdUnconnectedQuit", "CmdUnconnectedLook", "CmdUnconnectedHelp", "Magic")
CONNECTION_SCREEN_MODULE = settings.CONNECTION_SCREEN_MODULE
CONNECTION_SCREEN = ""
try:
CONNECTION_SCREEN = ansi.parse_ansi(utils.string_from_module(CONNECTION_SCREEN_MODULE))
except Exception:
pass
if not CONNECTION_SCREEN:
CONNECTION_SCREEN = "\nEvennia: Error in CONNECTION_SCREEN MODULE (randomly picked connection screen variable is not a string). \nEnter 'help' for aid."
class Magic(MuxCommand):
"""
Hidden command for the web client's magic cookie authenticator.
"""
key = "magic"
def func(self):
session = self.caller
player = PlayerDB.objects.player_search(self.lhs)
if len(player) != 1:
player = None
else:
player = player[0]
if player.name.lower() != self.lhs.lower():
player=None
pswd = None
if player:
pswd = self.rhs == player.db.magic_cookie
if not (player and pswd):
# No playername or password match
session.msg("Could not verify Magic Cookie. Please email the server administrator for assistance.")
return
# Check IP and/or name bans
bans = ServerConfig.objects.conf("server_bans")
if bans and (any(tup[0]==player.name for tup in bans)
or
any(tup[2].match(session.address[0]) for tup in bans if tup[2])):
# this is a banned IP or name!
string = "{rYou have been banned and cannot continue from here."
string += "\nIf you feel this ban is in error, please email an admin.{x"
session.msg(string)
session.execute_cmd("quit")
return
session.sessionhandler.login(session, player)
class Connect(MuxCommand):
"""
Connect to the game.
Usage (at login screen):
connect playername password
connect "player name" "pass word"
Use the create command to first create an account before logging in.
If you have spaces in your name, enclose it in quotes.
"""
key = "connect"
aliases = ["conn", "con", "co"]
locks = "cmd:all()" # not really needed
def func(self):
"""
Uses the Django admin api. Note that unlogged-in commands
have a unique position in that their func() receives
a session object instead of a source_object like all
other types of logged-in commands (this is because
there is no object yet before the player has logged in)
"""
session = self.caller
args = self.args
# extract quoted parts
parts = [part.strip() for part in re.split(r"\"|\'", args) if part.strip()]
if len(parts) == 1:
# this was (hopefully) due to no quotes being found
parts = parts[0].split(None, 1)
if len(parts) != 2:
session.msg("\n\r Usage (without <>): connect <name> <password>")
return
playername, password = parts
# Match account name and check password
player = PlayerDB.objects.player_search(playername)
if len(player) != 1:
player = None
else:
player = player[0]
if player.name.lower() != playername.lower():
player=None
pswd = None
if player:
pswd = player.check_password(password)
if not (player and pswd):
# No playername or password match
string = "Wrong login information given.\nIf you have spaces in your name or "
string += "password, don't forget to enclose it in quotes. Also capitalization matters."
string += "\nIf you are new you should first create a new account "
string += "using the 'create' command."
session.msg(string)
return
# Check IP and/or name bans
bans = ServerConfig.objects.conf("server_bans")
if bans and (any(tup[0]==player.name for tup in bans)
or
any(tup[2].match(session.address[0]) for tup in bans if tup[2])):
# this is a banned IP or name!
string = "{rYou have been banned and cannot continue from here."
string += "\nIf you feel this ban is in error, please email an admin.{x"
session.msg(string)
session.execute_cmd("quit")
return
# actually do the login. This will call all other hooks:
# session.at_init()
# if character:
# at_first_login() # only once
# at_pre_login()
# player.at_post_login() - calls look if no character is set
# character.at_post_login() - this calls look command by default
session.sessionhandler.login(session, player)
class Create(MuxCommand):
"""
Create a new account.
Usage (at login screen):
create <playername> <password>
create "player name" "pass word"
This creates a new player account.
If you have spaces in your name, enclose it in quotes.
"""
key = "create"
aliases = ["cre", "cr"]
locks = "cmd:all()"
def func(self):
"Do checks and create account"
session = self.caller
args = self.args.strip()
# extract quoted parts
parts = [part.strip() for part in re.split(r"\"|\'", args) if part.strip()]
if len(parts) == 1:
# this was (hopefully) due to no quotes being found
parts = parts[0].split(None, 1)
if len(parts) != 2:
string = "\n Usage (without <>): create <name> <password>"
string += "\nIf <name> or <password> contains spaces, enclose it in quotes."
session.msg(string)
return
playername, password = parts
print "playername '%s', password: '%s'" % (playername, password)
# sanity checks
if not re.findall('^[\w. @+-]+$', playername) or not (0 < len(playername) <= 30):
# this echoes the restrictions made by django's auth module (except not
# allowing spaces, for convenience of logging in).
string = "\n\r Playername can max be 30 characters or fewer. Letters, spaces, digits and @/./+/-/_ only."
session.msg(string)
return
# strip excessive spaces in playername
playername = re.sub(r"\s+", " ", playername).strip()
if PlayerDB.objects.filter(user__username__iexact=playername) or PlayerDB.objects.filter(username__iexact=playername):
# player already exists (we also ignore capitalization here)
session.msg("Sorry, there is already a player with the name '%s'." % playername)
return
if not re.findall('^[\w. @+-]+$', password) or not (3 < len(password)):
string = "\n\r Password should be longer than 3 characers. Letters, spaces, digits and @\.\+\-\_ only."
string += "\nFor best security, make it longer than 8 characters. You can also use a phrase of"
string += "\nmany words if you enclose the password in quotes."
session.msg(string)
return
# everything's ok. Create the new player account.
try:
default_home = ObjectDB.objects.get_id(settings.CHARACTER_DEFAULT_HOME)
typeclass = settings.BASE_CHARACTER_TYPECLASS
permissions = settings.PERMISSION_PLAYER_DEFAULT
try:
new_character = create.create_player(playername, None, password,
permissions=permissions,
character_typeclass=typeclass,
character_location=default_home,
character_home=default_home)
except Exception:
session.msg("There was an error creating the default Character/Player:\n%s\n If this problem persists, contact an admin.")
return
new_player = new_character.player
# This needs to be called so the engine knows this player is logging in for the first time.
# (so it knows to call the right hooks during login later)
utils.init_new_player(new_player)
# join the new player to the public channel
pchanneldef = settings.CHANNEL_PUBLIC
if pchanneldef:
pchannel = Channel.objects.get_channel(pchanneldef[0])
if not pchannel.connect_to(new_player):
string = "New player '%s' could not connect to public channel!" % new_player.key
logger.log_errmsg(string)
# allow only the character itself and the player to puppet this character (and Immortals).
new_character.locks.add("puppet:id(%i) or pid(%i) or perm(Immortals) or pperm(Immortals)" %
(new_character.id, new_player.id))
# If no description is set, set a default description
if not new_character.db.desc:
new_character.db.desc = "This is a Player."
# tell the caller everything went well.
string = "A new account '%s' was created. Welcome!"
if " " in playername:
string += "\n\nYou can now log in with the command 'connect \"%s\" <your password>'."
else:
string += "\n\nYou can now log with the command 'connect %s <your password>'."
session.msg(string % (playername, playername))
except Exception:
# We are in the middle between logged in and -not, so we have to handle tracebacks
# ourselves at this point. If we don't, we won't see any errors at all.
string = "%s\nThis is a bug. Please e-mail an admin if the problem persists."
session.msg(string % (traceback.format_exc()))
logger.log_errmsg(traceback.format_exc())
class CmdUnconnectedQuit(MuxCommand):
"""
We maintain a different version of the quit command
here for unconnected players for the sake of simplicity. The logged in
version is a bit more complicated.
"""
key = "quit"
aliases = ["q", "qu"]
locks = "cmd:all()"
def func(self):
"Simply close the connection."
session = self.caller
session.msg("Good bye! Disconnecting ...")
session.session_disconnect()
class CmdUnconnectedLook(MuxCommand):
"""
This is an unconnected version of the look command for simplicity.
This is called by the server and kicks everything in gear.
All it does is display the connect screen.
"""
key = CMD_LOGINSTART
aliases = ["look", "l"]
locks = "cmd:all()"
def func(self):
"Show the connect screen."
self.caller.msg(CONNECTION_SCREEN)
class CmdUnconnectedHelp(MuxCommand):
"""
This is an unconnected version of the help command,
for simplicity. It shows a pane of info.
"""
key = "help"
aliases = ["h", "?"]
locks = "cmd:all()"
def func(self):
"Shows help"
string = \
"""
You are not yet logged into the game. Commands available at this point:
{wcreate, connect, look, help, quit{n
To login to the system, you need to do one of the following:
{w1){n If you have no previous account, you need to use the 'create'
command.
{wcreate Anna c67jHL8p{n
Note that if you use spaces in your name, you have to enclose in quotes.
{wcreate "Anna the Barbarian" c67jHL8p{n
It's always a good idea (not only here, but everywhere on the net)
to not use a regular word for your password. Make it longer than
6 characters or write a passphrase.
{w2){n If you have an account already, either because you just created
one in {w1){n above or you are returning, use the 'connect' command:
{wconnect Anna c67jHL8p{n
(Again, if there are spaces in the name you have to enclose it in quotes).
This should log you in. Run {whelp{n again once you're logged in
to get more aid. Hope you enjoy your stay!
You can use the {wlook{n command if you want to see the connect screen again.
"""
self.caller.msg(string)
| TaliesinSkye/evennia | wintersoasis-master/commands/unloggedin.py | Python | bsd-3-clause | 12,835 |
#!/usr/bin/python
# Copyright (c) 2012 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import os
import subprocess
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import pynacl.platform
python = sys.executable
bash = '/bin/bash'
echo = 'echo'
BOT_ASSIGNMENT = {
######################################################################
# Buildbots.
######################################################################
'xp-newlib-opt':
python + ' buildbot\\buildbot_standard.py opt 32 newlib --no-gyp',
'xp-glibc-opt':
python + ' buildbot\\buildbot_standard.py opt 32 glibc --no-gyp',
'xp-bare-newlib-opt':
python + ' buildbot\\buildbot_standard.py opt 32 newlib --no-gyp',
'xp-bare-glibc-opt':
python + ' buildbot\\buildbot_standard.py opt 32 glibc --no-gyp',
'precise-64-validator-opt':
python + ' buildbot/buildbot_standard.py opt 64 glibc --validator',
# Clang.
'precise_64-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 64 newlib --clang',
'mac10.7-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 32 newlib --clang',
# ASan.
'precise_64-newlib-dbg-asan':
python + ' buildbot/buildbot_standard.py opt 64 newlib --asan',
'mac10.7-newlib-dbg-asan':
python + ' buildbot/buildbot_standard.py opt 32 newlib --asan',
# PNaCl.
'oneiric_32-newlib-arm_hw-pnacl-panda-dbg':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-hw-dbg',
'oneiric_32-newlib-arm_hw-pnacl-panda-opt':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-hw-opt',
'precise_64-newlib-arm_qemu-pnacl-dbg':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-dbg',
'precise_64-newlib-arm_qemu-pnacl-opt':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-opt',
'precise_64-newlib-x86_32-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 32 pnacl',
'precise_64-newlib-x86_64-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'mac10.8-newlib-opt-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 32 pnacl',
'win7-64-newlib-opt-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'precise_64-newlib-mips-pnacl':
echo + ' "TODO(mseaborn): add mips"',
# PNaCl Spec
'precise_64-newlib-arm_qemu-pnacl-buildonly-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-arm-buildonly',
'oneiric_32-newlib-arm_hw-pnacl-panda-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-arm-hw',
'lucid_64-newlib-x86_32-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-x8632',
'lucid_64-newlib-x86_64-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-x8664',
# NaCl Spec
'lucid_64-newlib-x86_32-spec':
bash + ' buildbot/buildbot_spec2k.sh nacl-x8632',
'lucid_64-newlib-x86_64-spec':
bash + ' buildbot/buildbot_spec2k.sh nacl-x8664',
# Valgrind bots.
'precise-64-newlib-dbg-valgrind':
echo + ' "Valgrind bots are disabled: see '
'https://code.google.com/p/nativeclient/issues/detail?id=3158"',
'precise-64-glibc-dbg-valgrind':
echo + ' "Valgrind bots are disabled: see '
'https://code.google.com/p/nativeclient/issues/detail?id=3158"',
# Coverage.
'mac10.6-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'precise-64-32-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
'precise-64-64-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'xp-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
######################################################################
# Trybots.
######################################################################
'nacl-precise64_validator_opt':
python + ' buildbot/buildbot_standard.py opt 64 glibc --validator',
'nacl-precise64_newlib_dbg_valgrind':
bash + ' buildbot/buildbot_valgrind.sh newlib',
'nacl-precise64_glibc_dbg_valgrind':
bash + ' buildbot/buildbot_valgrind.sh glibc',
# Coverage trybots.
'nacl-mac10.6-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'nacl-precise-64-32-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
'nacl-precise-64-64-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 64 newlib --coverage'),
'nacl-win32-newlib-coverage':
python + (' buildbot/buildbot_standard.py '
'coverage 32 newlib --coverage'),
# Clang trybots.
'nacl-precise_64-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 64 newlib --clang',
'nacl-mac10.6-newlib-dbg-clang':
python + ' buildbot/buildbot_standard.py dbg 32 newlib --clang',
# Pnacl main trybots
'nacl-precise_64-newlib-arm_qemu-pnacl':
bash + ' buildbot/buildbot_pnacl.sh mode-trybot-qemu',
'nacl-precise_64-newlib-x86_32-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 32 pnacl',
'nacl-precise_64-newlib-x86_64-pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
'nacl-precise_64-newlib-mips-pnacl':
echo + ' "TODO(mseaborn): add mips"',
'nacl-arm_opt_panda':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-try',
'nacl-arm_hw_opt_panda':
bash + ' buildbot/buildbot_pnacl.sh mode-buildbot-arm-hw-try',
'nacl-mac10.8_newlib_opt_pnacl':
python + ' buildbot/buildbot_pnacl.py opt 32 pnacl',
'nacl-win7_64_newlib_opt_pnacl':
python + ' buildbot/buildbot_pnacl.py opt 64 pnacl',
# Pnacl spec2k trybots
'nacl-precise_64-newlib-x86_32-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-x8632',
'nacl-precise_64-newlib-x86_64-pnacl-spec':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-x8664',
'nacl-arm_perf_panda':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-arm-buildonly',
'nacl-arm_hw_perf_panda':
bash + ' buildbot/buildbot_spec2k.sh pnacl-trybot-arm-hw',
# Toolchain glibc.
'precise64-glibc': bash + ' buildbot/buildbot_linux-glibc-makefile.sh',
'mac-glibc': bash + ' buildbot/buildbot_mac-glibc-makefile.sh',
'win7-glibc': 'buildbot\\buildbot_windows-glibc-makefile.bat',
# Toolchain newlib x86.
'win7-toolchain_x86': 'buildbot\\buildbot_toolchain_win.bat',
'mac-toolchain_x86': bash + ' buildbot/buildbot_toolchain.sh mac',
'precise64-toolchain_x86': bash + ' buildbot/buildbot_toolchain.sh linux',
# Toolchain newlib arm.
'win7-toolchain_arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --buildbot',
'mac-toolchain_arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --buildbot',
'precise64-toolchain_arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --buildbot',
# BIONIC toolchain builders.
'precise64-toolchain_bionic':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build_bionic'
' --buildbot',
# Pnacl toolchain builders.
'linux-armtools-x86_32':
bash + ' buildbot/buildbot_toolchain_arm_trusted.sh',
'linux-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
'linux-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
'precise-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
'precise-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
'mac-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
# TODO(robertm): Delete this once we are using win-pnacl-x86_64
'win-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
# TODO(robertm): use this in favor or the misnamed win-pnacl-x86_32
'win-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --buildbot',
# Pnacl toolchain testers
'linux-pnacl-x86_64-tests-x86_64':
bash + ' buildbot/buildbot_pnacl_toolchain_tests.sh tc-test-bot x86-64',
'linux-pnacl-x86_64-tests-x86_32':
bash + ' buildbot/buildbot_pnacl_toolchain_tests.sh tc-test-bot x86-32',
'linux-pnacl-x86_64-tests-arm':
bash + ' buildbot/buildbot_pnacl_toolchain_tests.sh tc-test-bot arm',
# MIPS toolchain buildbot.
'linux-pnacl-x86_32-tests-mips':
bash + ' buildbot/buildbot_toolchain_mips_trusted.sh',
# Toolchain trybots.
'nacl-toolchain-precise64-newlib':
bash + ' buildbot/buildbot_toolchain.sh linux',
'nacl-toolchain-mac-newlib': bash + ' buildbot/buildbot_toolchain.sh mac',
'nacl-toolchain-win7-newlib': 'buildbot\\buildbot_toolchain_win.bat',
'nacl-toolchain-precise64-newlib-arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --trybot',
'nacl-toolchain-mac-newlib-arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --trybot',
'nacl-toolchain-win7-newlib-arm':
python +
' buildbot/buildbot_toolchain_build.py'
' toolchain_build'
' --trybot',
'nacl-toolchain-precise64-glibc':
bash + ' buildbot/buildbot_linux-glibc-makefile.sh',
'nacl-toolchain-mac-glibc':
bash + ' buildbot/buildbot_mac-glibc-makefile.sh',
'nacl-toolchain-win7-glibc':
'buildbot\\buildbot_windows-glibc-makefile.bat',
# Pnacl toolchain trybots.
'nacl-toolchain-linux-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-linux-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-linux-pnacl-mips': echo + ' "TODO(mseaborn)"',
'nacl-toolchain-precise-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-precise-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-precise-pnacl-mips': echo + ' "TODO(mseaborn)"',
'nacl-toolchain-mac-pnacl-x86_32':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
'nacl-toolchain-win7-pnacl-x86_64':
python + ' buildbot/buildbot_pnacl_toolchain.py --trybot',
}
special_for_arm = [
'win7_64',
'win7-64',
'lucid-64',
'lucid64',
'precise-64',
'precise64'
]
for platform in [
'vista', 'win7', 'win8', 'win',
'mac10.6', 'mac10.7', 'mac10.8',
'lucid', 'precise'] + special_for_arm:
if platform in special_for_arm:
arch_variants = ['arm']
else:
arch_variants = ['', '32', '64', 'arm']
for arch in arch_variants:
arch_flags = ''
real_arch = arch
arch_part = '-' + arch
# Disable GYP build for win32 bots and arm cross-builders. In this case
# "win" means Windows XP, not Vista, Windows 7, etc.
#
# Building via GYP always builds all toolchains by default, but the win32
# XP pnacl builds are pathologically slow (e.g. ~38 seconds per compile on
# the nacl-win32_glibc_opt trybot). There are other builders that test
# Windows builds via gyp, so the reduced test coverage should be slight.
if arch == 'arm' or (platform == 'win' and arch == '32'):
arch_flags += ' --no-gyp'
if arch == '':
arch_part = ''
real_arch = '32'
# Test with Breakpad tools only on basic Linux builds.
if sys.platform.startswith('linux'):
arch_flags += ' --use-breakpad-tools'
for mode in ['dbg', 'opt']:
for libc in ['newlib', 'glibc']:
# Buildbots.
for bare in ['', '-bare']:
name = platform + arch_part + bare + '-' + libc + '-' + mode
assert name not in BOT_ASSIGNMENT, name
BOT_ASSIGNMENT[name] = (
python + ' buildbot/buildbot_standard.py ' +
mode + ' ' + real_arch + ' ' + libc + arch_flags)
# Trybots
for arch_sep in ['', '-', '_']:
name = 'nacl-' + platform + arch_sep + arch + '_' + libc + '_' + mode
assert name not in BOT_ASSIGNMENT, name
BOT_ASSIGNMENT[name] = (
python + ' buildbot/buildbot_standard.py ' +
mode + ' ' + real_arch + ' ' + libc + arch_flags)
def EscapeJson(data):
return '"' + json.dumps(data).replace('"', r'\"') + '"'
def Main():
builder = os.environ.get('BUILDBOT_BUILDERNAME')
build_number = os.environ.get('BUILDBOT_BUILDNUMBER')
slave_type = os.environ.get('BUILDBOT_SLAVE_TYPE')
cmd = BOT_ASSIGNMENT.get(builder)
if not cmd:
sys.stderr.write('ERROR - unset/invalid builder name\n')
sys.exit(1)
env = os.environ.copy()
# Don't write out .pyc files because in cases in which files move around or
# the PYTHONPATH / sys.path change, old .pyc files can be mistakenly used.
# This avoids the need for admin changes on the bots in this case.
env['PYTHONDONTWRITEBYTECODE'] = '1'
# Use .boto file from home-dir instead of buildbot supplied one.
if 'AWS_CREDENTIAL_FILE' in env:
del env['AWS_CREDENTIAL_FILE']
env['BOTO_CONFIG'] = os.path.expanduser('~/.boto')
env['GSUTIL'] = '/b/build/third_party/gsutil/gsutil'
# When running from cygwin, we sometimes want to use a native python.
# The native python will use the depot_tools version by invoking python.bat.
if pynacl.platform.IsWindows():
env['NATIVE_PYTHON'] = 'python.bat'
else:
env['NATIVE_PYTHON'] = 'python'
if sys.platform == 'win32':
# If the temp directory is not on the same drive as the working directory,
# there can be random failures when cleaning up temp directories, so use
# a directory on the current drive. Use __file__ here instead of os.getcwd()
# because toolchain_main picks its working directories relative to __file__
filedrive, _ = os.path.splitdrive(__file__)
tempdrive, _ = os.path.splitdrive(env['TEMP'])
if tempdrive != filedrive:
env['TEMP'] = filedrive + '\\temp'
env['TMP'] = env['TEMP']
if not os.path.exists(env['TEMP']):
os.mkdir(env['TEMP'])
# Run through runtest.py to get upload of perf data.
build_properties = {
'buildername': builder,
'mastername': 'client.nacl',
'buildnumber': str(build_number),
}
factory_properties = {
'perf_id': builder,
'show_perf_results': True,
'step_name': 'naclperf', # Seems unused, but is required.
'test_name': 'naclperf', # Really "Test Suite"
}
# Locate the buildbot build directory by relative path, as it's absolute
# location varies by platform and configuration.
buildbot_build_dir = os.path.join(* [os.pardir] * 4)
runtest = os.path.join(buildbot_build_dir, 'scripts', 'slave', 'runtest.py')
# For builds with an actual build number, require that the script is present
# (i.e. that we're run from an actual buildbot).
if build_number is not None and not os.path.exists(runtest):
raise Exception('runtest.py script not found at: %s\n' % runtest)
cmd_exe = cmd.split(' ')[0]
cmd_exe_ext = os.path.splitext(cmd_exe)[1]
# Do not wrap these types of builds with runtest.py:
# - tryjobs
# - commands beginning with 'echo '
# - batch files
# - debug builders
if not (slave_type == 'Trybot' or
cmd_exe == echo or
cmd_exe_ext == '.bat' or
'-dbg' in builder):
# Perf dashboards are now generated by output scraping that occurs in the
# script runtest.py, which lives in the buildbot repository.
# Non-trybot builds should be run through runtest, allowing it to upload
# perf data if relevant.
cmd = ' '.join([
python, runtest,
'--build-dir=src/out',
'--results-url=https://chromeperf.appspot.com',
'--annotate=graphing',
'--no-xvfb', # We provide our own xvfb invocation.
'--factory-properties', EscapeJson(factory_properties),
'--build-properties', EscapeJson(build_properties),
cmd,
])
print "%s runs: %s\n" % (builder, cmd)
retcode = subprocess.call(cmd, env=env, shell=True)
sys.exit(retcode)
if __name__ == '__main__':
Main()
| wilsonianb/nacl_contracts | buildbot/buildbot_selector.py | Python | bsd-3-clause | 16,846 |
from contextlib import contextmanager
from _pytest.python import FixtureRequest
import mock
from mock import Mock
import pyramid.testing
from webob.multidict import MultiDict
import pyramid_swagger
import pyramid_swagger.tween
import pytest
import simplejson
from pyramid.config import Configurator
from pyramid.interfaces import IRoutesMapper
from pyramid.registry import Registry
from pyramid.response import Response
from pyramid.urldispatch import RoutesMapper
from webtest import AppError
from .request_test import test_app
from pyramid_swagger.exceptions import ResponseValidationError
from pyramid_swagger.ingest import compile_swagger_schema
from pyramid_swagger.ingest import get_resource_listing
from pyramid_swagger.tween import validation_tween_factory
class CustomResponseValidationException(Exception):
pass
class EnhancedDummyRequest(pyramid.testing.DummyRequest):
"""
pyramid.testing.DummyRequest doesn't support MultiDicts like the real
pyramid.request.Request so this is the next best thing.
"""
def __init__(self, **kw):
super(EnhancedDummyRequest, self).__init__(**kw)
self.GET = MultiDict(self.GET)
# Make sure content_type attr exists is not passed in via **kw
self.content_type = getattr(self, 'content_type', None)
@contextmanager
def validation_context(request, response=None):
try:
yield
except Exception:
raise CustomResponseValidationException
validation_ctx_path = 'tests.acceptance.response_test.validation_context'
def get_registry(settings):
registry = Registry('testing')
config = Configurator(registry=registry)
if getattr(registry, 'settings', None) is None:
config._set_settings(settings)
registry.registerUtility(RoutesMapper(), IRoutesMapper)
config.commit()
return registry
def get_swagger_schema(schema_dir='tests/sample_schemas/good_app/'):
return compile_swagger_schema(
schema_dir,
get_resource_listing(schema_dir, False)
)
def _validate_against_tween(request, response=None, **overrides):
"""
Acceptance testing helper for testing the validation tween with Swagger 1.2
responses.
:param request: pytest fixture
:param response: standard fixture by default
"""
def handler(request):
return response or Response()
settings = dict({
'pyramid_swagger.swagger_versions': ['1.2'],
'pyramid_swagger.enable_swagger_spec_validation': False,
'pyramid_swagger.schema_directory': 'tests/sample_schemas/good_app/'},
**overrides
)
settings['pyramid_swagger.schema12'] = get_swagger_schema()
settings['pyramid_swagger.schema20'] = None
registry = get_registry(settings)
# Let's make request validation a no-op so we can focus our tests.
with mock.patch.object(pyramid_swagger.tween, 'validate_request'):
validation_tween_factory(handler, registry)(request)
def test_response_validation_enabled_by_default():
request = EnhancedDummyRequest(
method='GET',
path='/sample/path_arg1/resource',
params={'required_arg': 'test'},
matchdict={'path_arg': 'path_arg1'},
)
# Omit the logging_info key from the response. If response validation
# occurs, we'll fail it.
response = Response(
body=simplejson.dumps({'raw_response': 'foo'}),
headers={'Content-Type': 'application/json; charset=UTF-8'},
)
with pytest.raises(ResponseValidationError) as excinfo:
_validate_against_tween(request, response=response)
assert "'logging_info' is a required property" in str(excinfo.value)
def test_500_when_response_is_missing_required_field():
request = EnhancedDummyRequest(
method='GET',
path='/sample/path_arg1/resource',
params={'required_arg': 'test'},
matchdict={'path_arg': 'path_arg1'},
)
# Omit the logging_info key from the response.
response = Response(
body=simplejson.dumps({'raw_response': 'foo'}),
headers={'Content-Type': 'application/json; charset=UTF-8'},
)
with pytest.raises(ResponseValidationError) as excinfo:
_validate_against_tween(request, response=response)
assert "'logging_info' is a required property" in str(excinfo.value)
def test_200_when_response_is_void_with_none_response():
request = EnhancedDummyRequest(
method='GET',
path='/sample/nonstring/{int_arg}/{float_arg}/{boolean_arg}',
params={'required_arg': 'test'},
matchdict={'int_arg': '1', 'float_arg': '2.0', 'boolean_arg': 'true'},
)
response = Response(
body=simplejson.dumps(None),
headers={'Content-Type': 'application/json; charset=UTF-8'},
)
_validate_against_tween(request, response=response)
def test_200_when_response_is_void_with_empty_response():
request = EnhancedDummyRequest(
method='GET',
path='/sample/nonstring/{int_arg}/{float_arg}/{boolean_arg}',
params={'required_arg': 'test'},
matchdict={'int_arg': '1', 'float_arg': '2.0', 'boolean_arg': 'true'},
)
response = Response(body='{}')
_validate_against_tween(request, response=response)
def test_500_when_response_arg_is_wrong_type():
request = EnhancedDummyRequest(
method='GET',
path='/sample/path_arg1/resource',
params={'required_arg': 'test'},
matchdict={'path_arg': 'path_arg1'},
)
response = Response(
body=simplejson.dumps({
'raw_response': 1.0,
'logging_info': {'foo': 'bar'}
}),
headers={'Content-Type': 'application/json; charset=UTF-8'},
)
with pytest.raises(ResponseValidationError) as excinfo:
_validate_against_tween(request, response=response)
assert "1.0 is not of type 'string'" in str(excinfo.value)
def test_500_for_bad_validated_array_response():
request = EnhancedDummyRequest(
method='GET',
path='/sample_array_response',
)
response = Response(
body=simplejson.dumps([{"enum_value": "bad_enum_value"}]),
headers={'Content-Type': 'application/json; charset=UTF-8'},
)
with pytest.raises(ResponseValidationError) as excinfo:
_validate_against_tween(request, response=response)
assert "is not one of ['good_enum_value']" in str(excinfo.value)
def test_200_for_good_validated_array_response():
request = EnhancedDummyRequest(
method='GET',
path='/sample_array_response',
)
response = Response(
body=simplejson.dumps([{"enum_value": "good_enum_value"}]),
headers={'Content-Type': 'application/json; charset=UTF-8'},
)
_validate_against_tween(request, response=response)
def test_200_for_normal_response_validation():
app = test_app(
request=Mock(spec=FixtureRequest, param=['1.2']),
**{'pyramid_swagger.enable_response_validation': True}
)
response = app.post_json('/sample', {'foo': 'test', 'bar': 'test'})
assert response.status_code == 200
def test_200_skip_validation_for_excluded_path():
# FIXME(#64): This test is broken and doesn't check anything.
app = test_app(
request=Mock(spec=FixtureRequest, param=['1.2']),
**{'pyramid_swagger.exclude_paths': [r'^/sample/?']}
)
response = app.get(
'/sample/path_arg1/resource',
params={'required_arg': 'test'}
)
assert response.status_code == 200
def test_app_error_if_path_not_in_spec_and_path_validation_disabled():
"""If path missing and validation is disabled we want to let something else
handle the error. TestApp throws an AppError, but Pyramid would throw a
HTTPNotFound exception.
"""
with pytest.raises(AppError):
app = test_app(
request=Mock(spec=FixtureRequest, param=['1.2']),
**{'pyramid_swagger.enable_path_validation': False}
)
assert app.get('/this/path/doesnt/exist')
def test_response_validation_context():
request = EnhancedDummyRequest(
method='GET',
path='/sample/path_arg1/resource',
params={'required_arg': 'test'},
matchdict={'path_arg': 'path_arg1'},
)
# Omit the logging_info key from the response.
response = Response(
body=simplejson.dumps({'raw_response': 'foo'}),
headers={'Content-Type': 'application/json; charset=UTF-8'},
)
with pytest.raises(CustomResponseValidationException):
_validate_against_tween(
request,
response=response,
**{'pyramid_swagger.validation_context_path': validation_ctx_path}
)
| prat0318/pyramid_swagger | tests/acceptance/response_test.py | Python | bsd-3-clause | 8,642 |
import ghcnpy
# Provide introduction
ghcnpy.intro()
# Print Latest Version
ghcnpy.get_ghcnd_version()
# Testing Search Capabilities
print("\nTESTING SEARCH CAPABILITIES")
ghcnpy.find_station("Asheville")
# Testing Search Capabilities
print("\nTESTING PULL CAPABILITIES")
outfile=ghcnpy.get_data_station("USW00003812")
print(outfile," has been downloaded")
| jjrennie/GHCNpy | test.py | Python | bsd-3-clause | 360 |
# -*- coding: utf-8 -*-
"""
eve.io.media
~~~~~~~~~~~~
Media storage for Eve-powered APIs.
:copyright: (c) 2014 by Nicola Iarocci.
:license: BSD, see LICENSE for more details.
"""
class MediaStorage(object):
""" The MediaStorage class provides a standardized API for storing files,
along with a set of default behaviors that all other storage systems can
inherit or override as necessary.
..versioneadded:: 0.3
"""
def __init__(self, app=None):
"""
:param app: the flask application (eve itself). This can be used by
the class to access, amongst other things, the app.config object to
retrieve class-specific settings.
"""
self.app = app
def get(self, id_or_filename):
""" Opens the file given by name or unique id. Note that although the
returned file is guaranteed to be a File object, it might actually be
some subclass. Returns None if no file was found.
"""
raise NotImplementedError
def put(self, content, filename=None, content_type=None):
""" Saves a new file using the storage system, preferably with the name
specified. If there already exists a file with this name name, the
storage system may modify the filename as necessary to get a unique
name. Depending on the storage system, a unique id or the actual name
of the stored file will be returned. The content type argument is used
to appropriately identify the file when it is retrieved.
.. versionchanged:: 0.5
Allow filename to be optional (#414).
"""
raise NotImplementedError
def delete(self, id_or_filename):
""" Deletes the file referenced by name or unique id. If deletion is
not supported on the target storage system this will raise
NotImplementedError instead
"""
raise NotImplementedError
def exists(self, id_or_filename):
""" Returns True if a file referenced by the given name or unique id
already exists in the storage system, or False if the name is available
for a new file.
"""
raise NotImplementedError
| opticode/eve | eve/io/media.py | Python | bsd-3-clause | 2,207 |
from django.utils.translation import ugettext as _
from django.db import models
from jmbo.models import ModelBase
class Superhero(ModelBase):
name = models.CharField(max_length=256, editable=False)
class Meta:
verbose_name_plural = _("Superheroes")
| praekelt/jmbo-superhero | superhero/models.py | Python | bsd-3-clause | 269 |
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
''' Processor functions for images '''
import numpy as np
def squeeze_image(img):
''' Return image, remove axes length 1 at end of image shape
For example, an image may have shape (10,20,30,1,1). In this case
squeeze will result in an image with shape (10,20,30). See doctests
for further description of behavior.
Parameters
----------
img : ``SpatialImage``
Returns
-------
squeezed_img : ``SpatialImage``
Copy of img, such that data, and data shape have been squeezed,
for dimensions > 3rd, and at the end of the shape list
Examples
--------
>>> import nipype.externals.pynifti as nf
>>> shape = (10,20,30,1,1)
>>> data = np.arange(np.prod(shape)).reshape(shape)
>>> affine = np.eye(4)
>>> img = nf.Nifti1Image(data, affine)
>>> img.get_shape()
(10, 20, 30, 1, 1)
>>> img2 = squeeze_image(img)
>>> img2.get_shape()
(10, 20, 30)
If the data are 3D then last dimensions of 1 are ignored
>>> shape = (10,1,1)
>>> data = np.arange(np.prod(shape)).reshape(shape)
>>> img = nf.ni1.Nifti1Image(data, affine)
>>> img.get_shape()
(10, 1, 1)
>>> img2 = squeeze_image(img)
>>> img2.get_shape()
(10, 1, 1)
Only *final* dimensions of 1 are squeezed
>>> shape = (1, 1, 5, 1, 2, 1, 1)
>>> data = data.reshape(shape)
>>> img = nf.ni1.Nifti1Image(data, affine)
>>> img.get_shape()
(1, 1, 5, 1, 2, 1, 1)
>>> img2 = squeeze_image(img)
>>> img2.get_shape()
(1, 1, 5, 1, 2)
'''
klass = img.__class__
shape = img.get_shape()
slen = len(shape)
if slen < 4:
return klass.from_image(img)
for bdim in shape[3::][::-1]:
if bdim == 1:
slen-=1
else:
break
if slen == len(shape):
return klass.from_image(img)
shape = shape[:slen]
data = img.get_data()
data = data.reshape(shape)
return klass(data,
img.get_affine(),
img.get_header(),
img.extra)
def concat_images(images):
''' Concatenate images in list to single image, along last dimension '''
n_imgs = len(images)
img0 = images[0]
i0shape = img0.get_shape()
affine = img0.get_affine()
header = img0.get_header()
out_shape = (n_imgs, ) + i0shape
out_data = np.empty(out_shape)
for i, img in enumerate(images):
if not np.all(img.get_affine() == affine):
raise ValueError('Affines do not match')
out_data[i] = img.get_data()
out_data = np.rollaxis(out_data, 0, len(i0shape)+1)
klass = img0.__class__
return klass(out_data, affine, header)
| satra/NiPypeold | nipype/externals/pynifti/funcs.py | Python | bsd-3-clause | 2,788 |
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2021, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from qiime2.plugin import SemanticType
from ..plugin_setup import plugin
from . import AlphaDiversityDirectoryFormat
SampleData = SemanticType('SampleData', field_names='type')
AlphaDiversity = SemanticType('AlphaDiversity',
variant_of=SampleData.field['type'])
plugin.register_semantic_types(SampleData, AlphaDiversity)
plugin.register_semantic_type_to_format(
SampleData[AlphaDiversity],
artifact_format=AlphaDiversityDirectoryFormat
)
| qiime2/q2-types | q2_types/sample_data/_type.py | Python | bsd-3-clause | 832 |
from ..base import BaseTopazTest
class TestMarshal(BaseTopazTest):
def test_version_constants(self, space):
w_res = space.execute("return Marshal::MAJOR_VERSION")
assert space.int_w(w_res) == 4
w_res = space.execute("return Marshal::MINOR_VERSION")
assert space.int_w(w_res) == 8
w_res = space.execute("return Marshal.dump('test')[0].ord")
assert space.int_w(w_res) == 4
w_res = space.execute("return Marshal.dump('test')[1].ord")
assert space.int_w(w_res) == 8
def test_dump_constants(self, space):
w_res = space.execute("return Marshal.dump(nil)")
assert space.str_w(w_res) == "\x04\b0"
w_res = space.execute("return Marshal.dump(true)")
assert space.str_w(w_res) == "\x04\bT"
w_res = space.execute("return Marshal.dump(false)")
assert space.str_w(w_res) == "\x04\bF"
def test_load_constants(self, space):
w_res = space.execute("return Marshal.load('\x04\b0')")
assert w_res == space.w_nil
w_res = space.execute("return Marshal.load('\x04\bT')")
assert w_res == space.w_true
w_res = space.execute("return Marshal.load('\x04\bF')")
assert w_res == space.w_false
def test_constants(self, space):
w_res = space.execute("return Marshal.load(Marshal.dump(nil))")
assert w_res == space.w_nil
w_res = space.execute("return Marshal.load(Marshal.dump(true))")
assert w_res == space.w_true
w_res = space.execute("return Marshal.load(Marshal.dump(false))")
assert w_res == space.w_false
def test_dump_tiny_integer(self, space):
w_res = space.execute("return Marshal.dump(5)")
assert space.str_w(w_res) == "\x04\bi\n"
w_res = space.execute("return Marshal.dump(100)")
assert space.str_w(w_res) == "\x04\bii"
w_res = space.execute("return Marshal.dump(0)")
assert space.str_w(w_res) == "\x04\bi\x00"
w_res = space.execute("return Marshal.dump(-1)")
assert space.str_w(w_res) == "\x04\bi\xFA"
w_res = space.execute("return Marshal.dump(-123)")
assert space.str_w(w_res) == "\x04\bi\x80"
w_res = space.execute("return Marshal.dump(122)")
assert space.str_w(w_res) == "\x04\bi\x7F"
def test_load_tiny_integer(self, space):
w_res = space.execute("return Marshal.load('\x04\bi\n')")
assert space.int_w(w_res) == 5
w_res = space.execute("return Marshal.load('\x04\bii')")
assert space.int_w(w_res) == 100
#w_res = space.execute('return Marshal.load("\x04\bi\x00")')
w_res = space.execute('return Marshal.load(Marshal.dump(0))')
assert space.int_w(w_res) == 0
w_res = space.execute("return Marshal.load('\x04\bi\xFA')")
assert space.int_w(w_res) == -1
w_res = space.execute("return Marshal.load('\x04\bi\x80')")
assert space.int_w(w_res) == -123
w_res = space.execute("return Marshal.load('\x04\bi\x7F')")
assert space.int_w(w_res) == 122
def test_dump_array(self, space):
w_res = space.execute("return Marshal.dump([])")
assert space.str_w(w_res) == "\x04\b[\x00"
w_res = space.execute("return Marshal.dump([nil])")
assert space.str_w(w_res) == "\x04\b[\x060"
w_res = space.execute("return Marshal.dump([nil, true, false])")
assert space.str_w(w_res) == "\x04\b[\b0TF"
w_res = space.execute("return Marshal.dump([1, 2, 3])")
assert space.str_w(w_res) == "\x04\b[\x08i\x06i\x07i\x08"
w_res = space.execute("return Marshal.dump([1, [2, 3], 4])")
assert space.str_w(w_res) == "\x04\b[\bi\x06[\ai\ai\bi\t"
w_res = space.execute("return Marshal.dump([:foo, :bar])")
assert space.str_w(w_res) == "\x04\b[\a:\bfoo:\bbar"
def test_load_array(self, space):
#w_res = space.execute("return Marshal.load('\x04\b[\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump([]))")
assert self.unwrap(space, w_res) == []
w_res = space.execute("return Marshal.load('\x04\b[\x060')")
assert self.unwrap(space, w_res) == [None]
w_res = space.execute("return Marshal.load('\x04\b[\b0TF')")
assert self.unwrap(space, w_res) == [None, True, False]
w_res = space.execute("return Marshal.load('\x04\b[\x08i\x06i\x07i\x08')")
assert self.unwrap(space, w_res) == [1, 2, 3]
w_res = space.execute("return Marshal.load('\x04\b[\bi\x06[\ai\ai\bi\t')")
assert self.unwrap(space, w_res) == [1, [2, 3], 4]
w_res = space.execute("return Marshal.load('\x04\b[\a:\bfoo:\bbar')")
assert self.unwrap(space, w_res) == ["foo", "bar"]
def test_dump_symbol(self, space):
w_res = space.execute("return Marshal.dump(:abc)")
assert space.str_w(w_res) == "\x04\b:\babc"
w_res = space.execute("return Marshal.dump(('hello' * 25).to_sym)")
assert space.str_w(w_res) == "\x04\b:\x01}" + "hello" * 25
w_res = space.execute("return Marshal.dump(('hello' * 100).to_sym)")
assert space.str_w(w_res) == "\x04\b:\x02\xF4\x01" + "hello" * 100
def test_load_symbol(self, space):
w_res = space.execute("return Marshal.load('\x04\b:\babc')")
assert space.symbol_w(w_res) == "abc"
w_res = space.execute("return Marshal.load('\x04\b:\x01}' + 'hello' * 25)")
assert space.symbol_w(w_res) == "hello" * 25
def test_dump_hash(self, space):
w_res = space.execute("return Marshal.dump({})")
assert space.str_w(w_res) == "\x04\b{\x00"
w_res = space.execute("return Marshal.dump({1 => 2, 3 => 4})")
assert self.unwrap(space, w_res) == "\x04\b{\ai\x06i\ai\bi\t"
w_res = space.execute("return Marshal.dump({1 => {2 => 3}, 4 => 5})")
assert self.unwrap(space, w_res) == "\x04\b{\ai\x06{\x06i\ai\bi\ti\n"
w_res = space.execute("return Marshal.dump({1234 => {23456 => 3456789}, 4 => 5})")
assert self.unwrap(space, w_res) == "\x04\b{\ai\x02\xD2\x04{\x06i\x02\xA0[i\x03\x15\xBF4i\ti\n"
def test_load_hash(self, space):
#w_res = space.execute("return Marshal.load('\x04\b{\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump({}))")
assert self.unwrap(space, w_res) == {}
w_res = space.execute("return Marshal.load('\x04\b{\ai\x06i\ai\bi\t')")
assert self.unwrap(space, w_res) == {1: 2, 3: 4}
w_res = space.execute("return Marshal.load('\x04\b{\ai\x06{\x06i\ai\bi\ti\n')")
assert self.unwrap(space, w_res) == {1: {2: 3}, 4: 5}
w_res = space.execute("return Marshal.load('\x04\b{\ai\x02\xD2\x04{\x06i\x02\xA0[i\x03\x15\xBF4i\ti\n')")
assert self.unwrap(space, w_res) == {1234: {23456: 3456789}, 4: 5}
def test_dump_integer(self, space):
w_res = space.execute("return Marshal.dump(123)")
assert space.str_w(w_res) == "\x04\bi\x01{"
w_res = space.execute("return Marshal.dump(255)")
assert space.str_w(w_res) == "\x04\bi\x01\xFF"
w_res = space.execute("return Marshal.dump(256)")
assert space.str_w(w_res) == "\x04\bi\x02\x00\x01"
w_res = space.execute("return Marshal.dump(2 ** 16 - 2)")
assert space.str_w(w_res) == "\x04\bi\x02\xFE\xFF"
w_res = space.execute("return Marshal.dump(2 ** 16 - 1)")
assert space.str_w(w_res) == "\x04\bi\x02\xFF\xFF"
w_res = space.execute("return Marshal.dump(2 ** 16)")
assert space.str_w(w_res) == "\x04\bi\x03\x00\x00\x01"
w_res = space.execute("return Marshal.dump(2 ** 16 + 1)")
assert space.str_w(w_res) == "\x04\bi\x03\x01\x00\x01"
w_res = space.execute("return Marshal.dump(2 ** 30 - 1)")
assert space.str_w(w_res) == "\x04\bi\x04\xFF\xFF\xFF?"
# TODO: test tooo big numbers (they give a warning and inf)
def test_load_integer(self, space):
w_res = space.execute("return Marshal.load('\x04\bi\x01{')")
assert space.int_w(w_res) == 123
w_res = space.execute("return Marshal.load('\x04\bi\x01\xFF')")
assert space.int_w(w_res) == 255
#w_res = space.execute("return Marshal.load('\x04\bi\x02\x00\x01')")
w_res = space.execute("return Marshal.load(Marshal.dump(256))")
assert space.int_w(w_res) == 256
w_res = space.execute("return Marshal.load('\x04\bi\x02\xFE\xFF')")
assert space.int_w(w_res) == 2 ** 16 - 2
w_res = space.execute("return Marshal.load('\x04\bi\x02\xFF\xFF')")
assert space.int_w(w_res) == 2 ** 16 - 1
#w_res = space.execute("return Marshal.load('\x04\bi\x03\x00\x00\x01')")
w_res = space.execute("return Marshal.load(Marshal.dump(2 ** 16))")
assert space.int_w(w_res) == 2 ** 16
#w_res = space.execute("return Marshal.load('\x04\bi\x03\x01\x00\x01')")
w_res = space.execute("return Marshal.load(Marshal.dump(2 ** 16 + 1))")
assert space.int_w(w_res) == 2 ** 16 + 1
w_res = space.execute("return Marshal.load('\x04\bi\x04\xFF\xFF\xFF?')")
assert space.int_w(w_res) == 2 ** 30 - 1
def test_dump_negative_integer(self, space):
w_res = space.execute("return Marshal.dump(-1)")
assert space.str_w(w_res) == "\x04\bi\xFA"
w_res = space.execute("return Marshal.dump(-123)")
assert space.str_w(w_res) == "\x04\bi\x80"
w_res = space.execute("return Marshal.dump(-124)")
assert space.str_w(w_res) == "\x04\bi\xFF\x84"
w_res = space.execute("return Marshal.dump(-256)")
assert space.str_w(w_res) == "\x04\bi\xFF\x00"
w_res = space.execute("return Marshal.dump(-257)")
assert space.str_w(w_res) == "\x04\bi\xFE\xFF\xFE"
w_res = space.execute("return Marshal.dump(-(2 ** 30))")
assert space.str_w(w_res) == "\x04\bi\xFC\x00\x00\x00\xC0"
def test_load_negative_integer(self, space):
w_res = space.execute("return Marshal.load('\x04\bi\xFA')")
assert space.int_w(w_res) == -1
w_res = space.execute("return Marshal.load('\x04\bi\x80')")
assert space.int_w(w_res) == -123
w_res = space.execute("return Marshal.load('\x04\bi\xFF\x84')")
assert space.int_w(w_res) == -124
#w_res = space.execute("return Marshal.load('\x04\bi\xFF\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump(-256))")
assert space.int_w(w_res) == -256
w_res = space.execute("return Marshal.load('\x04\bi\xFE\xFF\xFE')")
assert space.int_w(w_res) == -257
#w_res = space.execute("return Marshal.load('\x04\bi\xFE\x00\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 16)))")
assert space.int_w(w_res) == -(2 ** 16)
w_res = space.execute("return Marshal.load('\x04\bi\xFD\xFF\xFF\xFE')")
assert space.int_w(w_res) == -(2 ** 16 + 1)
#w_res = space.execute("return Marshal.load('\x04\bi\xFC\x00\x00\x00')")
w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 24)))")
assert space.int_w(w_res) == -(2 ** 24)
w_res = space.execute("return Marshal.load('\x04\bi\xFC\xFF\xFF\xFF\xFE')")
assert space.int_w(w_res) == -(2 ** 24 + 1)
#w_res = space.execute("return Marshal.load('\x04\bi\xFC\x00\x00\x00\xC0')")
w_res = space.execute("return Marshal.load(Marshal.dump(-(2 ** 30)))")
assert space.int_w(w_res) == -(2 ** 30)
def test_dump_float(self, space):
w_res = space.execute("return Marshal.dump(0.0)")
assert space.str_w(w_res) == "\x04\bf\x060"
w_res = space.execute("return Marshal.dump(0.1)")
assert space.str_w(w_res) == "\x04\bf\b0.1"
w_res = space.execute("return Marshal.dump(1.0)")
assert space.str_w(w_res) == "\x04\bf\x061"
w_res = space.execute("return Marshal.dump(1.1)")
assert space.str_w(w_res) == "\x04\bf\b1.1"
w_res = space.execute("return Marshal.dump(1.001)")
assert space.str_w(w_res) == "\x04\bf\n1.001"
#w_res = space.execute("return Marshal.dump(123456789.123456789)")
#assert space.str_w(w_res) == "\x04\bf\x17123456789.12345679"
#w_res = space.execute("return Marshal.dump(-123456789.123456789)")
#assert space.str_w(w_res) == "\x04\bf\x18-123456789.12345679"
#w_res = space.execute("return Marshal.dump(-0.0)")
#assert space.str_w(w_res) == "\x04\bf\a-0"
def test_load_float(self, space):
w_res = space.execute("return Marshal.load('\x04\bf\x060')")
assert space.float_w(w_res) == 0.0
w_res = space.execute("return Marshal.load('\x04\bf\b0.1')")
assert space.float_w(w_res) == 0.1
w_res = space.execute("return Marshal.load('\x04\bf\x061')")
assert space.float_w(w_res) == 1.0
w_res = space.execute("return Marshal.load('\x04\bf\b1.1')")
assert space.float_w(w_res) == 1.1
w_res = space.execute("return Marshal.load('\x04\bf\n1.001')")
assert space.float_w(w_res) == 1.001
#w_res = space.execute("return Marshal.load('\x04\bf\x17123456789.12345679')")
#assert space.float_w(w_res) == 123456789.123456789
#w_res = space.execute("return Marshal.load('\x04\bf\x18-123456789.12345679')")
#assert space.float_w(w_res) == -123456789.123456789
#w_res = space.execute("return Marshal.load('\x04\bf\a-0')")
#assert repr(space.float_w(w_res)) == repr(-0.0)
def test_dump_string(self, space):
w_res = space.execute("return Marshal.dump('')")
assert space.str_w(w_res) == "\x04\bI\"\x00\x06:\x06ET"
w_res = space.execute("return Marshal.dump('abc')")
assert space.str_w(w_res) == "\x04\bI\"\babc\x06:\x06ET"
w_res = space.execute("return Marshal.dump('i am a longer string')")
assert space.str_w(w_res) == "\x04\bI\"\x19i am a longer string\x06:\x06ET"
def test_load_string(self, space):
#w_res = space.execute("return Marshal.load('\x04\bI\"\x00\x06:\x06ET')")
w_res = space.execute("return Marshal.load(Marshal.dump(''))")
assert space.str_w(w_res) == ""
w_res = space.execute("return Marshal.load('\x04\bI\"\babc\x06:\x06ET')")
assert space.str_w(w_res) == "abc"
w_res = space.execute("return Marshal.load('\x04\bI\"\x19i am a longer string\x06:\x06ET')")
assert space.str_w(w_res) == "i am a longer string"
def test_array(self, space):
w_res = space.execute("return Marshal.load(Marshal.dump([1, 2, 3]))")
assert self.unwrap(space, w_res) == [1, 2, 3]
w_res = space.execute("return Marshal.load(Marshal.dump([1, [2, 3], 4]))")
assert self.unwrap(space, w_res) == [1, [2, 3], 4]
w_res = space.execute("return Marshal.load(Marshal.dump([130, [2, 3], 4]))")
assert self.unwrap(space, w_res) == [130, [2, 3], 4]
w_res = space.execute("return Marshal.load(Marshal.dump([-10000, [2, 123456], -9000]))")
assert self.unwrap(space, w_res) == [-10000, [2, 123456], -9000]
w_res = space.execute("return Marshal.load(Marshal.dump([:foo, :bar]))")
assert self.unwrap(space, w_res) == ["foo", "bar"]
w_res = space.execute("return Marshal.load(Marshal.dump(['foo', 'bar']))")
assert self.unwrap(space, w_res) == ["foo", "bar"]
def test_incompatible_format(self, space):
with self.raises(
space,
"TypeError",
"incompatible marshal file format (can't be read)\n"
"format version 4.8 required; 97.115 given"
):
space.execute("Marshal.load('asd')")
def test_short_data(self, space):
with self.raises(space, "ArgumentError", "marshal data too short"):
space.execute("Marshal.load('')")
def test_parameters(self, space):
with self.raises(space, "TypeError", "instance of IO needed"):
space.execute("Marshal.load(4)")
def test_io(self, space, tmpdir):
f = tmpdir.join("testfile")
w_res = space.execute("""
Marshal.dump('hallo', File.new('%s', 'wb'))
file = File.open('%s', 'rb')
return Marshal.load(file.read)
""" % (f, f))
assert space.str_w(w_res) == "hallo"
w_res = space.execute("""
Marshal.dump('hallo', File.new('%s', 'wb'))
file = File.open('%s', 'rb')
return Marshal.load(file)
""" % (f, f))
assert space.str_w(w_res) == "hallo"
| babelsberg/babelsberg-r | tests/modules/test_marshal.py | Python | bsd-3-clause | 16,593 |
##########################################################################
#
# Copyright (c) 2013-2015, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import os
import unittest
import IECore
import Gaffer
import GafferImage
import GafferImageTest
class ObjectToImageTest( GafferImageTest.ImageTestCase ) :
fileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/checker.exr" )
negFileName = os.path.expandvars( "$GAFFER_ROOT/python/GafferImageTest/images/checkerWithNegativeDataWindow.200x150.exr" )
def test( self ) :
i = IECore.Reader.create( self.fileName ).read()
n = GafferImage.ObjectToImage()
n["object"].setValue( i )
self.assertEqual( n["out"].image(), i )
def testImageWithANegativeDataWindow( self ) :
i = IECore.Reader.create( self.negFileName ).read()
n = GafferImage.ObjectToImage()
n["object"].setValue( i )
self.assertEqual( n["out"].image(), i )
def testHashVariesPerTileAndChannel( self ) :
n = GafferImage.ObjectToImage()
n["object"].setValue( IECore.Reader.create( self.fileName ).read() )
self.assertNotEqual(
n["out"].channelDataHash( "R", IECore.V2i( 0 ) ),
n["out"].channelDataHash( "G", IECore.V2i( 0 ) )
)
self.assertNotEqual(
n["out"].channelDataHash( "R", IECore.V2i( 0 ) ),
n["out"].channelDataHash( "R", IECore.V2i( GafferImage.ImagePlug.tileSize() ) )
)
if __name__ == "__main__":
unittest.main()
| chippey/gaffer | python/GafferImageTest/ObjectToImageTest.py | Python | bsd-3-clause | 3,067 |
#!/usr/bin/env python2
from __future__ import print_function
import sys
import os
import urllib
import argparse
import xml.etree.ElementTree as ET
def warn(*msgs):
for x in msgs: print('[WARNING]:', x, file=sys.stderr)
class PDBTM:
def __init__(self, filename):
#self.tree = ET.parse(filename)
#self.root = self.tree.getroot()
def strsum(l):
s = ''
for x in l: s += x.rstrip() + '\n'
return s
f = open(filename)
s = []
for l in f: s.append(l)
#s = strsum(s[1:-1]).strip()
s = strsum(s).strip()
self.root = ET.fromstring(s)
print(root)
def get_database(prefix='.'):
if not prefix.endswith('/'): prefix += '/'
print('Fetching database...', file=sys.stderr)
db = urllib.urlopen('http://pdbtm.enzim.hu/data/pdbtmall')
print('Saving database...', file=sys.stderr)
f = open('%s/pdbtmall' % prefix, 'w')
for l in db: f.write(l)
#f.write(db.read())
db.close()
f.close()
def build_database(fn, prefix):
print('Unpacking database...', file=sys.stderr)
f = open(fn)
db = f.read()
f.close()
firstline = 1
header = ''
entries = []
pdbids = []
for l in db.split('\n'):
if firstline:
header += l
firstline -= 1
continue
if 'PDBTM>' in l: continue
if l.startswith('<?'): continue
if l.startswith('<pdbtm'):
a = l.find('ID=') + 4
b = a + 4
pdbids.append(l[a:b])
entries.append(header)
entries[-1] += '\n' + l
if not prefix.endswith('/'): prefix += '/'
if not os.path.isdir(prefix): os.mkdir(prefix)
for entry in zip(pdbids, entries):
f = open(prefix + entry[0] + '.xml', 'w')
f.write(entry[1])
f.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Manages PDBTM databases. Automatically fetches the PDBTM database if no options are specified. Run without any arguments, dbtool will retrieve the PDBTM database, store it in pdbtm, and unpack it.')
parser.add_argument('-d', '--db', default='pdbtmall', help='name of concatenated database file {default:pdbtmall}')
parser.add_argument('-b', '--build-db', action='store_true', help='(re)build database from an existing pdbtmsall file (available at http://pdbtm.enzim.hu/data/pdbtmall)')
parser.add_argument('directory', nargs='?', default='pdbtm', help='directory to store database in')
parser.add_argument('-f', '--force-refresh', action='store_true', help='force overwrite of existing database. Functionally equivalent to removing the old database and rerunning.')
#parser.add_argument('-n', metavar='bundle_size', type=int, help='size to cut bundles into')
args = parser.parse_args()
if args.build_db: build_database(args.db, args.directory)
else: #db = PDBTM(args.db)
if not os.path.isdir(args.directory): os.mkdir(args.directory)
if args.force_refresh or not os.path.isfile('%s/%s' % (args.directory, args.db)): get_database(args.directory)
build_database('%s/%s' % (args.directory, args.db), args.directory)
#http://pdbtm.enzim.hu/data/pdbtmall
| khendarg/pdbtmtop | dbtool.py | Python | bsd-3-clause | 2,933 |
from django.template import Library, Node, resolve_variable, TemplateSyntaxError
from django.core.urlresolvers import reverse
register = Library()
@register.simple_tag
def active(request, pattern):
import re
if re.search(pattern, request.get_full_path()):
return 'active'
return '' | Kami/munin_exchange | munin_exchange/apps/core/templatetags/navclass.py | Python | bsd-3-clause | 307 |
'''
Given a number, find the next higher number using only the digits in the given number.
For example if the given number is 1234, next higher number with same digits is 1243
'''
def FindNext(num):
number = str(num)
length = len(number)
for i in range(length-2,-1,-1):
current = number[i]
right = number[i+1]
if current < right:
temp = sorted(number[i:])
Next = temp[temp.index(current)+1]
temp.remove(Next)
temp = ''.join(temp)
return int(number[:i]+Next+temp)
return num
| jenniferwx/Programming_Practice | FindNextHigherNumberWithSameDigits.py | Python | bsd-3-clause | 578 |
import requests
class Status(object):
SKIP_LOCALES = ['en_US']
def __init__(self, url, app=None, highlight=None):
self.url = url
self.app = app
self.highlight = highlight or []
self.data = []
self.created = None
def get_data(self):
if self.data:
return
resp = requests.get(self.url)
if resp.status_code != 200:
resp.raise_for_status()
self.data = resp.json()
self.created = self.data[-1]['created']
def summary(self):
"""Generates summary data of today's state"""
self.get_data()
highlight = self.highlight
last_item = self.data[-1]
output = {}
output['app'] = self.app or 'ALL'
data = last_item['locales']
if self.app:
get_item = lambda x: x['apps'][self.app]
else:
get_item = lambda x: x
apps = data.items()[0][1]['apps'].keys()
apps.sort()
output['apps'] = apps
items = [item for item in data.items() if item[0] not in highlight]
hitems = [item for item in data.items() if item[0] in highlight]
highlighted = []
if hitems:
for loc, loc_data in sorted(hitems, key=lambda x: -x[1]['percent']):
if loc in self.SKIP_LOCALES:
continue
item = get_item(loc_data)
total = item.get('total', -1)
translated = item.get('translated', -1)
percent = item.get('percent', -1)
untranslated_words = item.get('untranslated_words', -1)
highlighted.append({
'locale': loc,
'percent': percent,
'total': total,
'translated': translated,
'untranslated': total - translated,
'untranslated_words': untranslated_words
})
output['highlighted'] = highlighted
locales = []
for loc, loc_data in sorted(items, key=lambda x: -x[1]['percent']):
if loc in self.SKIP_LOCALES:
continue
item = get_item(loc_data)
total = item.get('total', -1)
translated = item.get('translated', -1)
percent = item.get('percent', -1)
untranslated_words = item.get('untranslated_words', -1)
locales.append({
'locale': loc,
'percent': percent,
'total': total,
'translated': translated,
'untranslated': total - translated,
'untranslated_words': untranslated_words
})
output['locales'] = locales
output['created'] = self.created
return output
def _mark_movement(self, data):
"""For each item, converts to a tuple of (movement, item)"""
ret = []
prev_day = None
for i, day in enumerate(data):
if i == 0:
ret.append(('', day))
prev_day = day
continue
if prev_day > day:
item = ('down', day)
elif prev_day < day:
item = ('up', day)
else:
item = ('equal', day)
prev_day = day
ret.append(item)
return ret
def history(self):
self.get_data()
data = self.data
highlight = self.highlight
app = self.app
# Get a list of the locales we'll iterate through
locales = sorted(data[-1]['locales'].keys())
num_days = 14
# Truncate the data to what we want to look at
data = data[-num_days:]
if app:
get_data = lambda x: x['apps'][app]['percent']
else:
get_data = lambda x: x['percent']
hlocales = [loc for loc in locales if loc in highlight]
locales = [loc for loc in locales if loc not in highlight]
output = {}
output['app'] = self.app or 'All'
output['headers'] = [item['created'] for item in data]
output['highlighted'] = sorted(
(loc, self._mark_movement(get_data(day['locales'][loc]) for day in data))
for loc in hlocales
)
output['locales'] = sorted(
(loc, self._mark_movement(get_data(day['locales'].get(loc, {'percent': 0.0})) for day in data))
for loc in locales
)
output['created'] = self.created
return output
| willkg/postatus | postatus/status.py | Python | bsd-3-clause | 4,559 |
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'débarquer'."""
from math import sqrt
from primaires.interpreteur.commande.commande import Commande
from secondaires.navigation.constantes import *
class CmdDebarquer(Commande):
"""Commande 'debarquer'"""
def __init__(self):
"""Constructeur de la commande"""
Commande.__init__(self, "debarquer", "debark")
self.nom_categorie = "navire"
self.aide_courte = "débarque du navire"
self.aide_longue = \
"Cette commande permet de débarquer du navire sur lequel " \
"on se trouve. On doit se trouver assez prêt d'une côte " \
"pour débarquer dessus."
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
salle = personnage.salle
if not hasattr(salle, "navire") or salle.navire is None:
personnage << "|err|Vous n'êtes pas sur un navire.|ff|"
return
navire = salle.navire
if navire.etendue is None:
personnage << "|err|Vous n'êtes pas sur un navire.|ff|"
return
personnage.agir("bouger")
# On va chercher la salle la plus proche
etendue = navire.etendue
# On cherche la salle de nagvire la plus proche
d_salle = None # la salle de destination
distance = 2
x, y, z = salle.coords.tuple()
for t_salle in etendue.cotes.values():
if t_salle.coords.z == z:
t_x, t_y, t_z = t_salle.coords.tuple()
t_distance = sqrt((x - t_x) ** 2 + (y - t_y) ** 2)
if t_distance < distance and t_salle.nom_terrain in \
TERRAINS_ACCOSTABLES:
d_salle = t_salle
distance = t_distance
if d_salle is None:
personnage << "|err|Aucun quai n'a pu être trouvé à " \
"proximité.|ff|"
return
personnage.salle = d_salle
personnage << "Vous sautez sur {}.".format(
d_salle.titre.lower())
personnage << d_salle.regarder(personnage)
d_salle.envoyer("{{}} arrive en sautant depuis {}.".format(
navire.nom), personnage)
salle.envoyer("{{}} saute sur {}.".format(
d_salle.titre.lower()), personnage)
importeur.hook["personnage:deplacer"].executer(
personnage, d_salle, None, 0)
if not hasattr(d_salle, "navire") or d_salle.navire is None:
personnage.envoyer_tip("N'oubliez pas d'amarrer votre navire " \
"avec %amarre% %amarre:attacher%.")
| vlegoff/tsunami | src/secondaires/navigation/commandes/debarquer/__init__.py | Python | bsd-3-clause | 4,221 |
#
# GtkMain.py -- pygtk threading help routines.
#
# Eric Jeschke (eric@naoj.org)
#
# Copyright (c) Eric R. Jeschke. All rights reserved.
# This is open-source software licensed under a BSD license.
# Please see the file LICENSE.txt for details.
#
"""
GUI threading help routines.
Usage:
import GtkMain
# See constructor for GtkMain for options
self.mygtk = GtkMain.GtkMain()
# NOT THIS
#gtk.main()
# INSTEAD, main thread calls this:
self.mygtk.mainloop()
# (asynchronous call)
self.mygtk.gui_do(method, arg1, arg2, ... argN, kwd1=val1, ..., kwdN=valN)
# OR
# (synchronous call)
res = self.mygtk.gui_call(method, arg1, arg2, ... argN, kwd1=val1, ..., kwdN=valN)
# To cause the GUI thread to terminate the mainloop
self.mygtk.qui_quit()
"""
import sys, traceback
import thread, threading
import logging
import Queue as que
import gtk
from ginga.misc import Task, Future
class GtkMain(object):
def __init__(self, queue=None, logger=None, ev_quit=None):
# You can pass in a queue if you prefer to do so
if not queue:
queue = que.Queue()
self.gui_queue = queue
# You can pass in a logger if you prefer to do so
if logger == None:
logger = logging.getLogger('GtkHelper')
self.logger = logger
if not ev_quit:
ev_quit = threading.Event()
self.ev_quit = ev_quit
self.gui_thread_id = None
def update_pending(self, timeout=0.0):
"""Process all pending GTK events and return. _timeout_ is a tuning
parameter for performance.
"""
# Process "out-of-band" GTK events
try:
while gtk.events_pending():
#gtk.main_iteration(False)
gtk.main_iteration()
finally:
pass
done = False
while not done:
# Process "in-band" GTK events
try:
future = self.gui_queue.get(block=True,
timeout=timeout)
# Execute the GUI method
try:
try:
res = future.thaw(suppress_exception=False)
except Exception, e:
future.resolve(e)
self.logger.error("gui error: %s" % str(e))
try:
(type, value, tb) = sys.exc_info()
tb_str = "".join(traceback.format_tb(tb))
self.logger.error("Traceback:\n%s" % (tb_str))
except Exception, e:
self.logger.error("Traceback information unavailable.")
finally:
pass
except que.Empty:
done = True
except Exception, e:
self.logger.error("Main GUI loop error: %s" % str(e))
# Process "out-of-band" GTK events again
try:
while gtk.events_pending():
#gtk.main_iteration(False)
gtk.main_iteration()
finally:
pass
def gui_do(self, method, *args, **kwdargs):
"""General method for asynchronously calling into the GUI.
It makes a future to call the given (method) with the given (args)
and (kwdargs) inside the gui thread. If the calling thread is a
non-gui thread the future is returned.
"""
future = Future.Future()
future.freeze(method, *args, **kwdargs)
self.gui_queue.put(future)
my_id = thread.get_ident()
if my_id != self.gui_thread_id:
return future
def gui_call(self, method, *args, **kwdargs):
"""General method for synchronously calling into the GUI.
This waits until the method has completed before returning.
"""
my_id = thread.get_ident()
if my_id == self.gui_thread_id:
return method(*args, **kwdargs)
else:
future = self.gui_do(method, *args, **kwdargs)
return future.wait()
def gui_do_future(self, future):
self.gui_queue.put(future)
return future
def nongui_do(self, method, *args, **kwdargs):
task = Task.FuncTask(method, args, kwdargs, logger=self.logger)
return self.nongui_do_task(task)
def nongui_do_cb(self, tup, method, *args, **kwdargs):
task = Task.FuncTask(method, args, kwdargs, logger=self.logger)
task.register_callback(tup[0], args=tup[1:])
return self.nongui_do_task(task)
def nongui_do_future(self, future):
task = Task.FuncTask(future.thaw, (), {}, logger=self.logger)
return self.nongui_do_task(task)
def nongui_do_task(self, task):
try:
task.init_and_start(self)
return task
except Exception, e:
self.logger.error("Error starting task: %s" % (str(e)))
raise(e)
def assert_gui_thread(self):
my_id = thread.get_ident()
assert my_id == self.gui_thread_id, \
Exception("Non-GUI thread (%d) is executing GUI code!" % (
my_id))
def assert_nongui_thread(self):
my_id = thread.get_ident()
assert my_id != self.gui_thread_id, \
Exception("GUI thread (%d) is executing non-GUI code!" % (
my_id))
def mainloop(self, timeout=0.001):
# Mark our thread id
self.gui_thread_id = thread.get_ident()
while not self.ev_quit.isSet():
self.update_pending(timeout=timeout)
def gui_quit(self):
"Call this to cause the GUI thread to quit the mainloop."""
self.ev_quit.set()
# END
| Rbeaty88/ginga | ginga/gtkw/GtkMain.py | Python | bsd-3-clause | 5,866 |
# coding: utf-8
# This file is part of Thomas Aquinas.
#
# Thomas Aquinas is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Thomas Aquinas is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Thomas Aquinas. If not, see <http://www.gnu.org/licenses/>.
#
# veni, Sancte Spiritus.
import ctypes
import logging
| shackra/thomas-aquinas | summa/audio/system.py | Python | bsd-3-clause | 776 |
#<pycode(py_choose)>
class Choose:
"""
Choose - class for choose() with callbacks
"""
def __init__(self, list, title, flags=0, deflt=1, icon=37):
self.list = list
self.title = title
self.flags = flags
self.x0 = -1
self.x1 = -1
self.y0 = -1
self.y1 = -1
self.width = -1
self.deflt = deflt
self.icon = icon
# HACK: Add a circular reference for non-modal choosers. This prevents the GC
# from collecting the class object the callbacks need. Unfortunately this means
# that the class will never be collected, unless refhack is set to None explicitly.
if (flags & Choose2.CH_MODAL) == 0:
self.refhack = self
def sizer(self):
"""
Callback: sizer - returns the length of the list
"""
return len(self.list)
def getl(self, n):
"""
Callback: getl - get one item from the list
"""
if n == 0:
return self.title
if n <= self.sizer():
return str(self.list[n-1])
else:
return "<Empty>"
def ins(self):
pass
def update(self, n):
pass
def edit(self, n):
pass
def enter(self, n):
print "enter(%d) called" % n
def destroy(self):
pass
def get_icon(self, n):
pass
def choose(self):
"""
choose - Display the choose dialogue
"""
old = set_script_timeout(0)
n = _idaapi.choose_choose(
self,
self.flags,
self.x0,
self.y0,
self.x1,
self.y1,
self.width,
self.deflt,
self.icon)
set_script_timeout(old)
return n
#</pycode(py_choose)>
| nihilus/src | pywraps/py_choose.py | Python | bsd-3-clause | 1,595 |
# -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use Amazon's S3 for storing static files and uploaded media
- Use sendgrid to send emails
- Use MEMCACHIER on Heroku
'''
from configurations import values
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
try:
from S3 import CallingFormat
AWS_CALLING_FORMAT = CallingFormat.SUBDOMAIN
except ImportError:
# TODO: Fix this where even if in Dev this class is called.
pass
from .common import Common
class Production(Common):
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# INSTALLED_APPS
INSTALLED_APPS = Common.INSTALLED_APPS
# END INSTALLED_APPS
# SECRET KEY
SECRET_KEY = values.SecretValue()
# END SECRET KEY
# django-secure
INSTALLED_APPS += ("djangosecure", )
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = values.BooleanValue(True)
SECURE_FRAME_DENY = values.BooleanValue(True)
SECURE_CONTENT_TYPE_NOSNIFF = values.BooleanValue(True)
SECURE_BROWSER_XSS_FILTER = values.BooleanValue(True)
SESSION_COOKIE_SECURE = values.BooleanValue(False)
SESSION_COOKIE_HTTPONLY = values.BooleanValue(True)
SECURE_SSL_REDIRECT = values.BooleanValue(True)
# end django-secure
# SITE CONFIGURATION
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
INSTALLED_APPS += ("gunicorn", )
# STORAGE CONFIGURATION
# See: http://django-storages.readthedocs.org/en/latest/index.html
INSTALLED_APPS += (
'storages',
)
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
STATICFILES_STORAGE = DEFAULT_FILE_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
# See: http://django-storages.readthedocs.org/en/latest/backends/amazon-S3.html#settings
AWS_ACCESS_KEY_ID = values.SecretValue()
AWS_SECRET_ACCESS_KEY = values.SecretValue()
AWS_STORAGE_BUCKET_NAME = values.SecretValue()
AWS_AUTO_CREATE_BUCKET = True
AWS_QUERYSTRING_AUTH = False
# see: https://github.com/antonagestam/collectfast
AWS_PRELOAD_METADATA = True
INSTALLED_APPS += ('collectfast', )
# AWS cache settings, don't change unless you know what you're doing:
AWS_EXPIRY = 60 * 60 * 24 * 7
AWS_HEADERS = {
'Cache-Control': 'max-age=%d, s-maxage=%d, must-revalidate' % (
AWS_EXPIRY, AWS_EXPIRY)
}
# See: https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = 'https://s3.amazonaws.com/%s/' % AWS_STORAGE_BUCKET_NAME
# END STORAGE CONFIGURATION
# EMAIL
DEFAULT_FROM_EMAIL = values.Value('tco2 <noreply@example.com>')
EMAIL_HOST = values.Value('smtp.sendgrid.com')
EMAIL_HOST_PASSWORD = values.SecretValue(environ_prefix="", environ_name="SENDGRID_PASSWORD")
EMAIL_HOST_USER = values.SecretValue(environ_prefix="", environ_name="SENDGRID_USERNAME")
EMAIL_PORT = values.IntegerValue(587, environ_prefix="", environ_name="EMAIL_PORT")
EMAIL_SUBJECT_PREFIX = values.Value('[tco2] ', environ_name="EMAIL_SUBJECT_PREFIX")
EMAIL_USE_TLS = True
SERVER_EMAIL = EMAIL_HOST_USER
# END EMAIL
# TEMPLATE CONFIGURATION
# See: https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
# END TEMPLATE CONFIGURATION
# CACHING
# Only do this here because thanks to django-pylibmc-sasl and pylibmc
# memcacheify is painful to install on windows.
try:
# See: https://github.com/rdegges/django-heroku-memcacheify
from memcacheify import memcacheify
CACHES = memcacheify()
except ImportError:
CACHES = values.CacheURLValue(default="memcached://127.0.0.1:11211")
# END CACHING
# Your production stuff: Below this line define 3rd party library settings
| tpugsley/tco2 | tco2/config/production.py | Python | bsd-3-clause | 4,340 |
# Author: Immanuel Bayer
# License: BSD 3 clause
import ffm
import numpy as np
from .base import FactorizationMachine
from sklearn.utils.testing import assert_array_equal
from .validation import check_array, assert_all_finite
class FMRecommender(FactorizationMachine):
""" Factorization Machine Recommender with pairwise (BPR) loss solver.
Parameters
----------
n_iter : int, optional
The number of interations of individual samples .
init_stdev: float, optional
Sets the stdev for the initialization of the parameter
random_state: int, optional
The seed of the pseudo random number generator that
initializes the parameters and mcmc chain.
rank: int
The rank of the factorization used for the second order interactions.
l2_reg_w : float
L2 penalty weight for pairwise coefficients.
l2_reg_V : float
L2 penalty weight for linear coefficients.
l2_reg : float
L2 penalty weight for all coefficients (default=0).
step_size : float
Stepsize for the SGD solver, the solver uses a fixed step size and
might require a tunning of the number of iterations `n_iter`.
Attributes
---------
w0_ : float
bias term
w_ : float | array, shape = (n_features)
Coefficients for linear combination.
V_ : float | array, shape = (rank_pair, n_features)
Coefficients of second order factor matrix.
"""
def __init__(self, n_iter=100, init_stdev=0.1, rank=8, random_state=123,
l2_reg_w=0.1, l2_reg_V=0.1, l2_reg=0, step_size=0.1):
super(FMRecommender, self).\
__init__(n_iter=n_iter, init_stdev=init_stdev, rank=rank,
random_state=random_state)
if (l2_reg != 0):
self.l2_reg_V = l2_reg
self.l2_reg_w = l2_reg
else:
self.l2_reg_w = l2_reg_w
self.l2_reg_V = l2_reg_V
self.step_size = step_size
self.task = "ranking"
def fit(self, X, pairs):
""" Fit model with specified loss.
Parameters
----------
X : scipy.sparse.csc_matrix, (n_samples, n_features)
y : float | ndarray, shape = (n_compares, 2)
Each row `i` defines a pair of samples such that
the first returns a high value then the second
FM(X[i,0]) > FM(X[i, 1]).
"""
X = X.T
X = check_array(X, accept_sparse="csc", dtype=np.float64)
assert_all_finite(pairs)
pairs = pairs.astype(np.float64)
# check that pairs contain no real values
assert_array_equal(pairs, pairs.astype(np.int32))
assert pairs.max() <= X.shape[1]
assert pairs.min() >= 0
self.w0_, self.w_, self.V_ = ffm.ffm_fit_sgd_bpr(self, X, pairs)
return self
| ibayer/fastFM-fork | fastFM/bpr.py | Python | bsd-3-clause | 2,859 |
#------------------------------------------------------------------------------
# Copyright (c) 2007, Riverbank Computing Limited
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD license.
# However, when used with the GPL version of PyQt the additional terms described in the PyQt GPL exception also apply
#
# Author: Riverbank Computing Limited
# Description: <Enthought pyface package component>
#------------------------------------------------------------------------------
# Standard library imports.
import sys
# Major package imports.
from pyface.qt import QtCore, QtGui
# Enthought library imports.
from traits.api import Bool, Event, provides, Unicode
# Local imports.
from pyface.i_python_editor import IPythonEditor, MPythonEditor
from pyface.key_pressed_event import KeyPressedEvent
from pyface.widget import Widget
from pyface.ui.qt4.code_editor.code_widget import AdvancedCodeWidget
@provides(IPythonEditor)
class PythonEditor(MPythonEditor, Widget):
""" The toolkit specific implementation of a PythonEditor. See the
IPythonEditor interface for the API documentation.
"""
#### 'IPythonEditor' interface ############################################
dirty = Bool(False)
path = Unicode
show_line_numbers = Bool(True)
#### Events ####
changed = Event
key_pressed = Event(KeyPressedEvent)
###########################################################################
# 'object' interface.
###########################################################################
def __init__(self, parent, **traits):
super(PythonEditor, self).__init__(**traits)
self.control = self._create_control(parent)
###########################################################################
# 'PythonEditor' interface.
###########################################################################
def load(self, path=None):
""" Loads the contents of the editor.
"""
if path is None:
path = self.path
# We will have no path for a new script.
if len(path) > 0:
f = open(self.path, 'r')
text = f.read()
f.close()
else:
text = ''
self.control.code.setPlainText(text)
self.dirty = False
def save(self, path=None):
""" Saves the contents of the editor.
"""
if path is None:
path = self.path
f = open(path, 'w')
f.write(self.control.code.toPlainText())
f.close()
self.dirty = False
def select_line(self, lineno):
""" Selects the specified line.
"""
self.control.code.set_line_column(lineno, 0)
self.control.code.moveCursor(QtGui.QTextCursor.EndOfLine,
QtGui.QTextCursor.KeepAnchor)
###########################################################################
# Trait handlers.
###########################################################################
def _path_changed(self):
self._changed_path()
def _show_line_numbers_changed(self):
if self.control is not None:
self.control.code.line_number_widget.setVisible(
self.show_line_numbers)
self.control.code.update_line_number_width()
###########################################################################
# Private interface.
###########################################################################
def _create_control(self, parent):
""" Creates the toolkit-specific control for the widget.
"""
self.control = control = AdvancedCodeWidget(parent)
self._show_line_numbers_changed()
# Install event filter to trap key presses.
event_filter = PythonEditorEventFilter(self, self.control)
self.control.installEventFilter(event_filter)
self.control.code.installEventFilter(event_filter)
# Connect signals for text changes.
control.code.modificationChanged.connect(self._on_dirty_changed)
control.code.textChanged.connect(self._on_text_changed)
# Load the editor's contents.
self.load()
return control
def _on_dirty_changed(self, dirty):
""" Called whenever a change is made to the dirty state of the
document.
"""
self.dirty = dirty
def _on_text_changed(self):
""" Called whenever a change is made to the text of the document.
"""
self.changed = True
class PythonEditorEventFilter(QtCore.QObject):
""" A thin wrapper around the advanced code widget to handle the key_pressed
Event.
"""
def __init__(self, editor, parent):
super(PythonEditorEventFilter, self).__init__(parent)
self.__editor = editor
def eventFilter(self, obj, event):
""" Reimplemented to trap key presses.
"""
if self.__editor.control and obj == self.__editor.control and \
event.type() == QtCore.QEvent.FocusOut:
# Hack for Traits UI compatibility.
self.__editor.control.emit(QtCore.SIGNAL('lostFocus'))
elif self.__editor.control and obj == self.__editor.control.code and \
event.type() == QtCore.QEvent.KeyPress:
# Pyface doesn't seem to be Unicode aware. Only keep the key code
# if it corresponds to a single Latin1 character.
kstr = event.text()
try:
kcode = ord(str(kstr))
except:
kcode = 0
mods = event.modifiers()
self.key_pressed = KeyPressedEvent(
alt_down = ((mods & QtCore.Qt.AltModifier) ==
QtCore.Qt.AltModifier),
control_down = ((mods & QtCore.Qt.ControlModifier) ==
QtCore.Qt.ControlModifier),
shift_down = ((mods & QtCore.Qt.ShiftModifier) ==
QtCore.Qt.ShiftModifier),
key_code = kcode,
event = event)
return super(PythonEditorEventFilter, self).eventFilter(obj, event)
| geggo/pyface | pyface/ui/qt4/python_editor.py | Python | bsd-3-clause | 6,248 |
import argparse
import glob
import hashlib
import json
import os
IRMAS_INDEX_PATH = '../mirdata/indexes/irmas_index.json'
def md5(file_path):
"""Get md5 hash of a file.
Parameters
----------
file_path: str
File path.
Returns
-------
md5_hash: str
md5 hash of data in file_path
"""
hash_md5 = hashlib.md5()
with open(file_path, 'rb') as fhandle:
for chunk in iter(lambda: fhandle.read(4096), b''):
hash_md5.update(chunk)
return hash_md5.hexdigest()
def strip_first_dir(full_path):
return os.path.join(*(full_path.split(os.path.sep)[1:]))
def make_irmas_index(irmas_data_path):
count = 0
irmas_dict = dict()
for root, dirs, files in os.walk(irmas_data_path):
for directory in dirs:
if 'Train' in directory:
for root_, dirs_, files_ in os.walk(
os.path.join(irmas_data_path, directory)
):
for directory_ in dirs_:
for root__, dirs__, files__ in os.walk(
os.path.join(irmas_data_path, directory, directory_)
):
for file in files__:
if file.endswith('.wav'):
if 'dru' in file:
irmas_id_dru = file.split(']')[3] # Obtain id
irmas_id_dru_no_wav = irmas_id_dru.split('.')[
0
] # Obtain id without '.wav'
irmas_dict[irmas_id_dru_no_wav] = os.path.join(
directory, directory_, file
)
if 'nod' in file:
irmas_id_nod = file.split(']')[3] # Obtain id
irmas_id_nod_no_wav = irmas_id_nod.split('.')[
0
] # Obtain id without '.wav'
irmas_dict[irmas_id_nod_no_wav] = os.path.join(
directory, directory_, file
)
else:
irmas_id = file.split(']')[2] # Obtain id
irmas_id_no_wav = irmas_id.split('.')[
0
] # Obtain id without '.wav'
irmas_dict[irmas_id_no_wav] = os.path.join(
directory, directory_, file
)
irmas_test_dict = dict()
for root, dirs, files in os.walk(irmas_data_path):
for directory in dirs:
if 'Test' in directory:
for root_, dirs_, files_ in os.walk(
os.path.join(irmas_data_path, directory)
):
for directory_ in dirs_:
for root__, dirs__, files__ in os.walk(
os.path.join(irmas_data_path, directory, directory_)
):
for file in files__:
if file.endswith('.wav'):
file_name = os.path.join(
directory, directory_, file
)
track_name = str(file_name.split('.wa')[0]) + '.txt'
irmas_test_dict[count] = [file_name, track_name]
count += 1
irmas_id_list = sorted(irmas_dict.items()) # Sort strokes by id
irmas_index = {}
for inst in irmas_id_list:
print(inst[1])
audio_checksum = md5(os.path.join(irmas_data_path, inst[1]))
irmas_index[inst[0]] = {
'audio': (inst[1], audio_checksum),
'annotation': (inst[1], audio_checksum),
}
index = 1
for inst in irmas_test_dict.values():
audio_checksum = md5(os.path.join(irmas_data_path, inst[0]))
annotation_checksum = md5(os.path.join(irmas_data_path, inst[1]))
irmas_index[index] = {
'audio': (inst[0], audio_checksum),
'annotation': (inst[1], annotation_checksum),
}
index += 1
with open(IRMAS_INDEX_PATH, 'w') as fhandle:
json.dump(irmas_index, fhandle, indent=2)
def make_irmas_test_index(irmas_data_path):
count = 1
irmas_dict = dict()
for root, dirs, files in os.walk(irmas_data_path):
for directory in dirs:
if 'Test' in directory:
for root_, dirs_, files_ in os.walk(
os.path.join(irmas_data_path, directory)
):
for directory_ in dirs_:
for root__, dirs__, files__ in os.walk(
os.path.join(irmas_data_path, directory, directory_)
):
for file in files__:
if file.endswith('.wav'):
file_name = os.path.join(
directory, directory_, file
)
track_name = str(file_name.split('.wa')[0]) + '.txt'
irmas_dict[count] = [file_name, track_name]
count += 1
irmas_index = {}
index = 1
for inst in irmas_dict.values():
audio_checksum = md5(os.path.join(irmas_data_path, inst[0]))
annotation_checksum = md5(os.path.join(irmas_data_path, inst[1]))
irmas_index[index] = {
'audio': (inst[0], audio_checksum),
'annotation': (inst[1], annotation_checksum),
}
index += 1
with open(IRMAS_TEST_INDEX_PATH, 'w') as fhandle:
json.dump(irmas_index, fhandle, indent=2)
def main(args):
make_irmas_index(args.irmas_data_path)
# make_irmas_test_index(args.irmas_data_path)
if __name__ == '__main__':
PARSER = argparse.ArgumentParser(description='Make IRMAS index file.')
PARSER.add_argument('irmas_data_path', type=str, help='Path to IRMAS data folder.')
main(PARSER.parse_args())
| mir-dataset-loaders/mirdata | scripts/legacy/make_irmas_index.py | Python | bsd-3-clause | 6,590 |
import unittest
import time
import pprint
import logging
import scanner.logSetup as logSetup
import pyximport
print("Have Cython")
pyximport.install()
import dbPhashApi
class TestCompareDatabaseInterface(unittest.TestCase):
def __init__(self, *args, **kwargs):
logSetup.initLogging()
super().__init__(*args, **kwargs)
def setUp(self):
# We set up and tear down the tree a few times to validate the dropTree function
self.log = logging.getLogger("Main.TestCompareDatabaseInterface")
self.tree = dbPhashApi.PhashDbApi()
self.tree.forceReload()
def dist_check(self, distance, dbid, phash):
qtime1 = time.time()
have1 = self.tree.getWithinDistance_db(phash, distance=distance)
qtime2 = time.time()
qtime3 = time.time()
have2 = self.tree.getIdsWithinDistance(phash, distance=distance)
qtime4 = time.time()
# print(dbid, have1)
if have1 != have2:
self.log.error("Mismatch!")
for line in pprint.pformat(have1).split("\n"):
self.log.error(line)
for line in pprint.pformat(have2).split("\n"):
self.log.error(line)
self.assertTrue(dbid in have1)
self.assertTrue(dbid in have2)
self.assertEqual(have1, have2)
self.log.info('Dist %s %s, %s', distance, qtime2-qtime1, qtime4-qtime3)
def test_0(self):
rand_r = self.tree.getRandomPhashRows(0.001)
self.log.info("Have %s items to test with", len(rand_r))
stepno = 0
for dbid, phash in rand_r:
self.dist_check(1, dbid, phash)
self.dist_check(2, dbid, phash)
self.dist_check(3, dbid, phash)
self.dist_check(4, dbid, phash)
self.dist_check(5, dbid, phash)
self.dist_check(6, dbid, phash)
self.dist_check(7, dbid, phash)
self.dist_check(8, dbid, phash)
stepno += 1
self.log.info("On step %s of %s", stepno, len(rand_r))
| fake-name/IntraArchiveDeduplicator | Tests/Test_db_BKTree_Compare.py | Python | bsd-3-clause | 1,765 |
from numpy import array, zeros, ones, sqrt, ravel, mod, random, inner, conjugate
from scipy.sparse import csr_matrix, csc_matrix, coo_matrix, bmat, eye
from scipy import rand, mat, real, imag, linspace, hstack, vstack, exp, cos, sin, pi
from pyamg.util.linalg import norm
import pyamg
from scipy.optimize import fminbound, fmin
__all__ = ['one_D_helmholtz', 'min_wave']
def min_wave(A, omega, x, tol=1e-5, maxiter=25):
'''
parameters
----------
A {matrix}
1D Helmholtz Operator
omega {scalar}
Wavenumber used to discretize Helmholtz problem
x {array}
1D mesh for the problem
tol {scalar}
minimization tolerance
maxit {integer}
maximum iters for minimization algorithm
returns
-------
Applies minimization algorithm to find numerically lowest energy wavenumber
for the matrix A, i.e., the omega shift that minimizes <Ac, c> / <c, c>,
for c = cosine((omega+shift)x)
'''
x = ravel(x)
# Define scalar objective function, ignoring the
# boundaries by only considering A*c at [1:-1]
def obj_fcn(alpha):
c = cos((omega+alpha)*x)
Ac = (A*c)[1:-1]
return norm(Ac)/norm(c[1:-1])
(xopt, fval, ierr, numfunc) = fminbound(obj_fcn, -0.99*omega, \
0.99*omega, xtol=tol, maxfun=maxiter, full_output=True, disp=0)
#print "Minimizer = %1.4f, Function Value at Min = %1.4e\nError Flag = %d,\
# Number of function evals = %d" % (xopt, fval, ierr, numfunc)
return xopt
def one_D_helmholtz(h, omega=1.0, nplane_waves=2):
'''
parameters
----------
h {int}
Number of grid spacings for 1-D Helmholtz
omega {float}
Defines Helmholtz wave number
nplane_waves {int}
Defines the number of planewaves used for the near null-space modes, B.
1: B = [ exp(ikx) ]
2: B = [ real(exp(ikx)), complex(exp(ikx)) ]
returns
-------
dictionary containing:
A {matrix-like}
LHS of linear system for Helmholtz problem,
-laplace(u) - omega^2 u = f
mesh_h {float}
mesh size
vertices {array-like}
[X, Y]
elements {None}
None, just using 1-D finite-differencing
'''
# Ensure Repeatability of "random" initial guess
random.seed(10)
# Mesh Spacing
mesh_h = 1.0/(float(h)-1.0)
# Construct Real Operator
reA = pyamg.gallery.poisson( (h,), format='csr')
reA = reA - mesh_h*mesh_h*omega*omega*\
eye(reA.shape[0], reA.shape[1], format='csr')
dimen = reA.shape[0]
# Construct Imaginary Operator
imA = csr_matrix( coo_matrix( (array([2.0*mesh_h*omega]), \
(array([0]), array([0]))), shape=reA.shape) )
# Enforce Radiation Boundary Conditions at first grid point
reA.data[1] = -2.0
# In order to maintain symmetry scale the first equation by 1/2
reA.data[0] = 0.5*reA.data[0]
reA.data[1] = 0.5*reA.data[1]
imA.data[0] = 0.5*imA.data[0]
# Create complex-valued system
complexA = reA + 1.0j*imA
# For this case, the CG (continuous Galerkin) case is the default elements and vertices
# because there is no DG mesh to speak of
elements = None
vertices = hstack((linspace(-1.0,1.0,h).reshape(-1,1), zeros((h,1))))
# Near null-space modes are 1-D Plane waves: [exp(ikx), i exp(ikx)]
B = zeros( (dimen, nplane_waves), dtype=complex )
shift = min_wave(complexA, omega, vertices[:,0], tol=1e-9, maxiter=15)
if nplane_waves == 1:
B[:,0] = exp(1.0j*(omega+shift)*vertices[:,0])
elif nplane_waves == 2:
B[:,0] = cos((omega+shift)*vertices[:,0])
B[:,1] = sin((omega+shift)*vertices[:,0])
return {'A' : complexA, 'B' : B, 'mesh_h' : mesh_h, \
'elements' : elements, 'vertices' : vertices}
| pombreda/pyamg | Examples/ComplexSymmetric/one_D_helmholtz.py | Python | bsd-3-clause | 3,892 |
from bluebottle.projects.serializers import ProjectPreviewSerializer
from bluebottle.quotes.serializers import QuoteSerializer
from bluebottle.slides.serializers import SlideSerializer
from bluebottle.statistics.serializers import StatisticSerializer
from rest_framework import serializers
class HomePageSerializer(serializers.Serializer):
id = serializers.CharField()
quotes = QuoteSerializer(many=True)
slides = SlideSerializer(many=True)
statistics = StatisticSerializer(many=True)
projects = ProjectPreviewSerializer(many=True)
| jfterpstra/bluebottle | bluebottle/homepage/serializers.py | Python | bsd-3-clause | 554 |
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package contenant la commande 'scripting alerte info'."""
from primaires.interpreteur.masque.parametre import Parametre
from primaires.format.fonctions import echapper_accolades
from primaires.format.date import get_date
class PrmInfo(Parametre):
"""Commande 'scripting alerte info'"""
def __init__(self):
"""Constructeur du paramètre."""
Parametre.__init__(self, "info", "info")
self.schema = "<nombre>"
self.aide_courte = "affiche des informations sur l'alerte"
self.aide_longue = \
"Affiche des informations sur l'alerte permettant de la corriger."
def interpreter(self, personnage, dic_masques):
"""Méthode d'interprétation de commande"""
nombre = dic_masques["nombre"].nombre
try:
alerte = type(self).importeur.scripting.alertes[nombre]
except KeyError:
personnage << "|err|Ce numéro d'alerte est invalide.|ff|"
else:
msg = "Informations sur l'alerte {} :".format(alerte.no)
msg += "\n S'est produit sur {} {}".format(alerte.type,
alerte.objet) + " " + get_date(alerte.date.timetuple())
msg += "\n Evenement {}, test {}, ligne {}".format(
alerte.evenement, echapper_accolades(alerte.test),
alerte.no_ligne)
msg += "\n {}\n".format(echapper_accolades(alerte.ligne))
msg += "\n Message d'erreur : |err|{}|ff|".format(
echapper_accolades(alerte.message))
if personnage.nom_groupe == "administrateur":
msg += "\n Traceback Python :\n {}".format(
echapper_accolades(alerte.traceback))
personnage << msg
| vlegoff/tsunami | src/primaires/scripting/commandes/scripting/alerte_info.py | Python | bsd-3-clause | 3,352 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
def helloworld():
"""
Hello world routine !
"""
print("Hello world!")
| aboucaud/python-euclid2016 | euclid/euclid/hello.py | Python | bsd-3-clause | 135 |
import os
import os.path as op
import pytest
import numpy as np
from numpy.testing import (assert_array_equal, assert_equal, assert_allclose,
assert_array_less, assert_almost_equal)
import itertools
import mne
from mne.datasets import testing
from mne.fixes import _get_img_fdata
from mne import read_trans, write_trans
from mne.io import read_info
from mne.transforms import (invert_transform, _get_trans,
rotation, rotation3d, rotation_angles, _find_trans,
combine_transforms, apply_trans, translation,
get_ras_to_neuromag_trans, _pol_to_cart,
quat_to_rot, rot_to_quat, _angle_between_quats,
_find_vector_rotation, _sph_to_cart, _cart_to_sph,
_topo_to_sph, _average_quats,
_SphericalSurfaceWarp as SphericalSurfaceWarp,
rotation3d_align_z_axis, _read_fs_xfm,
_write_fs_xfm, _quat_real, _fit_matched_points,
_quat_to_euler, _euler_to_quat,
_quat_to_affine, _compute_r2, _validate_pipeline)
from mne.utils import requires_nibabel, requires_dipy
data_path = testing.data_path(download=False)
fname = op.join(data_path, 'MEG', 'sample', 'sample_audvis_trunc-trans.fif')
fname_eve = op.join(data_path, 'MEG', 'sample',
'sample_audvis_trunc_raw-eve.fif')
subjects_dir = op.join(data_path, 'subjects')
fname_t1 = op.join(subjects_dir, 'fsaverage', 'mri', 'T1.mgz')
base_dir = op.join(op.dirname(__file__), '..', 'io', 'tests', 'data')
fname_trans = op.join(base_dir, 'sample-audvis-raw-trans.txt')
test_fif_fname = op.join(base_dir, 'test_raw.fif')
ctf_fname = op.join(base_dir, 'test_ctf_raw.fif')
hp_fif_fname = op.join(base_dir, 'test_chpi_raw_sss.fif')
def test_tps():
"""Test TPS warping."""
az = np.linspace(0., 2 * np.pi, 20, endpoint=False)
pol = np.linspace(0, np.pi, 12)[1:-1]
sph = np.array(np.meshgrid(1, az, pol, indexing='ij'))
sph.shape = (3, -1)
assert_equal(sph.shape[1], 200)
source = _sph_to_cart(sph.T)
destination = source.copy()
destination *= 2
destination[:, 0] += 1
# fit with 100 points
warp = SphericalSurfaceWarp()
assert 'no ' in repr(warp)
warp.fit(source[::3], destination[::2])
assert 'oct5' in repr(warp)
destination_est = warp.transform(source)
assert_allclose(destination_est, destination, atol=1e-3)
@testing.requires_testing_data
def test_get_trans():
"""Test converting '-trans.txt' to '-trans.fif'."""
trans = read_trans(fname)
trans = invert_transform(trans) # starts out as head->MRI, so invert
trans_2 = _get_trans(fname_trans)[0]
assert trans.__eq__(trans_2, atol=1e-5)
@testing.requires_testing_data
def test_io_trans(tmpdir):
"""Test reading and writing of trans files."""
tempdir = str(tmpdir)
os.mkdir(op.join(tempdir, 'sample'))
pytest.raises(RuntimeError, _find_trans, 'sample', subjects_dir=tempdir)
trans0 = read_trans(fname)
fname1 = op.join(tempdir, 'sample', 'test-trans.fif')
trans0.save(fname1)
assert fname1 == _find_trans('sample', subjects_dir=tempdir)
trans1 = read_trans(fname1)
# check all properties
assert trans0 == trans1
# check reading non -trans.fif files
pytest.raises(IOError, read_trans, fname_eve)
# check warning on bad filenames
fname2 = op.join(tempdir, 'trans-test-bad-name.fif')
with pytest.warns(RuntimeWarning, match='-trans.fif'):
write_trans(fname2, trans0)
def test_get_ras_to_neuromag_trans():
"""Test the coordinate transformation from ras to neuromag."""
# create model points in neuromag-like space
rng = np.random.RandomState(0)
anterior = [0, 1, 0]
left = [-1, 0, 0]
right = [.8, 0, 0]
up = [0, 0, 1]
rand_pts = rng.uniform(-1, 1, (3, 3))
pts = np.vstack((anterior, left, right, up, rand_pts))
# change coord system
rx, ry, rz, tx, ty, tz = rng.uniform(-2 * np.pi, 2 * np.pi, 6)
trans = np.dot(translation(tx, ty, tz), rotation(rx, ry, rz))
pts_changed = apply_trans(trans, pts)
# transform back into original space
nas, lpa, rpa = pts_changed[:3]
hsp_trans = get_ras_to_neuromag_trans(nas, lpa, rpa)
pts_restored = apply_trans(hsp_trans, pts_changed)
err = "Neuromag transformation failed"
assert_allclose(pts_restored, pts, atol=1e-6, err_msg=err)
def _cartesian_to_sphere(x, y, z):
"""Convert using old function."""
hypotxy = np.hypot(x, y)
r = np.hypot(hypotxy, z)
elev = np.arctan2(z, hypotxy)
az = np.arctan2(y, x)
return az, elev, r
def _sphere_to_cartesian(theta, phi, r):
"""Convert using old function."""
z = r * np.sin(phi)
rcos_phi = r * np.cos(phi)
x = rcos_phi * np.cos(theta)
y = rcos_phi * np.sin(theta)
return x, y, z
def test_sph_to_cart():
"""Test conversion between sphere and cartesian."""
# Simple test, expected value (11, 0, 0)
r, theta, phi = 11., 0., np.pi / 2.
z = r * np.cos(phi)
rsin_phi = r * np.sin(phi)
x = rsin_phi * np.cos(theta)
y = rsin_phi * np.sin(theta)
coord = _sph_to_cart(np.array([[r, theta, phi]]))[0]
assert_allclose(coord, (x, y, z), atol=1e-7)
assert_allclose(coord, (r, 0, 0), atol=1e-7)
rng = np.random.RandomState(0)
# round-trip test
coords = rng.randn(10, 3)
assert_allclose(_sph_to_cart(_cart_to_sph(coords)), coords, atol=1e-5)
# equivalence tests to old versions
for coord in coords:
sph = _cart_to_sph(coord[np.newaxis])
cart = _sph_to_cart(sph)
sph_old = np.array(_cartesian_to_sphere(*coord))
cart_old = _sphere_to_cartesian(*sph_old)
sph_old[1] = np.pi / 2. - sph_old[1] # new convention
assert_allclose(sph[0], sph_old[[2, 0, 1]], atol=1e-7)
assert_allclose(cart[0], cart_old, atol=1e-7)
assert_allclose(cart[0], coord, atol=1e-7)
def _polar_to_cartesian(theta, r):
"""Transform polar coordinates to cartesian."""
x = r * np.cos(theta)
y = r * np.sin(theta)
return x, y
def test_polar_to_cartesian():
"""Test helper transform function from polar to cartesian."""
r = 1
theta = np.pi
# expected values are (-1, 0)
x = r * np.cos(theta)
y = r * np.sin(theta)
coord = _pol_to_cart(np.array([[r, theta]]))[0]
# np.pi is an approx since pi is irrational
assert_allclose(coord, (x, y), atol=1e-7)
assert_allclose(coord, (-1, 0), atol=1e-7)
assert_allclose(coord, _polar_to_cartesian(theta, r), atol=1e-7)
rng = np.random.RandomState(0)
r = rng.randn(10)
theta = rng.rand(10) * (2 * np.pi)
polar = np.array((r, theta)).T
assert_allclose([_polar_to_cartesian(p[1], p[0]) for p in polar],
_pol_to_cart(polar), atol=1e-7)
def _topo_to_phi_theta(theta, radius):
"""Convert using old function."""
sph_phi = (0.5 - radius) * 180
sph_theta = -theta
return sph_phi, sph_theta
def test_topo_to_sph():
"""Test topo to sphere conversion."""
rng = np.random.RandomState(0)
angles = rng.rand(10) * 360
radii = rng.rand(10)
angles[0] = 30
radii[0] = 0.25
# new way
sph = _topo_to_sph(np.array([angles, radii]).T)
new = _sph_to_cart(sph)
new[:, [0, 1]] = new[:, [1, 0]] * [-1, 1]
# old way
for ii, (angle, radius) in enumerate(zip(angles, radii)):
sph_phi, sph_theta = _topo_to_phi_theta(angle, radius)
if ii == 0:
assert_allclose(_topo_to_phi_theta(angle, radius), [45, -30])
azimuth = sph_theta / 180.0 * np.pi
elevation = sph_phi / 180.0 * np.pi
assert_allclose(sph[ii], [1., azimuth, np.pi / 2. - elevation],
atol=1e-7)
r = np.ones_like(radius)
x, y, z = _sphere_to_cartesian(azimuth, elevation, r)
pos = [-y, x, z]
if ii == 0:
expected = np.array([1. / 2., np.sqrt(3) / 2., 1.])
expected /= np.sqrt(2)
assert_allclose(pos, expected, atol=1e-7)
assert_allclose(pos, new[ii], atol=1e-7)
def test_rotation():
"""Test conversion between rotation angles and transformation matrix."""
tests = [(0, 0, 1), (.5, .5, .5), (np.pi, 0, -1.5)]
for rot in tests:
x, y, z = rot
m = rotation3d(x, y, z)
m4 = rotation(x, y, z)
assert_array_equal(m, m4[:3, :3])
back = rotation_angles(m)
assert_almost_equal(actual=back, desired=rot, decimal=12)
back4 = rotation_angles(m4)
assert_almost_equal(actual=back4, desired=rot, decimal=12)
def test_rotation3d_align_z_axis():
"""Test rotation3d_align_z_axis."""
# The more complex z axis fails the assert presumably due to tolerance
#
inp_zs = [[0, 0, 1], [0, 1, 0], [1, 0, 0], [0, 0, -1],
[-0.75071668, -0.62183808, 0.22302888]]
exp_res = [[[1., 0., 0.], [0., 1., 0.], [0., 0., 1.]],
[[1., 0., 0.], [0., 0., 1.], [0., -1., 0.]],
[[0., 0., 1.], [0., 1., 0.], [-1., 0., 0.]],
[[1., 0., 0.], [0., -1., 0.], [0., 0., -1.]],
[[0.53919688, -0.38169517, -0.75071668],
[-0.38169517, 0.683832, -0.62183808],
[0.75071668, 0.62183808, 0.22302888]]]
for res, z in zip(exp_res, inp_zs):
assert_allclose(res, rotation3d_align_z_axis(z), atol=1e-7)
@testing.requires_testing_data
def test_combine():
"""Test combining transforms."""
trans = read_trans(fname)
inv = invert_transform(trans)
combine_transforms(trans, inv, trans['from'], trans['from'])
pytest.raises(RuntimeError, combine_transforms, trans, inv,
trans['to'], trans['from'])
pytest.raises(RuntimeError, combine_transforms, trans, inv,
trans['from'], trans['to'])
pytest.raises(RuntimeError, combine_transforms, trans, trans,
trans['from'], trans['to'])
def test_quaternions():
"""Test quaternion calculations."""
rots = [np.eye(3)]
for fname in [test_fif_fname, ctf_fname, hp_fif_fname]:
rots += [read_info(fname)['dev_head_t']['trans'][:3, :3]]
# nasty numerical cases
rots += [np.array([
[-0.99978541, -0.01873462, -0.00898756],
[-0.01873462, 0.62565561, 0.77987608],
[-0.00898756, 0.77987608, -0.62587152],
])]
rots += [np.array([
[0.62565561, -0.01873462, 0.77987608],
[-0.01873462, -0.99978541, -0.00898756],
[0.77987608, -0.00898756, -0.62587152],
])]
rots += [np.array([
[-0.99978541, -0.00898756, -0.01873462],
[-0.00898756, -0.62587152, 0.77987608],
[-0.01873462, 0.77987608, 0.62565561],
])]
for rot in rots:
assert_allclose(rot, quat_to_rot(rot_to_quat(rot)),
rtol=1e-5, atol=1e-5)
rot = rot[np.newaxis, np.newaxis, :, :]
assert_allclose(rot, quat_to_rot(rot_to_quat(rot)),
rtol=1e-5, atol=1e-5)
# let's make sure our angle function works in some reasonable way
for ii in range(3):
for jj in range(3):
a = np.zeros(3)
b = np.zeros(3)
a[ii] = 1.
b[jj] = 1.
expected = np.pi if ii != jj else 0.
assert_allclose(_angle_between_quats(a, b), expected, atol=1e-5)
y_180 = np.array([[-1, 0, 0], [0, 1, 0], [0, 0, -1.]])
assert_allclose(_angle_between_quats(rot_to_quat(y_180),
np.zeros(3)), np.pi)
h_180_attitude_90 = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1.]])
assert_allclose(_angle_between_quats(rot_to_quat(h_180_attitude_90),
np.zeros(3)), np.pi)
def test_vector_rotation():
"""Test basic rotation matrix math."""
x = np.array([1., 0., 0.])
y = np.array([0., 1., 0.])
rot = _find_vector_rotation(x, y)
assert_array_equal(rot,
[[0, -1, 0], [1, 0, 0], [0, 0, 1]])
quat_1 = rot_to_quat(rot)
quat_2 = rot_to_quat(np.eye(3))
assert_allclose(_angle_between_quats(quat_1, quat_2), np.pi / 2.)
def test_average_quats():
"""Test averaging of quaternions."""
sq2 = 1. / np.sqrt(2.)
quats = np.array([[0, sq2, sq2],
[0, sq2, sq2],
[0, sq2, 0],
[0, 0, sq2],
[sq2, 0, 0]], float)
# In MATLAB:
# quats = [[0, sq2, sq2, 0]; [0, sq2, sq2, 0];
# [0, sq2, 0, sq2]; [0, 0, sq2, sq2]; [sq2, 0, 0, sq2]];
expected = [quats[0],
quats[0],
[0, 0.788675134594813, 0.577350269189626],
[0, 0.657192299694123, 0.657192299694123],
[0.100406058540540, 0.616329446922803, 0.616329446922803]]
# Averaging the first two should give the same thing:
for lim, ex in enumerate(expected):
assert_allclose(_average_quats(quats[:lim + 1]), ex, atol=1e-7)
quats[1] *= -1 # same quaternion (hidden value is zero here)!
rot_0, rot_1 = quat_to_rot(quats[:2])
assert_allclose(rot_0, rot_1, atol=1e-7)
for lim, ex in enumerate(expected):
assert_allclose(_average_quats(quats[:lim + 1]), ex, atol=1e-7)
# Assert some symmetry
count = 0
extras = [[sq2, sq2, 0]] + list(np.eye(3))
for quat in np.concatenate((quats, expected, extras)):
if np.isclose(_quat_real(quat), 0., atol=1e-7): # can flip sign
count += 1
angle = _angle_between_quats(quat, -quat)
assert_allclose(angle, 0., atol=1e-7)
rot_0, rot_1 = quat_to_rot(np.array((quat, -quat)))
assert_allclose(rot_0, rot_1, atol=1e-7)
assert count == 4 + len(extras)
@testing.requires_testing_data
@pytest.mark.parametrize('subject', ('fsaverage', 'sample'))
def test_fs_xfm(subject, tmpdir):
"""Test reading and writing of Freesurfer transforms."""
fname = op.join(data_path, 'subjects', subject, 'mri', 'transforms',
'talairach.xfm')
xfm, kind = _read_fs_xfm(fname)
if subject == 'fsaverage':
assert_allclose(xfm, np.eye(4), atol=1e-5) # fsaverage is in MNI
assert kind == 'MNI Transform File'
tempdir = str(tmpdir)
fname_out = op.join(tempdir, 'out.xfm')
_write_fs_xfm(fname_out, xfm, kind)
xfm_read, kind_read = _read_fs_xfm(fname_out)
assert kind_read == kind
assert_allclose(xfm, xfm_read, rtol=1e-5, atol=1e-5)
# Some wacky one
xfm[:3] = np.random.RandomState(0).randn(3, 4)
_write_fs_xfm(fname_out, xfm, 'foo')
xfm_read, kind_read = _read_fs_xfm(fname_out)
assert kind_read == 'foo'
assert_allclose(xfm, xfm_read, rtol=1e-5, atol=1e-5)
# degenerate conditions
with open(fname_out, 'w') as fid:
fid.write('foo')
with pytest.raises(ValueError, match='Failed to find'):
_read_fs_xfm(fname_out)
_write_fs_xfm(fname_out, xfm[:2], 'foo')
with pytest.raises(ValueError, match='Could not find'):
_read_fs_xfm(fname_out)
@pytest.fixture()
def quats():
"""Make some unit quats."""
quats = np.random.RandomState(0).randn(5, 3)
quats[:, 0] = 0 # identity
quats /= 2 * np.linalg.norm(quats, axis=1, keepdims=True) # some real part
return quats
def _check_fit_matched_points(
p, x, weights, do_scale, angtol=1e-5, dtol=1e-5, stol=1e-7):
__tracebackhide__ = True
mne.coreg._ALLOW_ANALITICAL = False
try:
params = mne.coreg.fit_matched_points(
p, x, weights=weights, scale=do_scale, out='params')
finally:
mne.coreg._ALLOW_ANALITICAL = True
quat_an, scale_an = _fit_matched_points(p, x, weights, scale=do_scale)
assert len(params) == 6 + int(do_scale)
q_co = _euler_to_quat(params[:3])
translate_co = params[3:6]
angle = np.rad2deg(_angle_between_quats(quat_an[:3], q_co))
dist = np.linalg.norm(quat_an[3:] - translate_co)
assert 0 <= angle < angtol, 'angle'
assert 0 <= dist < dtol, 'dist'
if do_scale:
scale_co = params[6]
assert_allclose(scale_an, scale_co, rtol=stol, err_msg='scale')
# errs
trans = _quat_to_affine(quat_an)
trans[:3, :3] *= scale_an
weights = np.ones(1) if weights is None else weights
err_an = np.linalg.norm(
weights[:, np.newaxis] * apply_trans(trans, p) - x)
trans = mne.coreg._trans_from_params((True, True, do_scale), params)
err_co = np.linalg.norm(
weights[:, np.newaxis] * apply_trans(trans, p) - x)
if err_an > 1e-14:
assert err_an < err_co * 1.5
return quat_an, scale_an
@pytest.mark.parametrize('scaling', [0.25, 1])
@pytest.mark.parametrize('do_scale', (True, False))
def test_fit_matched_points(quats, scaling, do_scale):
"""Test analytical least-squares matched point fitting."""
if scaling != 1 and not do_scale:
return # no need to test this, it will not be good
rng = np.random.RandomState(0)
fro = rng.randn(10, 3)
translation = rng.randn(3)
for qi, quat in enumerate(quats):
to = scaling * np.dot(quat_to_rot(quat), fro.T).T + translation
for corrupted in (False, True):
# mess up a point
if corrupted:
to[0, 2] += 100
weights = np.ones(len(to))
weights[0] = 0
else:
weights = None
est, scale_est = _check_fit_matched_points(
fro, to, weights=weights, do_scale=do_scale)
assert_allclose(scale_est, scaling, rtol=1e-5)
assert_allclose(est[:3], quat, atol=1e-14)
assert_allclose(est[3:], translation, atol=1e-14)
# if we don't adjust for the corruption above, it should get worse
angle = dist = None
for weighted in (False, True):
if not weighted:
weights = None
dist_bounds = (5, 20)
if scaling == 1:
angle_bounds = (5, 95)
angtol, dtol, stol = 1, 15, 3
else:
angle_bounds = (5, 105)
angtol, dtol, stol = 20, 15, 3
else:
weights = np.ones(len(to))
weights[0] = 10 # weighted=True here means "make it worse"
angle_bounds = (angle, 180) # unweighted values as new min
dist_bounds = (dist, 100)
if scaling == 1:
# XXX this angtol is not great but there is a hard to
# identify linalg/angle calculation bug on Travis...
angtol, dtol, stol = 180, 70, 3
else:
angtol, dtol, stol = 50, 70, 3
est, scale_est = _check_fit_matched_points(
fro, to, weights=weights, do_scale=do_scale,
angtol=angtol, dtol=dtol, stol=stol)
assert not np.allclose(est[:3], quat, atol=1e-5)
assert not np.allclose(est[3:], translation, atol=1e-5)
angle = np.rad2deg(_angle_between_quats(est[:3], quat))
assert_array_less(angle_bounds[0], angle)
assert_array_less(angle, angle_bounds[1])
dist = np.linalg.norm(est[3:] - translation)
assert_array_less(dist_bounds[0], dist)
assert_array_less(dist, dist_bounds[1])
def test_euler(quats):
"""Test euler transformations."""
euler = _quat_to_euler(quats)
quats_2 = _euler_to_quat(euler)
assert_allclose(quats, quats_2, atol=1e-14)
quat_rot = quat_to_rot(quats)
euler_rot = np.array([rotation(*e)[:3, :3] for e in euler])
assert_allclose(quat_rot, euler_rot, atol=1e-14)
@requires_nibabel()
@requires_dipy()
@pytest.mark.slowtest
@testing.requires_testing_data
def test_volume_registration():
"""Test volume registration."""
import nibabel as nib
from dipy.align import resample
T1 = nib.load(fname_t1)
affine = np.eye(4)
affine[0, 3] = 10
T1_resampled = resample(moving=T1.get_fdata(),
static=T1.get_fdata(),
moving_affine=T1.affine,
static_affine=T1.affine,
between_affine=np.linalg.inv(affine))
for pipeline in ('rigids', ('translation', 'sdr')):
reg_affine, sdr_morph = mne.transforms.compute_volume_registration(
T1_resampled, T1, pipeline=pipeline, zooms=10, niter=[5])
assert_allclose(affine, reg_affine, atol=0.25)
T1_aligned = mne.transforms.apply_volume_registration(
T1_resampled, T1, reg_affine, sdr_morph)
r2 = _compute_r2(_get_img_fdata(T1_aligned), _get_img_fdata(T1))
assert 99.9 < r2
# check that all orders of the pipeline work
for pipeline_len in range(1, 5):
for pipeline in itertools.combinations(
('translation', 'rigid', 'affine', 'sdr'), pipeline_len):
_validate_pipeline(pipeline)
_validate_pipeline(list(pipeline))
with pytest.raises(ValueError, match='Steps in pipeline are out of order'):
_validate_pipeline(('sdr', 'affine'))
with pytest.raises(ValueError,
match='Steps in pipeline should not be repeated'):
_validate_pipeline(('affine', 'affine'))
| bloyl/mne-python | mne/tests/test_transforms.py | Python | bsd-3-clause | 21,423 |
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from brambling.utils.payment import dwolla_update_tokens
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option(
'--days',
action='store',
dest='days',
default=15,
help='Number of days ahead of time to update refresh tokens.'),
)
def handle(self, *args, **options):
try:
days = int(options['days'])
except ValueError:
raise CommandError("Days must be an integer value.")
self.stdout.write("Updating dwolla tokens...")
self.stdout.flush()
count, test_count = dwolla_update_tokens(days)
self.stdout.write("Test tokens updated: {}".format(count))
self.stdout.write("Live tokens updated: {}".format(test_count))
self.stdout.flush()
| j-po/django-brambling | brambling/management/commands/update_tokens.py | Python | bsd-3-clause | 931 |
# -*- coding: utf-8 -*-
"""
.. _tut-set-eeg-ref:
Setting the EEG reference
=========================
This tutorial describes how to set or change the EEG reference in MNE-Python.
.. contents:: Page contents
:local:
:depth: 2
As usual we'll start by importing the modules we need, loading some
:ref:`example data <sample-dataset>`, and cropping it to save memory. Since
this tutorial deals specifically with EEG, we'll also restrict the dataset to
just a few EEG channels so the plots are easier to see:
"""
import os
import mne
sample_data_folder = mne.datasets.sample.data_path()
sample_data_raw_file = os.path.join(sample_data_folder, 'MEG', 'sample',
'sample_audvis_raw.fif')
raw = mne.io.read_raw_fif(sample_data_raw_file, verbose=False)
raw.crop(tmax=60).load_data()
raw.pick(['EEG 0{:02}'.format(n) for n in range(41, 60)])
###############################################################################
# Background
# ^^^^^^^^^^
#
# EEG measures a voltage (difference in electric potential) between each
# electrode and a reference electrode. This means that whatever signal is
# present at the reference electrode is effectively subtracted from all the
# measurement electrodes. Therefore, an ideal reference signal is one that
# captures *none* of the brain-specific fluctuations in electric potential,
# while capturing *all* of the environmental noise/interference that is being
# picked up by the measurement electrodes.
#
# In practice, this means that the reference electrode is often placed in a
# location on the subject's body and close to their head (so that any
# environmental interference affects the reference and measurement electrodes
# similarly) but as far away from the neural sources as possible (so that the
# reference signal doesn't pick up brain-based fluctuations). Typical reference
# locations are the subject's earlobe, nose, mastoid process, or collarbone.
# Each of these has advantages and disadvantages regarding how much brain
# signal it picks up (e.g., the mastoids pick up a fair amount compared to the
# others), and regarding the environmental noise it picks up (e.g., earlobe
# electrodes may shift easily, and have signals more similar to electrodes on
# the same side of the head).
#
# Even in cases where no electrode is specifically designated as the reference,
# EEG recording hardware will still treat one of the scalp electrodes as the
# reference, and the recording software may or may not display it to you (it
# might appear as a completely flat channel, or the software might subtract out
# the average of all signals before displaying, making it *look like* there is
# no reference).
#
#
# Setting or changing the reference channel
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# If you want to recompute your data with a different reference than was used
# when the raw data were recorded and/or saved, MNE-Python provides the
# :meth:`~mne.io.Raw.set_eeg_reference` method on :class:`~mne.io.Raw` objects
# as well as the :func:`mne.add_reference_channels` function. To use an
# existing channel as the new reference, use the
# :meth:`~mne.io.Raw.set_eeg_reference` method; you can also designate multiple
# existing electrodes as reference channels, as is sometimes done with mastoid
# references:
# code lines below are commented out because the sample data doesn't have
# earlobe or mastoid channels, so this is just for demonstration purposes:
# use a single channel reference (left earlobe)
# raw.set_eeg_reference(ref_channels=['A1'])
# use average of mastoid channels as reference
# raw.set_eeg_reference(ref_channels=['M1', 'M2'])
###############################################################################
# If a scalp electrode was used as reference but was not saved alongside the
# raw data (reference channels often aren't), you may wish to add it back to
# the dataset before re-referencing. For example, if your EEG system recorded
# with channel ``Fp1`` as the reference but did not include ``Fp1`` in the data
# file, using :meth:`~mne.io.Raw.set_eeg_reference` to set (say) ``Cz`` as the
# new reference will then subtract out the signal at ``Cz`` *without restoring
# the signal at* ``Fp1``. In this situation, you can add back ``Fp1`` as a flat
# channel prior to re-referencing using :func:`~mne.add_reference_channels`.
# (Since our example data doesn't use the `10-20 electrode naming system`_, the
# example below adds ``EEG 999`` as the missing reference, then sets the
# reference to ``EEG 050``.) Here's how the data looks in its original state:
raw.plot()
###############################################################################
# By default, :func:`~mne.add_reference_channels` returns a copy, so we can go
# back to our original ``raw`` object later. If you wanted to alter the
# existing :class:`~mne.io.Raw` object in-place you could specify
# ``copy=False``.
# add new reference channel (all zero)
raw_new_ref = mne.add_reference_channels(raw, ref_channels=['EEG 999'])
raw_new_ref.plot()
###############################################################################
# .. KEEP THESE BLOCKS SEPARATE SO FIGURES ARE BIG ENOUGH TO READ
# set reference to `EEG 050`
raw_new_ref.set_eeg_reference(ref_channels=['EEG 050'])
raw_new_ref.plot()
###############################################################################
# Notice that the new reference (``EEG 050``) is now flat, while the original
# reference channel that we added back to the data (``EEG 999``) has a non-zero
# signal. Notice also that ``EEG 053`` (which is marked as "bad" in
# ``raw.info['bads']``) is not affected by the re-referencing.
#
#
# Setting average reference
# ^^^^^^^^^^^^^^^^^^^^^^^^^
#
# To set a "virtual reference" that is the average of all channels, you can use
# :meth:`~mne.io.Raw.set_eeg_reference` with ``ref_channels='average'``. Just
# as above, this will not affect any channels marked as "bad", nor will it
# include bad channels when computing the average. However, it does modify the
# :class:`~mne.io.Raw` object in-place, so we'll make a copy first so we can
# still go back to the unmodified :class:`~mne.io.Raw` object later:
# sphinx_gallery_thumbnail_number = 4
# use the average of all channels as reference
raw_avg_ref = raw.copy().set_eeg_reference(ref_channels='average')
raw_avg_ref.plot()
###############################################################################
# Creating the average reference as a projector
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# If using an average reference, it is possible to create the reference as a
# :term:`projector` rather than subtracting the reference from the data
# immediately by specifying ``projection=True``:
raw.set_eeg_reference('average', projection=True)
print(raw.info['projs'])
###############################################################################
# Creating the average reference as a projector has a few advantages:
#
# 1. It is possible to turn projectors on or off when plotting, so it is easy
# to visualize the effect that the average reference has on the data.
#
# 2. If additional channels are marked as "bad" or if a subset of channels are
# later selected, the projector will be re-computed to take these changes
# into account (thus guaranteeing that the signal is zero-mean).
#
# 3. If there are other unapplied projectors affecting the EEG channels (such
# as SSP projectors for removing heartbeat or blink artifacts), EEG
# re-referencing cannot be performed until those projectors are either
# applied or removed; adding the EEG reference as a projector is not subject
# to that constraint. (The reason this wasn't a problem when we applied the
# non-projector average reference to ``raw_avg_ref`` above is that the
# empty-room projectors included in the sample data :file:`.fif` file were
# only computed for the magnetometers.)
for title, proj in zip(['Original', 'Average'], [False, True]):
fig = raw.plot(proj=proj, n_channels=len(raw))
# make room for title
fig.subplots_adjust(top=0.9)
fig.suptitle('{} reference'.format(title), size='xx-large', weight='bold')
###############################################################################
# EEG reference and source modeling
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#
# If you plan to perform source modeling (either with EEG or combined EEG/MEG
# data), it is **strongly recommended** to use the
# average-reference-as-projection approach. It is important to use an average
# reference because using a specific
# reference sensor (or even an average of a few sensors) spreads the forward
# model error from the reference sensor(s) into all sensors, effectively
# amplifying the importance of the reference sensor(s) when computing source
# estimates. In contrast, using the average of all EEG channels as reference
# spreads the forward modeling error evenly across channels, so no one channel
# is weighted more strongly during source estimation. See also this `FieldTrip
# FAQ on average referencing`_ for more information.
#
# The main reason for specifying the average reference as a projector was
# mentioned in the previous section: an average reference projector adapts if
# channels are dropped, ensuring that the signal will always be zero-mean when
# the source modeling is performed. In contrast, applying an average reference
# by the traditional subtraction method offers no such guarantee.
#
# For these reasons, when performing inverse imaging, *MNE-Python will
# automatically average-reference the EEG channels if they are present and no
# reference strategy has been specified*. If you want to perform inverse
# imaging and do not want to use an average reference (and hence you accept the
# risks presented in the previous paragraphs), you can force MNE-Python to
# relax its average reference requirement by passing an empty list to
# :meth:`~mne.io.Raw.set_eeg_reference` (i.e., by calling
# ``raw.set_eeg_reference(ref_channels=[])``) prior to performing inverse
# imaging.
#
#
# .. LINKS
#
# .. _`FieldTrip FAQ on average referencing`:
# http://www.fieldtriptoolbox.org/faq/why_should_i_use_an_average_reference_for_eeg_source_reconstruction/
# .. _`10-20 electrode naming system`:
# https://en.wikipedia.org/wiki/10%E2%80%9320_system_(EEG)
| mne-tools/mne-tools.github.io | 0.19/_downloads/0162af27293b0c7e7c35ef85531280ea/plot_55_setting_eeg_reference.py | Python | bsd-3-clause | 10,338 |
import base64
import json
from twisted.internet.defer import inlineCallbacks, DeferredQueue, returnValue
from twisted.web.http_headers import Headers
from twisted.web import http
from twisted.web.server import NOT_DONE_YET
from vumi.config import ConfigContext
from vumi.message import TransportUserMessage, TransportEvent
from vumi.tests.helpers import VumiTestCase
from vumi.tests.utils import MockHttpServer, LogCatcher
from vumi.transports.vumi_bridge.client import StreamingClient
from vumi.utils import http_request_full
from go.apps.http_api.resource import (
StreamResourceMixin, StreamingConversationResource)
from go.apps.tests.helpers import AppWorkerHelper
from go.apps.http_api.vumi_app import StreamingHTTPWorker
class TestStreamingHTTPWorker(VumiTestCase):
@inlineCallbacks
def setUp(self):
self.app_helper = self.add_helper(AppWorkerHelper(StreamingHTTPWorker))
self.config = {
'health_path': '/health/',
'web_path': '/foo',
'web_port': 0,
'metrics_prefix': 'metrics_prefix.',
'conversation_cache_ttl': 0,
}
self.app = yield self.app_helper.get_app_worker(self.config)
self.addr = self.app.webserver.getHost()
self.url = 'http://%s:%s%s' % (
self.addr.host, self.addr.port, self.config['web_path'])
conv_config = {
'http_api': {
'api_tokens': [
'token-1',
'token-2',
'token-3',
],
'metric_store': 'metric_store',
}
}
conversation = yield self.app_helper.create_conversation(
config=conv_config)
yield self.app_helper.start_conversation(conversation)
self.conversation = yield self.app_helper.get_conversation(
conversation.key)
self.auth_headers = {
'Authorization': ['Basic ' + base64.b64encode('%s:%s' % (
conversation.user_account.key, 'token-1'))],
}
self.client = StreamingClient()
# Mock server to test HTTP posting of inbound messages & events
self.mock_push_server = MockHttpServer(self.handle_request)
yield self.mock_push_server.start()
self.add_cleanup(self.mock_push_server.stop)
self.push_calls = DeferredQueue()
self._setup_wait_for_request()
self.add_cleanup(self._wait_for_requests)
def _setup_wait_for_request(self):
# Hackery to wait for the request to finish
self._req_state = {
'queue': DeferredQueue(),
'expected': 0,
}
orig_track = StreamingConversationResource.track_request
orig_release = StreamingConversationResource.release_request
def track_wrapper(*args, **kw):
self._req_state['expected'] += 1
return orig_track(*args, **kw)
def release_wrapper(*args, **kw):
return orig_release(*args, **kw).addCallback(
self._req_state['queue'].put)
self.patch(
StreamingConversationResource, 'track_request', track_wrapper)
self.patch(
StreamingConversationResource, 'release_request', release_wrapper)
@inlineCallbacks
def _wait_for_requests(self):
while self._req_state['expected'] > 0:
yield self._req_state['queue'].get()
self._req_state['expected'] -= 1
def handle_request(self, request):
self.push_calls.put(request)
return NOT_DONE_YET
@inlineCallbacks
def pull_message(self, count=1):
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
messages = DeferredQueue()
errors = DeferredQueue()
receiver = self.client.stream(
TransportUserMessage, messages.put, errors.put, url,
Headers(self.auth_headers))
received_messages = []
for msg_id in range(count):
yield self.app_helper.make_dispatch_inbound(
'in %s' % (msg_id,), message_id=str(msg_id),
conv=self.conversation)
recv_msg = yield messages.get()
received_messages.append(recv_msg)
receiver.disconnect()
returnValue((receiver, received_messages))
def assert_bad_request(self, response, reason):
self.assertEqual(response.code, http.BAD_REQUEST)
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
data = json.loads(response.delivered_body)
self.assertEqual(data, {
"success": False,
"reason": reason,
})
@inlineCallbacks
def test_proxy_buffering_headers_off(self):
# This is the default, but we patch it anyway to make sure we're
# testing the right thing should the default change.
self.patch(StreamResourceMixin, 'proxy_buffering', False)
receiver, received_messages = yield self.pull_message()
headers = receiver._response.headers
self.assertEqual(headers.getRawHeaders('x-accel-buffering'), ['no'])
@inlineCallbacks
def test_proxy_buffering_headers_on(self):
self.patch(StreamResourceMixin, 'proxy_buffering', True)
receiver, received_messages = yield self.pull_message()
headers = receiver._response.headers
self.assertEqual(headers.getRawHeaders('x-accel-buffering'), ['yes'])
@inlineCallbacks
def test_content_type(self):
receiver, received_messages = yield self.pull_message()
headers = receiver._response.headers
self.assertEqual(
headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
@inlineCallbacks
def test_messages_stream(self):
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
messages = DeferredQueue()
errors = DeferredQueue()
receiver = self.client.stream(
TransportUserMessage, messages.put, errors.put, url,
Headers(self.auth_headers))
msg1 = yield self.app_helper.make_dispatch_inbound(
'in 1', message_id='1', conv=self.conversation)
msg2 = yield self.app_helper.make_dispatch_inbound(
'in 2', message_id='2', conv=self.conversation)
rm1 = yield messages.get()
rm2 = yield messages.get()
receiver.disconnect()
# Sometimes messages arrive out of order if we're hitting real redis.
rm1, rm2 = sorted([rm1, rm2], key=lambda m: m['message_id'])
self.assertEqual(msg1['message_id'], rm1['message_id'])
self.assertEqual(msg2['message_id'], rm2['message_id'])
self.assertEqual(errors.size, None)
@inlineCallbacks
def test_events_stream(self):
url = '%s/%s/events.json' % (self.url, self.conversation.key)
events = DeferredQueue()
errors = DeferredQueue()
receiver = yield self.client.stream(TransportEvent, events.put,
events.put, url,
Headers(self.auth_headers))
msg1 = yield self.app_helper.make_stored_outbound(
self.conversation, 'out 1', message_id='1')
ack1 = yield self.app_helper.make_dispatch_ack(
msg1, conv=self.conversation)
msg2 = yield self.app_helper.make_stored_outbound(
self.conversation, 'out 2', message_id='2')
ack2 = yield self.app_helper.make_dispatch_ack(
msg2, conv=self.conversation)
ra1 = yield events.get()
ra2 = yield events.get()
receiver.disconnect()
# Sometimes messages arrive out of order if we're hitting real redis.
if ra1['event_id'] != ack1['event_id']:
ra1, ra2 = ra2, ra1
self.assertEqual(ack1['event_id'], ra1['event_id'])
self.assertEqual(ack2['event_id'], ra2['event_id'])
self.assertEqual(errors.size, None)
@inlineCallbacks
def test_missing_auth(self):
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
queue = DeferredQueue()
receiver = self.client.stream(
TransportUserMessage, queue.put, queue.put, url)
response = yield receiver.get_response()
self.assertEqual(response.code, http.UNAUTHORIZED)
self.assertEqual(response.headers.getRawHeaders('www-authenticate'), [
'basic realm="Conversation Realm"'])
@inlineCallbacks
def test_invalid_auth(self):
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
queue = DeferredQueue()
headers = Headers({
'Authorization': ['Basic %s' % (base64.b64encode('foo:bar'),)],
})
receiver = self.client.stream(
TransportUserMessage, queue.put, queue.put, url, headers)
response = yield receiver.get_response()
self.assertEqual(response.code, http.UNAUTHORIZED)
self.assertEqual(response.headers.getRawHeaders('www-authenticate'), [
'basic realm="Conversation Realm"'])
@inlineCallbacks
def test_send_to(self):
msg = {
'to_addr': '+2345',
'content': 'foo',
'message_id': 'evil_id',
}
# TaggingMiddleware.add_tag_to_msg(msg, self.tag)
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
self.assertEqual(response.code, http.OK)
put_msg = json.loads(response.delivered_body)
[sent_msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(sent_msg['to_addr'], sent_msg['to_addr'])
self.assertEqual(sent_msg['helper_metadata'], {
'go': {
'conversation_key': self.conversation.key,
'conversation_type': 'http_api',
'user_account': self.conversation.user_account.key,
},
})
# We do not respect the message_id that's been given.
self.assertNotEqual(sent_msg['message_id'], msg['message_id'])
self.assertEqual(sent_msg['message_id'], put_msg['message_id'])
self.assertEqual(sent_msg['to_addr'], msg['to_addr'])
self.assertEqual(sent_msg['from_addr'], None)
@inlineCallbacks
def test_send_to_within_content_length_limit(self):
self.conversation.config['http_api'].update({
'content_length_limit': 182,
})
yield self.conversation.save()
msg = {
'content': 'foo',
'to_addr': '+1234',
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
put_msg = json.loads(response.delivered_body)
self.assertEqual(response.code, http.OK)
[sent_msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(sent_msg['to_addr'], put_msg['to_addr'])
self.assertEqual(sent_msg['helper_metadata'], {
'go': {
'conversation_key': self.conversation.key,
'conversation_type': 'http_api',
'user_account': self.conversation.user_account.key,
},
})
self.assertEqual(sent_msg['message_id'], put_msg['message_id'])
self.assertEqual(sent_msg['session_event'], None)
self.assertEqual(sent_msg['to_addr'], '+1234')
self.assertEqual(sent_msg['from_addr'], None)
@inlineCallbacks
def test_send_to_content_too_long(self):
self.conversation.config['http_api'].update({
'content_length_limit': 10,
})
yield self.conversation.save()
msg = {
'content': "This message is longer than 10 characters.",
'to_addr': '+1234',
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(
url, json.dumps(msg), self.auth_headers, method='PUT')
self.assert_bad_request(
response, "Payload content too long: 42 > 10")
@inlineCallbacks
def test_send_to_with_evil_content(self):
msg = {
'content': 0xBAD,
'to_addr': '+1234',
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assert_bad_request(
response, "Invalid or missing value for payload key 'content'")
@inlineCallbacks
def test_send_to_with_evil_to_addr(self):
msg = {
'content': 'good',
'to_addr': 1234,
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assert_bad_request(
response, "Invalid or missing value for payload key 'to_addr'")
@inlineCallbacks
def test_in_reply_to(self):
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': 'foo',
'in_reply_to': inbound_msg['message_id'],
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
put_msg = json.loads(response.delivered_body)
self.assertEqual(response.code, http.OK)
[sent_msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(sent_msg['to_addr'], put_msg['to_addr'])
self.assertEqual(sent_msg['helper_metadata'], {
'go': {
'conversation_key': self.conversation.key,
'conversation_type': 'http_api',
'user_account': self.conversation.user_account.key,
},
})
self.assertEqual(sent_msg['message_id'], put_msg['message_id'])
self.assertEqual(sent_msg['session_event'], None)
self.assertEqual(sent_msg['to_addr'], inbound_msg['from_addr'])
self.assertEqual(sent_msg['from_addr'], '9292')
@inlineCallbacks
def test_in_reply_to_within_content_length_limit(self):
self.conversation.config['http_api'].update({
'content_length_limit': 182,
})
yield self.conversation.save()
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': 'foo',
'in_reply_to': inbound_msg['message_id'],
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
put_msg = json.loads(response.delivered_body)
self.assertEqual(response.code, http.OK)
[sent_msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(sent_msg['to_addr'], put_msg['to_addr'])
self.assertEqual(sent_msg['helper_metadata'], {
'go': {
'conversation_key': self.conversation.key,
'conversation_type': 'http_api',
'user_account': self.conversation.user_account.key,
},
})
self.assertEqual(sent_msg['message_id'], put_msg['message_id'])
self.assertEqual(sent_msg['session_event'], None)
self.assertEqual(sent_msg['to_addr'], inbound_msg['from_addr'])
self.assertEqual(sent_msg['from_addr'], '9292')
@inlineCallbacks
def test_in_reply_to_content_too_long(self):
self.conversation.config['http_api'].update({
'content_length_limit': 10,
})
yield self.conversation.save()
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': "This message is longer than 10 characters.",
'in_reply_to': inbound_msg['message_id'],
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(
url, json.dumps(msg), self.auth_headers, method='PUT')
self.assert_bad_request(
response, "Payload content too long: 42 > 10")
@inlineCallbacks
def test_in_reply_to_with_evil_content(self):
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': 0xBAD,
'in_reply_to': inbound_msg['message_id'],
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assert_bad_request(
response, "Invalid or missing value for payload key 'content'")
@inlineCallbacks
def test_invalid_in_reply_to(self):
msg = {
'content': 'foo',
'in_reply_to': '1', # this doesn't exist
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assert_bad_request(response, 'Invalid in_reply_to value')
@inlineCallbacks
def test_invalid_in_reply_to_with_missing_conversation_key(self):
# create a message with no conversation
inbound_msg = self.app_helper.make_inbound('in 1', message_id='msg-1')
vumi_api = self.app_helper.vumi_helper.get_vumi_api()
yield vumi_api.mdb.add_inbound_message(inbound_msg)
msg = {
'content': 'foo',
'in_reply_to': inbound_msg['message_id'],
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
with LogCatcher(message='Invalid reply to message <Message .*>'
' which has no conversation key') as lc:
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
[error_log] = lc.messages()
self.assert_bad_request(response, "Invalid in_reply_to value")
self.assertTrue(inbound_msg['message_id'] in error_log)
@inlineCallbacks
def test_in_reply_to_with_evil_session_event(self):
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': 'foo',
'in_reply_to': inbound_msg['message_id'],
'session_event': 0xBAD5E55104,
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assert_bad_request(
response,
"Invalid or missing value for payload key 'session_event'")
self.assertEqual(self.app_helper.get_dispatched_outbound(), [])
@inlineCallbacks
def test_in_reply_to_with_evil_message_id(self):
inbound_msg = yield self.app_helper.make_stored_inbound(
self.conversation, 'in 1', message_id='1')
msg = {
'content': 'foo',
'in_reply_to': inbound_msg['message_id'],
'message_id': 'evil_id'
}
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
response = yield http_request_full(url, json.dumps(msg),
self.auth_headers, method='PUT')
self.assertEqual(response.code, http.OK)
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
put_msg = json.loads(response.delivered_body)
[sent_msg] = self.app_helper.get_dispatched_outbound()
# We do not respect the message_id that's been given.
self.assertNotEqual(sent_msg['message_id'], msg['message_id'])
self.assertEqual(sent_msg['message_id'], put_msg['message_id'])
self.assertEqual(sent_msg['to_addr'], inbound_msg['from_addr'])
self.assertEqual(sent_msg['from_addr'], '9292')
@inlineCallbacks
def test_metric_publishing(self):
metric_data = [
("vumi.test.v1", 1234, 'SUM'),
("vumi.test.v2", 3456, 'AVG'),
]
url = '%s/%s/metrics.json' % (self.url, self.conversation.key)
response = yield http_request_full(
url, json.dumps(metric_data), self.auth_headers, method='PUT')
self.assertEqual(response.code, http.OK)
self.assertEqual(
response.headers.getRawHeaders('content-type'),
['application/json; charset=utf-8'])
prefix = "go.campaigns.test-0-user.stores.metric_store"
self.assertEqual(
self.app_helper.get_published_metrics(self.app),
[("%s.vumi.test.v1" % prefix, 1234),
("%s.vumi.test.v2" % prefix, 3456)])
@inlineCallbacks
def test_concurrency_limits(self):
config = yield self.app.get_config(None)
concurrency = config.concurrency_limit
queue = DeferredQueue()
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
max_receivers = [self.client.stream(
TransportUserMessage, queue.put, queue.put, url,
Headers(self.auth_headers)) for _ in range(concurrency)]
for i in range(concurrency):
msg = yield self.app_helper.make_dispatch_inbound(
'in %s' % (i,), message_id=str(i), conv=self.conversation)
received = yield queue.get()
self.assertEqual(msg['message_id'], received['message_id'])
maxed_out_resp = yield http_request_full(
url, method='GET', headers=self.auth_headers)
self.assertEqual(maxed_out_resp.code, 403)
self.assertTrue(
'Too many concurrent connections' in maxed_out_resp.delivered_body)
[r.disconnect() for r in max_receivers]
@inlineCallbacks
def test_disabling_concurrency_limit(self):
conv_resource = StreamingConversationResource(
self.app, self.conversation.key)
# negative concurrency limit disables it
ctxt = ConfigContext(user_account=self.conversation.user_account.key,
concurrency_limit=-1)
config = yield self.app.get_config(msg=None, ctxt=ctxt)
self.assertTrue(
(yield conv_resource.is_allowed(
config, self.conversation.user_account.key)))
@inlineCallbacks
def test_backlog_on_connect(self):
for i in range(10):
yield self.app_helper.make_dispatch_inbound(
'in %s' % (i,), message_id=str(i), conv=self.conversation)
queue = DeferredQueue()
url = '%s/%s/messages.json' % (self.url, self.conversation.key)
receiver = self.client.stream(
TransportUserMessage, queue.put, queue.put, url,
Headers(self.auth_headers))
for i in range(10):
received = yield queue.get()
self.assertEqual(received['message_id'], str(i))
receiver.disconnect()
@inlineCallbacks
def test_health_response(self):
health_url = 'http://%s:%s%s' % (
self.addr.host, self.addr.port, self.config['health_path'])
response = yield http_request_full(health_url, method='GET')
self.assertEqual(response.delivered_body, '0')
yield self.app_helper.make_dispatch_inbound(
'in 1', message_id='1', conv=self.conversation)
queue = DeferredQueue()
stream_url = '%s/%s/messages.json' % (self.url, self.conversation.key)
stream_receiver = self.client.stream(
TransportUserMessage, queue.put, queue.put, stream_url,
Headers(self.auth_headers))
yield queue.get()
response = yield http_request_full(health_url, method='GET')
self.assertEqual(response.delivered_body, '1')
stream_receiver.disconnect()
response = yield http_request_full(health_url, method='GET')
self.assertEqual(response.delivered_body, '0')
self.assertEqual(self.app.client_manager.clients, {
'sphex.stream.message.%s' % (self.conversation.key,): []
})
@inlineCallbacks
def test_post_inbound_message(self):
# Set the URL so stuff is HTTP Posted instead of streamed.
self.conversation.config['http_api'].update({
'push_message_url': self.mock_push_server.url,
})
yield self.conversation.save()
msg_d = self.app_helper.make_dispatch_inbound(
'in 1', message_id='1', conv=self.conversation)
req = yield self.push_calls.get()
posted_json_data = req.content.read()
req.finish()
msg = yield msg_d
posted_msg = TransportUserMessage.from_json(posted_json_data)
self.assertEqual(posted_msg['message_id'], msg['message_id'])
@inlineCallbacks
def test_post_inbound_message_201_response(self):
# Set the URL so stuff is HTTP Posted instead of streamed.
self.conversation.config['http_api'].update({
'push_message_url': self.mock_push_server.url,
})
yield self.conversation.save()
with LogCatcher(message='Got unexpected response code') as lc:
msg_d = self.app_helper.make_dispatch_inbound(
'in 1', message_id='1', conv=self.conversation)
req = yield self.push_calls.get()
req.setResponseCode(201)
req.finish()
yield msg_d
self.assertEqual(lc.messages(), [])
@inlineCallbacks
def test_post_inbound_message_500_response(self):
# Set the URL so stuff is HTTP Posted instead of streamed.
self.conversation.config['http_api'].update({
'push_message_url': self.mock_push_server.url,
})
yield self.conversation.save()
with LogCatcher(message='Got unexpected response code') as lc:
msg_d = self.app_helper.make_dispatch_inbound(
'in 1', message_id='1', conv=self.conversation)
req = yield self.push_calls.get()
req.setResponseCode(500)
req.finish()
yield msg_d
[warning_log] = lc.messages()
self.assertTrue(self.mock_push_server.url in warning_log)
self.assertTrue('500' in warning_log)
@inlineCallbacks
def test_post_inbound_event(self):
# Set the URL so stuff is HTTP Posted instead of streamed.
self.conversation.config['http_api'].update({
'push_event_url': self.mock_push_server.url,
})
yield self.conversation.save()
msg = yield self.app_helper.make_stored_outbound(
self.conversation, 'out 1', message_id='1')
event_d = self.app_helper.make_dispatch_ack(
msg, conv=self.conversation)
req = yield self.push_calls.get()
posted_json_data = req.content.read()
req.finish()
ack = yield event_d
self.assertEqual(TransportEvent.from_json(posted_json_data), ack)
@inlineCallbacks
def test_bad_urls(self):
def assert_not_found(url, headers={}):
d = http_request_full(self.url, method='GET', headers=headers)
d.addCallback(lambda r: self.assertEqual(r.code, http.NOT_FOUND))
return d
yield assert_not_found(self.url)
yield assert_not_found(self.url + '/')
yield assert_not_found('%s/%s' % (self.url, self.conversation.key),
headers=self.auth_headers)
yield assert_not_found('%s/%s/' % (self.url, self.conversation.key),
headers=self.auth_headers)
yield assert_not_found('%s/%s/foo' % (self.url, self.conversation.key),
headers=self.auth_headers)
@inlineCallbacks
def test_send_message_command(self):
yield self.app_helper.dispatch_command(
'send_message',
user_account_key=self.conversation.user_account.key,
conversation_key=self.conversation.key,
command_data={
u'batch_id': u'batch-id',
u'content': u'foo',
u'to_addr': u'to_addr',
u'msg_options': {
u'helper_metadata': {
u'tag': {
u'tag': [u'longcode', u'default10080']
}
},
u'from_addr': u'default10080',
}
})
[msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(msg.payload['to_addr'], "to_addr")
self.assertEqual(msg.payload['from_addr'], "default10080")
self.assertEqual(msg.payload['content'], "foo")
self.assertEqual(msg.payload['message_type'], "user_message")
self.assertEqual(
msg.payload['helper_metadata']['go']['user_account'],
self.conversation.user_account.key)
self.assertEqual(
msg.payload['helper_metadata']['tag']['tag'],
['longcode', 'default10080'])
@inlineCallbacks
def test_process_command_send_message_in_reply_to(self):
msg = yield self.app_helper.make_stored_inbound(
self.conversation, "foo")
yield self.app_helper.dispatch_command(
'send_message',
user_account_key=self.conversation.user_account.key,
conversation_key=self.conversation.key,
command_data={
u'batch_id': u'batch-id',
u'content': u'foo',
u'to_addr': u'to_addr',
u'msg_options': {
u'helper_metadata': {
u'tag': {
u'tag': [u'longcode', u'default10080']
}
},
u'transport_name': u'smpp_transport',
u'in_reply_to': msg['message_id'],
u'transport_type': u'sms',
u'from_addr': u'default10080',
}
})
[sent_msg] = self.app_helper.get_dispatched_outbound()
self.assertEqual(sent_msg['to_addr'], msg['from_addr'])
self.assertEqual(sent_msg['content'], 'foo')
self.assertEqual(sent_msg['in_reply_to'], msg['message_id'])
| praekelt/vumi-go | go/apps/http_api/tests/test_vumi_app.py | Python | bsd-3-clause | 31,622 |
from flask import request, current_app, url_for
from flask_jsonschema import validate
from .. import db
from ..models import AHBot as Bot
from .decorators import json_response
from . import api
@api.route('/abusehelper', methods=['GET'])
@json_response
def get_abusehelper():
"""Return a list of available abusehelper
**Example request**:
.. sourcecode:: http
GET /api/1.0/abusehelper HTTP/1.1
Host: do.cert.europa.eu
Accept: application/json
**Example response**:
.. sourcecode:: http
HTTP/1.0 200 OK
Content-Type: application/json
{
"abusehelper": [
{
"name": "ShadowServerBot",
"url": "http://sample.com/path.html",
"id": 1
}
]
}
:reqheader Accept: Content type(s) accepted by the client
:resheader Content-Type: this depends on `Accept` header or request
:>json array abusehelper: List of available bots
:>jsonobj integer id: Bot ID
:>jsonobj integer name: Bot name
:status 200: Deliverable endpoint found, response may be empty
:status 404: Not found
"""
bots = Bot.query.filter().all()
return {'abusehelper': [a.serialize() for a in bots]}
@api.route('/abusehelper/<int:bot_id>', methods=['GET'])
@json_response
def get_got(bot_id):
"""Get bot from database
**Example request**:
.. sourcecode:: http
GET /api/1.0/abusehelper/1 HTTP/1.1
Host: do.cert.europa.eu
Accept: application/json
**Example response**:
.. sourcecode:: http
HTTP/1.0 200 OK
Content-Type: application/json
{
"name": "ShadowServerBot",
"url": "http://sample.com/path.html",
"id": 1
}
:param bot_id: Bot unique ID
:reqheader Accept: Content type(s) accepted by the client
:resheader Content-Type: this depends on `Accept` header or request
:>json integer id: Bot unique ID
:>json integer name: Bot name
:status 200: ASN found
:status 404: Resource not found
"""
a = Bot.query.get_or_404(bot_id)
return a.serialize()
@api.route('/abusehelper', methods=['POST', 'PUT'])
@validate('abusehelper', 'add_bot')
@json_response
def add_bot():
"""Add new bot entry
**Example request**:
.. sourcecode:: http
POST /api/1.0/abusehelper HTTP/1.1
Host: do.cert.europa.eu
Accept: application/json
Content-Type: application/json
{
"name": "ShadowServerBot",
"url": "http://sample.com/path.html"
}
**Example response**:
.. sourcecode:: http
HTTP/1.0 201 CREATED
Content-Type: application/json
{
"bot": {
"name": "ShadowServerBot",
"url": "http://sample.com/path.html",
"id": 1
},
'message': "Bot added"
}
:reqheader Accept: Content type(s) accepted by the client
:resheader Content-Type: this depends on `Accept` header or request
:<json integer name: Bot name
:>jsonobj integer id: Unique ID of new bot
:>jsonobj integer name: bot name
:>json string message: Status message
:status 201: ASN successfully saved
:status 400: Bad request
"""
a = Bot.fromdict(request.json)
db.session.add(a)
db.session.commit()
return {'bot': a.serialize(), 'message': 'Bot added'}, 201, \
{'Location': url_for('api.get_bot', bot_id=a.id)}
@api.route('/abusehelper/<int:bot_id>', methods=['PUT'])
@validate('abusehelper', 'update_bot')
@json_response
def update_bot(bot_id):
return NotImplemented
@api.route('/abusehelper/<int:bot_id>', methods=['DELETE'])
@json_response
def delete_bot(bot_id):
"""Delete bot
**Example request**:
.. sourcecode:: http
DELETE /api/1.0/abusehelper/1 HTTP/1.1
Host: do.cert.europa.eu
Accept: application/json
**Example response**:
.. sourcecode:: http
HTTP/1.0 200 OK
Content-Type: application/json
{
"message": "Bot deleted"
}
:param bot_id: Bot unique ID.
:reqheader Accept: Content type(s) accepted by the client
:resheader Content-Type: this depends on `Accept` header or request
:>json string message: Action status status
:status 200: Bot was deleted
:status 404: Bot was not found
"""
a = Bot.query.filter_by(id == bot_id).delete()
if not a:
return {'message': 'No such bot'}, 404
db.session.commit()
return {'message': 'Bot deleted'}
@api.route('/abusehelper', methods=['DELETE'])
@json_response
def delete_abusehelper():
"""Clear abusehelper table
**Example request**:
.. sourcecode:: http
DELETE /api/1.0/abusehelper HTTP/1.1
Host: do.cert.europa.eu
Accept: application/json
**Example response**:
.. sourcecode:: http
HTTP/1.0 200 OK
Content-Type: application/json
{
"message": "Bots deleted"
}
:reqheader Accept: Content type(s) accepted by the client
:resheader Content-Type: this depends on `Accept` header or request
:>json string message: Action status status
:status 200: Bot was deleted
:status 404: Bot was not found
"""
a = Bot.query.all().delete()
db.session.commit()
current_app.log.debug('Deleted {} abusehelper'.format(a))
return {'message': 'Bots deleted'}
| certeu/do-portal | app/api/abusehelper.py | Python | bsd-3-clause | 5,478 |
from django.contrib import admin
# Register your models here.
from .models import Photos
admin.site.register(Photos) | dadisigursveinn/VEF-Lokaverkefni | myphotos/admin.py | Python | bsd-3-clause | 118 |
# Copyright (c) 2014, Enthought, Inc.
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only
# under the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
# Thanks for using Enthought open source!
#
# Author: Enthought, Inc.
""" Tests for the BetterZoom Chaco tool """
import unittest
import numpy
from chaco.api import create_line_plot
from chaco.tools.api import BetterZoom
from enable.testing import EnableTestAssistant
class TestBetterZoomTool(EnableTestAssistant, unittest.TestCase):
""" Tests for the BetterZoom Chaco tool """
def setUp(self):
values = numpy.arange(10)
self.plot = create_line_plot((values, values))
self.plot.bounds = [100, 100]
self.plot._window = self.create_mock_window()
self.tool = BetterZoom(component=self.plot)
self.plot.active_tool = self.tool
self.plot.do_layout()
def tearDown(self):
del self.tool
del self.plot
def test_default_position(self):
tool = self.tool
# this doesn't throw an exception
self.send_key(tool, '+')
self.assertEqual(tool.position, (50, 50))
# expected behaviour for a normal zoom in operation
self.assertNotEqual(tool._index_factor, 1.0)
self.assertNotEqual(tool._value_factor, 1.0)
self.assertEqual(len(tool._history), 2)
| tommy-u/chaco | chaco/tools/tests/better_zoom_test_case.py | Python | bsd-3-clause | 1,537 |
import pytest
from py4jdbc.dbapi2 import connect, Connection
from py4jdbc.resultset import ResultSet
from py4jdbc.exceptions.dbapi2 import Error
def test_connect(gateway):
url = "jdbc:derby:memory:testdb;create=true"
conn = connect(url, gateway=gateway)
cur = conn.cursor()
rs = cur.execute("select * from SYS.SYSTABLES")
assert isinstance(rs, ResultSet)
def test_execute(derby):
cur = derby.cursor()
rs = cur.execute("select * from SYS.SYSTABLES")
assert isinstance(rs, ResultSet)
def test_execute_with_params(derby):
derby.autocommit = False
cur = derby.cursor()
cur.execute("create schema x_with_params")
cur.execute("create table x_with_params.cowtest(a int, b char(1))")
# Verify table is empty.
rows = cur.execute("select * from x_with_params.cowtest as r").fetchall()
assert len(rows) == 0
# Insert one with parameter binding..
sql = "insert into x_with_params.cowtest (a, b) values (?, ?)"
cur.execute(sql, (12, "m"))
# Verify there's 1 row.
rows = cur.execute("select * from x_with_params.cowtest as r").fetchall()
assert len(rows) == 1
# Insert a bunch.
params = list(enumerate("thecowsaremooing"))
cur.executemany(sql, params)
rows = cur.execute("select * from x_with_params.cowtest as r").fetchall()
assert len(rows) == len("thecowsaremooing") + 1
derby.rollback()
derby.autocommit = True
def test_fetchone(derby):
cur = derby.cursor()
rs = cur.execute("select * from SYS.SYSTABLES")
assert isinstance(rs.fetchone(), rs.Row)
def test_fetchmany(derby):
'''Assert all rows of result set have the correct class.
'''
cur = derby.cursor()
rs = cur.execute("select * from SYS.SYSTABLES")
assert all({isinstance(row, rs.Row) for row in rs.fetchmany(5)})
def test_fetchManyCount(derby):
derby.autocommit = False
cur = derby.cursor()
cur.execute("create schema x_with_params")
cur.execute("create table x_with_params.cowtest(a int, b char(1))")
sql = "insert into x_with_params.cowtest (a, b) values (?, ?)"
params = list(enumerate("thecowsaremooing"))
cur.executemany(sql, params)
rs = cur.execute("select a from x_with_params.cowtest")
ress = []
while True:
x = rs.fetchmany(3)
ress.append(x)
if len(x) < 3:
break
derby.rollback()
derby.autocommit = True
assert sum(map(len, ress)) == len("thecowsaremooing")
def test_fetchall(derby):
'''Assert all rows of result set have the correct class.
'''
cur = derby.cursor()
rs = cur.execute("select * from SYS.SYSTABLES")
assert all({isinstance(row, rs.Row) for row in rs.fetchall()})
def test_Cursor__iter__(derby):
cur = derby.cursor()
rs = cur.execute("select * from SYS.SYSTABLES")
assert all({isinstance(row, rs.Row) for row in rs})
def test_Cursor__iter__(derby):
cur = derby.cursor()
rs = cur.execute("select * from SYS.SYSTABLES")
# Exhaust all rows.
list(rs)
assert rs.fetchone() == None
def test_close_and_execute(derby):
cur = derby.cursor()
cur.close()
with pytest.raises(Error):
cur.execute("select * from SYS.SYSTABLES")
def test_close_and_fetchone(derby):
cur = derby.cursor()
cur.execute("select * from SYS.SYSTABLES")
cur.close()
with pytest.raises(Error):
cur.fetchone()
def test_close_twice(derby):
cur = derby.cursor()
cur.close()
with pytest.raises(Error):
cur.close()
| massmutual/py4jdbc | tests/test_Cursor.py | Python | bsd-3-clause | 3,513 |
"""Unit-tests for `tree.visitors`
"""
from py2c import tree
from py2c.tree import visitors
from py2c.tests import Test, data_driven_test
from nose.tools import assert_equal
# TEST:: Add non-node fields
# =============================================================================
# Helper classes
# =============================================================================
class BasicNode(tree.Node):
_fields = []
class BasicNodeReplacement(tree.Node):
_fields = []
class BasicNodeWithListReplacement(tree.Node):
_fields = []
class BasicNodeDeletable(tree.Node):
_fields = []
class ParentNode(tree.Node):
_fields = [
('child', tree.Node, 'OPTIONAL'),
]
class ParentNodeWithChildrenList(tree.Node):
"""Node with list of nodes as field
"""
_fields = [
('child', tree.Node, 'ZERO_OR_MORE'),
]
# -----------------------------------------------------------------------------
# Concrete Visitors used for testing
# -----------------------------------------------------------------------------
class VisitOrderCheckingVisitor(visitors.RecursiveNodeVisitor):
def __init__(self):
super().__init__()
self.visited = []
def generic_visit(self, node):
self.visited.append(node.__class__.__name__)
super().generic_visit(node)
def visit_BasicNodeReplacement(self, node):
self.visited.append("visited Copy!")
class AccessPathCheckingVisitor(visitors.RecursiveNodeVisitor):
def __init__(self):
super().__init__()
self.recorded_access_path = None
def visit_BasicNode(self, node):
self.recorded_access_path = self.access_path[:]
class EmptyTransformer(visitors.RecursiveNodeTransformer):
pass
class VisitOrderCheckingTransformer(visitors.RecursiveNodeTransformer):
def __init__(self):
super().__init__()
self.visited = []
def generic_visit(self, node):
self.visited.append(node.__class__.__name__)
return super().generic_visit(node)
def visit_BasicNodeReplacement(self, node):
self.visited.append("visited Copy!")
return node
class AccessPathCheckingTransformer(visitors.RecursiveNodeTransformer):
def __init__(self):
super().__init__()
self.recorded_access_path = None
def visit_BasicNode(self, node):
self.recorded_access_path = self.access_path[:]
return node
class TransformationCheckingTransformer(visitors.RecursiveNodeTransformer):
def visit_BasicNode(self, node):
return BasicNodeReplacement()
def visit_BasicNodeDeletable(self, node):
return None # Delete this node
def visit_BasicNodeReplacement(self, node):
return self.NONE_DEPUTY # Replace this node with None
def visit_BasicNodeWithListReplacement(self, node):
return [BasicNode(), BasicNodeReplacement()]
# -----------------------------------------------------------------------------
# Tests
# -----------------------------------------------------------------------------
class TestRecursiveASTVisitor(Test):
"""py2c.tree.visitors.RecursiveNodeVisitor
"""
context = globals()
@data_driven_test("visitors-visitor_order.yaml", prefix="visit order of ")
def test_visit_order(self, node, order):
to_visit = self.load(node)
# The main stuff
visitor = VisitOrderCheckingVisitor()
retval = visitor.visit(to_visit)
assert_equal(retval, None)
assert_equal(visitor.visited, order)
@data_driven_test("visitors-access_path.yaml", prefix="access path on visit of ")
def test_access_path(self, node, access):
to_visit = self.load(node)
access_path = self.load(access)
# The main stuff
visitor = AccessPathCheckingVisitor()
retval = visitor.visit(to_visit)
assert_equal(retval, None)
assert_equal(visitor.recorded_access_path, access_path)
class TestRecursiveASTTransformer(Test):
"""py2c.tree.visitors.RecursiveNodeTransformer
"""
context = globals()
@data_driven_test("visitors-visitor_order.yaml", prefix="empty transformer does not transform ")
def test_empty_transformer(self, node, order):
to_visit = self.load(node)
# The main stuff
visitor = EmptyTransformer()
retval = visitor.visit(to_visit)
assert_equal(to_visit, retval)
@data_driven_test("visitors-visitor_order.yaml", prefix="visit order of ")
def test_visit_order(self, node, order):
to_visit = self.load(node)
# The main stuff
visitor = VisitOrderCheckingTransformer()
retval = visitor.visit(to_visit)
assert_equal(to_visit, retval)
assert_equal(visitor.visited, order)
@data_driven_test("visitors-access_path.yaml", prefix="access path on visit of ")
def test_access_path(self, node, access):
to_visit = self.load(node)
access_path = self.load(access)
# The main stuff
visitor = AccessPathCheckingTransformer()
retval = visitor.visit(to_visit)
assert_equal(retval, to_visit)
assert_equal(visitor.recorded_access_path, access_path)
@data_driven_test("visitors-transform.yaml", prefix="transformation of ")
def test_transformation(self, node, expected):
to_visit = self.load(node)
expected_node = self.load(expected)
# The main stuff
visitor = TransformationCheckingTransformer()
retval = visitor.visit(to_visit)
assert_equal(retval, expected_node)
if __name__ == '__main__':
from py2c.tests import runmodule
runmodule()
| pradyunsg/Py2C | py2c/tree/tests/test_visitors.py | Python | bsd-3-clause | 5,659 |
from importlib import import_module
from inspect import getdoc
def attribs(name):
mod = import_module(name)
print name
print 'Has __all__?', hasattr(mod, '__all__')
print 'Has __doc__?', hasattr(mod, '__doc__')
print 'doc: ', getdoc(mod)
if __name__=='__main__':
attribs('cairo')
attribs('zope')
attribs('A.B.C')
import hacked
class Object(object):
pass
opt = Object()
opt.ignore_errors = False
a, d = hacked.get_all_attr_has_docstr('/home/ali/ws-pydev/apidocfilter/A/B',
'/home/ali/ws-pydev/apidocfilter/A/B/C',
opt)
print(a)
print(d) | baharev/apidocfilter | check.py | Python | bsd-3-clause | 702 |
class Requirement(object):
"""
Requirements are the basis for Dominion. They define
what needs to exist on a host/role, or perhaps what *mustn't* exist.
Requirements are defined on Roles.
"""
creation_counter = 0
"The base class for requirements."
def __init__(self, required=True, ensure=None, depends=None, post=None):
self.required = required
self.ensure = ensure or "exists"
self.depends = depends or ()
if self.ensure == "removed":
self.required = False
self.post = post or ()
# Increase the creation counter, and save our local copy.
self.creation_counter = Requirement.creation_counter
Requirement.creation_counter += 1
def __call__(self):
self.apply()
def apply(self):
if self.ensure == "exists" or self.required:
if hasattr(self, 'install'):
return self.install()
if self.ensure == "removed":
if hasattr(self, 'uninstall'):
return self.uninstall()
| paulcwatts/dominion | dominion/base.py | Python | bsd-3-clause | 1,060 |
import py
try:
from pypy.rpython.test.test_llinterp import interpret
except ImportError:
py.test.skip('Needs PyPy to be on the PYTHONPATH')
from rply import ParserGenerator, Token
from rply.errors import ParserGeneratorWarning
from .base import BaseTests
from .utils import FakeLexer, BoxInt, ParserState
class TestTranslation(BaseTests):
def run(self, func, args):
return interpret(func, args)
def test_basic(self):
pg = ParserGenerator(["NUMBER", "PLUS"])
@pg.production("main : expr")
def main(p):
return p[0]
@pg.production("expr : expr PLUS expr")
def expr_op(p):
return BoxInt(p[0].getint() + p[2].getint())
@pg.production("expr : NUMBER")
def expr_num(p):
return BoxInt(int(p[0].getstr()))
with self.assert_warns(ParserGeneratorWarning, "1 shift/reduce conflict"):
parser = pg.build()
def f(n):
return parser.parse(FakeLexer([
Token("NUMBER", str(n)),
Token("PLUS", "+"),
Token("NUMBER", str(n))
])).getint()
assert self.run(f, [12]) == 24
def test_state(self):
pg = ParserGenerator(["NUMBER", "PLUS"], precedence=[
("left", ["PLUS"]),
])
@pg.production("main : expression")
def main(state, p):
state.count += 1
return p[0]
@pg.production("expression : expression PLUS expression")
def expression_plus(state, p):
state.count += 1
return BoxInt(p[0].getint() + p[2].getint())
@pg.production("expression : NUMBER")
def expression_number(state, p):
state.count += 1
return BoxInt(int(p[0].getstr()))
parser = pg.build()
def f():
state = ParserState()
return parser.parse(FakeLexer([
Token("NUMBER", "10"),
Token("PLUS", "+"),
Token("NUMBER", "12"),
Token("PLUS", "+"),
Token("NUMBER", "-2"),
]), state=state).getint() + state.count
assert self.run(f, []) == 26
| solanolabs/rply | tests/test_ztranslation.py | Python | bsd-3-clause | 2,200 |
# -*- coding: utf-8 -*-
#
# zambiaureport documentation build configuration file, created by
# sphinx-quickstart.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = []
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'zambiaureport'
copyright = u'2014, Andre Lesa'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.1'
# The full version, including alpha/beta/rc tags.
release = '0.1'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'zambiaureportdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'zambiaureport.tex', u'zambiaureport Documentation',
u'Andre Lesa', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'zambiaureport', u'zambiaureport Documentation',
[u'Andre Lesa'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'zambiaureport', u'zambiaureport Documentation',
u'Andre Lesa', 'zambiaureport',
'Zambia U-Report reference implementation.','Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote' | unicef-zambia/zambia-ureport | docs/conf.py | Python | bsd-3-clause | 7,753 |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2020, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
'''
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
import logging # isort:skip
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Bokeh imports
from .notebook import run_notebook_hook
from .state import curstate
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
__all__ = (
'output_file',
'output_notebook',
'reset_output',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
def output_file(filename, title="Bokeh Plot", mode=None, root_dir=None):
'''Configure the default output state to generate output saved
to a file when :func:`show` is called.
Does not change the current ``Document`` from ``curdoc()``. File and notebook
output may be active at the same time, so e.g., this does not clear the
effects of ``output_notebook()``.
Args:
filename (str) : a filename for saving the HTML document
title (str, optional) : a title for the HTML document (default: "Bokeh Plot")
mode (str, optional) : how to include BokehJS (default: ``'cdn'``)
One of: ``'inline'``, ``'cdn'``, ``'relative(-dev)'`` or
``'absolute(-dev)'``. See :class:`bokeh.resources.Resources` for more details.
root_dir (str, optional) : root directory to use for 'absolute' resources. (default: None)
This value is ignored for other resource types, e.g. ``INLINE`` or
``CDN``.
Returns:
None
.. note::
Generally, this should be called at the beginning of an interactive
session or the top of a script.
.. warning::
This output file will be overwritten on every save, e.g., each time
show() or save() is invoked.
'''
curstate().output_file(
filename,
title=title,
mode=mode,
root_dir=root_dir
)
def output_notebook(resources=None, verbose=False, hide_banner=False, load_timeout=5000, notebook_type='jupyter'):
''' Configure the default output state to generate output in notebook cells
when :func:`show` is called. Note that, :func:`show` may be called multiple
times in a single cell to display multiple objects in the output cell. The
objects will be displayed in order.
Args:
resources (Resource, optional) :
How and where to load BokehJS from (default: CDN)
verbose (bool, optional) :
whether to display detailed BokehJS banner (default: False)
hide_banner (bool, optional):
whether to hide the Bokeh banner (default: False)
load_timeout (int, optional) :
Timeout in milliseconds when plots assume load timed out (default: 5000)
notebook_type (string, optional):
Notebook type (default: jupyter)
Returns:
None
.. note::
Generally, this should be called at the beginning of an interactive
session or the top of a script.
'''
# verify notebook_type first in curstate().output_notebook
curstate().output_notebook(notebook_type)
run_notebook_hook(notebook_type, 'load', resources, verbose, hide_banner, load_timeout)
def reset_output(state=None):
''' Clear the default state of all output modes.
Returns:
None
'''
curstate().reset()
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| ericmjl/bokeh | bokeh/io/output.py | Python | bsd-3-clause | 4,608 |
from django import template
from django.utils.safestring import mark_safe
from mezzanine.conf import settings
from mezzanine_developer_extension.utils import refactor_html
register = template.Library()
# Checking settings.TEMPLATE_STYLE.
# Possible values are:
# - mezzanine_developer_extension.styles.macos
# - mezzanine_developer_extension.styles.ubuntu
# - mezzanine_developer_extension.styles.windows
_prefix = "mezzanine_developer_extension.styles"
try:
if settings.TERMINAL_STYLE not in \
["%s.macos" % _prefix, "%s.ubuntu" % _prefix, "%s.windows" % _prefix]:
# If the user has specified a wrong terminal styling format, we
# raise an exception warning about this.
msg = "Wrong terminal style format. Check the value of TERMINAL_STYLE"\
" in your settings.py file."
raise Exception(msg)
except AttributeError:
msg = "You have not specified a terminal output format. You have to"\
" define the attribute TERMINAL_STYLE in your settings.py"
raise Exception(msg)
@register.filter(name='safe_developer')
def safe_developer(content, style="macos"):
"""
Renders content without cleaning the original.
Replaces the terminal divs for a more complext html layout.
"""
new_content = refactor_html(content, style)
return mark_safe(new_content) | educalleja/mezzanine-developer-extension | mezzanine_developer_extension/templatetags/devfilters.py | Python | bsd-3-clause | 1,349 |
from datetime import datetime
from pymongo.connection import Connection
from django.db import models
from eventtracker.conf import settings
def get_mongo_collection():
"Open a connection to MongoDB and return the collection to use."
if settings.RIGHT_MONGODB_HOST:
connection = Connection.paired(
left=(settings.MONGODB_HOST, settings.MONGODB_PORT),
right=(settings.RIGHT_MONGODB_HOST, settings.RIGHT_MONGODB_PORT)
)
else:
connection = Connection(host=settings.MONGODB_HOST, port=settings.MONGODB_PORT)
return connection[settings.MONGODB_DB][settings.MONGODB_COLLECTION]
def save_event(collection, event, timestamp, params):
"Save the event in MongoDB collection"
collection.insert({
'event': event,
'timestamp': datetime.fromtimestamp(timestamp),
'params': params
})
class Event(models.Model):
"Dummy model for development."
timestamp = models.DateTimeField(auto_now_add=True)
event = models.SlugField()
params = models.TextField()
| ella/django-event-tracker | eventtracker/models.py | Python | bsd-3-clause | 1,073 |
# encoding: utf-8
import sys
sys.path.append(sys.path.insert(0,"../src"))
def urlopen(*args, **kwargs):
# Only parse one arg: the url
return Urls[args[0]]
# Provide a simple hashtable to contain the content of the urls and
# provide a mock object similar to what will be returned from the
# real urlopen() function calls
from io import StringIO
from time import time
import re
from nose.tools import with_setup
class MockUrlContent(StringIO):
def __init__(self, content):
super(MockUrlContent, self).__init__(content)
self.headers = {
'last-modified': time()
}
def close(self):
pass
scheme_re = re.compile(r'file:(/+)?')
class MockUrlCache(dict):
def __setitem__(self, name, content):
super(MockUrlCache, self).__setitem__(name, MockUrlContent(content))
def __getitem__(self, name):
if name in self:
return super(MockUrlCache, self).__getitem__(name)
# Strip off 'file:[///]' from url
elif name.startswith('file:'):
try:
name= scheme_re.sub('', name)
return super(MockUrlCache, self).__getitem__(name)
except:
# Fall through
pass
# urlopen raises ValueError if unable to load content (not KeyError)
raise ValueError("{0}: Cannot find file content".format(name))
Urls = MockUrlCache()
def clear_configs():
pass
@with_setup(clear_configs)
def testImportContent():
"Cannot import content from a file"
from xmlconfig import getConfig
Urls.clear()
Urls["file:file.txt"] = "Content embedded in a file"
Urls["config.xml"] = \
u"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants>
<string key="import" src="file:file.txt"/>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("import") == "Content embedded in a file"
@with_setup(clear_configs)
def testImportConfig():
"Cannot import another config file"
from xmlconfig import getConfig
Urls.clear()
Urls["config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants>
<string key="key22">This was imported from config2.xml</string>
</constants>
</config>
"""
Urls["config.xml"] = \
u"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<string key="imported">%(import:key22)</string>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("imported") == "This was imported from config2.xml"
@with_setup(clear_configs)
def testCircularImport():
"Property detect circluar importing"
from xmlconfig import getConfig
Urls.clear()
Urls["config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants namespace="circular" src="file:config.xml"/>
<constants>
<string key="key22">This was imported from config2.xml</string>
<string key="foreign">
Namespace changed in %(circular:key4.import)
</string>
</constants>
</config>
"""
Urls["config.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<section key="key4">
<string key="key5">value2</string>
<string key="import">%(import:key22)</string>
</section>
</constants>
</config>
"""
conf=getConfig()
conf.load("config.xml")
assert conf.get("import:foreign") == \
"Namespace changed in This was imported from config2.xml"
@with_setup(clear_configs)
def testRelativeImport():
"""Transfer leading absolute or relative path to the location of
documents imported"""
from xmlconfig import getConfig
Urls["../config/config2.xml"] = \
"""<?xml version="1.0"?>
<config>
<constants>
<string key="key22">This was imported from config2.xml</string>
</constants>
</config>
"""
Urls["../config/config.xml"] = \
"""<?xml version="1.0" encoding="utf-8"?>
<config>
<constants namespace="import" src="file:config2.xml"/>
<constants>
<string key="imported">%(import:key22)</string>
</constants>
</config>
"""
conf=getConfig()
conf.load("../config/config.xml")
assert conf.get("imported") == "This was imported from config2.xml"
| greezybacon/xmlconfig | test/testImport.py | Python | bsd-3-clause | 4,610 |
# -*- coding: utf-8 -*-
#
# complexity documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
cwd = os.getcwd()
parent = os.path.dirname(cwd)
sys.path.append(parent)
import organigrammi
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'openpa-organigrammi'
copyright = u'2014, Simone Dalla'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = organigrammi.__version__
# The full version, including alpha/beta/rc tags.
release = organigrammi.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'openpa-organigrammidoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'openpa-organigrammi.tex', u'openpa-organigrammi Documentation',
u'Simone Dalla', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'openpa-organigrammi', u'openpa-organigrammi Documentation',
[u'Simone Dalla'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'openpa-organigrammi', u'openpa-organigrammi Documentation',
u'Simone Dalla', 'openpa-organigrammi', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False | simodalla/pympa-affarigenerali-OLD | docs/conf.py | Python | bsd-3-clause | 8,210 |
# This file helps to compute a version number in source trees obtained from
# git-archive tarball (such as those provided by githubs download-from-tag
# feature). Distribution tarballs (built by setup.py sdist) and build
# directories (produced by setup.py build) will contain a much shorter file
# that just contains the computed version number.
# This file is released into the public domain. Generated by
# versioneer-0.19 (https://github.com/python-versioneer/python-versioneer)
"""Git implementation of _version.py."""
import errno
import os
import re
import subprocess
import sys
def get_keywords():
"""Get the keywords needed to look up the version information."""
# these strings will be replaced by git during git-archive.
# setup.py/versioneer.py will grep for the variable names, so they must
# each be defined on a line of their own. _version.py will just call
# get_keywords().
git_refnames = "$Format:%d$"
git_full = "$Format:%H$"
git_date = "$Format:%ci$"
keywords = {"refnames": git_refnames, "full": git_full, "date": git_date}
return keywords
class VersioneerConfig:
"""Container for Versioneer configuration parameters."""
def get_config():
"""Create, populate and return the VersioneerConfig() object."""
# these strings are filled in when 'setup.py versioneer' creates
# _version.py
cfg = VersioneerConfig()
cfg.VCS = "git"
cfg.style = "pep440"
cfg.tag_prefix = "v"
cfg.parentdir_prefix = ""
cfg.versionfile_source = "jxl2txt/_version.py"
cfg.verbose = False
return cfg
class NotThisMethod(Exception):
"""Exception raised if a method is not valid for the current scenario."""
LONG_VERSION_PY = {}
HANDLERS = {}
def register_vcs_handler(vcs, method): # decorator
"""Create decorator to mark a method as the handler of a VCS."""
def decorate(f):
"""Store f in HANDLERS[vcs][method]."""
if vcs not in HANDLERS:
HANDLERS[vcs] = {}
HANDLERS[vcs][method] = f
return f
return decorate
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False,
env=None):
"""Call the given command(s)."""
assert isinstance(commands, list)
p = None
for c in commands:
try:
dispcmd = str([c] + args)
# remember shell=False, so use git.cmd on windows, not just git
p = subprocess.Popen([c] + args, cwd=cwd, env=env,
stdout=subprocess.PIPE,
stderr=(subprocess.PIPE if hide_stderr
else None))
break
except EnvironmentError:
e = sys.exc_info()[1]
if e.errno == errno.ENOENT:
continue
if verbose:
print("unable to run %s" % dispcmd)
print(e)
return None, None
else:
if verbose:
print("unable to find command, tried %s" % (commands,))
return None, None
stdout = p.communicate()[0].strip().decode()
if p.returncode != 0:
if verbose:
print("unable to run %s (error)" % dispcmd)
print("stdout was %s" % stdout)
return None, p.returncode
return stdout, p.returncode
def versions_from_parentdir(parentdir_prefix, root, verbose):
"""Try to determine the version from the parent directory name.
Source tarballs conventionally unpack into a directory that includes both
the project name and a version string. We will also support searching up
two directory levels for an appropriately named parent directory
"""
rootdirs = []
for i in range(3):
dirname = os.path.basename(root)
if dirname.startswith(parentdir_prefix):
return {"version": dirname[len(parentdir_prefix):],
"full-revisionid": None,
"dirty": False, "error": None, "date": None}
else:
rootdirs.append(root)
root = os.path.dirname(root) # up a level
if verbose:
print("Tried directories %s but none started with prefix %s" %
(str(rootdirs), parentdir_prefix))
raise NotThisMethod("rootdir doesn't start with parentdir_prefix")
@register_vcs_handler("git", "get_keywords")
def git_get_keywords(versionfile_abs):
"""Extract version information from the given file."""
# the code embedded in _version.py can just fetch the value of these
# keywords. When used from setup.py, we don't want to import _version.py,
# so we do it with a regexp instead. This function is not used from
# _version.py.
keywords = {}
try:
f = open(versionfile_abs, "r")
for line in f.readlines():
if line.strip().startswith("git_refnames ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["refnames"] = mo.group(1)
if line.strip().startswith("git_full ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["full"] = mo.group(1)
if line.strip().startswith("git_date ="):
mo = re.search(r'=\s*"(.*)"', line)
if mo:
keywords["date"] = mo.group(1)
f.close()
except EnvironmentError:
pass
return keywords
@register_vcs_handler("git", "keywords")
def git_versions_from_keywords(keywords, tag_prefix, verbose):
"""Get version information from git keywords."""
if not keywords:
raise NotThisMethod("no keywords at all, weird")
date = keywords.get("date")
if date is not None:
# Use only the last line. Previous lines may contain GPG signature
# information.
date = date.splitlines()[-1]
# git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant
# datestamp. However we prefer "%ci" (which expands to an "ISO-8601
# -like" string, which we must then edit to make compliant), because
# it's been around since git-1.5.3, and it's too difficult to
# discover which version we're using, or to work around using an
# older one.
date = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
refnames = keywords["refnames"].strip()
if refnames.startswith("$Format"):
if verbose:
print("keywords are unexpanded, not using")
raise NotThisMethod("unexpanded keywords, not a git-archive tarball")
refs = set([r.strip() for r in refnames.strip("()").split(",")])
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
TAG = "tag: "
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
if not tags:
# Either we're using git < 1.8.3, or there really are no tags. We use
# a heuristic: assume all version tags have a digit. The old git %d
# expansion behaves like git log --decorate=short and strips out the
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
# between branches and tags. By ignoring refnames without digits, we
# filter out many common branch names like "release" and
# "stabilization", as well as "HEAD" and "master".
tags = set([r for r in refs if re.search(r'\d', r)])
if verbose:
print("discarding '%s', no digits" % ",".join(refs - tags))
if verbose:
print("likely tags: %s" % ",".join(sorted(tags)))
for ref in sorted(tags):
# sorting will prefer e.g. "2.0" over "2.0rc1"
if ref.startswith(tag_prefix):
r = ref[len(tag_prefix):]
if verbose:
print("picking %s" % r)
return {"version": r,
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": None,
"date": date}
# no suitable tags, so version is "0+unknown", but full hex is still there
if verbose:
print("no suitable tags, using unknown + full revision id")
return {"version": "0+unknown",
"full-revisionid": keywords["full"].strip(),
"dirty": False, "error": "no suitable tags", "date": None}
@register_vcs_handler("git", "pieces_from_vcs")
def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command):
"""Get version from 'git describe' in the root of the source tree.
This only gets called if the git-archive 'subst' keywords were *not*
expanded, and _version.py hasn't already been rewritten with a short
version string, meaning we're inside a checked out source tree.
"""
GITS = ["git"]
if sys.platform == "win32":
GITS = ["git.cmd", "git.exe"]
out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root,
hide_stderr=True)
if rc != 0:
if verbose:
print("Directory %s not under git control" % root)
raise NotThisMethod("'git rev-parse --git-dir' returned error")
# if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty]
# if there isn't one, this yields HEX[-dirty] (no NUM)
describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty",
"--always", "--long",
"--match", "%s*" % tag_prefix],
cwd=root)
# --long was added in git-1.5.5
if describe_out is None:
raise NotThisMethod("'git describe' failed")
describe_out = describe_out.strip()
full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
if full_out is None:
raise NotThisMethod("'git rev-parse' failed")
full_out = full_out.strip()
pieces = {}
pieces["long"] = full_out
pieces["short"] = full_out[:7] # maybe improved later
pieces["error"] = None
# parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty]
# TAG might have hyphens.
git_describe = describe_out
# look for -dirty suffix
dirty = git_describe.endswith("-dirty")
pieces["dirty"] = dirty
if dirty:
git_describe = git_describe[:git_describe.rindex("-dirty")]
# now we have TAG-NUM-gHEX or HEX
if "-" in git_describe:
# TAG-NUM-gHEX
mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe)
if not mo:
# unparseable. Maybe git-describe is misbehaving?
pieces["error"] = ("unable to parse git-describe output: '%s'"
% describe_out)
return pieces
# tag
full_tag = mo.group(1)
if not full_tag.startswith(tag_prefix):
if verbose:
fmt = "tag '%s' doesn't start with prefix '%s'"
print(fmt % (full_tag, tag_prefix))
pieces["error"] = ("tag '%s' doesn't start with prefix '%s'"
% (full_tag, tag_prefix))
return pieces
pieces["closest-tag"] = full_tag[len(tag_prefix):]
# distance: number of commits since tag
pieces["distance"] = int(mo.group(2))
# commit: short hex revision ID
pieces["short"] = mo.group(3)
else:
# HEX: no tags
pieces["closest-tag"] = None
count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"],
cwd=root)
pieces["distance"] = int(count_out) # total number of commits
# commit date: see ISO-8601 comment in git_versions_from_keywords()
date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"],
cwd=root)[0].strip()
# Use only the last line. Previous lines may contain GPG signature
# information.
date = date.splitlines()[-1]
pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1)
return pieces
def plus_or_dot(pieces):
"""Return a + if we don't already have one, else return a ."""
if "+" in pieces.get("closest-tag", ""):
return "."
return "+"
def render_pep440(pieces):
"""Build up version string, with post-release "local version identifier".
Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you
get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty
Exceptions:
1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += plus_or_dot(pieces)
rendered += "%d.g%s" % (pieces["distance"], pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
else:
# exception #1
rendered = "0+untagged.%d.g%s" % (pieces["distance"],
pieces["short"])
if pieces["dirty"]:
rendered += ".dirty"
return rendered
def render_pep440_pre(pieces):
"""TAG[.post0.devDISTANCE] -- No -dirty.
Exceptions:
1: no tags. 0.post0.devDISTANCE
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += ".post0.dev%d" % pieces["distance"]
else:
# exception #1
rendered = "0.post0.dev%d" % pieces["distance"]
return rendered
def render_pep440_post(pieces):
"""TAG[.postDISTANCE[.dev0]+gHEX] .
The ".dev0" means dirty. Note that .dev0 sorts backwards
(a dirty tree will appear "older" than the corresponding clean one),
but you shouldn't be releasing software with -dirty anyways.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += plus_or_dot(pieces)
rendered += "g%s" % pieces["short"]
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
rendered += "+g%s" % pieces["short"]
return rendered
def render_pep440_old(pieces):
"""TAG[.postDISTANCE[.dev0]] .
The ".dev0" means dirty.
Exceptions:
1: no tags. 0.postDISTANCE[.dev0]
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"] or pieces["dirty"]:
rendered += ".post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
else:
# exception #1
rendered = "0.post%d" % pieces["distance"]
if pieces["dirty"]:
rendered += ".dev0"
return rendered
def render_git_describe(pieces):
"""TAG[-DISTANCE-gHEX][-dirty].
Like 'git describe --tags --dirty --always'.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
if pieces["distance"]:
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render_git_describe_long(pieces):
"""TAG-DISTANCE-gHEX[-dirty].
Like 'git describe --tags --dirty --always -long'.
The distance/hash is unconditional.
Exceptions:
1: no tags. HEX[-dirty] (note: no 'g' prefix)
"""
if pieces["closest-tag"]:
rendered = pieces["closest-tag"]
rendered += "-%d-g%s" % (pieces["distance"], pieces["short"])
else:
# exception #1
rendered = pieces["short"]
if pieces["dirty"]:
rendered += "-dirty"
return rendered
def render(pieces, style):
"""Render the given version pieces into the requested style."""
if pieces["error"]:
return {"version": "unknown",
"full-revisionid": pieces.get("long"),
"dirty": None,
"error": pieces["error"],
"date": None}
if not style or style == "default":
style = "pep440" # the default
if style == "pep440":
rendered = render_pep440(pieces)
elif style == "pep440-pre":
rendered = render_pep440_pre(pieces)
elif style == "pep440-post":
rendered = render_pep440_post(pieces)
elif style == "pep440-old":
rendered = render_pep440_old(pieces)
elif style == "git-describe":
rendered = render_git_describe(pieces)
elif style == "git-describe-long":
rendered = render_git_describe_long(pieces)
else:
raise ValueError("unknown style '%s'" % style)
return {"version": rendered, "full-revisionid": pieces["long"],
"dirty": pieces["dirty"], "error": None,
"date": pieces.get("date")}
def get_versions():
"""Get version information or return default if unable to do so."""
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
# __file__, we can work backwards from there to the root. Some
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
# case we can only use expanded keywords.
cfg = get_config()
verbose = cfg.verbose
try:
return git_versions_from_keywords(get_keywords(), cfg.tag_prefix,
verbose)
except NotThisMethod:
pass
try:
root = os.path.realpath(__file__)
# versionfile_source is the relative path from the top of the source
# tree (where the .git directory might live) to this file. Invert
# this to find the root from __file__.
for i in cfg.versionfile_source.split('/'):
root = os.path.dirname(root)
except NameError:
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to find root of source tree",
"date": None}
try:
pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose)
return render(pieces, cfg.style)
except NotThisMethod:
pass
try:
if cfg.parentdir_prefix:
return versions_from_parentdir(cfg.parentdir_prefix, root, verbose)
except NotThisMethod:
pass
return {"version": "0+unknown", "full-revisionid": None,
"dirty": None,
"error": "unable to compute version", "date": None}
| mrahnis/jxl2txt | jxl2txt/_version.py | Python | bsd-3-clause | 18,658 |
from . import Cl, conformalize
layout_orig, blades_orig = Cl(3)
layout, blades, stuff = conformalize(layout_orig)
locals().update(blades)
locals().update(stuff)
# for shorter reprs
layout.__name__ = 'layout'
layout.__module__ = __name__
| arsenovic/clifford | clifford/g3c.py | Python | bsd-3-clause | 240 |
"""
Unit tests to ensure that we can call reset_traits/delete on a
property trait (regression tests for Github issue #67).
"""
from traits import _py2to3
from traits.api import Any, HasTraits, Int, Property, TraitError
from traits.testing.unittest_tools import unittest
class E(HasTraits):
a = Property(Any)
b = Property(Int)
class TestPropertyDelete(unittest.TestCase):
def test_property_delete(self):
e = E()
with self.assertRaises(TraitError):
del e.a
with self.assertRaises(TraitError):
del e.b
def test_property_reset_traits(self):
e = E()
unresetable = e.reset_traits()
_py2to3.assertCountEqual(self, unresetable, ['a', 'b'])
| burnpanck/traits | traits/tests/test_property_delete.py | Python | bsd-3-clause | 730 |
from django import forms
from ncdjango.interfaces.arcgis.form_fields import SrField
class PointForm(forms.Form):
x = forms.FloatField()
y = forms.FloatField()
projection = SrField() | consbio/ncdjango | ncdjango/interfaces/data/forms.py | Python | bsd-3-clause | 195 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUAvatar(NURESTObject):
""" Represents a Avatar in the VSD
Notes:
Avatar
"""
__rest_name__ = "avatar"
__resource_name__ = "avatars"
## Constants
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a Avatar instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> avatar = NUAvatar(id=u'xxxx-xxx-xxx-xxx', name=u'Avatar')
>>> avatar = NUAvatar(data=my_dict)
"""
super(NUAvatar, self).__init__()
# Read/Write Attributes
self._last_updated_by = None
self._last_updated_date = None
self._embedded_metadata = None
self._entity_scope = None
self._creation_date = None
self._owner = None
self._external_id = None
self._type = None
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
self.expose_attribute(local_name="type", remote_name="type", attribute_type=str, is_required=False, is_unique=False)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
@property
def type(self):
""" Get type value.
Notes:
The image type
"""
return self._type
@type.setter
def type(self, value):
""" Set type value.
Notes:
The image type
"""
self._type = value
| nuagenetworks/vspk-python | vspk/v6/nuavatar.py | Python | bsd-3-clause | 9,831 |
# -*- coding: utf-8 -*-
"""
Display currently playing song from Google Play Music Desktop Player.
Configuration parameters:
cache_timeout: how often we refresh this module in seconds (default 5)
format: specify the items and ordering of the data in the status bar.
These area 1:1 match to gpmdp-remote's options (default is '♫ {info}').
Format of status string placeholders:
See `gpmdp-remote help`. Simply surround the items you want displayed (i.e. `album`)
with curly braces (i.e. `{album}`) and place as-desired in the format string.
{info} Print info about now playing song
{title} Print current song title
{artist} Print current song artist
{album} Print current song album
{album_art} Print current song album art URL
{time_current} Print current song time in milliseconds
{time_total} Print total song time in milliseconds
{status} Print whether GPMDP is paused or playing
{current} Print now playing song in "artist - song" format
{help} Print this help message
Requires:
gpmdp: http://www.googleplaymusicdesktopplayer.com/
gpmdp-remote: https://github.com/iandrewt/gpmdp-remote
@author Aaron Fields https://twitter.com/spirotot
@license BSD
"""
from time import time
from subprocess import check_output
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 5
format = u'♫ {info}'
@staticmethod
def _run_cmd(cmd):
return check_output(['gpmdp-remote', cmd]).decode('utf-8').strip()
def gpmdp(self, i3s_output_list, i3s_config):
if self._run_cmd('status') == 'Paused':
result = ''
else:
cmds = ['info', 'title', 'artist', 'album', 'status', 'current',
'time_total', 'time_current', 'album_art']
data = {}
for cmd in cmds:
if '{%s}' % cmd in self.format:
data[cmd] = self._run_cmd(cmd)
result = self.format.format(**data)
response = {
'cached_until': time() + self.cache_timeout,
'full_text': result
}
return response
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| Spirotot/py3status | py3status/modules/gpmdp.py | Python | bsd-3-clause | 2,411 |
#!/usr/bin/python
import os
# With the addition of Keystone, to use an openstack cloud you should
# authenticate against keystone, which returns a **Token** and **Service
# Catalog**. The catalog contains the endpoint for all services the
# user/tenant has access to - including nova, glance, keystone, swift.
#
# *NOTE*: Using the 2.0 *auth api* does not mean that compute api is 2.0. We
# will use the 1.1 *compute api*
os.environ['OS_AUTH_URL'] = "https://keystone.rc.nectar.org.au:5000/v2.0/"
# With the addition of Keystone we have standardized on the term **tenant**
# as the entity that owns the resources.
os.environ['OS_TENANT_ID'] = "123456789012345678901234567890"
os.environ['OS_TENANT_NAME'] = "tenant_name"
# In addition to the owning entity (tenant), openstack stores the entity
# performing the action as the **user**.
os.environ['OS_USERNAME'] = "joe.bloggs@uni.edu.au"
# With Keystone you pass the keystone password.
os.environ['OS_PASSWORD'] = "????????????????????"
| wettenhj/mytardis-swift-uploader | openrc.py | Python | bsd-3-clause | 994 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from .local import Local # noqa
from .production import Production # noqa
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app
| HEG-Arc/Appagoo | appagoo/config/__init__.py | Python | bsd-3-clause | 288 |
# Possible discounts:
# - Node (administer inline with nodes)
# - Bulk amounts on nodes
# - User
# - Group of users
# - Order (this is more-or-less a voucher)
# - Shipping costs
# Possible amounts:
# - Percentage
# - Fixed amount
# Flag indicating if a discount can be combined with other discounts.
# Boolean "offer" to include in list of offers. Default to true if discount is at node level.
# Save all applied discounts when ordering in a ManyToMany relationship with Order.
| bhell/jimi | jimi/jimi/price/models/discount.py | Python | bsd-3-clause | 478 |
import matplotlib.pyplot as plt
import numpy as np
import scalpplot
from scalpplot import plot_scalp
from positions import POS_10_5
from scipy import signal
def plot_timeseries(frames, time=None, offset=None, color='k', linestyle='-'):
frames = np.asarray(frames)
if offset == None:
offset = np.max(np.std(frames, axis=0)) * 3
if time == None:
time = np.arange(frames.shape[0])
plt.plot(time, frames - np.mean(frames, axis=0) +
np.arange(frames.shape[1]) * offset, color=color, ls=linestyle)
def plot_scalpgrid(scalps, sensors, locs=POS_10_5, width=None,
clim=None, cmap=None, titles=None):
'''
Plots a grid with scalpplots. Scalps contains the different scalps in the
rows, sensors contains the names for the columns of scalps, locs is a dict
that maps the sensor-names to locations.
Width determines the width of the grid that contains the plots. Cmap selects
a colormap, for example plt.cm.RdBu_r is very useful for AUC-ROC plots.
Clim is a list containing the minimim and maximum value mapped to a color.
Titles is an optional list with titles for each subplot.
Returns a list with subplots for further manipulation.
'''
scalps = np.asarray(scalps)
assert scalps.ndim == 2
nscalps = scalps.shape[0]
subplots = []
if not width:
width = int(min(8, np.ceil(np.sqrt(nscalps))))
height = int(np.ceil(nscalps/float(width)))
if not clim:
clim = [np.min(scalps), np.max(scalps)]
plt.clf()
for i in range(nscalps):
subplots.append(plt.subplot(height, width, i + 1))
plot_scalp(scalps[i], sensors, locs, clim=clim, cmap=cmap)
if titles:
plt.title(titles[i])
# plot colorbar next to last scalp
bb = plt.gca().get_position()
plt.colorbar(cax=plt.axes([bb.xmax + bb.width/10, bb.ymin, bb.width/10,
bb.height]), ticks=np.linspace(clim[0], clim[1], 5).round(2))
return subplots
| breuderink/psychic | psychic/plots.py | Python | bsd-3-clause | 1,878 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('setlist', '0012_remove_show_leg'),
]
operations = [
migrations.CreateModel(
name='Show2',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('venue', models.ForeignKey(to='setlist.Venue', to_field='id')),
('tour', models.ForeignKey(to='setlist.Tour', to_field='id')),
('date', models.DateField(db_index=True)),
('setlist', models.TextField(default=b'', blank=True)),
('notes', models.TextField(default=b'', blank=True)),
('source', models.TextField(default=b'', blank=True)),
],
options={
},
bases=(models.Model,),
),
]
| tylereaves/26md | setlist/migrations/0013_show2.py | Python | bsd-3-clause | 970 |
#!/usr/bin/env python
import sys
from os.path import *
import os
from pyflann import *
from copy import copy
from numpy import *
from numpy.random import *
import unittest
class Test_PyFLANN_nn(unittest.TestCase):
def setUp(self):
self.nn = FLANN(log_level="warning")
################################################################################
# The typical
def test_nn_2d_10pt(self):
self.__nd_random_test_autotune(2, 2)
def test_nn_autotune_2d_1000pt(self):
self.__nd_random_test_autotune(2, 1000)
def test_nn_autotune_100d_1000pt(self):
self.__nd_random_test_autotune(100, 1000)
def test_nn_autotune_500d_100pt(self):
self.__nd_random_test_autotune(500, 100)
#
# ##########################################################################################
# # Stress it should handle
#
def test_nn_stress_1d_1pt_kmeans_autotune(self):
self.__nd_random_test_autotune(1, 1)
def __ensure_list(self,arg):
if type(arg)!=list:
return [arg]
else:
return arg
def __nd_random_test_autotune(self, dim, N, num_neighbors = 1, **kwargs):
"""
Make a set of random points, then pass the same ones to the
query points. Each point should be closest to itself.
"""
seed(0)
x = rand(N, dim)
xq = rand(N, dim)
perm = permutation(N)
# compute ground truth nearest neighbors
gt_idx, gt_dist = self.nn.nn(x,xq,
algorithm='linear',
num_neighbors=num_neighbors)
for tp in [0.70, 0.80, 0.90]:
nidx,ndist = self.nn.nn(x, xq,
algorithm='autotuned',
sample_fraction=1.0,
num_neighbors = num_neighbors,
target_precision = tp, checks=-2, **kwargs)
correctness = 0.0
for i in xrange(N):
l1 = self.__ensure_list(nidx[i])
l2 = self.__ensure_list(gt_idx[i])
correctness += float(len(set(l1).intersection(l2)))/num_neighbors
correctness /= N
self.assert_(correctness >= tp*0.9,
'failed #1: targ_prec=%f, N=%d,correctness=%f' % (tp, N, correctness))
if __name__ == '__main__':
unittest.main()
| piskvorky/flann | test/test_nn_autotune.py | Python | bsd-3-clause | 2,411 |
# -*- coding: utf-8 -*-
from collections import OrderedDict
import locale
from optparse import make_option
from verify.management.commands import VerifyBaseCommand
from verify.models import *
from verify.politici_models import *
from django.db.models import Q, Count
__author__ = 'guglielmo'
class Command(VerifyBaseCommand):
"""
Report delle statistiche di genere complessive, a livello nazionale,
per tutti gli organi di tutte le istituzioni.
Può limitarsi a una o più istituzioni, se si passa un elenco di institution_id
"""
args = '<institution_id institution_id ...>'
help = "Check that all locations have only male components (list locations with female components)."
option_list = VerifyBaseCommand.option_list
def execute_verification(self, *args, **options):
self.csv_headers = ["ISTITUZIONE", "INCARICO", "N_DONNE", "N_UOMINI", "N_TOTALI", "PERC_DONNE", "PERC_UOMINI"]
institutions = OpInstitution.objects.using('politici').all()
if args:
institutions = institutions.filter(id__in=args)
self.logger.info(
"Verification {0} launched with institutions limited to {1}".format(
self.__class__.__module__, ",".join(institutions.values_list('id', flat=True))
)
)
else:
self.logger.info(
"Verification {0} launched for all institutions".format(
self.__class__.__module__
)
)
self.ok_locs = []
self.ko_locs = []
for institution in institutions:
charge_types_ids = OpInstitutionCharge.objects.using('politici').\
filter(date_end__isnull=True,
content__deleted_at__isnull=True).\
filter(institution=institution).\
values_list('charge_type', flat=True).\
distinct()
charge_types = OpChargeType.objects.using('politici').\
filter(id__in=charge_types_ids)
for charge_type in charge_types:
self.logger.info(
"Counting {0} in {1}".format(
charge_type.name, institution.name
)
)
qs = OpInstitutionCharge.objects.using('politici').\
filter(date_end__isnull=True,
content__deleted_at__isnull=True).\
filter(institution=institution,
charge_type=charge_type)
n_tot = qs.count()
n_fem = qs.filter(politician__sex__iexact='f').count()
n_mal = n_tot - n_fem
merged = [institution.name, charge_type.name, n_fem, n_mal, n_tot,]
merged.append(locale.format("%.2f",100. * n_fem / float(n_tot) ))
merged.append(locale.format("%.2f",100. * n_mal / float(n_tot) ))
self.ko_locs.append(merged)
outcome = Verification.OUTCOME.failed
self.logger.info(
"Report for {0} institutions generated.".format(
len(self.ko_locs)
)
)
return outcome
| openpolis/op-verify | project/verify/management/commands/generi_in_istituzioni.py | Python | bsd-3-clause | 3,219 |
#!/usr/bin/env python
from distutils.core import setup
setup(name='django-modeltranslation',
version='0.4.0-alpha1',
description='Translates Django models using a registration approach.',
long_description='The modeltranslation application can be used to '
'translate dynamic content of existing models to an '
'arbitrary number of languages without having to '
'change the original model classes. It uses a '
'registration approach (comparable to Django\'s admin '
'app) to be able to add translations to existing or '
'new projects and is fully integrated into the Django '
'admin backend.',
author='Peter Eschler',
author_email='p.eschler@nmy.de',
maintainer='Dirk Eschler',
maintainer_email='d.eschler@nmy.de',
url='http://code.google.com/p/django-modeltranslation/',
packages=['modeltranslation', 'modeltranslation.management',
'modeltranslation.management.commands'],
package_data={'modeltranslation': ['static/modeltranslation/css/*.css',
'static/modeltranslation/js/*.js']},
include_package_data = True,
requires=['django(>=1.0)'],
download_url='http://django-modeltranslation.googlecode.com/files/django-modeltranslation-0.4.0-alpha1.tar.gz',
classifiers=['Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License'],
license='New BSD')
| google-code-export/django-modeltranslation | setup.py | Python | bsd-3-clause | 1,631 |
#!/usr/bin/python
# Copyright (c) 2009, Purdue University
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright notice, this
# list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# Neither the name of the Purdue University nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for Credential cache library."""
__copyright__ = 'Copyright (C) 2009, Purdue University'
__license__ = 'BSD'
__version__ = '#TRUNK#'
import unittest
import os
import roster_core
from roster_server import credentials
CONFIG_FILE = 'test_data/roster.conf' # Example in test_data
SCHEMA_FILE = '../roster-core/data/database_schema.sql'
DATA_FILE = 'test_data/test_data.sql'
class TestCredentialsLibrary(unittest.TestCase):
def setUp(self):
self.config_instance = roster_core.Config(file_name=CONFIG_FILE)
self.cred_instance = credentials.CredCache(self.config_instance,
u'sharrell')
db_instance = self.config_instance.GetDb()
db_instance.CreateRosterDatabase()
data = open(DATA_FILE, 'r').read()
db_instance.StartTransaction()
db_instance.cursor.execute(data)
db_instance.EndTransaction()
db_instance.close()
self.core_instance = roster_core.Core(u'sharrell', self.config_instance)
def is_valid_uuid (self, uuid):
"""
TAKEN FROM THE BLUEZ MODULE
is_valid_uuid (uuid) -> bool
returns True if uuid is a valid 128-bit UUID.
valid UUIDs are always strings taking one of the following forms:
XXXX
XXXXXXXX
XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX
where each X is a hexadecimal digit (case insensitive)
"""
try:
if len (uuid) == 4:
if int (uuid, 16) < 0: return False
elif len (uuid) == 8:
if int (uuid, 16) < 0: return False
elif len (uuid) == 36:
pieces = uuid.split ("-")
if len (pieces) != 5 or \
len (pieces[0]) != 8 or \
len (pieces[1]) != 4 or \
len (pieces[2]) != 4 or \
len (pieces[3]) != 4 or \
len (pieces[4]) != 12:
return False
[ int (p, 16) for p in pieces ]
else:
return False
except ValueError:
return False
except TypeError:
return False
return True
def testCredentials(self):
self.assertTrue(self.cred_instance.Authenticate(u'sharrell', 'test'))
cred_string = self.cred_instance.GetCredentials(u'sharrell', 'test',
self.core_instance)
self.assertEqual(self.cred_instance.CheckCredential(cred_string,
u'sharrell',
self.core_instance),
u'')
self.assertEqual(self.cred_instance.CheckCredential(u'test', u'sharrell',
self.core_instance),
None)
if( __name__ == '__main__' ):
unittest.main()
| stephenlienharrell/roster-dns-management | test/credentials_test.py | Python | bsd-3-clause | 4,275 |
#!/usr/bin/env python
import sys
import hyperdex.client
from hyperdex.client import LessEqual, GreaterEqual, Range, Regex, LengthEquals, LengthLessEqual, LengthGreaterEqual
c = hyperdex.client.Client(sys.argv[1], int(sys.argv[2]))
def to_objectset(xs):
return set([frozenset(x.items()) for x in xs])
assert c.put('kv', 'k', {}) == True
assert c.get('kv', 'k') == {'v': {}}
assert c.put('kv', 'k', {'v': {1: 3.14, 2: 0.25, 3: 1.0}}) == True
assert c.get('kv', 'k') == {'v': {1: 3.14, 2: 0.25, 3: 1.0}}
assert c.put('kv', 'k', {'v': {}}) == True
assert c.get('kv', 'k') == {'v': {}}
| hyc/HyperDex | test/python/DataTypeMapIntFloat.py | Python | bsd-3-clause | 585 |
"""
Room Typeclasses for the TutorialWorld.
This defines special types of Rooms available in the tutorial. To keep
everything in one place we define them together with the custom
commands needed to control them. Those commands could also have been
in a separate module (e.g. if they could have been re-used elsewhere.)
"""
from __future__ import print_function
import random
from evennia import TICKER_HANDLER
from evennia import CmdSet, Command, DefaultRoom
from evennia import utils, create_object, search_object
from evennia import syscmdkeys, default_cmds
from evennia.contrib.tutorial_world.objects import LightSource
# the system error-handling module is defined in the settings. We load the
# given setting here using utils.object_from_module. This way we can use
# it regardless of if we change settings later.
from django.conf import settings
_SEARCH_AT_RESULT = utils.object_from_module(settings.SEARCH_AT_RESULT)
# -------------------------------------------------------------
#
# Tutorial room - parent room class
#
# This room is the parent of all rooms in the tutorial.
# It defines a tutorial command on itself (available to
# all those who are in a tutorial room).
#
# -------------------------------------------------------------
#
# Special command available in all tutorial rooms
class CmdTutorial(Command):
"""
Get help during the tutorial
Usage:
tutorial [obj]
This command allows you to get behind-the-scenes info
about an object or the current location.
"""
key = "tutorial"
aliases = ["tut"]
locks = "cmd:all()"
help_category = "TutorialWorld"
def func(self):
"""
All we do is to scan the current location for an Attribute
called `tutorial_info` and display that.
"""
caller = self.caller
if not self.args:
target = self.obj # this is the room the command is defined on
else:
target = caller.search(self.args.strip())
if not target:
return
helptext = target.db.tutorial_info
if helptext:
caller.msg("|G%s|n" % helptext)
else:
caller.msg("|RSorry, there is no tutorial help available here.|n")
# for the @detail command we inherit from MuxCommand, since
# we want to make use of MuxCommand's pre-parsing of '=' in the
# argument.
class CmdTutorialSetDetail(default_cmds.MuxCommand):
"""
sets a detail on a room
Usage:
@detail <key> = <description>
@detail <key>;<alias>;... = description
Example:
@detail walls = The walls are covered in ...
@detail castle;ruin;tower = The distant ruin ...
This sets a "detail" on the object this command is defined on
(TutorialRoom for this tutorial). This detail can be accessed with
the TutorialRoomLook command sitting on TutorialRoom objects (details
are set as a simple dictionary on the room). This is a Builder command.
We custom parse the key for the ;-separator in order to create
multiple aliases to the detail all at once.
"""
key = "@detail"
locks = "cmd:perm(Builder)"
help_category = "TutorialWorld"
def func(self):
"""
All this does is to check if the object has
the set_detail method and uses it.
"""
if not self.args or not self.rhs:
self.caller.msg("Usage: @detail key = description")
return
if not hasattr(self.obj, "set_detail"):
self.caller.msg("Details cannot be set on %s." % self.obj)
return
for key in self.lhs.split(";"):
# loop over all aliases, if any (if not, this will just be
# the one key to loop over)
self.obj.set_detail(key, self.rhs)
self.caller.msg("Detail set: '%s': '%s'" % (self.lhs, self.rhs))
class CmdTutorialLook(default_cmds.CmdLook):
"""
looks at the room and on details
Usage:
look <obj>
look <room detail>
look *<account>
Observes your location, details at your location or objects
in your vicinity.
Tutorial: This is a child of the default Look command, that also
allows us to look at "details" in the room. These details are
things to examine and offers some extra description without
actually having to be actual database objects. It uses the
return_detail() hook on TutorialRooms for this.
"""
# we don't need to specify key/locks etc, this is already
# set by the parent.
help_category = "TutorialWorld"
def func(self):
"""
Handle the looking. This is a copy of the default look
code except for adding in the details.
"""
caller = self.caller
args = self.args
if args:
# we use quiet=True to turn off automatic error reporting.
# This tells search that we want to handle error messages
# ourself. This also means the search function will always
# return a list (with 0, 1 or more elements) rather than
# result/None.
looking_at_obj = caller.search(args,
# note: excludes room/room aliases
candidates=caller.location.contents + caller.contents,
use_nicks=True, quiet=True)
if len(looking_at_obj) != 1:
# no target found or more than one target found (multimatch)
# look for a detail that may match
detail = self.obj.return_detail(args)
if detail:
self.caller.msg(detail)
return
else:
# no detail found, delegate our result to the normal
# error message handler.
_SEARCH_AT_RESULT(None, caller, args, looking_at_obj)
return
else:
# we found a match, extract it from the list and carry on
# normally with the look handling.
looking_at_obj = looking_at_obj[0]
else:
looking_at_obj = caller.location
if not looking_at_obj:
caller.msg("You have no location to look at!")
return
if not hasattr(looking_at_obj, 'return_appearance'):
# this is likely due to us having an account instead
looking_at_obj = looking_at_obj.character
if not looking_at_obj.access(caller, "view"):
caller.msg("Could not find '%s'." % args)
return
# get object's appearance
caller.msg(looking_at_obj.return_appearance(caller))
# the object's at_desc() method.
looking_at_obj.at_desc(looker=caller)
return
class TutorialRoomCmdSet(CmdSet):
"""
Implements the simple tutorial cmdset. This will overload the look
command in the default CharacterCmdSet since it has a higher
priority (ChracterCmdSet has prio 0)
"""
key = "tutorial_cmdset"
priority = 1
def at_cmdset_creation(self):
"""add the tutorial-room commands"""
self.add(CmdTutorial())
self.add(CmdTutorialSetDetail())
self.add(CmdTutorialLook())
class TutorialRoom(DefaultRoom):
"""
This is the base room type for all rooms in the tutorial world.
It defines a cmdset on itself for reading tutorial info about the location.
"""
def at_object_creation(self):
"""Called when room is first created"""
self.db.tutorial_info = "This is a tutorial room. It allows you to use the 'tutorial' command."
self.cmdset.add_default(TutorialRoomCmdSet)
def at_object_receive(self, new_arrival, source_location):
"""
When an object enter a tutorial room we tell other objects in
the room about it by trying to call a hook on them. The Mob object
uses this to cheaply get notified of enemies without having
to constantly scan for them.
Args:
new_arrival (Object): the object that just entered this room.
source_location (Object): the previous location of new_arrival.
"""
if new_arrival.has_account and not new_arrival.is_superuser:
# this is a character
for obj in self.contents_get(exclude=new_arrival):
if hasattr(obj, "at_new_arrival"):
obj.at_new_arrival(new_arrival)
def return_detail(self, detailkey):
"""
This looks for an Attribute "obj_details" and possibly
returns the value of it.
Args:
detailkey (str): The detail being looked at. This is
case-insensitive.
"""
details = self.db.details
if details:
return details.get(detailkey.lower(), None)
def set_detail(self, detailkey, description):
"""
This sets a new detail, using an Attribute "details".
Args:
detailkey (str): The detail identifier to add (for
aliases you need to add multiple keys to the
same description). Case-insensitive.
description (str): The text to return when looking
at the given detailkey.
"""
if self.db.details:
self.db.details[detailkey.lower()] = description
else:
self.db.details = {detailkey.lower(): description}
# -------------------------------------------------------------
#
# Weather room - room with a ticker
#
# -------------------------------------------------------------
# These are rainy weather strings
WEATHER_STRINGS = (
"The rain coming down from the iron-grey sky intensifies.",
"A gust of wind throws the rain right in your face. Despite your cloak you shiver.",
"The rainfall eases a bit and the sky momentarily brightens.",
"For a moment it looks like the rain is slowing, then it begins anew with renewed force.",
"The rain pummels you with large, heavy drops. You hear the rumble of thunder in the distance.",
"The wind is picking up, howling around you, throwing water droplets in your face. It's cold.",
"Bright fingers of lightning flash over the sky, moments later followed by a deafening rumble.",
"It rains so hard you can hardly see your hand in front of you. You'll soon be drenched to the bone.",
"Lightning strikes in several thundering bolts, striking the trees in the forest to your west.",
"You hear the distant howl of what sounds like some sort of dog or wolf.",
"Large clouds rush across the sky, throwing their load of rain over the world.")
class WeatherRoom(TutorialRoom):
"""
This should probably better be called a rainy room...
This sets up an outdoor room typeclass. At irregular intervals,
the effects of weather will show in the room. Outdoor rooms should
inherit from this.
"""
def at_object_creation(self):
"""
Called when object is first created.
We set up a ticker to update this room regularly.
Note that we could in principle also use a Script to manage
the ticking of the room; the TickerHandler works fine for
simple things like this though.
"""
super(WeatherRoom, self).at_object_creation()
# subscribe ourselves to a ticker to repeatedly call the hook
# "update_weather" on this object. The interval is randomized
# so as to not have all weather rooms update at the same time.
self.db.interval = random.randint(50, 70)
TICKER_HANDLER.add(interval=self.db.interval, callback=self.update_weather, idstring="tutorial")
# this is parsed by the 'tutorial' command on TutorialRooms.
self.db.tutorial_info = \
"This room has a Script running that has it echo a weather-related message at irregular intervals."
def update_weather(self, *args, **kwargs):
"""
Called by the tickerhandler at regular intervals. Even so, we
only update 20% of the time, picking a random weather message
when we do. The tickerhandler requires that this hook accepts
any arguments and keyword arguments (hence the *args, **kwargs
even though we don't actually use them in this example)
"""
if random.random() < 0.2:
# only update 20 % of the time
self.msg_contents("|w%s|n" % random.choice(WEATHER_STRINGS))
SUPERUSER_WARNING = "\nWARNING: You are playing as a superuser ({name}). Use the {quell} command to\n" \
"play without superuser privileges (many functions and puzzles ignore the \n" \
"presence of a superuser, making this mode useful for exploring things behind \n" \
"the scenes later).\n" \
# ------------------------------------------------------------
#
# Intro Room - unique room
#
# This room marks the start of the tutorial. It sets up properties on
# the player char that is needed for the tutorial.
#
# -------------------------------------------------------------
class IntroRoom(TutorialRoom):
"""
Intro room
properties to customize:
char_health - integer > 0 (default 20)
"""
def at_object_creation(self):
"""
Called when the room is first created.
"""
super(IntroRoom, self).at_object_creation()
self.db.tutorial_info = "The first room of the tutorial. " \
"This assigns the health Attribute to "\
"the account."
def at_object_receive(self, character, source_location):
"""
Assign properties on characters
"""
# setup character for the tutorial
health = self.db.char_health or 20
if character.has_account:
character.db.health = health
character.db.health_max = health
if character.is_superuser:
string = "-" * 78 + SUPERUSER_WARNING + "-" * 78
character.msg("|r%s|n" % string.format(name=character.key, quell="|w@quell|r"))
# -------------------------------------------------------------
#
# Bridge - unique room
#
# Defines a special west-eastward "bridge"-room, a large room that takes
# several steps to cross. It is complete with custom commands and a
# chance of falling off the bridge. This room has no regular exits,
# instead the exitings are handled by custom commands set on the account
# upon first entering the room.
#
# Since one can enter the bridge room from both ends, it is
# divided into five steps:
# westroom <- 0 1 2 3 4 -> eastroom
#
# -------------------------------------------------------------
class CmdEast(Command):
"""
Go eastwards across the bridge.
Tutorial info:
This command relies on the caller having two Attributes
(assigned by the room when entering):
- east_exit: a unique name or dbref to the room to go to
when exiting east.
- west_exit: a unique name or dbref to the room to go to
when exiting west.
The room must also have the following Attributes
- tutorial_bridge_posistion: the current position on
on the bridge, 0 - 4.
"""
key = "east"
aliases = ["e"]
locks = "cmd:all()"
help_category = "TutorialWorld"
def func(self):
"""move one step eastwards"""
caller = self.caller
bridge_step = min(5, caller.db.tutorial_bridge_position + 1)
if bridge_step > 4:
# we have reached the far east end of the bridge.
# Move to the east room.
eexit = search_object(self.obj.db.east_exit)
if eexit:
caller.move_to(eexit[0])
else:
caller.msg("No east exit was found for this room. Contact an admin.")
return
caller.db.tutorial_bridge_position = bridge_step
# since we are really in one room, we have to notify others
# in the room when we move.
caller.location.msg_contents("%s steps eastwards across the bridge." % caller.name, exclude=caller)
caller.execute_cmd("look")
# go back across the bridge
class CmdWest(Command):
"""
Go westwards across the bridge.
Tutorial info:
This command relies on the caller having two Attributes
(assigned by the room when entering):
- east_exit: a unique name or dbref to the room to go to
when exiting east.
- west_exit: a unique name or dbref to the room to go to
when exiting west.
The room must also have the following property:
- tutorial_bridge_posistion: the current position on
on the bridge, 0 - 4.
"""
key = "west"
aliases = ["w"]
locks = "cmd:all()"
help_category = "TutorialWorld"
def func(self):
"""move one step westwards"""
caller = self.caller
bridge_step = max(-1, caller.db.tutorial_bridge_position - 1)
if bridge_step < 0:
# we have reached the far west end of the bridge.
# Move to the west room.
wexit = search_object(self.obj.db.west_exit)
if wexit:
caller.move_to(wexit[0])
else:
caller.msg("No west exit was found for this room. Contact an admin.")
return
caller.db.tutorial_bridge_position = bridge_step
# since we are really in one room, we have to notify others
# in the room when we move.
caller.location.msg_contents("%s steps westwards across the bridge." % caller.name, exclude=caller)
caller.execute_cmd("look")
BRIDGE_POS_MESSAGES = ("You are standing |wvery close to the the bridge's western foundation|n."
" If you go west you will be back on solid ground ...",
"The bridge slopes precariously where it extends eastwards"
" towards the lowest point - the center point of the hang bridge.",
"You are |whalfways|n out on the unstable bridge.",
"The bridge slopes precariously where it extends westwards"
" towards the lowest point - the center point of the hang bridge.",
"You are standing |wvery close to the bridge's eastern foundation|n."
" If you go east you will be back on solid ground ...")
BRIDGE_MOODS = ("The bridge sways in the wind.", "The hanging bridge creaks dangerously.",
"You clasp the ropes firmly as the bridge sways and creaks under you.",
"From the castle you hear a distant howling sound, like that of a large dog or other beast.",
"The bridge creaks under your feet. Those planks does not seem very sturdy.",
"Far below you the ocean roars and throws its waves against the cliff,"
" as if trying its best to reach you.",
"Parts of the bridge come loose behind you, falling into the chasm far below!",
"A gust of wind causes the bridge to sway precariously.",
"Under your feet a plank comes loose, tumbling down. For a moment you dangle over the abyss ...",
"The section of rope you hold onto crumble in your hands,"
" parts of it breaking apart. You sway trying to regain balance.")
FALL_MESSAGE = "Suddenly the plank you stand on gives way under your feet! You fall!" \
"\nYou try to grab hold of an adjoining plank, but all you manage to do is to " \
"divert your fall westwards, towards the cliff face. This is going to hurt ... " \
"\n ... The world goes dark ...\n\n"
class CmdLookBridge(Command):
"""
looks around at the bridge.
Tutorial info:
This command assumes that the room has an Attribute
"fall_exit", a unique name or dbref to the place they end upp
if they fall off the bridge.
"""
key = 'look'
aliases = ["l"]
locks = "cmd:all()"
help_category = "TutorialWorld"
def func(self):
"""Looking around, including a chance to fall."""
caller = self.caller
bridge_position = self.caller.db.tutorial_bridge_position
# this command is defined on the room, so we get it through self.obj
location = self.obj
# randomize the look-echo
message = "|c%s|n\n%s\n%s" % (location.key,
BRIDGE_POS_MESSAGES[bridge_position],
random.choice(BRIDGE_MOODS))
chars = [obj for obj in self.obj.contents_get(exclude=caller) if obj.has_account]
if chars:
# we create the You see: message manually here
message += "\n You see: %s" % ", ".join("|c%s|n" % char.key for char in chars)
self.caller.msg(message)
# there is a chance that we fall if we are on the western or central
# part of the bridge.
if bridge_position < 3 and random.random() < 0.05 and not self.caller.is_superuser:
# we fall 5% of time.
fall_exit = search_object(self.obj.db.fall_exit)
if fall_exit:
self.caller.msg("|r%s|n" % FALL_MESSAGE)
self.caller.move_to(fall_exit[0], quiet=True)
# inform others on the bridge
self.obj.msg_contents("A plank gives way under %s's feet and "
"they fall from the bridge!" % self.caller.key)
# custom help command
class CmdBridgeHelp(Command):
"""
Overwritten help command while on the bridge.
"""
key = "help"
aliases = ["h", "?"]
locks = "cmd:all()"
help_category = "Tutorial world"
def func(self):
"""Implements the command."""
string = "You are trying hard not to fall off the bridge ..." \
"\n\nWhat you can do is trying to cross the bridge |weast|n" \
" or try to get back to the mainland |wwest|n)."
self.caller.msg(string)
class BridgeCmdSet(CmdSet):
"""This groups the bridge commands. We will store it on the room."""
key = "Bridge commands"
priority = 1 # this gives it precedence over the normal look/help commands.
def at_cmdset_creation(self):
"""Called at first cmdset creation"""
self.add(CmdTutorial())
self.add(CmdEast())
self.add(CmdWest())
self.add(CmdLookBridge())
self.add(CmdBridgeHelp())
BRIDGE_WEATHER = (
"The rain intensifies, making the planks of the bridge even more slippery.",
"A gust of wind throws the rain right in your face.",
"The rainfall eases a bit and the sky momentarily brightens.",
"The bridge shakes under the thunder of a closeby thunder strike.",
"The rain pummels you with large, heavy drops. You hear the distinct howl of a large hound in the distance.",
"The wind is picking up, howling around you and causing the bridge to sway from side to side.",
"Some sort of large bird sweeps by overhead, giving off an eery screech. Soon it has disappeared in the gloom.",
"The bridge sways from side to side in the wind.",
"Below you a particularly large wave crashes into the rocks.",
"From the ruin you hear a distant, otherwordly howl. Or maybe it was just the wind.")
class BridgeRoom(WeatherRoom):
"""
The bridge room implements an unsafe bridge. It also enters the player into
a state where they get new commands so as to try to cross the bridge.
We want this to result in the account getting a special set of
commands related to crossing the bridge. The result is that it
will take several steps to cross it, despite it being represented
by only a single room.
We divide the bridge into steps:
self.db.west_exit - - | - - self.db.east_exit
0 1 2 3 4
The position is handled by a variable stored on the character
when entering and giving special move commands will
increase/decrease the counter until the bridge is crossed.
We also has self.db.fall_exit, which points to a gathering
location to end up if we happen to fall off the bridge (used by
the CmdLookBridge command).
"""
def at_object_creation(self):
"""Setups the room"""
# this will start the weather room's ticker and tell
# it to call update_weather regularly.
super(BridgeRoom, self).at_object_creation()
# this identifies the exits from the room (should be the command
# needed to leave through that exit). These are defaults, but you
# could of course also change them after the room has been created.
self.db.west_exit = "cliff"
self.db.east_exit = "gate"
self.db.fall_exit = "cliffledge"
# add the cmdset on the room.
self.cmdset.add_default(BridgeCmdSet)
# since the default Character's at_look() will access the room's
# return_description (this skips the cmdset) when
# first entering it, we need to explicitly turn off the room
# as a normal view target - once inside, our own look will
# handle all return messages.
self.locks.add("view:false()")
def update_weather(self, *args, **kwargs):
"""
This is called at irregular intervals and makes the passage
over the bridge a little more interesting.
"""
if random.random() < 80:
# send a message most of the time
self.msg_contents("|w%s|n" % random.choice(BRIDGE_WEATHER))
def at_object_receive(self, character, source_location):
"""
This hook is called by the engine whenever the player is moved
into this room.
"""
if character.has_account:
# we only run this if the entered object is indeed a player object.
# check so our east/west exits are correctly defined.
wexit = search_object(self.db.west_exit)
eexit = search_object(self.db.east_exit)
fexit = search_object(self.db.fall_exit)
if not (wexit and eexit and fexit):
character.msg("The bridge's exits are not properly configured. "
"Contact an admin. Forcing west-end placement.")
character.db.tutorial_bridge_position = 0
return
if source_location == eexit[0]:
# we assume we enter from the same room we will exit to
character.db.tutorial_bridge_position = 4
else:
# if not from the east, then from the west!
character.db.tutorial_bridge_position = 0
character.execute_cmd("look")
def at_object_leave(self, character, target_location):
"""
This is triggered when the player leaves the bridge room.
"""
if character.has_account:
# clean up the position attribute
del character.db.tutorial_bridge_position
# -------------------------------------------------------------------------------
#
# Dark Room - a room with states
#
# This room limits the movemenets of its denizens unless they carry an active
# LightSource object (LightSource is defined in
# tutorialworld.objects.LightSource)
#
# -------------------------------------------------------------------------------
DARK_MESSAGES = ("It is pitch black. You are likely to be eaten by a grue.",
"It's pitch black. You fumble around but cannot find anything.",
"You don't see a thing. You feel around, managing to bump your fingers hard against something. Ouch!",
"You don't see a thing! Blindly grasping the air around you, you find nothing.",
"It's totally dark here. You almost stumble over some un-evenness in the ground.",
"You are completely blind. For a moment you think you hear someone breathing nearby ... "
"\n ... surely you must be mistaken.",
"Blind, you think you find some sort of object on the ground, but it turns out to be just a stone.",
"Blind, you bump into a wall. The wall seems to be covered with some sort of vegetation,"
" but its too damp to burn.",
"You can't see anything, but the air is damp. It feels like you are far underground.")
ALREADY_LIGHTSOURCE = "You don't want to stumble around in blindness anymore. You already " \
"found what you need. Let's get light already!"
FOUND_LIGHTSOURCE = "Your fingers bump against a splinter of wood in a corner." \
" It smells of resin and seems dry enough to burn! " \
"You pick it up, holding it firmly. Now you just need to" \
" |wlight|n it using the flint and steel you carry with you."
class CmdLookDark(Command):
"""
Look around in darkness
Usage:
look
Look around in the darkness, trying
to find something.
"""
key = "look"
aliases = ["l", 'feel', 'search', 'feel around', 'fiddle']
locks = "cmd:all()"
help_category = "TutorialWorld"
def func(self):
"""
Implement the command.
This works both as a look and a search command; there is a
random chance of eventually finding a light source.
"""
caller = self.caller
if random.random() < 0.8:
# we don't find anything
caller.msg(random.choice(DARK_MESSAGES))
else:
# we could have found something!
if any(obj for obj in caller.contents if utils.inherits_from(obj, LightSource)):
# we already carry a LightSource object.
caller.msg(ALREADY_LIGHTSOURCE)
else:
# don't have a light source, create a new one.
create_object(LightSource, key="splinter", location=caller)
caller.msg(FOUND_LIGHTSOURCE)
class CmdDarkHelp(Command):
"""
Help command for the dark state.
"""
key = "help"
locks = "cmd:all()"
help_category = "TutorialWorld"
def func(self):
"""
Replace the the help command with a not-so-useful help
"""
string = "Can't help you until you find some light! Try looking/feeling around for something to burn. " \
"You shouldn't give up even if you don't find anything right away."
self.caller.msg(string)
class CmdDarkNoMatch(Command):
"""
This is a system command. Commands with special keys are used to
override special sitations in the game. The CMD_NOMATCH is used
when the given command is not found in the current command set (it
replaces Evennia's default behavior or offering command
suggestions)
"""
key = syscmdkeys.CMD_NOMATCH
locks = "cmd:all()"
def func(self):
"""Implements the command."""
self.caller.msg("Until you find some light, there's not much you can do. Try feeling around.")
class DarkCmdSet(CmdSet):
"""
Groups the commands of the dark room together. We also import the
default say command here so that players can still talk in the
darkness.
We give the cmdset the mergetype "Replace" to make sure it
completely replaces whichever command set it is merged onto
(usually the default cmdset)
"""
key = "darkroom_cmdset"
mergetype = "Replace"
priority = 2
def at_cmdset_creation(self):
"""populate the cmdset."""
self.add(CmdTutorial())
self.add(CmdLookDark())
self.add(CmdDarkHelp())
self.add(CmdDarkNoMatch())
self.add(default_cmds.CmdSay)
class DarkRoom(TutorialRoom):
"""
A dark room. This tries to start the DarkState script on all
objects entering. The script is responsible for making sure it is
valid (that is, that there is no light source shining in the room).
The is_lit Attribute is used to define if the room is currently lit
or not, so as to properly echo state changes.
Since this room (in the tutorial) is meant as a sort of catch-all,
we also make sure to heal characters ending up here, since they
may have been beaten up by the ghostly apparition at this point.
"""
def at_object_creation(self):
"""
Called when object is first created.
"""
super(DarkRoom, self).at_object_creation()
self.db.tutorial_info = "This is a room with custom command sets on itself."
# the room starts dark.
self.db.is_lit = False
self.cmdset.add(DarkCmdSet, permanent=True)
def at_init(self):
"""
Called when room is first recached (such as after a reload)
"""
self.check_light_state()
def _carries_light(self, obj):
"""
Checks if the given object carries anything that gives light.
Note that we do NOT look for a specific LightSource typeclass,
but for the Attribute is_giving_light - this makes it easy to
later add other types of light-giving items. We also accept
if there is a light-giving object in the room overall (like if
a splinter was dropped in the room)
"""
return obj.is_superuser or obj.db.is_giving_light or any(o for o in obj.contents if o.db.is_giving_light)
def _heal(self, character):
"""
Heal a character.
"""
health = character.db.health_max or 20
character.db.health = health
def check_light_state(self, exclude=None):
"""
This method checks if there are any light sources in the room.
If there isn't it makes sure to add the dark cmdset to all
characters in the room. It is called whenever characters enter
the room and also by the Light sources when they turn on.
Args:
exclude (Object): An object to not include in the light check.
"""
if any(self._carries_light(obj) for obj in self.contents if obj != exclude):
self.locks.add("view:all()")
self.cmdset.remove(DarkCmdSet)
self.db.is_lit = True
for char in (obj for obj in self.contents if obj.has_account):
# this won't do anything if it is already removed
char.msg("The room is lit up.")
else:
# noone is carrying light - darken the room
self.db.is_lit = False
self.locks.add("view:false()")
self.cmdset.add(DarkCmdSet, permanent=True)
for char in (obj for obj in self.contents if obj.has_account):
if char.is_superuser:
char.msg("You are Superuser, so you are not affected by the dark state.")
else:
# put players in darkness
char.msg("The room is completely dark.")
def at_object_receive(self, obj, source_location):
"""
Called when an object enters the room.
"""
if obj.has_account:
# a puppeted object, that is, a Character
self._heal(obj)
# in case the new guy carries light with them
self.check_light_state()
def at_object_leave(self, obj, target_location):
"""
In case people leave with the light, we make sure to clear the
DarkCmdSet if necessary. This also works if they are
teleported away.
"""
# since this hook is called while the object is still in the room,
# we exclude it from the light check, to ignore any light sources
# it may be carrying.
self.check_light_state(exclude=obj)
# -------------------------------------------------------------
#
# Teleport room - puzzles solution
#
# This is a sort of puzzle room that requires a certain
# attribute on the entering character to be the same as
# an attribute of the room. If not, the character will
# be teleported away to a target location. This is used
# by the Obelisk - grave chamber puzzle, where one must
# have looked at the obelisk to get an attribute set on
# oneself, and then pick the grave chamber with the
# matching imagery for this attribute.
#
# -------------------------------------------------------------
class TeleportRoom(TutorialRoom):
"""
Teleporter - puzzle room.
Important attributes (set at creation):
puzzle_key - which attr to look for on character
puzzle_value - what char.db.puzzle_key must be set to
success_teleport_to - where to teleport in case if success
success_teleport_msg - message to echo while teleporting to success
failure_teleport_to - where to teleport to in case of failure
failure_teleport_msg - message to echo while teleporting to failure
"""
def at_object_creation(self):
"""Called at first creation"""
super(TeleportRoom, self).at_object_creation()
# what character.db.puzzle_clue must be set to, to avoid teleportation.
self.db.puzzle_value = 1
# target of successful teleportation. Can be a dbref or a
# unique room name.
self.db.success_teleport_msg = "You are successful!"
self.db.success_teleport_to = "treasure room"
# the target of the failure teleportation.
self.db.failure_teleport_msg = "You fail!"
self.db.failure_teleport_to = "dark cell"
def at_object_receive(self, character, source_location):
"""
This hook is called by the engine whenever the player is moved into
this room.
"""
if not character.has_account:
# only act on player characters.
return
# determine if the puzzle is a success or not
is_success = str(character.db.puzzle_clue) == str(self.db.puzzle_value)
teleport_to = self.db.success_teleport_to if is_success else self.db.failure_teleport_to
# note that this returns a list
results = search_object(teleport_to)
if not results or len(results) > 1:
# we cannot move anywhere since no valid target was found.
character.msg("no valid teleport target for %s was found." % teleport_to)
return
if character.is_superuser:
# superusers don't get teleported
character.msg("Superuser block: You would have been teleported to %s." % results[0])
return
# perform the teleport
if is_success:
character.msg(self.db.success_teleport_msg)
else:
character.msg(self.db.failure_teleport_msg)
# teleport quietly to the new place
character.move_to(results[0], quiet=True, move_hooks=False)
# we have to call this manually since we turn off move_hooks
# - this is necessary to make the target dark room aware of an
# already carried light.
results[0].at_object_receive(character, self)
# -------------------------------------------------------------
#
# Outro room - unique exit room
#
# Cleans up the character from all tutorial-related properties.
#
# -------------------------------------------------------------
class OutroRoom(TutorialRoom):
"""
Outro room.
Called when exiting the tutorial, cleans the
character of tutorial-related attributes.
"""
def at_object_creation(self):
"""
Called when the room is first created.
"""
super(OutroRoom, self).at_object_creation()
self.db.tutorial_info = "The last room of the tutorial. " \
"This cleans up all temporary Attributes " \
"the tutorial may have assigned to the "\
"character."
def at_object_receive(self, character, source_location):
"""
Do cleanup.
"""
if character.has_account:
del character.db.health_max
del character.db.health
del character.db.last_climbed
del character.db.puzzle_clue
del character.db.combat_parry_mode
del character.db.tutorial_bridge_position
for obj in character.contents:
if obj.typeclass_path.startswith("evennia.contrib.tutorial_world"):
obj.delete()
character.tags.clear(category="tutorial_world")
| feend78/evennia | evennia/contrib/tutorial_world/rooms.py | Python | bsd-3-clause | 40,655 |
"""
Vision-specific analysis functions.
$Id: featureresponses.py 7714 2008-01-24 16:42:21Z antolikjan $
"""
__version__='$Revision: 7714 $'
from math import fmod,floor,pi,sin,cos,sqrt
import numpy
from numpy.oldnumeric import Float
from numpy import zeros, array, size, empty, object_
#import scipy
try:
import pylab
except ImportError:
print "Warning: Could not import matplotlib; pylab plots will not work."
import param
import topo
from topo.base.cf import CFSheet
from topo.base.sheetview import SheetView
from topo.misc.filepath import normalize_path
from topo.misc.numbergenerator import UniformRandom
from topo.plotting.plotgroup import create_plotgroup, plotgroups
from topo.command.analysis import measure_sine_pref
max_value = 0
global_index = ()
def _complexity_rec(x,y,index,depth,fm):
"""
Recurrent helper function for complexity()
"""
global max_value
global global_index
if depth<size(fm.features):
for i in range(size(fm.features[depth].values)):
_complexity_rec(x,y,index + (i,),depth+1,fm)
else:
if max_value < fm.full_matrix[index][x][y]:
global_index = index
max_value = fm.full_matrix[index][x][y]
def complexity(full_matrix):
global global_index
global max_value
"""This function expects as an input a object of type FullMatrix which contains
responses of all neurons in a sheet to stimuly with different varying parameter values.
One of these parameters (features) has to be phase. In such case it computes the classic
modulation ratio (see Hawken et al. for definition) for each neuron and returns them as a matrix.
"""
rows,cols = full_matrix.matrix_shape
complexity = zeros(full_matrix.matrix_shape)
complex_matrix = zeros(full_matrix.matrix_shape,object_)
fftmeasure = zeros(full_matrix.matrix_shape,Float)
i = 0
for f in full_matrix.features:
if f.name == "phase":
phase_index = i
break
i=i+1
sum = 0.0
res = 0.0
average = 0.0
for x in range(rows):
for y in range(cols):
complex_matrix[x,y] = []#
max_value=-0.01
global_index = ()
_complexity_rec(x,y,(),0,full_matrix)
#compute the sum of the responses over phases given the found index of highest response
iindex = array(global_index)
sum = 0.0
for i in range(size(full_matrix.features[phase_index].values)):
iindex[phase_index] = i
sum = sum + full_matrix.full_matrix[tuple(iindex.tolist())][x][y]
#average
average = sum / float(size(full_matrix.features[phase_index].values))
res = 0.0
#compute the sum of absolute values of the responses minus average
for i in range(size(full_matrix.features[phase_index].values)):
iindex[phase_index] = i
res = res + abs(full_matrix.full_matrix[tuple(iindex.tolist())][x][y] - average)
complex_matrix[x,y] = complex_matrix[x,y] + [full_matrix.full_matrix[tuple(iindex.tolist())][x][y]]
#this is taking away the DC component
#complex_matrix[x,y] -= numpy.min(complex_matrix[x,y])
if x==15 and y==15:
pylab.figure()
pylab.plot(complex_matrix[x,y])
if x==26 and y==26:
pylab.figure()
pylab.plot(complex_matrix[x,y])
#complexity[x,y] = res / (2*sum)
fft = numpy.fft.fft(complex_matrix[x,y]+complex_matrix[x,y]+complex_matrix[x,y]+complex_matrix[x,y],2048)
first_har = 2048/len(complex_matrix[0,0])
if abs(fft[0]) != 0:
fftmeasure[x,y] = 2 *abs(fft[first_har]) /abs(fft[0])
else:
fftmeasure[x,y] = 0
return fftmeasure
def compute_ACDC_orientation_tuning_curves(full_matrix,curve_label,sheet):
""" This function allows and alternative computation of orientation tuning curve where
for each given orientation the response is computed as a maximum of AC or DC component
across the phases instead of the maximum used as a standard in Topographica"""
# this method assumes that only single frequency has been used
i = 0
for f in full_matrix.features:
if f.name == "phase":
phase_index = i
if f.name == "orientation":
orientation_index = i
if f.name == "frequency":
frequency_index = i
i=i+1
print sheet.curve_dict
if not sheet.curve_dict.has_key("orientationACDC"):
sheet.curve_dict["orientationACDC"]={}
sheet.curve_dict["orientationACDC"][curve_label]={}
rows,cols = full_matrix.matrix_shape
for o in xrange(size(full_matrix.features[orientation_index].values)):
s_w = zeros(full_matrix.matrix_shape)
for x in range(rows):
for y in range(cols):
or_response=[]
for p in xrange(size(full_matrix.features[phase_index].values)):
index = [0,0,0]
index[phase_index] = p
index[orientation_index] = o
index[frequency_index] = 0
or_response.append(full_matrix.full_matrix[tuple(index)][x][y])
fft = numpy.fft.fft(or_response+or_response+or_response+or_response,2048)
first_har = 2048/len(or_response)
s_w[x][y] = numpy.maximum(2 *abs(fft[first_har]),abs(fft[0]))
s = SheetView((s_w,sheet.bounds), sheet.name , sheet.precedence, topo.sim.time(),sheet.row_precedence)
sheet.curve_dict["orientationACDC"][curve_label].update({full_matrix.features[orientation_index].values[o]:s})
def phase_preference_scatter_plot(sheet_name,diameter=0.39):
r = UniformRandom(seed=1023)
preference_map = topo.sim[sheet_name].sheet_views['PhasePreference']
offset_magnitude = 0.03
datax = []
datay = []
(v,bb) = preference_map.view()
for z in zeros(66):
x = (r() - 0.5)*2*diameter
y = (r() - 0.5)*2*diameter
rand = r()
xoff = sin(rand*2*pi)*offset_magnitude
yoff = cos(rand*2*pi)*offset_magnitude
xx = max(min(x+xoff,diameter),-diameter)
yy = max(min(y+yoff,diameter),-diameter)
x = max(min(x,diameter),-diameter)
y = max(min(y,diameter),-diameter)
[xc1,yc1] = topo.sim[sheet_name].sheet2matrixidx(xx,yy)
[xc2,yc2] = topo.sim[sheet_name].sheet2matrixidx(x,y)
if((xc1==xc2) & (yc1==yc2)): continue
datax = datax + [v[xc1,yc1]]
datay = datay + [v[xc2,yc2]]
for i in range(0,len(datax)):
datax[i] = datax[i] * 360
datay[i] = datay[i] * 360
if(datay[i] > datax[i] + 180): datay[i]= datay[i]- 360
if((datax[i] > 180) & (datay[i]> 180)): datax[i] = datax[i] - 360; datay[i] = datay[i] - 360
if((datax[i] > 180) & (datay[i] < (datax[i]-180))): datax[i] = datax[i] - 360; #datay[i] = datay[i] - 360
f = pylab.figure()
ax = f.add_subplot(111, aspect='equal')
pylab.plot(datax,datay,'ro')
pylab.plot([0,360],[-180,180])
pylab.plot([-180,180],[0,360])
pylab.plot([-180,-180],[360,360])
ax.axis([-180,360,-180,360])
pylab.xticks([-180,0,180,360], [-180,0,180,360])
pylab.yticks([-180,0,180,360], [-180,0,180,360])
pylab.grid()
pylab.savefig(normalize_path(str(topo.sim.timestr()) + sheet_name + "_scatter.png"))
###############################################################################
# JABALERT: Should we move this plot and command to analysis.py or
# pylabplots.py, where all the rest are?
#
# In any case, it requires generalization; it should not be hardcoded
# to any particular map name, and should just do the right thing for
# most networks for which it makes sense. E.g. it already measures
# the ComplexSelectivity for all measured_sheets, but then
# plot_modulation_ratio only accepts two with specific names.
# plot_modulation_ratio should just plot whatever it is given, and
# then analyze_complexity can simply pass in whatever was measured,
# with the user controlling what is measured using the measure_map
# attribute of each Sheet. That way the complexity of any sheet could
# be measured, which is what we want.
#
# Specific changes needed:
# - Make plot_modulation_ratio accept a list of sheets and
# plot their individual modulation ratios and combined ratio.
# - Remove complex_sheet_name argument, which is no longer needed
# - Make sure it still works fine even if V1Simple doesn't exist;
# as this is just for an optional scatter plot, it's fine to skip
# it.
# - Preferably remove the filename argument by default, so that
# plots will show up in the GUI
def analyze_complexity(full_matrix,simple_sheet_name,complex_sheet_name,filename=None):
"""
Compute modulation ratio for each neuron, to distinguish complex from simple cells.
Uses full_matrix data obtained from measure_or_pref().
If there is a sheet named as specified in simple_sheet_name,
also plots its phase preference as a scatter plot.
"""
import topo
measured_sheets = [s for s in topo.sim.objects(CFSheet).values()
if hasattr(s,'measure_maps') and s.measure_maps]
for sheet in measured_sheets:
# Divide by two to get into 0-1 scale - that means simple/complex boundry is now at 0.5
complx = array(complexity(full_matrix[sheet]))/2.0
# Should this be renamed to ModulationRatio?
sheet.sheet_views['ComplexSelectivity']=SheetView((complx,sheet.bounds), sheet.name , sheet.precedence, topo.sim.time(),sheet.row_precedence)
import topo.command.pylabplots
topo.command.pylabplots.plot_modulation_ratio(full_matrix,simple_sheet_name=simple_sheet_name,complex_sheet_name=complex_sheet_name,filename=filename)
# Avoid error if no simple sheet exists
try:
phase_preference_scatter_plot(simple_sheet_name,diameter=0.24999)
except AttributeError:
print "Skipping phase preference scatter plot; could not analyze region %s." \
% simple_sheet_name
class measure_and_analyze_complexity(measure_sine_pref):
"""Macro for measuring orientation preference and then analyzing its complexity."""
def __call__(self,**params):
fm = super(measure_and_analyze_complexity,self).__call__(**params)
#from topo.command.analysis import measure_or_pref
#fm = measure_or_pref()
analyze_complexity(fm,simple_sheet_name="V1Simple",complex_sheet_name="V1Complex",filename="ModulationRatio")
pg= create_plotgroup(name='Orientation Preference and Complexity',category="Preference Maps",
doc='Measure preference for sine grating orientation.',
pre_plot_hooks=[measure_and_analyze_complexity.instance()])
pg.add_plot('Orientation Preference',[('Hue','OrientationPreference')])
pg.add_plot('Orientation Preference&Selectivity',[('Hue','OrientationPreference'),
('Confidence','OrientationSelectivity')])
pg.add_plot('Orientation Selectivity',[('Strength','OrientationSelectivity')])
pg.add_plot('Modulation Ratio',[('Strength','ComplexSelectivity')])
pg.add_plot('Phase Preference',[('Hue','PhasePreference')])
pg.add_static_image('Color Key','command/or_key_white_vert_small.png')
| jesuscript/topo-mpi | topo/analysis/vision.py | Python | bsd-3-clause | 11,628 |
# # product
import logging
from django.contrib import messages
from django.contrib.auth.decorators import user_passes_test
from django.urls import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render
from dojo.utils import add_breadcrumb
from dojo.forms import ToolTypeForm
from dojo.models import Tool_Type
logger = logging.getLogger(__name__)
@user_passes_test(lambda u: u.is_staff)
def new_tool_type(request):
if request.method == 'POST':
tform = ToolTypeForm(request.POST, instance=Tool_Type())
if tform.is_valid():
tform.save()
messages.add_message(request,
messages.SUCCESS,
'Tool Type Configuration Successfully Created.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('tool_type', ))
else:
tform = ToolTypeForm()
add_breadcrumb(title="New Tool Type Configuration", top_level=False, request=request)
return render(request, 'dojo/new_tool_type.html',
{'tform': tform})
@user_passes_test(lambda u: u.is_staff)
def edit_tool_type(request, ttid):
tool_type = Tool_Type.objects.get(pk=ttid)
if request.method == 'POST':
tform = ToolTypeForm(request.POST, instance=tool_type)
if tform.is_valid():
tform.save()
messages.add_message(request,
messages.SUCCESS,
'Tool Type Configuration Successfully Updated.',
extra_tags='alert-success')
return HttpResponseRedirect(reverse('tool_type', ))
else:
tform = ToolTypeForm(instance=tool_type)
add_breadcrumb(title="Edit Tool Type Configuration", top_level=False, request=request)
return render(request,
'dojo/edit_tool_type.html',
{
'tform': tform,
})
@user_passes_test(lambda u: u.is_staff)
def tool_type(request):
confs = Tool_Type.objects.all().order_by('name')
add_breadcrumb(title="Tool Type List", top_level=not len(request.GET), request=request)
return render(request,
'dojo/tool_type.html',
{'confs': confs,
})
| rackerlabs/django-DefectDojo | dojo/tool_type/views.py | Python | bsd-3-clause | 2,344 |
# -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant le paramètre 'voir' de la commande 'chemin'."""
from primaires.format.fonctions import oui_ou_non
from primaires.interpreteur.masque.parametre import Parametre
from primaires.pnj.chemin import FLAGS
class PrmVoir(Parametre):
"""Commande 'chemin voir'.
"""
def __init__(self):
"""Constructeur du paramètre"""
Parametre.__init__(self, "voir", "view")
self.schema = "<cle>"
self.aide_courte = "affiche le détail d'un chemin"
self.aide_longue = \
"Cette commande permet d'obtenir plus d'informations sur " \
"un chemin (ses flags actifs, ses salles et sorties...)."
def ajouter(self):
"""Méthode appelée lors de l'ajout de la commande à l'interpréteur"""
cle = self.noeud.get_masque("cle")
cle.proprietes["regex"] = r"'[a-z0-9_:]{3,}'"
def interpreter(self, personnage, dic_masques):
"""Interprétation du paramètre"""
cle = dic_masques["cle"].cle
if cle not in importeur.pnj.chemins:
personnage << "|err|Ce chemin n'existe pas.|ff|"
return
chemin = importeur.pnj.chemins[cle]
msg = "Détail sur le chemin {} :".format(chemin.cle)
msg += "\n Flags :"
for nom_flag in FLAGS.keys():
msg += "\n {}".format(nom_flag.capitalize())
msg += " : " + oui_ou_non(chemin.a_flag(nom_flag))
msg += "\n Salles du chemin :"
if len(chemin.salles) == 0:
msg += "\n Aucune"
else:
for salle, direction in chemin.salles.items():
msg += "\n " + salle.ident.ljust(20) + " "
msg += direction.ljust(10)
if salle in chemin.salles_retour and \
chemin.salles_retour[salle]:
msg += " (retour " + chemin.salles_retour[salle] + ")"
personnage << msg
| vlegoff/tsunami | src/primaires/pnj/commandes/chemin/voir.py | Python | bsd-3-clause | 3,490 |
#This is where the tests go.
| praekelt/ummeli | ummeli/providers/tests.py | Python | bsd-3-clause | 29 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2021 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" This package contains the qibuild actions. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
| aldebaran/qibuild | python/qibuild/actions/__init__.py | Python | bsd-3-clause | 365 |
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is configman
#
# The Initial Developer of the Original Code is
# Mozilla Foundation
# Portions created by the Initial Developer are Copyright (C) 2011
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# K Lars Lohn, lars@mozilla.com
# Peter Bengtsson, peterbe@mozilla.com
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
import sys
import re
import datetime
import types
import inspect
import collections
import json
from required_config import RequiredConfig
from namespace import Namespace
from .datetime_util import datetime_from_ISO_string as datetime_converter
from .datetime_util import date_from_ISO_string as date_converter
import datetime_util
#------------------------------------------------------------------------------
def option_value_str(an_option):
"""return an instance of Option's value as a string.
The option instance doesn't actually have to be from the Option class. All
it requires is that the passed option instance has a ``value`` attribute.
"""
if an_option.value is None:
return ''
try:
converter = to_string_converters[type(an_option.value)]
s = converter(an_option.value)
except KeyError:
if not isinstance(an_option.value, basestring):
s = unicode(an_option.value)
else:
s = an_option.value
if an_option.from_string_converter in converters_requiring_quotes:
s = "'''%s'''" % s
return s
#------------------------------------------------------------------------------
def str_dict_keys(a_dict):
"""return a modified dict where all the keys that are anything but str get
converted to str.
E.g.
>>> result = str_dict_keys({u'name': u'Peter', u'age': 99, 1: 2})
>>> # can't compare whole dicts in doctests
>>> result['name']
u'Peter'
>>> result['age']
99
>>> result[1]
2
The reason for this is that in Python <= 2.6.4 doing
``MyClass(**{u'name': u'Peter'})`` would raise a TypeError
Note that only unicode types are converted to str types.
The reason for that is you might have a class that looks like this::
class Option(object):
def __init__(self, foo=None, bar=None, **kwargs):
...
And it's being used like this::
Option(**{u'foo':1, u'bar':2, 3:4})
Then you don't want to change that {3:4} part which becomes part of
`**kwargs` inside the __init__ method.
Using integers as parameter keys is a silly example but the point is that
due to the python 2.6.4 bug only unicode keys are converted to str.
"""
new_dict = {}
for key in a_dict:
if isinstance(key, unicode):
new_dict[str(key)] = a_dict[key]
else:
new_dict[key] = a_dict[key]
return new_dict
#------------------------------------------------------------------------------
def io_converter(input_str):
""" a conversion function for to select stdout, stderr or open a file for
writing"""
if type(input_str) is str:
input_str_lower = input_str.lower()
if input_str_lower == 'stdout':
return sys.stdout
if input_str_lower == 'stderr':
return sys.stderr
return open(input_str, "w")
return input_str
#------------------------------------------------------------------------------
def timedelta_converter(input_str):
"""a conversion function for time deltas"""
if isinstance(input_str, basestring):
days, hours, minutes, seconds = 0, 0, 0, 0
details = input_str.split(':')
if len(details) >= 4:
days = int(details[-4])
if len(details) >= 3:
hours = int(details[-3])
if len(details) >= 2:
minutes = int(details[-2])
if len(details) >= 1:
seconds = int(details[-1])
return datetime.timedelta(days=days,
hours=hours,
minutes=minutes,
seconds=seconds)
raise ValueError(input_str)
#------------------------------------------------------------------------------
def boolean_converter(input_str):
""" a conversion function for boolean
"""
return input_str.lower() in ("true", "t", "1", "y", "yes")
#------------------------------------------------------------------------------
import __builtin__
_all_named_builtins = dir(__builtin__)
def class_converter(input_str):
""" a conversion that will import a module and class name
"""
if not input_str:
return None
if '.' not in input_str and input_str in _all_named_builtins:
return eval(input_str)
parts = [x.strip() for x in input_str.split('.') if x.strip()]
try:
# first try as a complete module
package = __import__(input_str)
except ImportError:
# it must be a class from a module
if len(parts) == 1:
# since it has only one part, it must be a class from __main__
parts = ('__main__', input_str)
package = __import__('.'.join(parts[:-1]), globals(), locals(), [])
obj = package
for name in parts[1:]:
obj = getattr(obj, name)
return obj
#------------------------------------------------------------------------------
def classes_in_namespaces_converter(template_for_namespace="cls%d",
name_of_class_option='cls',
instantiate_classes=False):
"""take a comma delimited list of class names, convert each class name
into an actual class as an option within a numbered namespace. This
function creates a closure over a new function. That new function,
in turn creates a class derived from RequiredConfig. The inner function,
'class_list_converter', populates the InnerClassList with a Namespace for
each of the classes in the class list. In addition, it puts the each class
itself into the subordinate Namespace. The requirement discovery mechanism
of configman then reads the InnerClassList's requried config, pulling in
the namespaces and associated classes within.
For example, if we have a class list like this: "Alpha, Beta", then this
converter will add the following Namespaces and options to the
configuration:
"cls0" - the subordinate Namespace for Alpha
"cls0.cls" - the option containing the class Alpha itself
"cls1" - the subordinate Namespace for Beta
"cls1.cls" - the option containing the class Beta itself
Optionally, the 'class_list_converter' inner function can embue the
InnerClassList's subordinate namespaces with aggregates that will
instantiate classes from the class list. This is a convenience to the
programmer who would otherwise have to know ahead of time what the
namespace names were so that the classes could be instantiated within the
context of the correct namespace. Remember the user could completely
change the list of classes at run time, so prediction could be difficult.
"cls0" - the subordinate Namespace for Alpha
"cls0.cls" - the option containing the class Alpha itself
"cls0.cls_instance" - an instance of the class Alpha
"cls1" - the subordinate Namespace for Beta
"cls1.cls" - the option containing the class Beta itself
"cls1.cls_instance" - an instance of the class Beta
parameters:
template_for_namespace - a template for the names of the namespaces
that will contain the classes and their
associated required config options. The
namespaces will be numbered sequentially. By
default, they will be "cls1", "cls2", etc.
class_option_name - the name to be used for the class option within
the nested namespace. By default, it will choose:
"cls1.cls", "cls2.cls", etc.
instantiate_classes - a boolean to determine if there should be an
aggregator added to each namespace that
instantiates each class. If True, then each
Namespace will contain elements for the class, as
well as an aggregator that will instantiate the
class.
"""
#--------------------------------------------------------------------------
def class_list_converter(class_list_str):
"""This function becomes the actual converter used by configman to
take a string and convert it into the nested sequence of Namespaces,
one for each class in the list. It does this by creating a proxy
class stuffed with its own 'required_config' that's dynamically
generated."""
if isinstance(class_list_str, basestring):
class_list = [x.strip() for x in class_list_str.split(',')]
else:
raise TypeError('must be derivative of a basestring')
#======================================================================
class InnerClassList(RequiredConfig):
"""This nested class is a proxy list for the classes. It collects
all the config requirements for the listed classes and places them
each into their own Namespace.
"""
# we're dynamically creating a class here. The following block of
# code is actually adding class level attributes to this new class
required_config = Namespace() # 1st requirement for configman
subordinate_namespace_names = [] # to help the programmer know
# what Namespaces we added
namespace_template = template_for_namespace # save the template
# for future reference
class_option_name = name_of_class_option # save the class's option
# name for the future
# for each class in the class list
for namespace_index, a_class in enumerate(class_list):
# figure out the Namespace name
namespace_name = template_for_namespace % namespace_index
subordinate_namespace_names.append(namespace_name)
# create the new Namespace
required_config[namespace_name] = Namespace()
# add the option for the class itself
required_config[namespace_name].add_option(
name_of_class_option,
#doc=a_class.__doc__ # not helpful if too verbose
default=a_class,
from_string_converter=class_converter
)
if instantiate_classes:
# add an aggregator to instantiate the class
required_config[namespace_name].add_aggregation(
"%s_instance" % name_of_class_option,
lambda c, lc, a: lc[name_of_class_option](lc))
@classmethod
def to_str(cls):
"""this method takes this inner class object and turns it back
into the original string of classnames. This is used
primarily as for the output of the 'help' option"""
return ', '.join(
py_obj_to_str(v[name_of_class_option].value)
for v in cls.get_required_config().values()
if isinstance(v, Namespace))
return InnerClassList # result of class_list_converter
return class_list_converter # result of classes_in_namespaces_converter
#------------------------------------------------------------------------------
def regex_converter(input_str):
return re.compile(input_str)
compiled_regexp_type = type(re.compile(r'x'))
#------------------------------------------------------------------------------
from_string_converters = {
int: int,
float: float,
str: str,
unicode: unicode,
bool: boolean_converter,
dict: json.loads,
datetime.datetime: datetime_converter,
datetime.date: date_converter,
datetime.timedelta: timedelta_converter,
type: class_converter,
types.FunctionType: class_converter,
compiled_regexp_type: regex_converter,
}
#------------------------------------------------------------------------------
def py_obj_to_str(a_thing):
if a_thing is None:
return ''
if inspect.ismodule(a_thing):
return a_thing.__name__
if a_thing.__module__ == '__builtin__':
return a_thing.__name__
if a_thing.__module__ == "__main__":
return a_thing.__name__
if hasattr(a_thing, 'to_str'):
return a_thing.to_str()
return "%s.%s" % (a_thing.__module__, a_thing.__name__)
#------------------------------------------------------------------------------
def list_to_str(a_list):
return ', '.join(to_string_converters[type(x)](x) for x in a_list)
#------------------------------------------------------------------------------
to_string_converters = {
int: str,
float: str,
str: str,
unicode: unicode,
list: list_to_str,
tuple: list_to_str,
bool: lambda x: 'True' if x else 'False',
dict: json.dumps,
datetime.datetime: datetime_util.datetime_to_ISO_string,
datetime.date: datetime_util.date_to_ISO_string,
datetime.timedelta: datetime_util.timedelta_to_str,
type: py_obj_to_str,
types.ModuleType: py_obj_to_str,
types.FunctionType: py_obj_to_str,
compiled_regexp_type: lambda x: x.pattern,
}
#------------------------------------------------------------------------------
#converters_requiring_quotes = [eval, eval_to_regex_converter]
converters_requiring_quotes = [eval, regex_converter]
| AdrianGaudebert/configman | configman/converters.py | Python | bsd-3-clause | 15,400 |
###
# Copyright (c) 2005, Jeremiah Fincher
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions, and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the author of this software nor the name of
# contributors to this software may be used to endorse or promote products
# derived from this software without specific prior written consent.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
###
from supybot.test import *
class MyChannelLoggerTestCase(PluginTestCase):
plugins = ('MyChannelLogger',)
# vim:set shiftwidth=4 softtabstop=4 expandtab textwidth=79:
| octete/octete-supybot-plugins | MyChannelLogger/test.py | Python | bsd-3-clause | 1,757 |
#!/usr/bin/env python
import sys
def inv(s):
if s[0] == '-':
return s[1:]
elif s[0] == '+':
return '-' + s[1:]
else: # plain number
return '-' + s
if len(sys.argv) != 1:
print 'Usage:', sys.argv[0]
sys.exit(1)
for line in sys.stdin:
linesplit = line.strip().split()
if len(linesplit) == 3:
assert(linesplit[0] == 'p')
print('p ' + inv(linesplit[2]) + ' ' + linesplit[1])
elif len(linesplit) == 5:
assert(linesplit[0] == 's')
print('s ' + \
inv(linesplit[2]) + ' ' + linesplit[1] + ' ' + \
inv(linesplit[4]) + ' ' + linesplit[3] )
elif len(linesplit) == 0:
print
| hlzz/dotfiles | graphics/cgal/Segment_Delaunay_graph_Linf_2/developer_scripts/lsprotate90.py | Python | bsd-3-clause | 636 |
def test_default(cookies):
"""
Checks if default configuration is working
"""
result = cookies.bake()
assert result.exit_code == 0
assert result.project.isdir()
assert result.exception is None
| vchaptsev/cookiecutter-django-vue | tests/test_generation.py | Python | bsd-3-clause | 222 |
#!/usr/bin/env python
# -*- mode: python; sh-basic-offset: 4; indent-tabs-mode: nil; coding: utf-8 -*-
# vim: tabstop=4 softtabstop=4 expandtab shiftwidth=4 fileencoding=utf-8
#
# Shell command
# Copyright 2010, Jeremy Grosser <synack@digg.com>
import argparse
import os
import sys
import clusto
from clusto import script_helper
class Console(script_helper.Script):
'''
Use clusto's hardware port mappings to console to a remote server
using the serial console.
'''
def __init__(self):
script_helper.Script.__init__(self)
def _add_arguments(self, parser):
user = os.environ.get('USER')
parser.add_argument('--user', '-u', default=user,
help='SSH User (you can also set this in clusto.conf too'
'in console.user: --user > clusto.conf:console.user > "%s")' % user)
parser.add_argument('server', nargs=1,
help='Object to console to (IP or name)')
def add_subparser(self, subparsers):
parser = self._setup_subparser(subparsers)
self._add_arguments(parser)
def run(self, args):
try:
server = clusto.get(args.server[0])
if not server:
raise LookupError('Object "%s" does not exist' % args.server)
except Exception as e:
self.debug(e)
self.error('No object like "%s" was found' % args.server)
return 1
server = server[0]
if not hasattr(server, 'console'):
self.error('The object %s lacks a console method' % server.name)
return 2
user = os.environ.get('USER')
if args.user:
self.debug('Grabbing user from parameter')
user = args.user
else:
self.debug('Grabbing user from config file or default')
user = self.get_conf('console.user', user)
self.debug('User is "%s"' % user)
return(server.console(ssh_user=user))
def main():
console, args = script_helper.init_arguments(Console)
return(console.run(args))
if __name__ == '__main__':
sys.exit(main())
| sanyaade-mobiledev/clusto | src/clusto/commands/console.py | Python | bsd-3-clause | 2,107 |
from unittest import TestCase
from django.core.management import call_command
class SendAiPicsStatsTestCase(TestCase):
def test_run_command(self):
call_command('send_ai_pics_stats')
| KlubJagiellonski/pola-backend | pola/tests/commands/test_send_ai_pics_stats.py | Python | bsd-3-clause | 197 |
# Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
from m5.objects import *
from arm_generic import *
import switcheroo
root = LinuxArmFSSwitcheroo(
mem_class=DDR3_1600_x64,
cpu_classes=(AtomicSimpleCPU, AtomicSimpleCPU)
).create_root()
# Setup a custom test method that uses the switcheroo tester that
# switches between CPU models.
run_test = switcheroo.run_test
| prodromou87/gem5 | tests/configs/realview-switcheroo-atomic.py | Python | bsd-3-clause | 2,428 |
# -*- coding: utf-8 -*-
"""
Display a fortune-telling, swimming fish.
Wanda has no use what-so-ever. It only takes up disk space and compilation time,
and if loaded, it also takes up precious bar space, memory, and cpu cycles.
Anybody found using it should be promptly sent for a psychiatric evaluation.
Configuration parameters:
cache_timeout: refresh interval for this module (default 0)
format: display format for this module
(default '{nomotion}[{fortune} ]{wanda}{motion}')
fortune_timeout: refresh interval for fortune (default 60)
Format placeholders:
{fortune} one of many aphorisms or vague prophecies
{wanda} name of one of the most commonly kept freshwater aquarium fish
{motion} biologically propelled motion through a liquid medium
{nomotion} opposite behavior of motion to prevent modules from shifting
Optional:
fortune-mod: the fortune cookie program from bsd games
Examples:
```
# disable motions when not in use
wanda_the_fish {
format = '[\?if=fortune {nomotion}][{fortune} ]'
format += '{wanda}[\?if=fortune {motion}]'
}
# no updates, no motions, yes fortunes, you click
wanda_the_fish {
format = '[{fortune} ]{wanda}'
cache_timeout = -1
}
# wanda moves, fortunes stays
wanda_the_fish {
format = '[{fortune} ]{nomotion}{wanda}{motion}'
}
# wanda is swimming too fast, slow down wanda
wanda_the_fish {
cache_timeout = 2
}
```
@author lasers
SAMPLE OUTPUT
[
{'full_text': 'innovate, v.: To annoy people.'},
{'full_text': ' <', 'color': '#ffa500'},
{'full_text': '\xba', 'color': '#add8e6'},
{'full_text': ',', 'color': '#ff8c00'},
{'full_text': '))', 'color': '#ffa500'},
{'full_text': '))>< ', 'color': '#ff8c00'},
]
idle
[
{'full_text': ' <', 'color': '#ffa500'},
{'full_text': '\xba', 'color': '#add8e6'},
{'full_text': ',', 'color': '#ff8c00'},
{'full_text': '))', 'color': '#ffa500'},
{'full_text': '))>3', 'color': '#ff8c00'},
]
py3status
[
{'full_text': 'py3status is so cool!'},
{'full_text': ' <', 'color': '#ffa500'},
{'full_text': '\xba', 'color': '#add8e6'},
{'full_text': ',', 'color': '#ff8c00'},
{'full_text': '))', 'color': '#ffa500'},
{'full_text': '))>< ', 'color': '#ff8c00'},
]
"""
from time import time
class Py3status:
"""
"""
# available configuration parameters
cache_timeout = 0
format = "{nomotion}[{fortune} ]{wanda}{motion}"
fortune_timeout = 60
def post_config_hook(self):
body = (
"[\?color=orange&show <"
"[\?color=lightblue&show º]"
"[\?color=darkorange&show ,]))"
"[\?color=darkorange&show ))>%s]]"
)
wanda = [body % fin for fin in ("<", ">", "<", "3")]
self.wanda = [self.py3.safe_format(x) for x in wanda]
self.wanda_length = len(self.wanda)
self.index = 0
self.fortune_command = ["fortune", "-as"]
self.fortune = self.py3.storage_get("fortune") or None
self.toggled = self.py3.storage_get("toggled") or False
self.motions = {"motion": " ", "nomotion": ""}
# deal with {new,old} timeout between storage
fortune_timeout = self.py3.storage_get("fortune_timeout")
timeout = None
if self.fortune_timeout != fortune_timeout:
timeout = time() + self.fortune_timeout
self.time = (
timeout or self.py3.storage_get("time") or (time() + self.fortune_timeout)
)
def _set_fortune(self, state=None, new=False):
if not self.fortune_command:
return
if new:
try:
fortune_data = self.py3.command_output(self.fortune_command)
except self.py3.CommandError:
self.fortune = ""
self.fortune_command = None
else:
self.fortune = " ".join(fortune_data.split())
self.time = time() + self.fortune_timeout
elif state is None:
if self.toggled and time() >= self.time:
self._set_fortune(new=True)
else:
self.toggled = state
if state:
self._set_fortune(new=True)
else:
self.fortune = None
def _set_motion(self):
for k in self.motions:
self.motions[k] = "" if self.motions[k] else " "
def _set_wanda(self):
self.index += 1
if self.index >= self.wanda_length:
self.index = 0
def wanda_the_fish(self):
self._set_fortune()
self._set_motion()
self._set_wanda()
return {
"cached_until": self.py3.time_in(self.cache_timeout),
"full_text": self.py3.safe_format(
self.format,
{
"fortune": self.fortune,
"motion": self.motions["motion"],
"nomotion": self.motions["nomotion"],
"wanda": self.wanda[self.index],
},
),
}
def kill(self):
self.py3.storage_set("toggled", self.toggled)
self.py3.storage_set("fortune", self.fortune)
self.py3.storage_set("fortune_timeout", self.fortune_timeout)
self.py3.storage_set("time", self.time)
def on_click(self, event):
if not self.fortune_command:
return
self._set_fortune(not self.toggled)
if __name__ == "__main__":
"""
Run module in test mode.
"""
from py3status.module_test import module_test
module_test(Py3status)
| Andrwe/py3status | py3status/modules/wanda_the_fish.py | Python | bsd-3-clause | 5,569 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2018-10-05 14:25
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [("elections", "0049_move_status")]
operations = [
migrations.RemoveField(model_name="election", name="rejection_reason"),
migrations.RemoveField(model_name="election", name="suggested_status"),
migrations.RemoveField(model_name="election", name="suggestion_reason"),
]
| DemocracyClub/EveryElection | every_election/apps/elections/migrations/0050_auto_20181005_1425.py | Python | bsd-3-clause | 512 |
from __future__ import print_function
import re
import logging
logging.basicConfig(level=logging.INFO)
class Executor(object):
def __init__(self, op_map):
processed = {}
for pattern, f in op_map.iteritems():
s = self._build_pattern_groups(pattern.lower())
processed[re.compile(s)] = f
self.operations = processed
def execute(self, context, op):
s = "%04x" % op
for pattern, f in self.operations.iteritems():
m = pattern.match(s)
if m:
return f(context, *[int(v, base=16) for v in m.groups()])
assert False, s
def _build_pattern_groups(self, pattern):
s = pattern.replace('?', '.')
for id in ['x', 'y', 'z']:
m = re.search('%s+' % id, s)
if m:
s = s[:m.start()] + ('(.{%s})' % (m.end() - m.start())) + s[m.end():]
return '^' + s + '$'
def set_mem_v0_vx(context, x):
for i in range(x):
context.memory.write_byte(context.index_reg + i, context.v[i])
context.pc += 2
def fill_v0_vx(context, x):
for i in range(x+1):
context.v[i] = context.memory.get_byte(context.index_reg + i)
context.pc += 2
def set_bcd_vx(context, x):
val = int(context.v[x])
context.memory.write_byte(context.index_reg, val / 100)
context.memory.write_byte(context.index_reg + 1, val % 100 / 10)
context.memory.write_byte(context.index_reg + 2, val % 100 % 10)
context.pc += 2
def set_i_font(context, x):
context.index_reg = context.memory.get_font_address(context.v[x])
context.pc += 2
def add_reg_ind(context, x):
context.index_reg += context.v[x]
context.pc += 2
def set_delay_timer(context, x):
context.delay_timer = context.v[x]
context.pc += 2
def set_sound_timer(context, x):
context.sound_timer = context.v[x]
context.pc += 2
def set_vx_key_pressed(context, x):
context.v[x] = context.keypad.wait_for_keypress()
context.pc += 2
def set_vx_delay_timer(context, x):
context.v[x] = context.delay_timer
context.pc += 2
def skip_key_vx(context, x, result=True):
if context.keypad.is_keypressed(context.v[x]) == result:
context.pc += 2
context.pc += 2
def draw_sprite(context, x, y, n):
sprite = []
for cb in range(n):
sprite.append(context.memory.get_byte(context.index_reg + cb))
collision = context.screen.draw(context.v[x], context.v[y], sprite)
context.v[15] = collision
context.pc += 2
def jump_nnn_v0(context, nnn):
context.pc = context.v[0] + nnn
def set_vx_rand(context, x, nn):
import random
context.v[x] = random.randint(0, 0xFF) & nn
context.pc += 2
def jump_noteq(context, x, y):
if context.v[x] != context.v[y]:
context.pc += 2
context.pc += 2
def shift_vy_left(context, x, y):
context.v[15] = context.v[15] >> 7 # First value
context.v[x] = (context.v[y] << 1) % 255
context.pc += 2
def shift_right(context, x, y):
context.v[15] = context.v[y] & 0x1
context.v[x] = context.v[y] >> 1
context.pc += 2
def sub_vx_vy_vf(context, x, y):
logging.info('Setting V[X] = V[X] - V[Y], V[F] = 1 if V[Y] > V[X]')
context.v[15] = 1 if context.v[y] > context.v[x] else 0
context.v[x] = context.v[x] - context.v[y]
context.pc += 2
def add_vx_vy(context, x, y):
logging.info('Setting V[X] = V[X] + V[Y]')
val = context.v[x] + context.v[y]
context.v[15] = 1 if val > 255 else 0
context.v[x] = val % 256
context.pc += 2
def sub_vx_vy(context, x, y):
logging.info('Setting V[X] = V[X] - V[Y]')
val = context.v[x] - context.v[y]
context.v[15] = 1 if val < 0 else 0
context.v[x] = val % 256
context.pc += 2
def set_vx_or_vy(context, x, y):
logging.info('Setting V[X] = V[X] | V[Y]')
context.v[x] = context.v[x] | context.v[y]
context.pc += 2
def set_vx_xor_vy(context, x, y):
logging.info('Setting V[X] = V[X] ^ V[Y]')
context.v[x] = context.v[x] ^ context.v[y]
context.pc += 2
def set_vx_and_vy(context, x, y):
logging.info('Setting V[X] = V[X] & V[Y]')
context.v[x] = context.v[x] & context.v[y]
context.pc += 2
def set_vx_vy(context, x, y):
logging.info('Setting V[X] = V[Y]')
context.v[x] = context.v[y]
context.pc += 2
def add_reg(context, x, nnn):
logging.info('Adding NNN to V[X]')
context.v[x] = (context.v[x] + nnn) % 256
context.pc += 2
def set_i(context, nnn):
logging.info('Setting NNN to index_reg')
context.index_reg = nnn
context.pc += 2
def pop_stack(context):
logging.info('Returning from a subroutine')
context.pc = context.stack.pop()
def call_rca1082(context, address): #TODO
print("operation not implemented yet:", address)
context.pc += 1
def clear(context):
logging.info('Clearing screen')
context.screen.clear()
context.pc += 2
def jump(context, address):
logging.info('Jump at 0x%2x address' % address)
context.pc = address
def call(context, address):
logging.info('Calling subroutine at 0x%2x address' % address)
context.pc += 2
context.stack.append(context.pc)
context.pc = address
def skip_equal(context, x, nnn, ifeq=True):
logging.info('Skip if V[X] === NNN is %s' % ifeq)
if (context.v[x] == nnn) == ifeq:
context.pc += 2
context.pc += 2
def skip_eq_reg(context, x, y):
logging.info('Skip if V[X] === V[Y]')
if context.v[x] == context.v[y]:
context.pc += 2
context.pc += 2
def set_reg(context, x, nnn):
logging.info('Set NNN to cpu reg V[x]')
context.v[x] = nnn
context.pc += 2
op_map = {
'0?E0': clear,
'0?EE': pop_stack,
'0XXX': call_rca1082,
'1XXX': jump,
'2XXX': call,
'3XYY': skip_equal,
'4XYY': lambda context, x, nn: skip_equal(context, x, nn, ifeq = False),
'5XY0': skip_eq_reg,
'6XYY': set_reg,
'7XYY': add_reg,
'8XY0': set_vx_vy,
'8XY1': set_vx_or_vy,
'8XY2': set_vx_and_vy,
'8XY3': set_vx_xor_vy,
'8XY4': add_vx_vy,
'8XY5': sub_vx_vy,
'8XY6': shift_right,
'8XY7': sub_vx_vy_vf,
'8XYE': shift_vy_left,
'9XY0': jump_noteq,
'AXXX': set_i,
'BXXX': jump_nnn_v0,
'CXYY': set_vx_rand,
'DXYZ': draw_sprite,
'EX9E': lambda context, x: skip_key_vx(x, result=False),
'EXA1': skip_key_vx,
'FX07': set_vx_delay_timer,
'FX0A': set_vx_key_pressed,
'FX15': set_delay_timer,
'FX18': set_sound_timer,
'FX1E': add_reg_ind,
'FX29': set_i_font,
'FX33': set_bcd_vx,
'FX55': set_mem_v0_vx,
'FX65': fill_v0_vx
}
| martindimondo/PyC8 | chip8/operations.py | Python | bsd-3-clause | 6,673 |
# -*- coding: utf-8 -*-
"""
Created on Thu Jan 03 10:16:39 2013
@author: Grahesh
"""
import pandas
from qstkutil import DataAccess as da
import numpy as np
import math
import copy
import qstkutil.qsdateutil as du
import datetime as dt
import qstkutil.DataAccess as da
import qstkutil.tsutil as tsu
import qstkstudy.EventProfiler as ep
"""
Accepts a list of symbols along with start and end date
Returns the Event Matrix which is a pandas Datamatrix
Event matrix has the following structure :
|IBM |GOOG|XOM |MSFT| GS | JP |
(d1)|nan |nan | 1 |nan |nan | 1 |
(d2)|nan | 1 |nan |nan |nan |nan |
(d3)| 1 |nan | 1 |nan | 1 |nan |
(d4)|nan | 1 |nan | 1 |nan |nan |
...................................
...................................
Also, d1 = start date
nan = no information about any event.
1 = status bit(positively confirms the event occurence)
"""
# Get the data from the data store
storename = "NSEData" # get data from our daily prices source
# Available field names: open, close, high, low, close, actual_close, volume
closefield = "close"
volumefield = "volume"
window = 10
def getHalfYearEndDates(timestamps):
newTS=[]
tempYear=timestamps[0].year
flag=1
for x in range(0, len(timestamps)-1):
if(timestamps[x].year==tempYear):
if(timestamps[x].month==4 and flag==1):
newTS.append(timestamps[x-1])
flag=0
if(timestamps[x].month==10):
newTS.append(timestamps[x-1])
tempYear=timestamps[x].year+1
flag=1
return newTS
def findEvents(symbols, startday,endday, marketSymbol,verbose=False):
# Reading the Data for the list of Symbols.
timeofday=dt.timedelta(hours=16)
timestamps = du.getNSEdays(startday,endday,timeofday)
endOfHalfYear=getHalfYearEndDates(timestamps)
dataobj = da.DataAccess('NSEData')
if verbose:
print __name__ + " reading data"
# Reading the Data
close = dataobj.get_data(timestamps, symbols, closefield)
# Completing the Data - Removing the NaN values from the Matrix
close = (close.fillna(method='ffill')).fillna(method='backfill')
# Calculating Daily Returns for the Market
tsu.returnize0(close.values)
# Calculating the Returns of the Stock Relative to the Market
# So if a Stock went up 5% and the Market rised 3%. The the return relative to market is 2%
mktneutDM = close - close[marketSymbol]
np_eventmat = copy.deepcopy(mktneutDM)
for sym in symbols:
for time in timestamps:
np_eventmat[sym][time]=np.NAN
if verbose:
print __name__ + " finding events"
# Generating the Event Matrix
# Event described is : Analyzing half year events for given stocks.
for symbol in symbols:
for i in endOfHalfYear:
np_eventmat[symbol][i] = 1.0 #overwriting by the bit, marking the event
return np_eventmat
#################################################
################ MAIN CODE ######################
#################################################
symbols = np.loadtxt('NSE500port.csv',dtype='S13',comments='#', skiprows=1)
# You might get a message about some files being missing, don't worry about it.
#symbols =['SPY','BFRE','ATCS','RSERF','GDNEF','LAST','ATTUF','JBFCF','CYVA','SPF','XPO','EHECF','TEMO','AOLS','CSNT','REMI','GLRP','AIFLY','BEE','DJRT','CHSTF','AICAF']
#symbols=['NSE','3MINDIA.NS','AARTIIND.NS','ABAN.NS','ABB.NS','ABGSHIP.NS','ABIRLANUV.NS','ACC.NS','ADANIENT.NS','ADANIPORT.NS','ADANIPOWE.NS','ADVANTA.NS','ALLCARGO.NS','AIAENG.NS','AIL.NS','AZKOINDIA.NS']
startday = dt.datetime(2011,1,1)
endday = dt.datetime(2012,1,1)
eventMatrix = findEvents(symbols,startday,endday,marketSymbol='NSE500',verbose=True)
eventMatrix.to_csv('eventmatrix.csv', sep=',')
eventProfiler = ep.EventProfiler(eventMatrix,startday,endday,lookback_days=20,lookforward_days=20,verbose=True)
eventProfiler.study(filename="HalfYearEventStudy.jpg",plotErrorBars=True,plotMarketNeutral=True,plotEvents=False,marketSymbol='NSE500')
| grahesh/Stock-Market-Event-Analysis | Examples/Event Analysis/Half-Yearly End/Half_Year_End_Analysis.py | Python | bsd-3-clause | 4,522 |
#!/usr/bin/python
# Copyright (c) 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
A chain with four possible intermediates with different notBefore and notAfter
dates, for testing path bulding prioritization.
"""
import sys
sys.path += ['../..']
import gencerts
DATE_A = '150101120000Z'
DATE_B = '150102120000Z'
DATE_C = '180101120000Z'
DATE_D = '180102120000Z'
root = gencerts.create_self_signed_root_certificate('Root')
root.set_validity_range(DATE_A, DATE_D)
int_ac = gencerts.create_intermediate_certificate('Intermediate', root)
int_ac.set_validity_range(DATE_A, DATE_C)
int_ad = gencerts.create_intermediate_certificate('Intermediate', root)
int_ad.set_validity_range(DATE_A, DATE_D)
int_ad.set_key(int_ac.get_key())
int_bc = gencerts.create_intermediate_certificate('Intermediate', root)
int_bc.set_validity_range(DATE_B, DATE_C)
int_bc.set_key(int_ac.get_key())
int_bd = gencerts.create_intermediate_certificate('Intermediate', root)
int_bd.set_validity_range(DATE_B, DATE_D)
int_bd.set_key(int_ac.get_key())
target = gencerts.create_end_entity_certificate('Target', int_ac)
target.set_validity_range(DATE_A, DATE_D)
gencerts.write_chain('The root', [root], out_pem='root.pem')
gencerts.write_chain('Intermediate with validity range A..C',
[int_ac], out_pem='int_ac.pem')
gencerts.write_chain('Intermediate with validity range A..D',
[int_ad], out_pem='int_ad.pem')
gencerts.write_chain('Intermediate with validity range B..C',
[int_bc], out_pem='int_bc.pem')
gencerts.write_chain('Intermediate with validity range B..D',
[int_bd], out_pem='int_bd.pem')
gencerts.write_chain('The target', [target], out_pem='target.pem')
| endlessm/chromium-browser | net/data/path_builder_unittest/validity_date_prioritization/generate-certs.py | Python | bsd-3-clause | 1,833 |
# Copyright (c) 2017 David Sorokin <david.sorokin@gmail.com>
#
# Licensed under BSD3. See the LICENSE.txt file in the root of this distribution.
from simulation.aivika.modeler.model import *
from simulation.aivika.modeler.port import *
from simulation.aivika.modeler.stream import *
from simulation.aivika.modeler.data_type import *
from simulation.aivika.modeler.pdf import *
def uniform_random_stream(transact_type, min_delay, max_delay):
"""Return a new stream of transacts with random delays distributed uniformly."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomUniformStream ' + str(min_delay) + ' ' + str(max_delay)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def uniform_int_random_stream(transact_type, min_delay, max_delay):
"""Return a new stream of transacts with integer random delays distributed uniformly."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomUniformIntStream ' + str(min_delay) + ' ' + str(max_delay)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def triangular_random_stream(transact_type, min_delay, median_delay, max_delay):
"""Return a new stream of transacts with random delays having the triangular distribution."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomTriangularStream ' + str(min_delay) + ' ' + str(median_delay) + ' ' + str(max_delay)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def normal_random_stream(transact_type, mean_delay, delay_deviation):
"""Return a new stream of transacts with random delays having the normal distribution."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomNormalStream ' + str(mean_delay) + ' ' + str(delay_deviation)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def lognormal_random_stream(transact_type, normal_mean_delay, normal_delay_deviation):
"""Return a new stream of transacts with random delays having the lognormal distribution.
The numerical parameters are related to the normal distribution that
this distribution is derived from.
"""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomLogNormalStream ' + str(normal_mean_delay) + ' ' + str(normal_delay_deviation)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def exponential_random_stream(transact_type, mean_delay):
"""Return a new stream of transacts with random delays having the exponential distribution with the specified mean (a reciprocal of the rate)."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomExponentialStream ' + str(mean_delay)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def erlang_random_stream(transact_type, scale, shape):
"""Return a new stream of transacts with random delays having the Erlang distribution with the specified scale (a reciprocal of the rate) and shape parameters."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomErlangStream ' + str(scale) + ' ' + str(shape)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def poisson_random_stream(transact_type, mean_delay):
"""Return a new stream of transacts with random delays having the Poisson distribution with the specified mean."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomPoissonStream ' + str(mean_delay)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def binomial_random_stream(transact_type, probability, trials):
"""Return a new stream of transacts with random delays having the binomial distribution with the specified probability and trials."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomBinomialStream ' + str(probability) + ' ' + str(trials)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def gamma_random_stream(transact_type, shape, scale):
"""Return a new stream of transacts with random delays having the Gamma distribution by the specified shape and scale."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomGammaStream ' + str(shape) + ' ' + str(scale)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def beta_random_stream(transact_type, alpha, beta):
"""Return a new stream of transacts with random delays having the Beta distribution by the specified shape parameters (alpha and beta)."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomBetaStream ' + str(alpha) + ' ' + str(beta)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def weibull_random_stream(transact_type, shape, scale):
"""Return a new stream of transacts with random delays having the Weibull distribution by the specified shape and scale."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomWeibullStream ' + str(shape) + ' ' + str(scale)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
def discrete_random_stream(transact_type, pdf):
"""Return a new stream of transacts with random delays having the discrete distribution by the specified probability density function."""
expect_transact_type(transact_type)
model = transact_type.get_model()
code = 'return $ mapStream (\\a -> ' + transact_type.coerce_arrival('a') + ') $ '
code += 'randomDiscreteStream ' + encode_pdf(pdf)
y = StreamPort(model, transact_type.get_data_type())
y.bind_to_input()
y.write(code)
return y
| dsorokin/aivika-modeler | simulation/aivika/modeler/stream_random.py | Python | bsd-3-clause | 7,405 |
# apis_v1/documentation_source/voter_star_on_save_doc.py
# Brought to you by We Vote. Be good.
# -*- coding: UTF-8 -*-
def voter_star_on_save_doc_template_values(url_root):
"""
Show documentation about voterStarOnSave
"""
required_query_parameter_list = [
{
'name': 'api_key',
'value': 'string (from post, cookie, or get (in that order))', # boolean, integer, long, string
'description': 'The unique key provided to any organization using the WeVoteServer APIs',
},
{
'name': 'voter_device_id',
'value': 'string', # boolean, integer, long, string
'description': 'An 88 character unique identifier linked to a voter record on the server',
},
{
'name': 'kind_of_ballot_item',
'value': 'string', # boolean, integer, long, string
'description': 'What is the type of ballot item for which we are saving the \'on\' status? '
'(kind_of_ballot_item is either "OFFICE", "CANDIDATE", "POLITICIAN" or "MEASURE")',
},
{
'name': 'ballot_item_id',
'value': 'integer', # boolean, integer, long, string
'description': 'The unique internal identifier for this ballot_item '
'(either ballot_item_id OR ballot_item_we_vote_id required -- not both. '
'If it exists, ballot_item_id is used instead of ballot_item_we_vote_id)',
},
{
'name': 'ballot_item_we_vote_id',
'value': 'string', # boolean, integer, long, string
'description': 'The unique identifier for this ballot_item across all networks '
'(either ballot_item_id OR ballot_item_we_vote_id required -- not both. '
'NOTE: In the future we might support other identifiers used in the industry.',
},
]
optional_query_parameter_list = [
]
potential_status_codes_list = [
{
'code': 'VALID_VOTER_DEVICE_ID_MISSING',
'description': 'Cannot proceed. A valid voter_device_id parameter was not included.',
},
{
'code': 'VALID_VOTER_ID_MISSING',
'description': 'Cannot proceed. Missing voter_id while trying to save.',
},
{
'code': 'STAR_ON_OFFICE CREATE/UPDATE ITEM_STARRED',
'description': '',
},
{
'code': 'STAR_ON_CANDIDATE CREATE/UPDATE ITEM_STARRED',
'description': '',
},
{
'code': 'STAR_ON_MEASURE CREATE/UPDATE ITEM_STARRED',
'description': '',
},
]
try_now_link_variables_dict = {
'kind_of_ballot_item': 'CANDIDATE',
'ballot_item_id': '5655',
}
api_response = '{\n' \
' "status": string (description of what happened),\n' \
' "success": boolean (did the save happen?),\n' \
' "ballot_item_id": integer,\n' \
' "ballot_item_we_vote_id": string,\n' \
' "kind_of_ballot_item": string (CANDIDATE, MEASURE),\n' \
'}'
template_values = {
'api_name': 'voterStarOnSave',
'api_slug': 'voterStarOnSave',
'api_introduction':
"Save or create private 'star on' state for the current voter for a measure, an office or candidate.",
'try_now_link': 'apis_v1:voterStarOnSaveView',
'try_now_link_variables_dict': try_now_link_variables_dict,
'url_root': url_root,
'get_or_post': 'GET',
'required_query_parameter_list': required_query_parameter_list,
'optional_query_parameter_list': optional_query_parameter_list,
'api_response': api_response,
'api_response_notes':
"",
'potential_status_codes_list': potential_status_codes_list,
}
return template_values
| wevote/WebAppPublic | apis_v1/documentation_source/voter_star_on_save_doc.py | Python | bsd-3-clause | 4,125 |
import numpy as np
from nose.tools import (assert_true, assert_false, assert_equal,
assert_almost_equal)
from numpy.testing import (assert_array_equal, assert_array_almost_equal,
assert_)
from dipy.sims.voxel import (_check_directions, SingleTensor, MultiTensor,
multi_tensor_odf, all_tensor_evecs, add_noise,
single_tensor, sticks_and_ball, multi_tensor_dki,
kurtosis_element, DKI_signal)
from dipy.core.geometry import (vec2vec_rotmat, sphere2cart)
from dipy.data import get_data, get_sphere
from dipy.core.gradients import gradient_table
from dipy.io.gradients import read_bvals_bvecs
fimg, fbvals, fbvecs = get_data('small_64D')
bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs)
gtab = gradient_table(bvals, bvecs)
# 2 shells for techniques that requires multishell data
bvals_2s = np.concatenate((bvals, bvals * 2), axis=0)
bvecs_2s = np.concatenate((bvecs, bvecs), axis=0)
gtab_2s = gradient_table(bvals_2s, bvecs_2s)
def diff2eigenvectors(dx, dy, dz):
""" numerical derivatives 2 eigenvectors
"""
u = np.array([dx, dy, dz])
u = u / np.linalg.norm(u)
R = vec2vec_rotmat(basis[:, 0], u)
eig0 = u
eig1 = np.dot(R, basis[:, 1])
eig2 = np.dot(R, basis[:, 2])
eigs = np.zeros((3, 3))
eigs[:, 0] = eig0
eigs[:, 1] = eig1
eigs[:, 2] = eig2
return eigs, R
def test_check_directions():
# Testing spherical angles for two principal coordinate axis
angles = [(0, 0)] # axis z
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[0, 0, 1]])
angles = [(0, 90)] # axis z again (phi can be anything it theta is zero)
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[0, 0, 1]])
angles = [(90, 0)] # axis x
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[1, 0, 0]])
# Testing if directions are already given in cartesian coordinates
angles = [(0, 0, 1)]
sticks = _check_directions(angles)
assert_array_almost_equal(sticks, [[0, 0, 1]])
# Testing more than one direction simultaneously
angles = np.array([[90, 0], [30, 0]])
sticks = _check_directions(angles)
ref_vec = [np.sin(np.pi*30/180), 0, np.cos(np.pi*30/180)]
assert_array_almost_equal(sticks, [[1, 0, 0], ref_vec])
# Testing directions not aligned to planes x = 0, y = 0, or z = 0
the1 = 0
phi1 = 90
the2 = 30
phi2 = 45
angles = np.array([(the1, phi1), (the2, phi2)])
sticks = _check_directions(angles)
ref_vec1 = (np.sin(np.pi*the1/180) * np.cos(np.pi*phi1/180),
np.sin(np.pi*the1/180) * np.sin(np.pi*phi1/180),
np.cos(np.pi*the1/180))
ref_vec2 = (np.sin(np.pi*the2/180) * np.cos(np.pi*phi2/180),
np.sin(np.pi*the2/180) * np.sin(np.pi*phi2/180),
np.cos(np.pi*the2/180))
assert_array_almost_equal(sticks, [ref_vec1, ref_vec2])
def test_sticks_and_ball():
d = 0.0015
S, sticks = sticks_and_ball(gtab, d=d, S0=1, angles=[(0, 0), ],
fractions=[100], snr=None)
assert_array_equal(sticks, [[0, 0, 1]])
S_st = SingleTensor(gtab, 1, evals=[d, 0, 0], evecs=[[0, 0, 0],
[0, 0, 0],
[1, 0, 0]])
assert_array_almost_equal(S, S_st)
def test_single_tensor():
evals = np.array([1.4, .35, .35]) * 10 ** (-3)
evecs = np.eye(3)
S = SingleTensor(gtab, 100, evals, evecs, snr=None)
assert_array_almost_equal(S[gtab.b0s_mask], 100)
assert_(np.mean(S[~gtab.b0s_mask]) < 100)
from dipy.reconst.dti import TensorModel
m = TensorModel(gtab)
t = m.fit(S)
assert_array_almost_equal(t.fa, 0.707, decimal=3)
def test_multi_tensor():
sphere = get_sphere('symmetric724')
vertices = sphere.vertices
mevals = np.array(([0.0015, 0.0003, 0.0003],
[0.0015, 0.0003, 0.0003]))
e0 = np.array([np.sqrt(2) / 2., np.sqrt(2) / 2., 0])
e1 = np.array([0, np.sqrt(2) / 2., np.sqrt(2) / 2.])
mevecs = [all_tensor_evecs(e0), all_tensor_evecs(e1)]
# odf = multi_tensor_odf(vertices, [0.5, 0.5], mevals, mevecs)
# assert_(odf.shape == (len(vertices),))
# assert_(np.all(odf <= 1) & np.all(odf >= 0))
fimg, fbvals, fbvecs = get_data('small_101D')
bvals, bvecs = read_bvals_bvecs(fbvals, fbvecs)
gtab = gradient_table(bvals, bvecs)
s1 = single_tensor(gtab, 100, mevals[0], mevecs[0], snr=None)
s2 = single_tensor(gtab, 100, mevals[1], mevecs[1], snr=None)
Ssingle = 0.5*s1 + 0.5*s2
S, sticks = MultiTensor(gtab, mevals, S0=100, angles=[(90, 45), (45, 90)],
fractions=[50, 50], snr=None)
assert_array_almost_equal(S, Ssingle)
def test_snr():
np.random.seed(1978)
s = single_tensor(gtab)
# For reasonably large SNR, var(signal) ~= sigma**2, where sigma = 1/SNR
for snr in [5, 10, 20]:
sigma = 1.0 / snr
for j in range(1000):
s_noise = add_noise(s, snr, 1, noise_type='rician')
assert_array_almost_equal(np.var(s_noise - s), sigma ** 2, decimal=2)
def test_all_tensor_evecs():
e0 = np.array([1/np.sqrt(2), 1/np.sqrt(2), 0])
desired = np.array([[1/np.sqrt(2), 1/np.sqrt(2), 0],
[-1/np.sqrt(2), 1/np.sqrt(2), 0],
[0, 0, 1]]).T
assert_array_almost_equal(all_tensor_evecs(e0), desired)
def test_kurtosis_elements():
""" Testing symmetry of the elements of the KT
As an 4th order tensor, KT has 81 elements. However, due to diffusion
symmetry the KT is fully characterized by 15 independent elements. This
test checks for this property.
"""
# two fiber not aligned to planes x = 0, y = 0, or z = 0
mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087],
[0.00099, 0, 0], [0.00226, 0.00087, 0.00087]])
angles = [(80, 10), (80, 10), (20, 30), (20, 30)]
fie = 0.49 # intra axonal water fraction
frac = [fie * 50, (1-fie) * 50, fie * 50, (1-fie) * 50]
sticks = _check_directions(angles)
mD = np.zeros((len(frac), 3, 3))
for i in range(len(frac)):
R = all_tensor_evecs(sticks[i])
mD[i] = np.dot(np.dot(R, np.diag(mevals[i])), R.T)
# compute global DT
D = np.zeros((3, 3))
for i in range(len(frac)):
D = D + frac[i]*mD[i]
# compute voxel's MD
MD = (D[0][0] + D[1][1] + D[2][2]) / 3
# Reference dictionary with the 15 independent elements.
# Note: The multiplication of the indexes (i+1) * (j+1) * (k+1) * (l+1)
# for of an elements is only equal to this multiplication for another
# element if an only if the element corresponds to an symmetry element.
# Thus indexes multiplication is used as key of the reference dictionary
kt_ref = {1: kurtosis_element(mD, frac, 0, 0, 0, 0),
16: kurtosis_element(mD, frac, 1, 1, 1, 1),
81: kurtosis_element(mD, frac, 2, 2, 2, 2),
2: kurtosis_element(mD, frac, 0, 0, 0, 1),
3: kurtosis_element(mD, frac, 0, 0, 0, 2),
8: kurtosis_element(mD, frac, 0, 1, 1, 1),
24: kurtosis_element(mD, frac, 1, 1, 1, 2),
27: kurtosis_element(mD, frac, 0, 2, 2, 2),
54: kurtosis_element(mD, frac, 1, 2, 2, 2),
4: kurtosis_element(mD, frac, 0, 0, 1, 1),
9: kurtosis_element(mD, frac, 0, 0, 2, 2),
36: kurtosis_element(mD, frac, 1, 1, 2, 2),
6: kurtosis_element(mD, frac, 0, 0, 1, 2),
12: kurtosis_element(mD, frac, 0, 1, 1, 2),
18: kurtosis_element(mD, frac, 0, 1, 2, 2)}
# Testing all 81 possible elements
xyz = [0, 1, 2]
for i in xyz:
for j in xyz:
for k in xyz:
for l in xyz:
key = (i+1) * (j+1) * (k+1) * (l+1)
assert_almost_equal(kurtosis_element(mD, frac, i, k, j, l),
kt_ref[key])
# Testing optional funtion inputs
assert_almost_equal(kurtosis_element(mD, frac, i, k, j, l),
kurtosis_element(mD, frac, i, k, j, l,
D, MD))
def test_DKI_simulations_aligned_fibers():
"""
Testing DKI simulations when aligning the same fiber to different axis.
If biological parameters don't change, kt[0] of a fiber aligned to axis x
has to be equal to kt[1] of a fiber aligned to the axis y and equal to
kt[2] of a fiber aligned to axis z. The same is applicable for dt
"""
# Defining parameters based on Neto Henriques et al., 2015. NeuroImage 111
mevals = np.array([[0.00099, 0, 0], # Intra-cellular
[0.00226, 0.00087, 0.00087]]) # Extra-cellular
frac = [49, 51] # Compartment volume fraction
# axis x
angles = [(90, 0), (90, 0)]
signal_fx, dt_fx, kt_fx = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac)
# axis y
angles = [(90, 90), (90, 90)]
signal_fy, dt_fy, kt_fy = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac)
# axis z
angles = [(0, 0), (0, 0)]
signal_fz, dt_fz, kt_fz = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac)
assert_array_equal([kt_fx[0], kt_fx[1], kt_fx[2]],
[kt_fy[1], kt_fy[0], kt_fy[2]])
assert_array_equal([kt_fx[0], kt_fx[1], kt_fx[2]],
[kt_fz[2], kt_fz[0], kt_fz[1]])
assert_array_equal([dt_fx[0], dt_fx[2], dt_fx[5]],
[dt_fy[2], dt_fy[0], dt_fy[5]])
assert_array_equal([dt_fx[0], dt_fx[2], dt_fx[5]],
[dt_fz[5], dt_fz[0], dt_fz[2]])
# testing S signal along axis x, y and z
bvals = np.array([0, 0, 0, 1000, 1000, 1000, 2000, 2000, 2000])
bvecs = np.asarray([[1, 0, 0], [0, 1, 0], [0, 0, 1],
[1, 0, 0], [0, 1, 0], [0, 0, 1],
[1, 0, 0], [0, 1, 0], [0, 0, 1]])
gtab_axis = gradient_table(bvals, bvecs)
# axis x
S_fx = DKI_signal(gtab_axis, dt_fx, kt_fx, S0=100)
assert_array_almost_equal(S_fx[0:3], [100, 100, 100]) # test S f0r b=0
# axis y
S_fy = DKI_signal(gtab_axis, dt_fy, kt_fy, S0=100)
assert_array_almost_equal(S_fy[0:3], [100, 100, 100]) # test S f0r b=0
# axis z
S_fz = DKI_signal(gtab_axis, dt_fz, kt_fz, S0=100)
assert_array_almost_equal(S_fz[0:3], [100, 100, 100]) # test S f0r b=0
# test S for b = 1000
assert_array_almost_equal([S_fx[3], S_fx[4], S_fx[5]],
[S_fy[4], S_fy[3], S_fy[5]])
assert_array_almost_equal([S_fx[3], S_fx[4], S_fx[5]],
[S_fz[5], S_fz[3], S_fz[4]])
# test S for b = 2000
assert_array_almost_equal([S_fx[6], S_fx[7], S_fx[8]],
[S_fy[7], S_fy[6], S_fy[8]])
assert_array_almost_equal([S_fx[6], S_fx[7], S_fx[8]],
[S_fz[8], S_fz[6], S_fz[7]])
def test_DKI_crossing_fibers_simulations():
""" Testing DKI simulations of a crossing fiber
"""
# two fiber not aligned to planes x = 0, y = 0, or z = 0
mevals = np.array([[0.00099, 0, 0], [0.00226, 0.00087, 0.00087],
[0.00099, 0, 0], [0.00226, 0.00087, 0.00087]])
angles = [(80, 10), (80, 10), (20, 30), (20, 30)]
fie = 0.49
frac = [fie*50, (1 - fie)*50, fie*50, (1 - fie)*50]
signal, dt, kt = multi_tensor_dki(gtab_2s, mevals, angles=angles,
fractions=frac, snr=None)
# in this simulations dt and kt cannot have zero elements
for i in range(len(dt)):
assert dt[i] != 0
for i in range(len(kt)):
assert kt[i] != 0
# test S, dt and kt relative to the expected values computed from another
# DKI package - UDKI (Neto Henriques et al., 2015)
dt_ref = [1.0576161e-3, 0.1292542e-3, 0.4786179e-3,
0.2667081e-3, 0.1136643e-3, 0.9888660e-3]
kt_ref = [2.3529944, 0.8226448, 2.3011221, 0.2017312, -0.0437535,
0.0404011, 0.0355281, 0.2449859, 0.2157668, 0.3495910,
0.0413366, 0.3461519, -0.0537046, 0.0133414, -0.017441]
assert_array_almost_equal(dt, dt_ref)
assert_array_almost_equal(kt, kt_ref)
assert_array_almost_equal(signal,
DKI_signal(gtab_2s, dt_ref, kt_ref, S0=100,
snr=None),
decimal=5)
if __name__ == "__main__":
test_multi_tensor()
| oesteban/dipy | dipy/sims/tests/test_voxel.py | Python | bsd-3-clause | 12,904 |
# License: BSD 3 clause <https://opensource.org/licenses/BSD-3-Clause>
# Copyright (c) 2016, Fabricio Vargas Matos <fabriciovargasmatos@gmail.com>
# All rights reserved.
''''
Tune the 3 most promissing algorithms and compare them
'''
# Load libraries
import os
import time
import pandas
import numpy
import matplotlib.pyplot as plt
from pandas.tools.plotting import scatter_matrix
from pandas import DataFrame
from sklearn.preprocessing import StandardScaler
from sklearn.preprocessing import MinMaxScaler
from sklearn import cross_validation
from sklearn.metrics import classification_report
from sklearn.metrics import confusion_matrix
from sklearn.metrics import accuracy_score
from sklearn.pipeline import Pipeline
from sklearn.grid_search import GridSearchCV
from sklearn.linear_model import LogisticRegression
from sklearn.tree import DecisionTreeClassifier
from sklearn.neighbors import KNeighborsClassifier
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.naive_bayes import GaussianNB
from sklearn.svm import SVC
from sklearn.ensemble import AdaBoostClassifier
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.ensemble import RandomForestClassifier
from sklearn.ensemble import ExtraTreesClassifier
from sklearn.datasets import load_digits
from sklearn.model_selection import GridSearchCV
from sklearn.decomposition import PCA, NMF
from sklearn.feature_selection import SelectKBest, chi2
import lib.eda1 as eda1
import lib.eda3 as eda3
#constants
N_DIGITS = 3
NUM_FOLDS = 10
RAND_SEED = 7
SCORING = 'accuracy'
VALIDATION_SIZE = 0.20
N_JOBS = 6
#global variables
start = time.clock()
imageidx = 1
createImages = True
results = []
names = []
params = []
bestResults = []
# RandomForestClassifier
def tuneRF(X_train, Y_train, outputPath):
global results, names, params, bestResults
print 'tune LR (Random Forest Classifier)'
pipeline = Pipeline([('PCA', PCA()),('MinMaxScaler', MinMaxScaler(feature_range=(0, 1))),('Scaler', StandardScaler())])
scaler = pipeline.fit(X_train)
rescaledX = scaler.transform(X_train)
#tune para meters
# http://scikit-learn.org/stable/modules/generated/sklearn.ensemble.RandomForestClassifier.html
#n_estimators_values = [5, 10, 100, 1000, 3000]
n_estimators_values = [1000]
max_features_values = [0.1, 'auto', 'sqrt', 'log2', None] # (float)0.1=>10%
criterion_values = ['gini', 'entropy']
param_grid = dict(n_estimators=n_estimators_values, max_features=max_features_values, criterion=criterion_values)
model = RandomForestClassifier()
kfold = cross_validation.KFold(n=len(X_train), n_folds=NUM_FOLDS, random_state=RAND_SEED)
grid = GridSearchCV(n_jobs=N_JOBS, verbose=10, estimator=model, param_grid=param_grid, scoring=SCORING, cv=kfold)
grid_result = grid.fit(rescaledX, Y_train)
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
best_idx = grid_result.best_index_
#TODO: check it out if 'mean_test_score' is really what I want here
cv_results = grid_result.cv_results_['mean_test_score']
results.append(cv_results)
grid_scores = sorted(grid_result.grid_scores_, key=lambda x: x[2].mean(), reverse=True)
first = True
for param, mean_score, scores in grid_scores:
if first:
bestResults.append({'name':'RF', 'mean':scores.mean(), 'std':scores.std(), 'params':param})
first = False
print("%f (%f) with: %r" % (scores.mean(), scores.std(), param))
# ExtraTreesClassifier
def tuneET(X_train, Y_train, outputPath):
global results, names, params, bestResults
print 'tune ET (Extra Trees Classifier)'
pipeline = Pipeline([('PCA', PCA()),('MinMaxScaler', MinMaxScaler(feature_range=(0, 1))),('Scaler', StandardScaler())])
scaler = pipeline.fit(X_train)
rescaledX = scaler.transform(X_train)
#tune para meters
# http://scikit-learn.org/stable/modules/generated/sklearn.ensemble.ExtraTreesClassifier.html
#n_estimators_values = [5, 10, 100, 1000, 3000]
n_estimators_values = [1000]
max_features_values = [0.1, 'auto', 'sqrt', 'log2', None] # (float)0.1=>10%
criterion_values = ['gini', 'entropy']
param_grid = dict(n_estimators=n_estimators_values, max_features=max_features_values, criterion=criterion_values)
model = ExtraTreesClassifier()
kfold = cross_validation.KFold(n=len(X_train), n_folds=NUM_FOLDS, random_state=RAND_SEED)
grid = GridSearchCV(n_jobs=N_JOBS, verbose=10, estimator=model, param_grid=param_grid, scoring=SCORING, cv=kfold)
grid_result = grid.fit(rescaledX, Y_train)
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
best_idx = grid_result.best_index_
#TODO: check it out if 'mean_test_score' is really what a want here
cv_results = grid_result.cv_results_['mean_test_score']
results.append(cv_results)
grid_scores = sorted(grid_result.grid_scores_, key=lambda x: x[2].mean(), reverse=True)
first = True
for param, mean_score, scores in grid_scores:
if first:
bestResults.append({'name':'ET', 'mean':scores.mean(), 'std':scores.std(), 'params':param})
first = False
print("%f (%f) with: %r" % (scores.mean(), scores.std(), param))
# Tune scaled SVM
def tuneSVM(X_train, Y_train, outputPath):
global results, names, params, bestResults
print 'tune SVM (Support Vector Machines Classifier)'
pipeline = Pipeline([('PCA', PCA()),('MinMaxScaler', MinMaxScaler(feature_range=(0, 1))),('Scaler', StandardScaler())])
scaler = pipeline.fit(X_train)
rescaledX = scaler.transform(X_train)
#c_values = [0.1, 1.0, 100.0, 10000.0, 100000.0]
c_values = [10000.0, 100000.0]
kernel_values = ['linear', 'poly', 'rbf', 'sigmoid']
param_grid = dict(C=c_values, kernel=kernel_values)
model = SVC()
kfold = cross_validation.KFold(n=len(X_train), n_folds=NUM_FOLDS, random_state=RAND_SEED)
grid = GridSearchCV(n_jobs=N_JOBS, verbose=10, estimator=model, param_grid=param_grid, scoring=SCORING, cv=kfold)
grid_result = grid.fit(rescaledX, Y_train)
print("Best: %f using %s" % (grid_result.best_score_, grid_result.best_params_))
best_idx = grid_result.best_index_
#TODO: check it out if 'mean_test_score' is really what a want here
cv_results = grid_result.cv_results_['mean_test_score']
results.append(cv_results)
grid_scores = sorted(grid_result.grid_scores_, key=lambda x: x[2].mean(), reverse=True)
first = True
for param, mean_score, scores in grid_scores:
if first:
bestResults.append({'name':'SVM', 'mean':scores.mean(), 'std':scores.std(), 'params':param})
first = False
print("%f (%f) with: %r" % (scores.mean(), scores.std(), param))
def drawTunedAlgorithmsComparison(results, names, outputPath):
global imageidx
print '\n === Tuned Algorithms Comparison ===\n'
#print bestResults
for x in bestResults:
print x
# Compare Algorithms
if (createImages):
fig = plt.figure()
fig.suptitle('Final Tuned-Algorithms Comparison')
ax = fig.add_subplot(111)
plt.boxplot(results)
ax.set_xticklabels(names)
#plt.show()
plt.savefig(outputPath + str(imageidx).zfill(N_DIGITS) + '-Tuned-Algorithm-Comparison.png')
imageidx += 1
plt.close('all')
def set_createImages(value):
global createImages
createImages = value
# ===================================================
# ================== main function ==================
# ===================================================
def run(inputFilePath, outputPath, createImagesFlag, dropColumns):
global start
print '####################################################################'
print '############### Running Exploratory Data Analysis #4 ###############'
print '####################################################################'
print ''
set_createImages(createImagesFlag)
start = time.clock()
eda1.reset_imageidx()
eda1.set_createImages(createImagesFlag)
if not os.path.exists(outputPath):
os.makedirs(outputPath)
# Load dataset
dataframe = eda1.loadDataframe(inputFilePath)
# drop out 'not fair' features
dataframe = eda1.dataCleansing(dataframe, dropColumns)
#Split-out train/validation dataset
X_train, X_validation, Y_train, Y_validation = eda1.splitoutValidationDataset(dataframe)
'''
# tune each algorithm
try:
tuneRF(X_train, Y_train, outputPath)
except Exception as e:
print "ERROR: couldn't tune RF"
print "Message: %s" % str(e)
try:
tuneET(X_train, Y_train, outputPath)
except Exception as e:
print "ERROR: couldn't tune ET"
print "Message: %s" % str(e)
'''
try:
tuneSVM(X_train, Y_train, outputPath)
except Exception as e:
print "ERROR: couldn't tune SVM"
print "Message: %s" % str(e)
#print the results comparing the algorithms with the best tune for each one
drawTunedAlgorithmsComparison(results, names, outputPath)
print '\n<<< THEN END - Running Exploratory Data Analysis #4 >>>'
#RF - Best: 0.853451 using {'max_features': 'log2', 'n_estimators': 1000, 'criterion': 'gini'}
#ET - Best: 0.855320 using {'max_features': None, 'n_estimators': 1000, 'criterion': 'gini'} | FabricioMatos/ifes-dropout-machine-learning | lib/eda4.py | Python | bsd-3-clause | 9,696 |
import numpy as np
from scipy.linalg import norm
from .base import AppearanceLucasKanade
class SimultaneousForwardAdditive(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-FA'
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = self.appearance_model._jacobian.T
# Forward Additive Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute warp Jacobian
dW_dp = self.transform.jacobian(
self.template.mask.true_indices)
# Compute steepest descent images, VI_dW_dp
J = self.residual.steepest_descent_images(
image, dW_dp, forward=(self.template, self.transform,
self.interpolator))
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, self.template, IWxp)
# Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
parameters = self.transform.as_vector() + delta_p[:n_params]
self.transform.from_vector_inplace(parameters)
lk_fitting.parameters.append(parameters)
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting
class SimultaneousForwardCompositional(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-FC'
def _set_up(self):
# Compute warp Jacobian
self._dW_dp = self.transform.jacobian(
self.template.mask.true_indices)
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = self.appearance_model._jacobian.T
# Forward Additive Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute steepest descent images, VI_dW_dp
J = self.residual.steepest_descent_images(IWxp, self._dW_dp)
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, self.template, IWxp)
# Compute gradient descent parameter updates
delta_p = np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
self.transform.compose_after_from_vector_inplace(delta_p[:n_params])
lk_fitting.parameters.append(self.transform.as_vector())
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting
class SimultaneousInverseCompositional(AppearanceLucasKanade):
@property
def algorithm(self):
return 'Simultaneous-IA'
def _set_up(self):
# Compute the Jacobian of the warp
self._dW_dp = self.transform.jacobian(
self.appearance_model.mean.mask.true_indices)
def _fit(self, lk_fitting, max_iters=20, project=True):
# Initial error > eps
error = self.eps + 1
image = lk_fitting.image
lk_fitting.weights = []
n_iters = 0
# Number of shape weights
n_params = self.transform.n_parameters
# Initial appearance weights
if project:
# Obtained weights by projection
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
weights = self.appearance_model.project(IWxp)
# Reset template
self.template = self.appearance_model.instance(weights)
else:
# Set all weights to 0 (yielding the mean)
weights = np.zeros(self.appearance_model.n_active_components)
lk_fitting.weights.append(weights)
# Compute appearance model Jacobian wrt weights
appearance_jacobian = -self.appearance_model._jacobian.T
# Baker-Matthews, Inverse Compositional Algorithm
while n_iters < max_iters and error > self.eps:
# Compute warped image with current weights
IWxp = image.warp_to(self.template.mask, self.transform,
interpolator=self.interpolator)
# Compute steepest descent images, VT_dW_dp
J = self.residual.steepest_descent_images(self.template,
self._dW_dp)
# Concatenate VI_dW_dp with appearance model Jacobian
self._J = np.hstack((J, appearance_jacobian))
# Compute Hessian and inverse
self._H = self.residual.calculate_hessian(self._J)
# Compute steepest descent parameter updates
sd_delta_p = self.residual.steepest_descent_update(
self._J, IWxp, self.template)
# Compute gradient descent parameter updates
delta_p = -np.real(self._calculate_delta_p(sd_delta_p))
# Update warp weights
self.transform.compose_after_from_vector_inplace(delta_p[:n_params])
lk_fitting.parameters.append(self.transform.as_vector())
# Update appearance weights
weights -= delta_p[n_params:]
self.template = self.appearance_model.instance(weights)
lk_fitting.weights.append(weights)
# Test convergence
error = np.abs(norm(delta_p))
n_iters += 1
lk_fitting.fitted = True
return lk_fitting
| jabooth/menpo-archive | menpo/fit/lucaskanade/appearance/simultaneous.py | Python | bsd-3-clause | 8,583 |
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Provides fakes for several of Telemetry's internal objects.
These allow code like story_runner and Benchmark to be run and tested
without compiling or starting a browser. Class names prepended with an
underscore are intended to be implementation details, and should not
be subclassed; however, some, like _FakeBrowser, have public APIs that
may need to be called in tests.
"""
from telemetry.internal.backends.chrome_inspector import websocket
from telemetry.internal.browser import browser_options
from telemetry.internal.platform import system_info
from telemetry.page import shared_page_state
from telemetry.util import image_util
from telemetry.testing.internal import fake_gpu_info
from types import ModuleType
# Classes and functions which are intended to be part of the public
# fakes API.
class FakePlatform(object):
def __init__(self):
self._network_controller = None
self._tracing_controller = None
self._has_battor = False
self._os_name = 'FakeOS'
self._device_type_name = 'abc'
self._is_svelte = False
self._is_aosp = True
@property
def is_host_platform(self):
raise NotImplementedError
@property
def network_controller(self):
if self._network_controller is None:
self._network_controller = _FakeNetworkController()
return self._network_controller
@property
def tracing_controller(self):
if self._tracing_controller is None:
self._tracing_controller = _FakeTracingController()
return self._tracing_controller
def Initialize(self):
pass
def CanMonitorThermalThrottling(self):
return False
def IsThermallyThrottled(self):
return False
def HasBeenThermallyThrottled(self):
return False
def GetArchName(self):
raise NotImplementedError
def SetOSName(self, name):
self._os_name = name
def GetOSName(self):
return self._os_name
def GetOSVersionName(self):
raise NotImplementedError
def GetOSVersionDetailString(self):
raise NotImplementedError
def StopAllLocalServers(self):
pass
def WaitForBatteryTemperature(self, _):
pass
def HasBattOrConnected(self):
return self._has_battor
def SetBattOrDetected(self, b):
assert isinstance(b, bool)
self._has_battor = b
# TODO(rnephew): Investigate moving from setters to @property.
def SetDeviceTypeName(self, name):
self._device_type_name = name
def GetDeviceTypeName(self):
return self._device_type_name
def SetIsSvelte(self, b):
assert isinstance(b, bool)
self._is_svelte = b
def IsSvelte(self):
if self._os_name != 'android':
raise NotImplementedError
return self._is_svelte
def SetIsAosp(self, b):
assert isinstance(b, bool)
self._is_aosp = b
def IsAosp(self):
return self._is_aosp and self._os_name == 'android'
class FakeLinuxPlatform(FakePlatform):
def __init__(self):
super(FakeLinuxPlatform, self).__init__()
self.screenshot_png_data = None
self.http_server_directories = []
self.http_server = FakeHTTPServer()
@property
def is_host_platform(self):
return True
def GetDeviceTypeName(self):
return 'Desktop'
def GetArchName(self):
return 'x86_64'
def GetOSName(self):
return 'linux'
def GetOSVersionName(self):
return 'trusty'
def GetOSVersionDetailString(self):
return ''
def CanTakeScreenshot(self):
return bool(self.screenshot_png_data)
def TakeScreenshot(self, file_path):
if not self.CanTakeScreenshot():
raise NotImplementedError
img = image_util.FromBase64Png(self.screenshot_png_data)
image_util.WritePngFile(img, file_path)
return True
def SetHTTPServerDirectories(self, paths):
self.http_server_directories.append(paths)
class FakeHTTPServer(object):
def UrlOf(self, url):
del url # unused
return 'file:///foo'
class FakePossibleBrowser(object):
def __init__(self, execute_on_startup=None,
execute_after_browser_creation=None):
self._returned_browser = _FakeBrowser(FakeLinuxPlatform())
self.browser_type = 'linux'
self.supports_tab_control = False
self.is_remote = False
self.execute_on_startup = execute_on_startup
self.execute_after_browser_creation = execute_after_browser_creation
@property
def returned_browser(self):
"""The browser object that will be returned through later API calls."""
return self._returned_browser
def Create(self, finder_options):
if self.execute_on_startup is not None:
self.execute_on_startup()
del finder_options # unused
if self.execute_after_browser_creation is not None:
self.execute_after_browser_creation(self._returned_browser)
return self.returned_browser
@property
def platform(self):
"""The platform object from the returned browser.
To change this or set it up, change the returned browser's
platform.
"""
return self.returned_browser.platform
def IsRemote(self):
return self.is_remote
def SetCredentialsPath(self, _):
pass
class FakeSharedPageState(shared_page_state.SharedPageState):
def __init__(self, test, finder_options, story_set):
super(FakeSharedPageState, self).__init__(test, finder_options, story_set)
def _GetPossibleBrowser(self, test, finder_options):
p = FakePossibleBrowser()
self.ConfigurePossibleBrowser(p)
return p
def ConfigurePossibleBrowser(self, possible_browser):
"""Override this to configure the PossibleBrowser.
Can make changes to the browser's configuration here via e.g.:
possible_browser.returned_browser.returned_system_info = ...
"""
pass
def DidRunStory(self, results):
# TODO(kbr): add a test which throws an exception from DidRunStory
# to verify the fix from https://crrev.com/86984d5fc56ce00e7b37ebe .
super(FakeSharedPageState, self).DidRunStory(results)
class FakeSystemInfo(system_info.SystemInfo):
def __init__(self, model_name='', gpu_dict=None, command_line=''):
if gpu_dict == None:
gpu_dict = fake_gpu_info.FAKE_GPU_INFO
super(FakeSystemInfo, self).__init__(model_name, gpu_dict, command_line)
class _FakeBrowserFinderOptions(browser_options.BrowserFinderOptions):
def __init__(self, execute_on_startup=None,
execute_after_browser_creation=None, *args, **kwargs):
browser_options.BrowserFinderOptions.__init__(self, *args, **kwargs)
self.fake_possible_browser = \
FakePossibleBrowser(
execute_on_startup=execute_on_startup,
execute_after_browser_creation=execute_after_browser_creation)
def CreateBrowserFinderOptions(browser_type=None, execute_on_startup=None,
execute_after_browser_creation=None):
"""Creates fake browser finder options for discovering a browser."""
return _FakeBrowserFinderOptions(
browser_type=browser_type,
execute_on_startup=execute_on_startup,
execute_after_browser_creation=execute_after_browser_creation)
# Internal classes. Note that end users may still need to both call
# and mock out methods of these classes, but they should not be
# subclassed.
class _FakeBrowser(object):
def __init__(self, platform):
self._tabs = _FakeTabList(self)
# Fake the creation of the first tab.
self._tabs.New()
self._returned_system_info = FakeSystemInfo()
self._platform = platform
self._browser_type = 'release'
self._is_crashed = False
@property
def platform(self):
return self._platform
@platform.setter
def platform(self, incoming):
"""Allows overriding of the fake browser's platform object."""
assert isinstance(incoming, FakePlatform)
self._platform = incoming
@property
def returned_system_info(self):
"""The object which will be returned from calls to GetSystemInfo."""
return self._returned_system_info
@returned_system_info.setter
def returned_system_info(self, incoming):
"""Allows overriding of the returned SystemInfo object.
Incoming argument must be an instance of FakeSystemInfo."""
assert isinstance(incoming, FakeSystemInfo)
self._returned_system_info = incoming
@property
def browser_type(self):
"""The browser_type this browser claims to be ('debug', 'release', etc.)"""
return self._browser_type
@browser_type.setter
def browser_type(self, incoming):
"""Allows setting of the browser_type."""
self._browser_type = incoming
@property
def credentials(self):
return _FakeCredentials()
def Close(self):
self._is_crashed = False
@property
def supports_system_info(self):
return True
def GetSystemInfo(self):
return self.returned_system_info
@property
def supports_tab_control(self):
return True
@property
def tabs(self):
return self._tabs
def DumpStateUponFailure(self):
pass
class _FakeCredentials(object):
def WarnIfMissingCredentials(self, _):
pass
class _FakeTracingController(object):
def __init__(self):
self._is_tracing = False
def StartTracing(self, tracing_config, timeout=10):
self._is_tracing = True
del tracing_config
del timeout
def StopTracing(self):
self._is_tracing = False
@property
def is_tracing_running(self):
return self._is_tracing
def ClearStateIfNeeded(self):
pass
def IsChromeTracingSupported(self):
return True
class _FakeNetworkController(object):
def __init__(self):
self.wpr_mode = None
self.extra_wpr_args = None
self.is_initialized = False
self.is_open = False
self.use_live_traffic = None
def InitializeIfNeeded(self, use_live_traffic=False):
self.use_live_traffic = use_live_traffic
def UpdateTrafficSettings(self, round_trip_latency_ms=None,
download_bandwidth_kbps=None, upload_bandwidth_kbps=None):
pass
def Open(self, wpr_mode, extra_wpr_args, use_wpr_go=False):
del use_wpr_go # Unused.
self.wpr_mode = wpr_mode
self.extra_wpr_args = extra_wpr_args
self.is_open = True
def Close(self):
self.wpr_mode = None
self.extra_wpr_args = None
self.is_initialized = False
self.is_open = False
def StartReplay(self, archive_path, make_javascript_deterministic=False):
del make_javascript_deterministic # Unused.
assert self.is_open
self.is_initialized = archive_path is not None
def StopReplay(self):
self.is_initialized = False
class _FakeTab(object):
def __init__(self, browser, tab_id):
self._browser = browser
self._tab_id = str(tab_id)
self._collect_garbage_count = 0
self.test_png = None
@property
def collect_garbage_count(self):
return self._collect_garbage_count
@property
def id(self):
return self._tab_id
@property
def browser(self):
return self._browser
def WaitForDocumentReadyStateToBeComplete(self, timeout=0):
pass
def Navigate(self, url, script_to_evaluate_on_commit=None,
timeout=0):
del script_to_evaluate_on_commit, timeout # unused
if url == 'chrome://crash':
self.browser._is_crashed = True
raise Exception
def WaitForDocumentReadyStateToBeInteractiveOrBetter(self, timeout=0):
pass
def WaitForFrameToBeDisplayed(self, timeout=0):
pass
def IsAlive(self):
return True
def CloseConnections(self):
pass
def CollectGarbage(self):
self._collect_garbage_count += 1
def Close(self):
pass
@property
def screenshot_supported(self):
return self.test_png is not None
def Screenshot(self):
assert self.screenshot_supported, 'Screenshot is not supported'
return image_util.FromBase64Png(self.test_png)
class _FakeTabList(object):
_current_tab_id = 0
def __init__(self, browser):
self._tabs = []
self._browser = browser
def New(self, timeout=300):
del timeout # unused
type(self)._current_tab_id += 1
t = _FakeTab(self._browser, type(self)._current_tab_id)
self._tabs.append(t)
return t
def __iter__(self):
return self._tabs.__iter__()
def __len__(self):
return len(self._tabs)
def __getitem__(self, index):
if self._tabs[index].browser._is_crashed:
raise Exception
else:
return self._tabs[index]
def GetTabById(self, identifier):
"""The identifier of a tab can be accessed with tab.id."""
for tab in self._tabs:
if tab.id == identifier:
return tab
return None
class FakeInspectorWebsocket(object):
_NOTIFICATION_EVENT = 1
_NOTIFICATION_CALLBACK = 2
"""A fake InspectorWebsocket.
A fake that allows tests to send pregenerated data. Normal
InspectorWebsockets allow for any number of domain handlers. This fake only
allows up to 1 domain handler, and assumes that the domain of the response
always matches that of the handler.
"""
def __init__(self, mock_timer):
self._mock_timer = mock_timer
self._notifications = []
self._response_handlers = {}
self._pending_callbacks = {}
self._handler = None
def RegisterDomain(self, _, handler):
self._handler = handler
def AddEvent(self, method, params, time):
if self._notifications:
assert self._notifications[-1][1] < time, (
'Current response is scheduled earlier than previous response.')
response = {'method': method, 'params': params}
self._notifications.append((response, time, self._NOTIFICATION_EVENT))
def AddAsyncResponse(self, method, result, time):
if self._notifications:
assert self._notifications[-1][1] < time, (
'Current response is scheduled earlier than previous response.')
response = {'method': method, 'result': result}
self._notifications.append((response, time, self._NOTIFICATION_CALLBACK))
def AddResponseHandler(self, method, handler):
self._response_handlers[method] = handler
def SyncRequest(self, request, *args, **kwargs):
del args, kwargs # unused
handler = self._response_handlers[request['method']]
return handler(request) if handler else None
def AsyncRequest(self, request, callback):
self._pending_callbacks.setdefault(request['method'], []).append(callback)
def SendAndIgnoreResponse(self, request):
pass
def Connect(self, _):
pass
def DispatchNotifications(self, timeout):
current_time = self._mock_timer.time()
if not self._notifications:
self._mock_timer.SetTime(current_time + timeout + 1)
raise websocket.WebSocketTimeoutException()
response, time, kind = self._notifications[0]
if time - current_time > timeout:
self._mock_timer.SetTime(current_time + timeout + 1)
raise websocket.WebSocketTimeoutException()
self._notifications.pop(0)
self._mock_timer.SetTime(time + 1)
if kind == self._NOTIFICATION_EVENT:
self._handler(response)
elif kind == self._NOTIFICATION_CALLBACK:
callback = self._pending_callbacks.get(response['method']).pop(0)
callback(response)
else:
raise Exception('Unexpected response type')
class FakeTimer(object):
""" A fake timer to fake out the timing for a module.
Args:
module: module to fake out the time
"""
def __init__(self, module=None):
self._elapsed_time = 0
self._module = module
self._actual_time = None
if module:
assert isinstance(module, ModuleType)
self._actual_time = module.time
self._module.time = self
def sleep(self, time):
self._elapsed_time += time
def time(self):
return self._elapsed_time
def SetTime(self, time):
self._elapsed_time = time
def __del__(self):
self.Restore()
def Restore(self):
if self._module:
self._module.time = self._actual_time
self._module = None
self._actual_time = None
| benschmaus/catapult | telemetry/telemetry/testing/fakes/__init__.py | Python | bsd-3-clause | 15,827 |
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Quality Control based on fuzzy logic.
"""
import logging
import numpy as np
from .core import QCCheckVar
from .gradient import gradient
from .spike import spike
from .woa_normbias import woa_normbias
from cotede.fuzzy import fuzzy_uncertainty
module_logger = logging.getLogger(__name__)
def fuzzylogic(features, cfg, require="all"):
"""
FIXME: Think about, should I return 0, or have an assert, and at qc.py
all qc tests are applied with a try, and in case it fails it flag
0s.
"""
require = cfg.get("require", require)
if (require == "all") and not np.all([f in features for f in cfg["features"]]):
module_logger.warning(
"Not all features (%s) required by fuzzy logic are available".format(
cfg["features"].keys()
)
)
raise KeyError
uncertainty = fuzzy_uncertainty(
data=features, features=cfg["features"], output=cfg["output"], require=require
)
return uncertainty
class FuzzyLogic(QCCheckVar):
def set_features(self):
self.features = {}
for v in [f for f in self.cfg["features"] if f not in self.features]:
if v == "woa_bias":
woa_comparison = woa_normbias(self.data, self.varname, self.attrs)
self.features[v] = woa_comparison["woa_bias"]
elif v == "woa_normbias":
woa_comparison = woa_normbias(self.data, self.varname, self.attrs)
self.features[v] = woa_comparison["woa_normbias"]
elif v == "spike":
self.features[v] = spike(self.data[self.varname])
elif v == "gradient":
self.features[v] = gradient(self.data[self.varname])
self.features["fuzzylogic"] = fuzzylogic(self.features, self.cfg)
def test(self):
self.flags = {}
cfg = self.cfg
flag = np.zeros(np.shape(self.data[self.varname]), dtype="i1")
uncertainty = self.features["fuzzylogic"]
# FIXME: As it is now, it will have no zero flag value. Think about cases
# where some values in a profile would not be estimated, hence flag=0
# I needed to use np.nonzeros because now uncertainty is a masked array,
# to accept when a feature is masked.
flag[np.nonzero(uncertainty <= 0.29)] = 1
flag[np.nonzero((uncertainty > 0.29) & (uncertainty <= 0.34))] = 2
flag[np.nonzero((uncertainty > 0.34) & (uncertainty <= 0.72))] = 3
flag[np.nonzero(uncertainty > 0.72)] = 4
self.flags["fuzzylogic"] = flag
| castelao/CoTeDe | cotede/qctests/fuzzylogic.py | Python | bsd-3-clause | 2,680 |
from setuptools import setup, find_packages
setup(name='gelato.models',
version='0.1.2',
description='Gelato models',
namespace_packages=['gelato'],
long_description='',
author='',
author_email='',
license='',
url='',
include_package_data=True,
packages=find_packages(exclude=['tests']),
install_requires=['django', 'tower'])
| washort/gelato.models | setup.py | Python | bsd-3-clause | 394 |
import sys
import warnings
try:
import itertools.izip as zip
except ImportError:
pass
from itertools import product
import numpy as np
from .. import util
from ..dimension import dimension_name
from ..element import Element
from ..ndmapping import NdMapping, item_check, sorted_context
from .interface import DataError, Interface
from .pandas import PandasInterface
from .util import finite_range
class cuDFInterface(PandasInterface):
"""
The cuDFInterface allows a Dataset objects to wrap a cuDF
DataFrame object. Using cuDF allows working with columnar
data on a GPU. Most operations leave the data in GPU memory,
however to plot the data it has to be loaded into memory.
The cuDFInterface covers almost the complete API exposed
by the PandasInterface with two notable exceptions:
1) Aggregation and groupby do not have a consistent sort order
(see https://github.com/rapidsai/cudf/issues/4237)
3) Not all functions can be easily applied to a cuDF so
some functions applied with aggregate and reduce will not work.
"""
datatype = 'cuDF'
types = ()
@classmethod
def loaded(cls):
return 'cudf' in sys.modules
@classmethod
def applies(cls, obj):
if not cls.loaded():
return False
import cudf
return isinstance(obj, (cudf.DataFrame, cudf.Series))
@classmethod
def init(cls, eltype, data, kdims, vdims):
import cudf
import pandas as pd
element_params = eltype.param.objects()
kdim_param = element_params['kdims']
vdim_param = element_params['vdims']
if isinstance(data, (cudf.Series, pd.Series)):
data = data.to_frame()
if not isinstance(data, cudf.DataFrame):
data, _, _ = PandasInterface.init(eltype, data, kdims, vdims)
data = cudf.from_pandas(data)
columns = list(data.columns)
ncols = len(columns)
index_names = [data.index.name]
if index_names == [None]:
index_names = ['index']
if eltype._auto_indexable_1d and ncols == 1 and kdims is None:
kdims = list(index_names)
if isinstance(kdim_param.bounds[1], int):
ndim = min([kdim_param.bounds[1], len(kdim_param.default)])
else:
ndim = None
nvdim = vdim_param.bounds[1] if isinstance(vdim_param.bounds[1], int) else None
if kdims and vdims is None:
vdims = [c for c in columns if c not in kdims]
elif vdims and kdims is None:
kdims = [c for c in columns if c not in vdims][:ndim]
elif kdims is None:
kdims = list(columns[:ndim])
if vdims is None:
vdims = [d for d in columns[ndim:((ndim+nvdim) if nvdim else None)]
if d not in kdims]
elif kdims == [] and vdims is None:
vdims = list(columns[:nvdim if nvdim else None])
# Handle reset of index if kdims reference index by name
for kd in kdims:
kd = dimension_name(kd)
if kd in columns:
continue
if any(kd == ('index' if name is None else name)
for name in index_names):
data = data.reset_index()
break
if any(isinstance(d, (np.int64, int)) for d in kdims+vdims):
raise DataError("cudf DataFrame column names used as dimensions "
"must be strings not integers.", cls)
if kdims:
kdim = dimension_name(kdims[0])
if eltype._auto_indexable_1d and ncols == 1 and kdim not in columns:
data = data.copy()
data.insert(0, kdim, np.arange(len(data)))
for d in kdims+vdims:
d = dimension_name(d)
if len([c for c in columns if c == d]) > 1:
raise DataError('Dimensions may not reference duplicated DataFrame '
'columns (found duplicate %r columns). If you want to plot '
'a column against itself simply declare two dimensions '
'with the same name. '% d, cls)
return data, {'kdims':kdims, 'vdims':vdims}, {}
@classmethod
def range(cls, dataset, dimension):
dimension = dataset.get_dimension(dimension, strict=True)
column = dataset.data[dimension.name]
if dimension.nodata is not None:
column = cls.replace_value(column, dimension.nodata)
if column.dtype.kind == 'O':
return np.NaN, np.NaN
else:
return finite_range(column, column.min(), column.max())
@classmethod
def values(cls, dataset, dim, expanded=True, flat=True, compute=True,
keep_index=False):
dim = dataset.get_dimension(dim, strict=True)
data = dataset.data[dim.name]
if not expanded:
data = data.unique()
return data.values_host if compute else data.values
elif keep_index:
return data
elif compute:
return data.values_host
try:
return data.values
except Exception:
return data.values_host
@classmethod
def groupby(cls, dataset, dimensions, container_type, group_type, **kwargs):
# Get dimensions information
dimensions = [dataset.get_dimension(d).name for d in dimensions]
kdims = [kdim for kdim in dataset.kdims if kdim not in dimensions]
# Update the kwargs appropriately for Element group types
group_kwargs = {}
group_type = dict if group_type == 'raw' else group_type
if issubclass(group_type, Element):
group_kwargs.update(util.get_param_values(dataset))
group_kwargs['kdims'] = kdims
group_kwargs.update(kwargs)
# Propagate dataset
group_kwargs['dataset'] = dataset.dataset
# Find all the keys along supplied dimensions
keys = product(*(dataset.data[dimensions[0]].unique().values_host for d in dimensions))
# Iterate over the unique entries applying selection masks
grouped_data = []
for unique_key in util.unique_iterator(keys):
group_data = dataset.select(**dict(zip(dimensions, unique_key)))
if not len(group_data):
continue
group_data = group_type(group_data, **group_kwargs)
grouped_data.append((unique_key, group_data))
if issubclass(container_type, NdMapping):
with item_check(False), sorted_context(False):
kdims = [dataset.get_dimension(d) for d in dimensions]
return container_type(grouped_data, kdims=kdims)
else:
return container_type(grouped_data)
@classmethod
def select_mask(cls, dataset, selection):
"""
Given a Dataset object and a dictionary with dimension keys and
selection keys (i.e. tuple ranges, slices, sets, lists, or literals)
return a boolean mask over the rows in the Dataset object that
have been selected.
"""
mask = None
for dim, sel in selection.items():
if isinstance(sel, tuple):
sel = slice(*sel)
arr = cls.values(dataset, dim, keep_index=True)
if util.isdatetime(arr) and util.pd:
try:
sel = util.parse_datetime_selection(sel)
except:
pass
new_masks = []
if isinstance(sel, slice):
with warnings.catch_warnings():
warnings.filterwarnings('ignore', r'invalid value encountered')
if sel.start is not None:
new_masks.append(sel.start <= arr)
if sel.stop is not None:
new_masks.append(arr < sel.stop)
if not new_masks:
continue
new_mask = new_masks[0]
for imask in new_masks[1:]:
new_mask &= imask
elif isinstance(sel, (set, list)):
for v in sel:
new_masks.append(arr==v)
if not new_masks:
continue
new_mask = new_masks[0]
for imask in new_masks[1:]:
new_mask |= imask
elif callable(sel):
new_mask = sel(arr)
else:
new_mask = arr == sel
if mask is None:
mask = new_mask
else:
mask &= new_mask
return mask
@classmethod
def select(cls, dataset, selection_mask=None, **selection):
df = dataset.data
if selection_mask is None:
selection_mask = cls.select_mask(dataset, selection)
indexed = cls.indexed(dataset, selection)
if selection_mask is not None:
df = df.loc[selection_mask]
if indexed and len(df) == 1 and len(dataset.vdims) == 1:
return df[dataset.vdims[0].name].iloc[0]
return df
@classmethod
def concat_fn(cls, dataframes, **kwargs):
import cudf
return cudf.concat(dataframes, **kwargs)
@classmethod
def add_dimension(cls, dataset, dimension, dim_pos, values, vdim):
data = dataset.data.copy()
if dimension.name not in data:
data[dimension.name] = values
return data
@classmethod
def aggregate(cls, dataset, dimensions, function, **kwargs):
data = dataset.data
cols = [d.name for d in dataset.kdims if d in dimensions]
vdims = dataset.dimensions('value', label='name')
reindexed = data[cols+vdims]
agg = function.__name__
if len(dimensions):
agg_map = {'amin': 'min', 'amax': 'max'}
agg = agg_map.get(agg, agg)
grouped = reindexed.groupby(cols, sort=False)
if not hasattr(grouped, agg):
raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg)
df = getattr(grouped, agg)().reset_index()
else:
agg_map = {'amin': 'min', 'amax': 'max', 'size': 'count'}
agg = agg_map.get(agg, agg)
if not hasattr(reindexed, agg):
raise ValueError('%s aggregation is not supported on cudf DataFrame.' % agg)
agg = getattr(reindexed, agg)()
data = dict(((col, [v]) for col, v in zip(agg.index.values_host, agg.to_array())))
df = util.pd.DataFrame(data, columns=list(agg.index.values_host))
dropped = []
for vd in vdims:
if vd not in df.columns:
dropped.append(vd)
return df, dropped
@classmethod
def iloc(cls, dataset, index):
import cudf
rows, cols = index
scalar = False
columns = list(dataset.data.columns)
if isinstance(cols, slice):
cols = [d.name for d in dataset.dimensions()][cols]
elif np.isscalar(cols):
scalar = np.isscalar(rows)
cols = [dataset.get_dimension(cols).name]
else:
cols = [dataset.get_dimension(d).name for d in index[1]]
col_index = [columns.index(c) for c in cols]
if np.isscalar(rows):
rows = [rows]
if scalar:
return dataset.data[cols[0]].iloc[rows[0]]
result = dataset.data.iloc[rows, col_index]
# cuDF does not handle single rows and cols indexing correctly
# as of cudf=0.10.0 so we have to convert Series back to DataFrame
if isinstance(result, cudf.Series):
if len(cols) == 1:
result = result.to_frame(cols[0])
else:
result = result.to_frame().T
return result
@classmethod
def sort(cls, dataset, by=[], reverse=False):
cols = [dataset.get_dimension(d, strict=True).name for d in by]
return dataset.data.sort_values(by=cols, ascending=not reverse)
@classmethod
def dframe(cls, dataset, dimensions):
if dimensions:
return dataset.data[dimensions].to_pandas()
else:
return dataset.data.to_pandas()
Interface.register(cuDFInterface)
| ioam/holoviews | holoviews/core/data/cudf.py | Python | bsd-3-clause | 12,346 |
# Copyright (c) 2006-2009 The Trustees of Indiana University.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# - Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# - Neither the Indiana University nor the names of its contributors may be used
# to endorse or promote products derived from this software without specific
# prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from corepy.spre.spe import Instruction, DispatchInstruction, Register
from spu_insts import *
__doc__="""
ISA for the Cell Broadband Engine's SPU.
"""
class lqx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':452}
cycles = (1, 6, 0)
class stqx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':324}
cycles = (1, 6, 0)
class cbx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':468}
cycles = (1, 4, 0)
class chx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':469}
cycles = (1, 4, 0)
class cwx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':470}
cycles = (1, 4, 0)
class cdx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':471}
cycles = (1, 4, 0)
class ah(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':200}
cycles = (0, 2, 0)
class a(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':192}
cycles = (0, 2, 0)
class sfh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':72}
cycles = (0, 2, 0)
class sf(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':64}
cycles = (0, 2, 0)
class addx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':832}
cycles = (0, 2, 0)
class cg(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':194}
cycles = (0, 2, 0)
class cgx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':834}
cycles = (0, 2, 0)
class sfx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':833}
cycles = (0, 2, 0)
class bg(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':66}
cycles = (0, 2, 0)
class bgx(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':835}
cycles = (0, 2, 0)
class mpy(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':964}
cycles = (0, 7, 0)
class mpyu(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':972}
cycles = (0, 7, 0)
class mpyh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':965}
cycles = (0, 7, 0)
class mpys(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':967}
cycles = (0, 7, 0)
class mpyhh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':966}
cycles = (0, 7, 0)
class mpyhha(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':838}
cycles = (0, 7, 0)
class mpyhhu(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':974}
cycles = (0, 7, 0)
class mpyhhau(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':846}
cycles = (0, 7, 0)
class clz(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':677}
cycles = (0, 2, 0)
class cntb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':692}
cycles = (0, 4, 0)
class fsmb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':438}
cycles = (1, 4, 0)
class fsmh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':437}
cycles = (1, 4, 0)
class fsm(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':436}
cycles = (1, 4, 0)
class gbb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':434}
cycles = (1, 4, 0)
class gbh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':433}
cycles = (1, 4, 0)
class gb(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':432}
cycles = (1, 4, 0)
class avgb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':211}
cycles = (0, 4, 0)
class absdb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':83}
cycles = (0, 4, 0)
class sumb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':595}
cycles = (0, 4, 0)
class xsbh(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':694}
cycles = (0, 2, 0)
class xshw(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':686}
cycles = (0, 2, 0)
class xswd(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':678}
cycles = (0, 2, 0)
class and_(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':193}
cycles = (0, 2, 0)
class andc(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':705}
cycles = (0, 2, 0)
class or_(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':65}
cycles = (0, 2, 0)
class orc(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':713}
cycles = (0, 2, 0)
class orx(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':496}
cycles = (1, 4, 0)
class xor(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':577}
cycles = (0, 2, 0)
class nand(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':201}
cycles = (0, 2, 0)
class nor(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':73}
cycles = (0, 2, 0)
class eqv(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':585}
cycles = (0, 2, 0)
class shlh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':95}
cycles = (0, 4, 0)
class shl(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':91}
cycles = (0, 4, 0)
class shlqbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':475}
cycles = (1, 4, 0)
class shlqby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':479}
cycles = (1, 4, 0)
class shlqbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':463}
cycles = (1, 4, 0)
class roth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':92}
cycles = (0, 4, 0)
class rot(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':88}
cycles = (0, 4, 0)
class rotqby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':476}
cycles = (1, 4, 0)
class rotqbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':460}
cycles = (1, 4, 0)
class rotqbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':472}
cycles = (1, 4, 0)
class rothm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':93}
cycles = (0, 4, 0)
class rotm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':89}
cycles = (0, 4, 0)
class rotqmby(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':477}
cycles = (1, 4, 0)
class rotqmbybi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':461}
cycles = (1, 4, 0)
class rotqmbi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':473}
cycles = (1, 4, 0)
class rotmah(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':94}
cycles = (0, 4, 0)
class rotma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':90}
cycles = (0, 4, 0)
class heq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':984}
cycles = (0, 2, 0)
class hgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':600}
cycles = (0, 2, 0)
class hlgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':728}
cycles = (0, 2, 0)
class ceqb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':976}
cycles = (0, 2, 0)
class ceqh(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':968}
cycles = (0, 2, 0)
class ceq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':960}
cycles = (0, 2, 0)
class cgtb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':592}
cycles = (0, 2, 0)
class cgth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':584}
cycles = (0, 2, 0)
class cgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':576}
cycles = (0, 2, 0)
class clgtb(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':720}
cycles = (0, 2, 0)
class clgth(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':712}
cycles = (0, 2, 0)
class clgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':704}
cycles = (0, 2, 0)
class bi(Instruction):
machine_inst = OPCD_A_D_E
params = {'OPCD':424}
cycles = (1, 4, 0)
class iret(Instruction):
machine_inst = OPCD_A_D_E
params = {'OPCD':426}
cycles = (1, 4, 0)
class bisled(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':427}
cycles = (1, 4, 0)
class bisl(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':425}
cycles = (1, 4, 0)
class biz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':296}
cycles = (1, 4, 0)
class binz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':297}
cycles = (1, 4, 0)
class bihz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':294}
cycles = (1, 4, 0)
class bihnz(Instruction):
machine_inst = OPCD_A_T_D_E
params = {'OPCD':299}
cycles = (1, 4, 0)
# TODO - can we check that if P is set then RO is zero as required?
class hbr(DispatchInstruction):
cycles = (1, 15, 0)
dispatch = (
(OPCD_RO_A_P, {'OPCD':428}),
(OPCD_LBL9_A_P, {'OPCD':428}))
class fa(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':708}
cycles = (0, 6, 0)
class dfa(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':716}
cycles = (0, 13, 6)
class fs(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':709}
cycles = (0, 6, 0)
class dfs(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':717}
cycles = (0, 13, 6)
class fm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':710}
cycles = (0, 6, 0)
class dfm(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':718}
cycles = (0, 13, 6)
class dfma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':860}
cycles = (0, 13, 6)
class dfnms(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':862}
cycles = (0, 13, 6)
class dfms(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':861}
cycles = (0, 13, 6)
class dfnma(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':863}
cycles = (0, 13, 6)
class frest(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':440}
cycles = (1, 4, 0)
class frsqest(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':441}
cycles = (1, 4, 0)
class fi(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':980}
cycles = (0, 7, 0)
class frds(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':953}
cycles = (0, 13, 6)
class fesd(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':952}
cycles = (0, 13, 6)
class fceq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':962}
cycles = (0, 2, 0)
class fcmeq(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':970}
cycles = (0, 2, 0)
class fcgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':706}
cycles = (0, 2, 0)
class fcmgt(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':714}
cycles = (0, 2, 0)
class fscrwr(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':954}
cycles = (0, 7, 0)
class fscrrd(Instruction):
machine_inst = OPCD_T
params = {'OPCD':920}
cycles = (0, 13, 6)
class stop(Instruction):
machine_inst = OPCD_STOP_SIG
params = {'OPCD':0}
cycles = (1, 4, 0)
class stopd(Instruction):
machine_inst = OPCD_B_A_T
params = {'OPCD':320}
cycles = (1, 4, 0)
class lnop(Instruction):
machine_inst = OPCD
params = {'OPCD':1}
cycles = (1, 0, 0)
class nop(Instruction):
machine_inst = OPCD_T
params = {'OPCD':513}
cycles = (0, 0, 0)
class sync(Instruction):
machine_inst = OPCD_CF
params = {'OPCD':2}
cycles = (1, 4, 0)
class dsync(Instruction):
machine_inst = OPCD
params = {'OPCD':3}
cycles = (1, 4, 0)
class mfspr(Instruction):
machine_inst = OPCD_SA_T
params = {'OPCD':12}
cycles = (1, 6, 0)
class mtspr(Instruction):
machine_inst = OPCD_SA_T
params = {'OPCD':268}
cycles = (1, 6, 0)
class rdch(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':13}
cycles = (1, 6, 0)
class rchcnt(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':15}
cycles = (1, 6, 0)
class wrch(Instruction):
machine_inst = OPCD_A_T
params = {'OPCD':269}
cycles = (1, 6, 0)
class mpya(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':12}
cycles = (0, 7, 0)
class selb(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':8}
cycles = (0, 2, 0)
class shufb(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':11}
cycles = (1, 4, 0)
class fma(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':14}
cycles = (0, 6, 0)
class fnms(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':13}
cycles = (0, 6, 0)
class fms(Instruction):
machine_inst = OPCD_T_B_A_C
params = {'OPCD':15}
cycles = (0, 6, 0)
class cbd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':500}
cycles = (1, 4, 0)
class chd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':501}
cycles = (1, 4, 0)
class cwd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':502}
cycles = (1, 4, 0)
class cdd(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':503}
cycles = (1, 4, 0)
class shlhi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':127}
cycles = (0, 4, 0)
class shli(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':123}
cycles = (0, 4, 0)
class shlqbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':507}
cycles = (1, 4, 0)
class shlqbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':511}
cycles = (1, 4, 0)
class rothi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':124}
cycles = (0, 4, 0)
class roti(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':120}
cycles = (0, 4, 0)
class rotqbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':508}
cycles = (1, 4, 0)
class rotqbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':504}
cycles = (1, 4, 0)
class rothmi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':125}
cycles = (0, 4, 0)
class rotmi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':121}
cycles = (0, 4, 0)
class rotqmbyi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':509}
cycles = (1, 4, 0)
class rotqmbii(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':505}
cycles = (1, 4, 0)
class rotmahi(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':126}
cycles = (0, 4, 0)
class rotmai(Instruction):
machine_inst = OPCD_I7_A_T
params = {'OPCD':122}
cycles = (0, 4, 0)
class csflt(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':474}
cycles = (0, 7, 0)
class cflts(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':472}
cycles = (0, 7, 0)
class cuflt(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':475}
cycles = (0, 7, 0)
class cfltu(Instruction):
machine_inst = OPCD_I8_A_T
params = {'OPCD':473}
cycles = (0, 7, 0)
class lqd(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':52}
cycles = (1, 6, 0)
class stqd(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':36}
cycles = (1, 6, 0)
class ahi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':29}
cycles = (0, 2, 0)
class ai(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':28}
cycles = (0, 2, 0)
class sfhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':13}
cycles = (0, 2, 0)
class sfi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':12}
cycles = (0, 2, 0)
class mpyi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':116}
cycles = (0, 7, 0)
class mpyui(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':117}
cycles = (0, 7, 0)
class andbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':22}
cycles = (0, 2, 0)
class andhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':21}
cycles = (0, 2, 0)
class andi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':20}
cycles = (0, 2, 0)
class orbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':6}
cycles = (0, 2, 0)
class orhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':5}
cycles = (0, 2, 0)
class ori(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':4}
cycles = (0, 2, 0)
class xorbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':70}
cycles = (0, 2, 0)
class xorhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':69}
cycles = (0, 2, 0)
class xori(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':68}
cycles = (0, 2, 0)
class heqi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':127}
cycles = (0, 2, 0)
class hgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':79}
cycles = (0, 2, 0)
class hlgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':95}
cycles = (0, 2, 0)
class ceqbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':126}
cycles = (0, 2, 0)
class ceqhi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':125}
cycles = (0, 2, 0)
class ceqi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':124}
cycles = (0, 2, 0)
class cgtbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':78}
cycles = (0, 2, 0)
class cgthi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':77}
cycles = (0, 2, 0)
class cgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':76}
cycles = (0, 2, 0)
class clgtbi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':94}
cycles = (0, 2, 0)
class clgthi(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':93}
cycles = (0, 2, 0)
class clgti(Instruction):
machine_inst = OPCD_I10_A_T
params = {'OPCD':92}
cycles = (0, 2, 0)
class lqa(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':97}
cycles = (1, 6, 0)
class lqr(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':103}
cycles = (1, 6, 0)
class stqa(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':65}
cycles = (1, 6, 0)
class stqr(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':71}
cycles = (1, 6, 0)
class ilh(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':131}
cycles = (0, 2, 0)
class ilhu(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':130}
cycles = (0, 2, 0)
class il(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':129}
cycles = (0, 2, 0)
class iohl(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':193}
cycles = (0, 2, 0)
class fsmbi(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':101}
cycles = (1, 4, 0)
class br(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':100}),
(OPCD_LBL16, {'OPCD':100}))
# TODO - how can I do absolute branches?
class bra(Instruction):
machine_inst = OPCD_I16
params = {'OPCD':96}
cycles = (1, 4, 0)
# TODO - I16 has two zero bits appended, do I handle this correctly?
# What is the correct way, anyway?
class brsl(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':102}),
(OPCD_LBL16_T, {'OPCD':102}))
class brasl(Instruction):
machine_inst = OPCD_I16_T
params = {'OPCD':98}
cycles = (1, 4, 0)
class brnz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':66}),
(OPCD_LBL16_T, {'OPCD':66}))
class brz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16_T, {'OPCD':64}),
(OPCD_LBL16_T, {'OPCD':64}))
class brhnz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':70}),
(OPCD_LBL16, {'OPCD':70}))
class brhz(DispatchInstruction):
cycles = (1, 4, 0)
dispatch = (
(OPCD_I16, {'OPCD':68}),
(OPCD_LBL16, {'OPCD':68}))
class hbra(Instruction):
machine_inst = OPCD_LBL9_I16
params = {'OPCD':8}
cycles = (1, 15, 0)
class hbrr(DispatchInstruction):
cycles = (1, 15, 0)
dispatch = (
(OPCD_ROA_I16, {'OPCD':9}),
(OPCD_LBL9_LBL16, {'OPCD':9}))
class ila(Instruction):
machine_inst = OPCD_I18_T
params = {'OPCD':33}
cycles = (0, 2, 0)
| matthiaskramm/corepy | corepy/arch/spu/isa/spu_isa.py | Python | bsd-3-clause | 22,294 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2015, Alcatel-Lucent Inc, 2017 Nokia
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from .fetchers import NUPermissionsFetcher
from .fetchers import NUMetadatasFetcher
from .fetchers import NUGlobalMetadatasFetcher
from bambou import NURESTObject
class NUVMResync(NURESTObject):
""" Represents a VMResync in the VSD
Notes:
Provide information about the state of a VM resync request.
"""
__rest_name__ = "resync"
__resource_name__ = "resync"
## Constants
CONST_STATUS_IN_PROGRESS = "IN_PROGRESS"
CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL"
CONST_STATUS_SUCCESS = "SUCCESS"
CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE"
def __init__(self, **kwargs):
""" Initializes a VMResync instance
Notes:
You can specify all parameters while calling this methods.
A special argument named `data` will enable you to load the
object from a Python dictionary
Examples:
>>> vmresync = NUVMResync(id=u'xxxx-xxx-xxx-xxx', name=u'VMResync')
>>> vmresync = NUVMResync(data=my_dict)
"""
super(NUVMResync, self).__init__()
# Read/Write Attributes
self._last_request_timestamp = None
self._last_time_resync_initiated = None
self._last_updated_by = None
self._last_updated_date = None
self._embedded_metadata = None
self._entity_scope = None
self._creation_date = None
self._status = None
self._owner = None
self._external_id = None
self.expose_attribute(local_name="last_request_timestamp", remote_name="lastRequestTimestamp", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_time_resync_initiated", remote_name="lastTimeResyncInitiated", attribute_type=int, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="last_updated_date", remote_name="lastUpdatedDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="embedded_metadata", remote_name="embeddedMetadata", attribute_type=list, is_required=False, is_unique=False)
self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL'])
self.expose_attribute(local_name="creation_date", remote_name="creationDate", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="status", remote_name="status", attribute_type=str, is_required=False, is_unique=False, choices=[u'IN_PROGRESS', u'SUCCESS'])
self.expose_attribute(local_name="owner", remote_name="owner", attribute_type=str, is_required=False, is_unique=False)
self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True)
# Fetchers
self.permissions = NUPermissionsFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child")
self._compute_args(**kwargs)
# Properties
@property
def last_request_timestamp(self):
""" Get last_request_timestamp value.
Notes:
Time of the last timestamp received
This attribute is named `lastRequestTimestamp` in VSD API.
"""
return self._last_request_timestamp
@last_request_timestamp.setter
def last_request_timestamp(self, value):
""" Set last_request_timestamp value.
Notes:
Time of the last timestamp received
This attribute is named `lastRequestTimestamp` in VSD API.
"""
self._last_request_timestamp = value
@property
def last_time_resync_initiated(self):
""" Get last_time_resync_initiated value.
Notes:
Time that the resync was initiated
This attribute is named `lastTimeResyncInitiated` in VSD API.
"""
return self._last_time_resync_initiated
@last_time_resync_initiated.setter
def last_time_resync_initiated(self, value):
""" Set last_time_resync_initiated value.
Notes:
Time that the resync was initiated
This attribute is named `lastTimeResyncInitiated` in VSD API.
"""
self._last_time_resync_initiated = value
@property
def last_updated_by(self):
""" Get last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
return self._last_updated_by
@last_updated_by.setter
def last_updated_by(self, value):
""" Set last_updated_by value.
Notes:
ID of the user who last updated the object.
This attribute is named `lastUpdatedBy` in VSD API.
"""
self._last_updated_by = value
@property
def last_updated_date(self):
""" Get last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
return self._last_updated_date
@last_updated_date.setter
def last_updated_date(self, value):
""" Set last_updated_date value.
Notes:
Time stamp when this object was last updated.
This attribute is named `lastUpdatedDate` in VSD API.
"""
self._last_updated_date = value
@property
def embedded_metadata(self):
""" Get embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
return self._embedded_metadata
@embedded_metadata.setter
def embedded_metadata(self, value):
""" Set embedded_metadata value.
Notes:
Metadata objects associated with this entity. This will contain a list of Metadata objects if the API request is made using the special flag to enable the embedded Metadata feature. Only a maximum of Metadata objects is returned based on the value set in the system configuration.
This attribute is named `embeddedMetadata` in VSD API.
"""
self._embedded_metadata = value
@property
def entity_scope(self):
""" Get entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
return self._entity_scope
@entity_scope.setter
def entity_scope(self, value):
""" Set entity_scope value.
Notes:
Specify if scope of entity is Data center or Enterprise level
This attribute is named `entityScope` in VSD API.
"""
self._entity_scope = value
@property
def creation_date(self):
""" Get creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
return self._creation_date
@creation_date.setter
def creation_date(self, value):
""" Set creation_date value.
Notes:
Time stamp when this object was created.
This attribute is named `creationDate` in VSD API.
"""
self._creation_date = value
@property
def status(self):
""" Get status value.
Notes:
Status of the resync
"""
return self._status
@status.setter
def status(self, value):
""" Set status value.
Notes:
Status of the resync
"""
self._status = value
@property
def owner(self):
""" Get owner value.
Notes:
Identifies the user that has created this object.
"""
return self._owner
@owner.setter
def owner(self, value):
""" Set owner value.
Notes:
Identifies the user that has created this object.
"""
self._owner = value
@property
def external_id(self):
""" Get external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
return self._external_id
@external_id.setter
def external_id(self, value):
""" Set external_id value.
Notes:
External object ID. Used for integration with third party systems
This attribute is named `externalID` in VSD API.
"""
self._external_id = value
| nuagenetworks/vspk-python | vspk/v6/nuvmresync.py | Python | bsd-3-clause | 11,928 |
from mock import patch
from nose.tools import eq_
from helper import TestCase
import appvalidator.submain as submain
class TestSubmainPackage(TestCase):
@patch("appvalidator.submain.test_inner_package",
lambda x, z: "success")
def test_package_pass(self):
"Tests the test_package function with simple data"
self.setup_err()
name = "tests/resources/submain/install_rdf.xpi"
with open(name) as pack:
result = submain.test_package(self.err, pack, name)
self.assert_silent()
eq_(result, "success")
@patch("appvalidator.submain.test_inner_package",
lambda x, z: "success")
def test_package_corrupt(self):
"Tests the test_package function fails with a non-zip"
self.setup_err()
name = "tests/resources/junk.xpi"
with open(name) as pack:
result = submain.test_package(self.err, pack, name)
self.assert_failed()
def test_package_corrupt(self):
"Tests the test_package function fails with a corrupt file"
self.setup_err()
name = "tests/resources/corrupt.xpi"
result = submain.test_package(self.err, name, name)
self.assert_failed(with_errors=True, with_warnings=True)
| mattbasta/perfalator | tests/test_submain_package.py | Python | bsd-3-clause | 1,268 |
# Copyright (C) 2010 CENATIC: Centro Nacional de Referencia de
# Aplicacion de las TIC basadas en Fuentes Abiertas, Spain.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# Neither the name of the CENATIC nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# You may contact the copyright holder at: Fundacion CENATIC, Edificio
# de Servicios Sociales: C/ Vistahermosa, 1, 3ra planta, 06200
# Almendralejo (Badajoz), Spain
from DBSlayer import Query
def get_type_name (type_id):
l = get_type (type_id)
if not l:
return None
return l['name']
def get_type (type_id):
q = "SELECT id, type "\
"FROM asset_types WHERE id=%(type_id)s;" % locals()
query = Query(q)
if len(query) != 1:
return None
ret = {'id': type_id,
'name': query['type'][0]}
return ret
def get_types ():
q = "SELECT id, type "\
"FROM asset_types;" % locals()
query = Query(q)
if not len(query):
return None
ret = []
for x in query:
d={'id': query[x]['id'],
'name': query[x]['type']}
ret.append(d)
return ret
def test ():
import sys
try:
type_id = sys.argv[1]
except IndexError:
print 'Required test parameters: type_id'
sys.exit(1)
print 'Types:', get_types()
print 'type_id %s, type_name %s' % (type_id, get_type_name(type_id))
print get_type(type_id),
if __name__ == '__main__':
test()
| helix84/activae | src/Type.py | Python | bsd-3-clause | 2,833 |
# -*- coding: utf-8 -*-
'''
Production Configurations
- Use djangosecure
- Use mailgun to send emails
- Use redis
'''
from __future__ import absolute_import, unicode_literals
from django.utils import six
from .common import * # noqa
# SECRET CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/settings/#secret-key
# Raises ImproperlyConfigured exception if DJANO_SECRET_KEY not in os.environ
SECRET_KEY = env("DJANGO_SECRET_KEY")
# This ensures that Django will be able to detect a secure connection
# properly on Heroku.
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
# django-secure
# ------------------------------------------------------------------------------
INSTALLED_APPS += ("djangosecure", )
MIDDLEWARE_CLASSES = (
# Make sure djangosecure.middleware.SecurityMiddleware is listed first
'djangosecure.middleware.SecurityMiddleware',
) + MIDDLEWARE_CLASSES
# set this to 60 seconds and then to 518400 when you can prove it works
SECURE_HSTS_SECONDS = 60
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool("DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True)
SECURE_FRAME_DENY = env.bool("DJANGO_SECURE_FRAME_DENY", default=True)
SECURE_CONTENT_TYPE_NOSNIFF = env.bool("DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True)
SECURE_BROWSER_XSS_FILTER = True
SESSION_COOKIE_SECURE = False
SESSION_COOKIE_HTTPONLY = True
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# SITE CONFIGURATION
# ------------------------------------------------------------------------------
# Hosts/domain names that are valid for this site
# See https://docs.djangoproject.com/en/1.6/ref/settings/#allowed-hosts
ALLOWED_HOSTS = ["*"]
# END SITE CONFIGURATION
# EMAIL
# ------------------------------------------------------------------------------
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL',
default='{{cookiecutter.project_name}} <noreply@{{cookiecutter.domain_name}}>')
EMAIL_BACKEND = 'django_mailgun.MailgunBackend'
MAILGUN_ACCESS_KEY = env('DJANGO_MAILGUN_API_KEY')
MAILGUN_SERVER_NAME = env('DJANGO_MAILGUN_SERVER_NAME')
EMAIL_SUBJECT_PREFIX = env("DJANGO_EMAIL_SUBJECT_PREFIX", default='[{{cookiecutter.project_name}}] ')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL)
# TEMPLATE CONFIGURATION
# ------------------------------------------------------------------------------
# See: https://docs.djangoproject.com/en/dev/ref/templates/api/#django.template.loaders.cached.Loader
TEMPLATES[0]['OPTIONS']['loaders'] = [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
]
# CACHE CONFIGURATION
# ------------------------------------------------------------------------------
CACHES = {
'default': {
'BACKEND': 'redis_cache.RedisCache',
'LOCATION': [
'redis:6379',
],
'OPTIONS': {
'DB': 1,
'PARSER_CLASS': 'redis.connection.HiredisParser',
'CONNECTION_POOL_CLASS': 'redis.BlockingConnectionPool',
'CONNECTION_POOL_CLASS_KWARGS': {
'max_connections': 50,
'timeout': 20,
},
'MAX_CONNECTIONS': 1000,
'PICKLE_VERSION': -1,
},
},
}
# ASSET CONFIGURATION
# ------------------------------------------------------------------------------
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
STATIC_ROOT = '/static'
MEDIA_ROOT = '/media'
STATICFILES_DIRS = (
unicode(APPS_DIR.path("static")),
)
{% if cookiecutter.use_celery %}
# CELERY BROKER CONFIGURATION
# ------------------------------------------------------------------------------
BROKER_URL = "amqp://guest:guest@rabbitmq:5672//"
{% endif %}
{% if cookiecutter.use_sentry %}
# SENTRY CONFIGURATION
# ------------------------------------------------------------------------------
RAVEN_CONFIG = {
'dsn': env("SENTRY_URL"),
}
INSTALLED_APPS = INSTALLED_APPS + (
'raven.contrib.django.raven_compat',
)
{% endif %}
# Your production stuff: Below this line define 3rd party library settings
| jayfk/cookiecutter-django-docker | {{cookiecutter.repo_name}}/config/settings/production.py | Python | bsd-3-clause | 4,238 |
# Copyright (c) 2013 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Package containing the different outputs.
Each output type is defined inside a module.
"""
| v-legoff/croissant | croissant/output/__init__.py | Python | bsd-3-clause | 1,636 |
# Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from collections import namedtuple
from blinkpy.common.net.results_fetcher import TestResultsFetcher
BuilderStep = namedtuple('BuilderStep', ['build', 'step_name'])
# TODO(qyearsley): To be consistent with other fake ("mock") classes, this
# could be changed so it's not a subclass of TestResultsFetcher.
class MockTestResultsFetcher(TestResultsFetcher):
def __init__(self):
super(MockTestResultsFetcher, self).__init__()
self._canned_results = {}
self._canned_retry_summary_json = {}
self._webdriver_results = {}
self.fetched_builds = []
self.fetched_webdriver_builds = []
self._layout_test_step_name = 'blink_web_tests (with patch)'
def set_results(self, build, results, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self._canned_results[step] = results
def fetch_results(self, build, full=False, step_name=None):
step_name = step_name or self.get_layout_test_step_name(build)
step = BuilderStep(build=build, step_name=step_name)
self.fetched_builds.append(step)
return self._canned_results.get(step)
def set_results_to_resultdb(self, build, results):
self._canned_results[build.build_id] = results
def fetch_results_from_resultdb(self, host, builds, predicate):
rv = []
for build in builds:
results = self._canned_results.get(build.build_id)
if results:
rv.extend(results)
return rv
def set_webdriver_test_results(self, build, m, results):
self._webdriver_results[(build, m)] = results
def fetch_webdriver_test_results(self, build, m):
self.fetched_webdriver_builds.append((build, m))
return self._webdriver_results.get((build, m))
def set_retry_sumary_json(self, build, content):
self._canned_retry_summary_json[build] = content
def fetch_retry_summary_json(self, build):
return self._canned_retry_summary_json.get(build)
def set_layout_test_step_name(self, name):
self._layout_test_step_name = name
def get_layout_test_step_name(self, build):
return self._layout_test_step_name
| ric2b/Vivaldi-browser | chromium/third_party/blink/tools/blinkpy/common/net/results_fetcher_mock.py | Python | bsd-3-clause | 3,794 |
# -*- coding: utf-8 -*-
import access
import util
@auth.requires_login()
def index():
"""Produces a list of the feedback obtained for a given venue,
or for all venues."""
venue_id = request.args(0)
if venue_id == 'all':
q = (db.submission.user == get_user_email())
else:
q = ((db.submission.user == get_user_email())
& (db.submission.venue_id == venue_id))
db.submission.id.represent = lambda x, r: A(T('View'), _class='btn', _href=URL('submission', 'view_own_submission', args=['v', r.id]))
db.submission.id.label = T('Submission')
db.submission.id.readable = True
db.submission.venue_id.readable = True
grid = SQLFORM.grid(q,
fields=[db.submission.id, db.submission.venue_id,
db.submission.date_created, db.submission.date_updated, ],
csv=False, details=False, create=False, editable=False, deletable=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid)
@auth.requires_login()
def view_feedback():
"""Shows detailed feedback for a user in a venue.
This controller accepts various types of arguments:
* 's', submission_id
* 'u', venue_id, username
* 'v', venue_id (in which case, shows own submission to that venue)
"""
if len(request.args) == 0:
redirect(URL('default', 'index'))
if request.args(0) == 's':
# submission_id
n_args = 2
subm = db.submission(request.args(1)) or redirect(URL('default', 'index'))
c = db.venue(subm.venue_id) or redirect(URL('default', 'index'))
username = subm.user
elif request.args(0) == 'v':
# venue_id
n_args = 2
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = get_user_email()
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
else:
# venue_id, username
n_args = 3
c = db.venue(request.args(1)) or redirect(URL('default', 'index'))
username = request.args(2) or redirect(URL('default', 'index'))
subm = db((db.submission.user == username) & (db.submission.venue_id == c.id)).select().first()
# Checks permissions.
props = db(db.user_properties.user == get_user_email()).select().first()
if props == None:
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
is_author = (username == get_user_email())
can_view_feedback = access.can_view_feedback(c, props) or is_author
if (not can_view_feedback):
session.flash = T('Not authorized.')
redirect(URL('default', 'index'))
if not (access.can_view_feedback(c, props) or datetime.utcnow() > c.rate_close_date):
session.flash = T('The ratings are not yet available.')
redirect(URL('feedback', 'index', args=['all']))
# Produces the link to edit the feedback.
edit_feedback_link = None
if subm is not None and access.can_observe(c, props):
edit_feedback_link = A(T('Edit feedback'), _class='btn',
_href=URL('submission', 'edit_feedback', args=[subm.id]))
# Produces the download link.
download_link = None
if subm is not None and c.allow_file_upload and subm.content is not None:
if is_author:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_author', args=[subm.id, subm.content]))
else:
download_link = A(T('Download'), _class='btn',
_href=URL('submission', 'download_manager', args=[subm.id, subm.content]))
venue_link = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
# Submission link.
subm_link = None
if subm is not None and c.allow_link_submission:
subm_link = A(subm.link, _href=subm.link)
# Submission content and feedback.
subm_comment = None
subm_feedback = None
if subm is not None:
raw_subm_comment = keystore_read(subm.comment)
if raw_subm_comment is not None and len(raw_subm_comment) > 0:
subm_comment = MARKMIN(keystore_read(subm.comment))
raw_feedback = keystore_read(subm.feedback)
if raw_feedback is not None and len(raw_feedback) > 0:
subm_feedback = MARKMIN(raw_feedback)
# Display settings.
db.submission.percentile.readable = True
db.submission.comment.readable = True
db.submission.feedback.readable = True
if access.can_observe(c, props):
db.submission.quality.readable = True
db.submission.error.readable = True
# Reads the grade information.
submission_grade = submission_percentile = None
review_grade = review_percentile = user_reputation = None
final_grade = final_percentile = None
assigned_grade = None
if c.grades_released:
grade_info = db((db.grades.user == username) & (db.grades.venue_id == c.id)).select().first()
if grade_info is not None:
submission_grade = represent_quality(grade_info.submission_grade, None)
submission_percentile = represent_percentage(grade_info.submission_percentile, None)
review_grade = represent_quality_10(grade_info.accuracy, None)
review_percentile = represent_percentage(grade_info.accuracy_percentile, None)
user_reputation = represent_01_as_percentage(grade_info.reputation, None)
final_grade = represent_quality(grade_info.grade, None)
final_percentile = represent_percentage(grade_info.percentile, None)
assigned_grade = represent_quality(grade_info.assigned_grade, None)
# Makes a grid of comments.
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.rejected.readable = True
db.task.helpfulness.readable = db.task.helpfulness.writable = True
# Prevent editing the comments; the only thing editable should be the "is bogus" field.
db.task.comments.writable = False
db.task.comments.readable = True
ranking_link = None
if access.can_observe(c, props):
db.task.user.readable = True
db.task.completed_date.readable = True
links = [
dict(header=T('Review details'), body= lambda r:
A(T('View'), _class='btn', _href=URL('ranking', 'view_comparison', args=[r.id]))),
]
details = False
if subm is not None:
ranking_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_submission', args=[subm.id]))
reviews_link = A(T('details'), _href=URL('ranking', 'view_comparisons_given_user', args=[username, c.id]))
db.task.user.represent = lambda v, r: A(v, _href=URL('ranking', 'view_comparisons_given_user',
args=[v, c.id], user_signature=True))
else:
user_reputation = None
links = [
dict(header=T('Review feedback'), body = lambda r:
A(T('Give feedback'), _class='btn',
_href=URL('feedback', 'reply_to_review', args=[r.id], user_signature=True))),
]
details = False
ranking_link = None
reviews_link = None
if subm is not None:
q = ((db.task.submission_id == subm.id) & (db.task.is_completed == True))
# q = (db.task.submission_id == subm.id)
else:
q = (db.task.id == -1)
grid = SQLFORM.grid(q,
fields=[db.task.id, db.task.user, db.task.rejected, db.task.comments, db.task.helpfulness, ],
details = details,
csv=False, create=False, editable=False, deletable=False, searchable=False,
links=links,
args=request.args[:n_args],
maxtextlength=24,
)
return dict(subm=subm, download_link=download_link, subm_link=subm_link, username=username,
subm_comment=subm_comment, subm_feedback=subm_feedback,
edit_feedback_link=edit_feedback_link,
is_admin=is_user_admin(),
submission_grade=submission_grade, submission_percentile=submission_percentile,
review_grade=review_grade, review_percentile=review_percentile,
user_reputation=user_reputation,
final_grade=final_grade, final_percentile=final_percentile,
assigned_grade=assigned_grade,
venue_link=venue_link, grid=grid, ranking_link=ranking_link,
reviews_link=reviews_link)
@auth.requires_signature()
def reply_to_review():
t = db.task(request.args(0)) or redirect(URL('default', 'index'))
db.task.submission_name.readable = False
db.task.assigned_date.readable = False
db.task.completed_date.readable = False
db.task.comments.readable = False
db.task.helpfulness.readable = db.task.helpfulness.writable = True
db.task.feedback.readable = db.task.feedback.writable = True
form = SQLFORM(db.task, record=t)
form.vars.feedback = keystore_read(t.feedback)
if form.process(onvalidation=validate_review_feedback(t)).accepted:
session.flash = T('Updated.')
redirect(URL('feedback', 'view_feedback', args=['s', t.submission_id]))
link_to_submission = A(T('View submission'), _href=URL('submission', 'view_own_submission', args=['v', t.submission_id]))
review_comments = MARKMIN(keystore_read(t.comments))
return dict(form=form, link_to_submission=link_to_submission, review_comments=review_comments)
def validate_review_feedback(t):
def f(form):
if not form.errors:
feedback_id = keystore_update(t.feedback, form.vars.feedback)
form.vars.feedback = feedback_id
return f
@auth.requires_login()
def view_my_reviews():
"""This controller displays the reviews a user has written for a venue, along with
the feedback they received."""
c = db.venue(request.args(0)) or redirect(URL('rating', 'review_index'))
link_to_venue = A(c.name, _href=URL('venues', 'view_venue', args=[c.id]))
link_to_eval = A(T('My evaluation in this venue'), _class='btn',
_href=URL('feedback', 'view_feedback', args=['v', c.id]))
q = ((db.task.user == get_user_email()) & (db.task.venue_id == c.id))
db.task.rejected.readable = True
db.task.helpfulness.readable = True
db.task.comments.readable = True
db.task.feedback.readable = True
# To prevent chopping
db.task.submission_name.represent = represent_text_field
grid = SQLFORM.grid(q,
fields=[db.task.submission_name, db.task.rejected, db.task.helpfulness],
details=True,
editable=False, deletable=False, create=False, searchable=False,
csv=False,
args=request.args[:1],
maxtextlength=24,
)
return dict(grid=grid, link_to_venue=link_to_venue, link_to_eval=link_to_eval)
| lucadealfaro/crowdranker | controllers/feedback.py | Python | bsd-3-clause | 10,966 |
from __future__ import print_function
import shutil
import os, sys
import time
import logging
from .loaders import PythonLoader, YAMLLoader
from .bundle import get_all_bundle_files
from .exceptions import BuildError
from .updater import TimestampUpdater
from .merge import MemoryHunk
from .version import get_manifest
from .cache import FilesystemCache
from .utils import set, StringIO
__all__ = ('CommandError', 'CommandLineEnvironment', 'main')
# logging has WARNING as default level, for the CLI we want INFO. Set this
# as early as possible, so that user customizations will not be overwritten.
logging.getLogger('webassets.script').setLevel(logging.INFO)
class CommandError(Exception):
pass
class Command(object):
"""Base-class for a command used by :class:`CommandLineEnvironment`.
Each command being a class opens up certain possibilities with respect to
subclassing and customizing the default CLI.
"""
def __init__(self, cmd_env):
self.cmd = cmd_env
def __getattr__(self, name):
# Make stuff from cmd environment easier to access
return getattr(self.cmd, name)
def __call__(self, *args, **kwargs):
raise NotImplementedError()
class BuildCommand(Command):
def __call__(self, bundles=None, output=None, directory=None, no_cache=None,
manifest=None, production=None):
"""Build assets.
``bundles``
A list of bundle names. If given, only this list of bundles
should be built.
``output``
List of (bundle, filename) 2-tuples. If given, only these
bundles will be built, using the custom output filenames.
Cannot be used with ``bundles``.
``directory``
Custom output directory to use for the bundles. The original
basenames defined in the bundle ``output`` attribute will be
used. If the ``output`` of the bundles are pointing to different
directories, they will be offset by their common prefix.
Cannot be used with ``output``.
``no_cache``
If set, a cache (if one is configured) will not be used.
``manifest``
If set, the given manifest instance will be used, instead of
any that might have been configured in the Environment. The value
passed will be resolved through ``get_manifest()``. If this fails,
a file-based manifest will be used using the given value as the
filename.
``production``
If set to ``True``, then :attr:`Environment.debug`` will forcibly
be disabled (set to ``False``) during the build.
"""
# Validate arguments
if bundles and output:
raise CommandError(
'When specifying explicit output filenames you must '
'do so for all bundles you want to build.')
if directory and output:
raise CommandError('A custom output directory cannot be '
'combined with explicit output filenames '
'for individual bundles.')
if production:
# TODO: Reset again (refactor commands to be classes)
self.environment.debug = False
# TODO: Oh how nice it would be to use the future options stack.
if manifest is not None:
try:
manifest = get_manifest(manifest, env=self.environment)
except ValueError:
manifest = get_manifest(
# abspath() is important, or this will be considered
# relative to Environment.directory.
"file:%s" % os.path.abspath(manifest),
env=self.environment)
self.environment.manifest = manifest
# Use output as a dict.
if output:
output = dict(output)
# Validate bundle names
bundle_names = bundles if bundles else (output.keys() if output else [])
for name in bundle_names:
if not name in self.environment:
raise CommandError(
'I do not know a bundle name named "%s".' % name)
# Make a list of bundles to build, and the filename to write to.
if bundle_names:
# TODO: It's not ok to use an internal property here.
bundles = [(n,b) for n, b in self.environment._named_bundles.items()
if n in bundle_names]
else:
# Includes unnamed bundles as well.
bundles = [(None, b) for b in self.environment]
# Determine common prefix for use with ``directory`` option.
if directory:
prefix = os.path.commonprefix(
[os.path.normpath(b.resolve_output())
for _, b in bundles if b.output])
# dirname() gives the right value for a single file.
prefix = os.path.dirname(prefix)
to_build = []
for name, bundle in bundles:
# TODO: We really should support this. This error here
# is just in place of a less understandable error that would
# otherwise occur.
if bundle.is_container and directory:
raise CommandError(
'A custom output directory cannot currently be '
'used with container bundles.')
# Determine which filename to use, if not the default.
overwrite_filename = None
if output:
overwrite_filename = output[name]
elif directory:
offset = os.path.normpath(
bundle.resolve_output())[len(prefix)+1:]
overwrite_filename = os.path.join(directory, offset)
to_build.append((bundle, overwrite_filename, name,))
# Build.
built = []
for bundle, overwrite_filename, name in to_build:
if name:
# A name is not necessary available of the bundle was
# registered without one.
self.log.info("Building bundle: %s (to %s)" % (
name, overwrite_filename or bundle.output))
else:
self.log.info("Building bundle: %s" % bundle.output)
try:
if not overwrite_filename:
with bundle.bind(self.environment):
bundle.build(force=True, disable_cache=no_cache)
else:
# TODO: Rethink how we deal with container bundles here.
# As it currently stands, we write all child bundles
# to the target output, merged (which is also why we
# create and force writing to a StringIO instead of just
# using the ``Hunk`` objects that build() would return
# anyway.
output = StringIO()
with bundle.bind(self.environment):
bundle.build(force=True, output=output,
disable_cache=no_cache)
if directory:
# Only auto-create directories in this mode.
output_dir = os.path.dirname(overwrite_filename)
if not os.path.exists(output_dir):
os.makedirs(output_dir)
MemoryHunk(output.getvalue()).save(overwrite_filename)
built.append(bundle)
except BuildError as e:
self.log.error("Failed, error was: %s" % e)
if len(built):
self.event_handlers['post_build']()
if len(built) != len(to_build):
return 2
class WatchCommand(Command):
def __call__(self, loop=None):
"""Watch assets for changes.
``loop``
A callback, taking no arguments, to be called once every loop
iteration. Can be useful to integrate the command with other code.
If not specified, the loop wil call ``time.sleep()``.
"""
# TODO: This should probably also restart when the code changes.
mtimes = {}
try:
# Before starting to watch for changes, also recognize changes
# made while we did not run, and apply those immediately.
for bundle in self.environment:
print('Bringing up to date: %s' % bundle.output)
bundle.build(force=False)
self.log.info("Watching %d bundles for changes..." %
len(self.environment))
while True:
changed_bundles = self.check_for_changes(mtimes)
built = []
for bundle in changed_bundles:
print("Building bundle: %s ..." % bundle.output, end=' ')
sys.stdout.flush()
try:
bundle.build(force=True)
built.append(bundle)
except BuildError as e:
print("")
print("Failed: %s" % e)
else:
print("done")
if len(built):
self.event_handlers['post_build']()
do_end = loop() if loop else time.sleep(0.1)
if do_end:
break
except KeyboardInterrupt:
pass
def check_for_changes(self, mtimes):
# Do not update original mtimes dict right away, so that we detect
# all bundle changes if a file is in multiple bundles.
_new_mtimes = mtimes.copy()
changed_bundles = set()
# TODO: An optimization was lost here, skipping a bundle once
# a single file has been found to have changed. Bring back.
for filename, bundles_to_update in self.yield_files_to_watch():
stat = os.stat(filename)
mtime = stat.st_mtime
if sys.platform == "win32":
mtime -= stat.st_ctime
if mtimes.get(filename, mtime) != mtime:
if callable(bundles_to_update):
# Hook for when file has changed
try:
bundles_to_update = bundles_to_update()
except EnvironmentError:
# EnvironmentError is what the hooks is allowed to
# raise for a temporary problem, like an invalid config
import traceback
traceback.print_exc()
# Don't update anything, wait for another change
bundles_to_update = set()
if bundles_to_update is True:
# Indicates all bundles should be rebuilt for the change
bundles_to_update = set(self.environment)
changed_bundles |= bundles_to_update
_new_mtimes[filename] = mtime
_new_mtimes[filename] = mtime
mtimes.update(_new_mtimes)
return changed_bundles
def yield_files_to_watch(self):
for bundle in self.environment:
for filename in get_all_bundle_files(bundle):
yield filename, set([bundle])
class CleanCommand(Command):
def __call__(self):
"""Delete generated assets.
"""
self.log.info('Cleaning generated assets...')
for bundle in self.environment:
if not bundle.output:
continue
file_path = bundle.resolve_output(self.environment)
if os.path.exists(file_path):
os.unlink(file_path)
self.log.info("Deleted asset: %s" % bundle.output)
if isinstance(self.environment.cache, FilesystemCache):
shutil.rmtree(self.environment.cache.directory)
class CheckCommand(Command):
def __call__(self):
"""Check to see if assets need to be rebuilt.
A non-zero exit status will be returned if any of the input files are
newer (based on mtime) than their output file. This is intended to be
used in pre-commit hooks.
"""
needsupdate = False
updater = self.environment.updater
if not updater:
self.log.debug('no updater configured, using TimestampUpdater')
updater = TimestampUpdater()
for bundle in self.environment:
self.log.info('Checking asset: %s', bundle.output)
if updater.needs_rebuild(bundle, self.environment):
self.log.info(' needs update')
needsupdate = True
if needsupdate:
sys.exit(-1)
class CommandLineEnvironment(object):
"""Implements the core functionality for a command line frontend to
``webassets``, abstracted in a way to allow frameworks to integrate the
functionality into their own tools, for example, as a Django management
command, or a command for ``Flask-Script``.
"""
def __init__(self, env, log, post_build=None, commands=None):
self.environment = env
self.log = log
self.event_handlers = dict(post_build=lambda: True)
if callable(post_build):
self.event_handlers['post_build'] = post_build
# Instantiate each command
command_def = self.DefaultCommands.copy()
command_def.update(commands or {})
self.commands = {}
for name, construct in command_def.items():
if not construct:
continue
if not isinstance(construct, (list, tuple)):
construct = [construct, (), {}]
self.commands[name] = construct[0](
self, *construct[1], **construct[2])
def __getattr__(self, item):
# Allow method-like access to commands.
if item in self.commands:
return self.commands[item]
raise AttributeError(item)
def invoke(self, command, args):
"""Invoke ``command``, or throw a CommandError.
This is essentially a simple validation mechanism. Feel free
to call the individual command methods manually.
"""
try:
function = self.commands[command]
except KeyError as e:
raise CommandError('unknown command: %s' % e)
else:
return function(**args)
# List of commands installed
DefaultCommands = {
'build': BuildCommand,
'watch': WatchCommand,
'clean': CleanCommand,
'check': CheckCommand
}
class GenericArgparseImplementation(object):
"""Generic command line utility to interact with an webassets environment.
This is effectively a reference implementation of a command line utility
based on the ``CommandLineEnvironment`` class. Implementers may find it
feasible to simple base their own command line utility on this, rather than
implementing something custom on top of ``CommandLineEnvironment``. In
fact, if that is possible, you are encouraged to do so for greater
consistency across implementations.
"""
class WatchCommand(WatchCommand):
"""Extended watch command that also looks at the config file itself."""
def __init__(self, cmd_env, argparse_ns):
WatchCommand.__init__(self, cmd_env)
self.ns = argparse_ns
def yield_files_to_watch(self):
for result in WatchCommand.yield_files_to_watch(self):
yield result
# If the config changes, rebuild all bundles
if getattr(self.ns, 'config', None):
yield self.ns.config, self.reload_config
def reload_config(self):
try:
self.cmd.environment = YAMLLoader(self.ns.config).load_environment()
except Exception as e:
raise EnvironmentError(e)
return True
def __init__(self, env=None, log=None, prog=None, no_global_options=False):
try:
import argparse
except ImportError:
raise RuntimeError(
'The webassets command line now requires the '
'"argparse" library on Python versions <= 2.6.')
else:
self.argparse = argparse
self.env = env
self.log = log
self._construct_parser(prog, no_global_options)
def _construct_parser(self, prog=None, no_global_options=False):
self.parser = parser = self.argparse.ArgumentParser(
description="Manage assets.",
prog=prog)
if not no_global_options:
# Start with the base arguments that are valid for any command.
# XXX: Add those to the subparser?
parser.add_argument("-v", dest="verbose", action="store_true",
help="be verbose")
parser.add_argument("-q", action="store_true", dest="quiet",
help="be quiet")
if self.env is None:
loadenv = parser.add_mutually_exclusive_group()
loadenv.add_argument("-c", "--config", dest="config",
help="read environment from a YAML file")
loadenv.add_argument("-m", "--module", dest="module",
help="read environment from a Python module")
# Add subparsers.
subparsers = parser.add_subparsers(dest='command')
for command in CommandLineEnvironment.DefaultCommands.keys():
command_parser = subparsers.add_parser(command)
maker = getattr(self, 'make_%s_parser' % command, False)
if maker:
maker(command_parser)
@staticmethod
def make_build_parser(parser):
parser.add_argument(
'bundles', nargs='*', metavar='BUNDLE',
help='Optional bundle names to process. If none are '
'specified, then all known bundles will be built.')
parser.add_argument(
'--output', '-o', nargs=2, action='append',
metavar=('BUNDLE', 'FILE'),
help='Build the given bundle, and use a custom output '
'file. Can be given multiple times.')
parser.add_argument(
'--directory', '-d',
help='Write built files to this directory, using the '
'basename defined by the bundle. Will offset '
'the original bundle output paths on their common '
'prefix. Cannot be used with --output.')
parser.add_argument(
'--no-cache', action='store_true',
help='Do not use a cache that might be configured.')
parser.add_argument(
'--manifest',
help='Write a manifest to the given file. Also supports '
'the id:arg format, if you want to use a different '
'manifest implementation.')
parser.add_argument(
'--production', action='store_true',
help='Forcably turn off debug mode for the build. This '
'only has an effect if debug is set to "merge".')
def _setup_logging(self, ns):
if self.log:
log = self.log
else:
log = logging.getLogger('webassets.script')
if not log.handlers:
# In theory, this could run multiple times (e.g. tests)
handler = logging.StreamHandler()
log.addHandler(handler)
# Note that setting the level filter at the handler level is
# better than the logger level, since this is "our" handler,
# we create it, for the purposes of having a default output.
# The logger itself the user may be modifying.
handler.setLevel(logging.DEBUG if ns.verbose else (
logging.WARNING if ns.quiet else logging.INFO))
return log
def _setup_assets_env(self, ns, log):
env = self.env
if env is None:
assert not (ns.module and ns.config)
if ns.module:
env = PythonLoader(ns.module).load_environment()
if ns.config:
env = YAMLLoader(ns.config).load_environment()
return env
def _setup_cmd_env(self, assets_env, log, ns):
return CommandLineEnvironment(assets_env, log, commands={
'watch': (GenericArgparseImplementation.WatchCommand, (ns,), {})
})
def _prepare_command_args(self, ns):
# Prepare a dict of arguments cleaned of values that are not
# command-specific, and which the command method would not accept.
args = vars(ns).copy()
for action in self.parser._actions:
dest = action.dest
if dest in args:
del args[dest]
return args
def run_with_ns(self, ns):
log = self._setup_logging(ns)
env = self._setup_assets_env(ns, log)
if env is None:
raise CommandError(
"Error: No environment given or found. Maybe use -m?")
cmd = self._setup_cmd_env(env, log, ns)
# Run the selected command
args = self._prepare_command_args(ns)
return cmd.invoke(ns.command, args)
def run_with_argv(self, argv):
try:
ns = self.parser.parse_args(argv)
except SystemExit as e:
# We do not want the main() function to exit the program.
# See run() instead.
return e.args[0]
return self.run_with_ns(ns)
def main(self, argv):
"""Parse the given command line.
The commandline is expected to NOT including what would be sys.argv[0].
"""
try:
return self.run_with_argv(argv)
except CommandError as e:
print(e)
return 1
def main(argv, env=None):
"""Execute the generic version of the command line interface.
You only need to work directly with ``GenericArgparseImplementation`` if
you desire to customize things.
If no environment is given, additional arguments will be supported to allow
the user to specify/construct the environment on the command line.
"""
return GenericArgparseImplementation(env).main(argv)
def run():
"""Runs the command line interface via ``main``, then exits the process
with a proper return code."""
sys.exit(main(sys.argv[1:]) or 0)
if __name__ == '__main__':
run()
| gi0baro/weppy-assets | weppy_assets/webassets/script.py | Python | bsd-3-clause | 22,478 |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
from google.appengine.api.api_base_pb import *
import google.appengine.api.api_base_pb
from google.appengine.datastore.action_pb import *
import google.appengine.datastore.action_pb
from google.appengine.datastore.entity_pb import *
import google.appengine.datastore.entity_pb
from google.appengine.datastore.snapshot_pb import *
import google.appengine.datastore.snapshot_pb
class InternalHeader(ProtocolBuffer.ProtocolMessage):
has_requesting_app_id_ = 0
requesting_app_id_ = ""
has_requesting_project_id_ = 0
requesting_project_id_ = ""
has_requesting_version_id_ = 0
requesting_version_id_ = ""
has_api_settings_ = 0
api_settings_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def requesting_app_id(self): return self.requesting_app_id_
def set_requesting_app_id(self, x):
self.has_requesting_app_id_ = 1
self.requesting_app_id_ = x
def clear_requesting_app_id(self):
if self.has_requesting_app_id_:
self.has_requesting_app_id_ = 0
self.requesting_app_id_ = ""
def has_requesting_app_id(self): return self.has_requesting_app_id_
def requesting_project_id(self): return self.requesting_project_id_
def set_requesting_project_id(self, x):
self.has_requesting_project_id_ = 1
self.requesting_project_id_ = x
def clear_requesting_project_id(self):
if self.has_requesting_project_id_:
self.has_requesting_project_id_ = 0
self.requesting_project_id_ = ""
def has_requesting_project_id(self): return self.has_requesting_project_id_
def requesting_version_id(self): return self.requesting_version_id_
def set_requesting_version_id(self, x):
self.has_requesting_version_id_ = 1
self.requesting_version_id_ = x
def clear_requesting_version_id(self):
if self.has_requesting_version_id_:
self.has_requesting_version_id_ = 0
self.requesting_version_id_ = ""
def has_requesting_version_id(self): return self.has_requesting_version_id_
def api_settings(self): return self.api_settings_
def set_api_settings(self, x):
self.has_api_settings_ = 1
self.api_settings_ = x
def clear_api_settings(self):
if self.has_api_settings_:
self.has_api_settings_ = 0
self.api_settings_ = ""
def has_api_settings(self): return self.has_api_settings_
def MergeFrom(self, x):
assert x is not self
if (x.has_requesting_app_id()): self.set_requesting_app_id(x.requesting_app_id())
if (x.has_requesting_project_id()): self.set_requesting_project_id(x.requesting_project_id())
if (x.has_requesting_version_id()): self.set_requesting_version_id(x.requesting_version_id())
if (x.has_api_settings()): self.set_api_settings(x.api_settings())
def Equals(self, x):
if x is self: return 1
if self.has_requesting_app_id_ != x.has_requesting_app_id_: return 0
if self.has_requesting_app_id_ and self.requesting_app_id_ != x.requesting_app_id_: return 0
if self.has_requesting_project_id_ != x.has_requesting_project_id_: return 0
if self.has_requesting_project_id_ and self.requesting_project_id_ != x.requesting_project_id_: return 0
if self.has_requesting_version_id_ != x.has_requesting_version_id_: return 0
if self.has_requesting_version_id_ and self.requesting_version_id_ != x.requesting_version_id_: return 0
if self.has_api_settings_ != x.has_api_settings_: return 0
if self.has_api_settings_ and self.api_settings_ != x.api_settings_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_requesting_app_id_): n += 1 + self.lengthString(len(self.requesting_app_id_))
if (self.has_requesting_project_id_): n += 1 + self.lengthString(len(self.requesting_project_id_))
if (self.has_requesting_version_id_): n += 1 + self.lengthString(len(self.requesting_version_id_))
if (self.has_api_settings_): n += 1 + self.lengthString(len(self.api_settings_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_requesting_app_id_): n += 1 + self.lengthString(len(self.requesting_app_id_))
if (self.has_requesting_project_id_): n += 1 + self.lengthString(len(self.requesting_project_id_))
if (self.has_requesting_version_id_): n += 1 + self.lengthString(len(self.requesting_version_id_))
if (self.has_api_settings_): n += 1 + self.lengthString(len(self.api_settings_))
return n
def Clear(self):
self.clear_requesting_app_id()
self.clear_requesting_project_id()
self.clear_requesting_version_id()
self.clear_api_settings()
def OutputUnchecked(self, out):
if (self.has_requesting_app_id_):
out.putVarInt32(18)
out.putPrefixedString(self.requesting_app_id_)
if (self.has_api_settings_):
out.putVarInt32(26)
out.putPrefixedString(self.api_settings_)
if (self.has_requesting_project_id_):
out.putVarInt32(34)
out.putPrefixedString(self.requesting_project_id_)
if (self.has_requesting_version_id_):
out.putVarInt32(42)
out.putPrefixedString(self.requesting_version_id_)
def OutputPartial(self, out):
if (self.has_requesting_app_id_):
out.putVarInt32(18)
out.putPrefixedString(self.requesting_app_id_)
if (self.has_api_settings_):
out.putVarInt32(26)
out.putPrefixedString(self.api_settings_)
if (self.has_requesting_project_id_):
out.putVarInt32(34)
out.putPrefixedString(self.requesting_project_id_)
if (self.has_requesting_version_id_):
out.putVarInt32(42)
out.putPrefixedString(self.requesting_version_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
self.set_requesting_app_id(d.getPrefixedString())
continue
if tt == 26:
self.set_api_settings(d.getPrefixedString())
continue
if tt == 34:
self.set_requesting_project_id(d.getPrefixedString())
continue
if tt == 42:
self.set_requesting_version_id(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_requesting_app_id_: res+=prefix+("requesting_app_id: %s\n" % self.DebugFormatString(self.requesting_app_id_))
if self.has_requesting_project_id_: res+=prefix+("requesting_project_id: %s\n" % self.DebugFormatString(self.requesting_project_id_))
if self.has_requesting_version_id_: res+=prefix+("requesting_version_id: %s\n" % self.DebugFormatString(self.requesting_version_id_))
if self.has_api_settings_: res+=prefix+("api_settings: %s\n" % self.DebugFormatString(self.api_settings_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
krequesting_app_id = 2
krequesting_project_id = 4
krequesting_version_id = 5
kapi_settings = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
2: "requesting_app_id",
3: "api_settings",
4: "requesting_project_id",
5: "requesting_version_id",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.InternalHeader'
class Transaction(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_handle_ = 0
handle_ = 0
has_app_ = 0
app_ = ""
has_mark_changes_ = 0
mark_changes_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def handle(self): return self.handle_
def set_handle(self, x):
self.has_handle_ = 1
self.handle_ = x
def clear_handle(self):
if self.has_handle_:
self.has_handle_ = 0
self.handle_ = 0
def has_handle(self): return self.has_handle_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def mark_changes(self): return self.mark_changes_
def set_mark_changes(self, x):
self.has_mark_changes_ = 1
self.mark_changes_ = x
def clear_mark_changes(self):
if self.has_mark_changes_:
self.has_mark_changes_ = 0
self.mark_changes_ = 0
def has_mark_changes(self): return self.has_mark_changes_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_handle()): self.set_handle(x.handle())
if (x.has_app()): self.set_app(x.app())
if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_handle_ != x.has_handle_: return 0
if self.has_handle_ and self.handle_ != x.handle_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_mark_changes_ != x.has_mark_changes_: return 0
if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_handle_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: handle not set.')
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(len(self.app_))
if (self.has_mark_changes_): n += 2
return n + 10
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_handle_):
n += 9
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_mark_changes_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_handle()
self.clear_app()
self.clear_mark_changes()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.put64(self.handle_)
out.putVarInt32(18)
out.putPrefixedString(self.app_)
if (self.has_mark_changes_):
out.putVarInt32(24)
out.putBoolean(self.mark_changes_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_handle_):
out.putVarInt32(9)
out.put64(self.handle_)
if (self.has_app_):
out.putVarInt32(18)
out.putPrefixedString(self.app_)
if (self.has_mark_changes_):
out.putVarInt32(24)
out.putBoolean(self.mark_changes_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_handle(d.get64())
continue
if tt == 18:
self.set_app(d.getPrefixedString())
continue
if tt == 24:
self.set_mark_changes(d.getBoolean())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_))
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 4
khandle = 1
kapp = 2
kmark_changes = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "handle",
2: "app",
3: "mark_changes",
4: "header",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.DOUBLE,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Transaction'
class Query_Filter(ProtocolBuffer.ProtocolMessage):
LESS_THAN = 1
LESS_THAN_OR_EQUAL = 2
GREATER_THAN = 3
GREATER_THAN_OR_EQUAL = 4
EQUAL = 5
IN = 6
EXISTS = 7
_Operator_NAMES = {
1: "LESS_THAN",
2: "LESS_THAN_OR_EQUAL",
3: "GREATER_THAN",
4: "GREATER_THAN_OR_EQUAL",
5: "EQUAL",
6: "IN",
7: "EXISTS",
}
def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "")
Operator_Name = classmethod(Operator_Name)
has_op_ = 0
op_ = 0
def __init__(self, contents=None):
self.property_ = []
if contents is not None: self.MergeFromString(contents)
def op(self): return self.op_
def set_op(self, x):
self.has_op_ = 1
self.op_ = x
def clear_op(self):
if self.has_op_:
self.has_op_ = 0
self.op_ = 0
def has_op(self): return self.has_op_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_op()): self.set_op(x.op())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
def Equals(self, x):
if x is self: return 1
if self.has_op_ != x.has_op_: return 0
if self.has_op_ and self.op_ != x.op_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_op_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: op not set.')
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.op_)
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_op_):
n += 1
n += self.lengthVarInt64(self.op_)
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_op()
self.clear_property()
def OutputUnchecked(self, out):
out.putVarInt32(48)
out.putVarInt32(self.op_)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSize())
self.property_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_op_):
out.putVarInt32(48)
out.putVarInt32(self.op_)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSizePartial())
self.property_[i].OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 36: break
if tt == 48:
self.set_op(d.getVarInt32())
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_property().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_op_: res+=prefix+("op: %s\n" % self.DebugFormatInt32(self.op_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
class Query_Order(ProtocolBuffer.ProtocolMessage):
ASCENDING = 1
DESCENDING = 2
_Direction_NAMES = {
1: "ASCENDING",
2: "DESCENDING",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
has_property_ = 0
property_ = ""
has_direction_ = 0
direction_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def property(self): return self.property_
def set_property(self, x):
self.has_property_ = 1
self.property_ = x
def clear_property(self):
if self.has_property_:
self.has_property_ = 0
self.property_ = ""
def has_property(self): return self.has_property_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 1
def has_direction(self): return self.has_direction_
def MergeFrom(self, x):
assert x is not self
if (x.has_property()): self.set_property(x.property())
if (x.has_direction()): self.set_direction(x.direction())
def Equals(self, x):
if x is self: return 1
if self.has_property_ != x.has_property_: return 0
if self.has_property_ and self.property_ != x.property_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_property_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: property not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.property_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_property_):
n += 1
n += self.lengthString(len(self.property_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n
def Clear(self):
self.clear_property()
self.clear_direction()
def OutputUnchecked(self, out):
out.putVarInt32(82)
out.putPrefixedString(self.property_)
if (self.has_direction_):
out.putVarInt32(88)
out.putVarInt32(self.direction_)
def OutputPartial(self, out):
if (self.has_property_):
out.putVarInt32(82)
out.putPrefixedString(self.property_)
if (self.has_direction_):
out.putVarInt32(88)
out.putVarInt32(self.direction_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 76: break
if tt == 82:
self.set_property(d.getPrefixedString())
continue
if tt == 88:
self.set_direction(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
return res
class Query(ProtocolBuffer.ProtocolMessage):
ORDER_FIRST = 1
ANCESTOR_FIRST = 2
FILTER_FIRST = 3
_Hint_NAMES = {
1: "ORDER_FIRST",
2: "ANCESTOR_FIRST",
3: "FILTER_FIRST",
}
def Hint_Name(cls, x): return cls._Hint_NAMES.get(x, "")
Hint_Name = classmethod(Hint_Name)
has_header_ = 0
header_ = None
has_app_ = 0
app_ = ""
has_name_space_ = 0
name_space_ = ""
has_kind_ = 0
kind_ = ""
has_ancestor_ = 0
ancestor_ = None
has_search_query_ = 0
search_query_ = ""
has_hint_ = 0
hint_ = 0
has_count_ = 0
count_ = 0
has_offset_ = 0
offset_ = 0
has_limit_ = 0
limit_ = 0
has_compiled_cursor_ = 0
compiled_cursor_ = None
has_end_compiled_cursor_ = 0
end_compiled_cursor_ = None
has_require_perfect_plan_ = 0
require_perfect_plan_ = 0
has_keys_only_ = 0
keys_only_ = 0
has_transaction_ = 0
transaction_ = None
has_compile_ = 0
compile_ = 0
has_failover_ms_ = 0
failover_ms_ = 0
has_strong_ = 0
strong_ = 0
has_distinct_ = 0
distinct_ = 0
has_min_safe_time_seconds_ = 0
min_safe_time_seconds_ = 0
has_persist_offset_ = 0
persist_offset_ = 1
def __init__(self, contents=None):
self.filter_ = []
self.order_ = []
self.composite_index_ = []
self.property_name_ = []
self.group_by_property_name_ = []
self.safe_replica_name_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def name_space(self): return self.name_space_
def set_name_space(self, x):
self.has_name_space_ = 1
self.name_space_ = x
def clear_name_space(self):
if self.has_name_space_:
self.has_name_space_ = 0
self.name_space_ = ""
def has_name_space(self): return self.has_name_space_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = ""
def has_kind(self): return self.has_kind_
def ancestor(self):
if self.ancestor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.ancestor_ is None: self.ancestor_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.ancestor_
def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0;
if self.ancestor_ is not None: self.ancestor_.Clear()
def has_ancestor(self): return self.has_ancestor_
def filter_size(self): return len(self.filter_)
def filter_list(self): return self.filter_
def filter(self, i):
return self.filter_[i]
def mutable_filter(self, i):
return self.filter_[i]
def add_filter(self):
x = Query_Filter()
self.filter_.append(x)
return x
def clear_filter(self):
self.filter_ = []
def search_query(self): return self.search_query_
def set_search_query(self, x):
self.has_search_query_ = 1
self.search_query_ = x
def clear_search_query(self):
if self.has_search_query_:
self.has_search_query_ = 0
self.search_query_ = ""
def has_search_query(self): return self.has_search_query_
def order_size(self): return len(self.order_)
def order_list(self): return self.order_
def order(self, i):
return self.order_[i]
def mutable_order(self, i):
return self.order_[i]
def add_order(self):
x = Query_Order()
self.order_.append(x)
return x
def clear_order(self):
self.order_ = []
def hint(self): return self.hint_
def set_hint(self, x):
self.has_hint_ = 1
self.hint_ = x
def clear_hint(self):
if self.has_hint_:
self.has_hint_ = 0
self.hint_ = 0
def has_hint(self): return self.has_hint_
def count(self): return self.count_
def set_count(self, x):
self.has_count_ = 1
self.count_ = x
def clear_count(self):
if self.has_count_:
self.has_count_ = 0
self.count_ = 0
def has_count(self): return self.has_count_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def compiled_cursor(self):
if self.compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.compiled_cursor_
def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor()
def clear_compiled_cursor(self):
if self.has_compiled_cursor_:
self.has_compiled_cursor_ = 0;
if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear()
def has_compiled_cursor(self): return self.has_compiled_cursor_
def end_compiled_cursor(self):
if self.end_compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.end_compiled_cursor_ is None: self.end_compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.end_compiled_cursor_
def mutable_end_compiled_cursor(self): self.has_end_compiled_cursor_ = 1; return self.end_compiled_cursor()
def clear_end_compiled_cursor(self):
if self.has_end_compiled_cursor_:
self.has_end_compiled_cursor_ = 0;
if self.end_compiled_cursor_ is not None: self.end_compiled_cursor_.Clear()
def has_end_compiled_cursor(self): return self.has_end_compiled_cursor_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def require_perfect_plan(self): return self.require_perfect_plan_
def set_require_perfect_plan(self, x):
self.has_require_perfect_plan_ = 1
self.require_perfect_plan_ = x
def clear_require_perfect_plan(self):
if self.has_require_perfect_plan_:
self.has_require_perfect_plan_ = 0
self.require_perfect_plan_ = 0
def has_require_perfect_plan(self): return self.has_require_perfect_plan_
def keys_only(self): return self.keys_only_
def set_keys_only(self, x):
self.has_keys_only_ = 1
self.keys_only_ = x
def clear_keys_only(self):
if self.has_keys_only_:
self.has_keys_only_ = 0
self.keys_only_ = 0
def has_keys_only(self): return self.has_keys_only_
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def compile(self): return self.compile_
def set_compile(self, x):
self.has_compile_ = 1
self.compile_ = x
def clear_compile(self):
if self.has_compile_:
self.has_compile_ = 0
self.compile_ = 0
def has_compile(self): return self.has_compile_
def failover_ms(self): return self.failover_ms_
def set_failover_ms(self, x):
self.has_failover_ms_ = 1
self.failover_ms_ = x
def clear_failover_ms(self):
if self.has_failover_ms_:
self.has_failover_ms_ = 0
self.failover_ms_ = 0
def has_failover_ms(self): return self.has_failover_ms_
def strong(self): return self.strong_
def set_strong(self, x):
self.has_strong_ = 1
self.strong_ = x
def clear_strong(self):
if self.has_strong_:
self.has_strong_ = 0
self.strong_ = 0
def has_strong(self): return self.has_strong_
def property_name_size(self): return len(self.property_name_)
def property_name_list(self): return self.property_name_
def property_name(self, i):
return self.property_name_[i]
def set_property_name(self, i, x):
self.property_name_[i] = x
def add_property_name(self, x):
self.property_name_.append(x)
def clear_property_name(self):
self.property_name_ = []
def group_by_property_name_size(self): return len(self.group_by_property_name_)
def group_by_property_name_list(self): return self.group_by_property_name_
def group_by_property_name(self, i):
return self.group_by_property_name_[i]
def set_group_by_property_name(self, i, x):
self.group_by_property_name_[i] = x
def add_group_by_property_name(self, x):
self.group_by_property_name_.append(x)
def clear_group_by_property_name(self):
self.group_by_property_name_ = []
def distinct(self): return self.distinct_
def set_distinct(self, x):
self.has_distinct_ = 1
self.distinct_ = x
def clear_distinct(self):
if self.has_distinct_:
self.has_distinct_ = 0
self.distinct_ = 0
def has_distinct(self): return self.has_distinct_
def min_safe_time_seconds(self): return self.min_safe_time_seconds_
def set_min_safe_time_seconds(self, x):
self.has_min_safe_time_seconds_ = 1
self.min_safe_time_seconds_ = x
def clear_min_safe_time_seconds(self):
if self.has_min_safe_time_seconds_:
self.has_min_safe_time_seconds_ = 0
self.min_safe_time_seconds_ = 0
def has_min_safe_time_seconds(self): return self.has_min_safe_time_seconds_
def safe_replica_name_size(self): return len(self.safe_replica_name_)
def safe_replica_name_list(self): return self.safe_replica_name_
def safe_replica_name(self, i):
return self.safe_replica_name_[i]
def set_safe_replica_name(self, i, x):
self.safe_replica_name_[i] = x
def add_safe_replica_name(self, x):
self.safe_replica_name_.append(x)
def clear_safe_replica_name(self):
self.safe_replica_name_ = []
def persist_offset(self): return self.persist_offset_
def set_persist_offset(self, x):
self.has_persist_offset_ = 1
self.persist_offset_ = x
def clear_persist_offset(self):
if self.has_persist_offset_:
self.has_persist_offset_ = 0
self.persist_offset_ = 1
def has_persist_offset(self): return self.has_persist_offset_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_app()): self.set_app(x.app())
if (x.has_name_space()): self.set_name_space(x.name_space())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
for i in xrange(x.filter_size()): self.add_filter().CopyFrom(x.filter(i))
if (x.has_search_query()): self.set_search_query(x.search_query())
for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i))
if (x.has_hint()): self.set_hint(x.hint())
if (x.has_count()): self.set_count(x.count())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_limit()): self.set_limit(x.limit())
if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor())
if (x.has_end_compiled_cursor()): self.mutable_end_compiled_cursor().MergeFrom(x.end_compiled_cursor())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
if (x.has_keys_only()): self.set_keys_only(x.keys_only())
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_compile()): self.set_compile(x.compile())
if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms())
if (x.has_strong()): self.set_strong(x.strong())
for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i))
for i in xrange(x.group_by_property_name_size()): self.add_group_by_property_name(x.group_by_property_name(i))
if (x.has_distinct()): self.set_distinct(x.distinct())
if (x.has_min_safe_time_seconds()): self.set_min_safe_time_seconds(x.min_safe_time_seconds())
for i in xrange(x.safe_replica_name_size()): self.add_safe_replica_name(x.safe_replica_name(i))
if (x.has_persist_offset()): self.set_persist_offset(x.persist_offset())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_name_space_ != x.has_name_space_: return 0
if self.has_name_space_ and self.name_space_ != x.name_space_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
if len(self.filter_) != len(x.filter_): return 0
for e1, e2 in zip(self.filter_, x.filter_):
if e1 != e2: return 0
if self.has_search_query_ != x.has_search_query_: return 0
if self.has_search_query_ and self.search_query_ != x.search_query_: return 0
if len(self.order_) != len(x.order_): return 0
for e1, e2 in zip(self.order_, x.order_):
if e1 != e2: return 0
if self.has_hint_ != x.has_hint_: return 0
if self.has_hint_ and self.hint_ != x.hint_: return 0
if self.has_count_ != x.has_count_: return 0
if self.has_count_ and self.count_ != x.count_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0
if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0
if self.has_end_compiled_cursor_ != x.has_end_compiled_cursor_: return 0
if self.has_end_compiled_cursor_ and self.end_compiled_cursor_ != x.end_compiled_cursor_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0
if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0
if self.has_keys_only_ != x.has_keys_only_: return 0
if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_compile_ != x.has_compile_: return 0
if self.has_compile_ and self.compile_ != x.compile_: return 0
if self.has_failover_ms_ != x.has_failover_ms_: return 0
if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0
if self.has_strong_ != x.has_strong_: return 0
if self.has_strong_ and self.strong_ != x.strong_: return 0
if len(self.property_name_) != len(x.property_name_): return 0
for e1, e2 in zip(self.property_name_, x.property_name_):
if e1 != e2: return 0
if len(self.group_by_property_name_) != len(x.group_by_property_name_): return 0
for e1, e2 in zip(self.group_by_property_name_, x.group_by_property_name_):
if e1 != e2: return 0
if self.has_distinct_ != x.has_distinct_: return 0
if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
if self.has_min_safe_time_seconds_ != x.has_min_safe_time_seconds_: return 0
if self.has_min_safe_time_seconds_ and self.min_safe_time_seconds_ != x.min_safe_time_seconds_: return 0
if len(self.safe_replica_name_) != len(x.safe_replica_name_): return 0
for e1, e2 in zip(self.safe_replica_name_, x.safe_replica_name_):
if e1 != e2: return 0
if self.has_persist_offset_ != x.has_persist_offset_: return 0
if self.has_persist_offset_ and self.persist_offset_ != x.persist_offset_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
for p in self.filter_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.order_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
if (self.has_end_compiled_cursor_ and not self.end_compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 2 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
n += 2 * len(self.filter_)
for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSize()
if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
n += 2 * len(self.order_)
for i in xrange(len(self.order_)): n += self.order_[i].ByteSize()
if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSize())
if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSize())
n += 2 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_require_perfect_plan_): n += 3
if (self.has_keys_only_): n += 3
if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSize())
if (self.has_compile_): n += 3
if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 3
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
n += 2 * len(self.group_by_property_name_)
for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i]))
if (self.has_distinct_): n += 3
if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_)
n += 2 * len(self.safe_replica_name_)
for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i]))
if (self.has_persist_offset_): n += 3
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 2 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_name_space_): n += 2 + self.lengthString(len(self.name_space_))
if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial())
n += 2 * len(self.filter_)
for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSizePartial()
if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
n += 2 * len(self.order_)
for i in xrange(len(self.order_)): n += self.order_[i].ByteSizePartial()
if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
if (self.has_count_): n += 2 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
if (self.has_compiled_cursor_): n += 2 + self.lengthString(self.compiled_cursor_.ByteSizePartial())
if (self.has_end_compiled_cursor_): n += 2 + self.lengthString(self.end_compiled_cursor_.ByteSizePartial())
n += 2 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_require_perfect_plan_): n += 3
if (self.has_keys_only_): n += 3
if (self.has_transaction_): n += 2 + self.lengthString(self.transaction_.ByteSizePartial())
if (self.has_compile_): n += 3
if (self.has_failover_ms_): n += 2 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 3
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
n += 2 * len(self.group_by_property_name_)
for i in xrange(len(self.group_by_property_name_)): n += self.lengthString(len(self.group_by_property_name_[i]))
if (self.has_distinct_): n += 3
if (self.has_min_safe_time_seconds_): n += 2 + self.lengthVarInt64(self.min_safe_time_seconds_)
n += 2 * len(self.safe_replica_name_)
for i in xrange(len(self.safe_replica_name_)): n += self.lengthString(len(self.safe_replica_name_[i]))
if (self.has_persist_offset_): n += 3
return n
def Clear(self):
self.clear_header()
self.clear_app()
self.clear_name_space()
self.clear_kind()
self.clear_ancestor()
self.clear_filter()
self.clear_search_query()
self.clear_order()
self.clear_hint()
self.clear_count()
self.clear_offset()
self.clear_limit()
self.clear_compiled_cursor()
self.clear_end_compiled_cursor()
self.clear_composite_index()
self.clear_require_perfect_plan()
self.clear_keys_only()
self.clear_transaction()
self.clear_compile()
self.clear_failover_ms()
self.clear_strong()
self.clear_property_name()
self.clear_group_by_property_name()
self.clear_distinct()
self.clear_min_safe_time_seconds()
self.clear_safe_replica_name()
self.clear_persist_offset()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_kind_):
out.putVarInt32(26)
out.putPrefixedString(self.kind_)
for i in xrange(len(self.filter_)):
out.putVarInt32(35)
self.filter_[i].OutputUnchecked(out)
out.putVarInt32(36)
if (self.has_search_query_):
out.putVarInt32(66)
out.putPrefixedString(self.search_query_)
for i in xrange(len(self.order_)):
out.putVarInt32(75)
self.order_[i].OutputUnchecked(out)
out.putVarInt32(76)
if (self.has_offset_):
out.putVarInt32(96)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(128)
out.putVarInt32(self.limit_)
if (self.has_ancestor_):
out.putVarInt32(138)
out.putVarInt32(self.ancestor_.ByteSize())
self.ancestor_.OutputUnchecked(out)
if (self.has_hint_):
out.putVarInt32(144)
out.putVarInt32(self.hint_)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(154)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_require_perfect_plan_):
out.putVarInt32(160)
out.putBoolean(self.require_perfect_plan_)
if (self.has_keys_only_):
out.putVarInt32(168)
out.putBoolean(self.keys_only_)
if (self.has_transaction_):
out.putVarInt32(178)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
if (self.has_count_):
out.putVarInt32(184)
out.putVarInt32(self.count_)
if (self.has_distinct_):
out.putVarInt32(192)
out.putBoolean(self.distinct_)
if (self.has_compile_):
out.putVarInt32(200)
out.putBoolean(self.compile_)
if (self.has_failover_ms_):
out.putVarInt32(208)
out.putVarInt64(self.failover_ms_)
if (self.has_name_space_):
out.putVarInt32(234)
out.putPrefixedString(self.name_space_)
if (self.has_compiled_cursor_):
out.putVarInt32(242)
out.putVarInt32(self.compiled_cursor_.ByteSize())
self.compiled_cursor_.OutputUnchecked(out)
if (self.has_end_compiled_cursor_):
out.putVarInt32(250)
out.putVarInt32(self.end_compiled_cursor_.ByteSize())
self.end_compiled_cursor_.OutputUnchecked(out)
if (self.has_strong_):
out.putVarInt32(256)
out.putBoolean(self.strong_)
for i in xrange(len(self.property_name_)):
out.putVarInt32(266)
out.putPrefixedString(self.property_name_[i])
for i in xrange(len(self.group_by_property_name_)):
out.putVarInt32(274)
out.putPrefixedString(self.group_by_property_name_[i])
if (self.has_min_safe_time_seconds_):
out.putVarInt32(280)
out.putVarInt64(self.min_safe_time_seconds_)
for i in xrange(len(self.safe_replica_name_)):
out.putVarInt32(290)
out.putPrefixedString(self.safe_replica_name_[i])
if (self.has_persist_offset_):
out.putVarInt32(296)
out.putBoolean(self.persist_offset_)
if (self.has_header_):
out.putVarInt32(314)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_app_):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_kind_):
out.putVarInt32(26)
out.putPrefixedString(self.kind_)
for i in xrange(len(self.filter_)):
out.putVarInt32(35)
self.filter_[i].OutputPartial(out)
out.putVarInt32(36)
if (self.has_search_query_):
out.putVarInt32(66)
out.putPrefixedString(self.search_query_)
for i in xrange(len(self.order_)):
out.putVarInt32(75)
self.order_[i].OutputPartial(out)
out.putVarInt32(76)
if (self.has_offset_):
out.putVarInt32(96)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(128)
out.putVarInt32(self.limit_)
if (self.has_ancestor_):
out.putVarInt32(138)
out.putVarInt32(self.ancestor_.ByteSizePartial())
self.ancestor_.OutputPartial(out)
if (self.has_hint_):
out.putVarInt32(144)
out.putVarInt32(self.hint_)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(154)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
if (self.has_require_perfect_plan_):
out.putVarInt32(160)
out.putBoolean(self.require_perfect_plan_)
if (self.has_keys_only_):
out.putVarInt32(168)
out.putBoolean(self.keys_only_)
if (self.has_transaction_):
out.putVarInt32(178)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
if (self.has_count_):
out.putVarInt32(184)
out.putVarInt32(self.count_)
if (self.has_distinct_):
out.putVarInt32(192)
out.putBoolean(self.distinct_)
if (self.has_compile_):
out.putVarInt32(200)
out.putBoolean(self.compile_)
if (self.has_failover_ms_):
out.putVarInt32(208)
out.putVarInt64(self.failover_ms_)
if (self.has_name_space_):
out.putVarInt32(234)
out.putPrefixedString(self.name_space_)
if (self.has_compiled_cursor_):
out.putVarInt32(242)
out.putVarInt32(self.compiled_cursor_.ByteSizePartial())
self.compiled_cursor_.OutputPartial(out)
if (self.has_end_compiled_cursor_):
out.putVarInt32(250)
out.putVarInt32(self.end_compiled_cursor_.ByteSizePartial())
self.end_compiled_cursor_.OutputPartial(out)
if (self.has_strong_):
out.putVarInt32(256)
out.putBoolean(self.strong_)
for i in xrange(len(self.property_name_)):
out.putVarInt32(266)
out.putPrefixedString(self.property_name_[i])
for i in xrange(len(self.group_by_property_name_)):
out.putVarInt32(274)
out.putPrefixedString(self.group_by_property_name_[i])
if (self.has_min_safe_time_seconds_):
out.putVarInt32(280)
out.putVarInt64(self.min_safe_time_seconds_)
for i in xrange(len(self.safe_replica_name_)):
out.putVarInt32(290)
out.putPrefixedString(self.safe_replica_name_[i])
if (self.has_persist_offset_):
out.putVarInt32(296)
out.putBoolean(self.persist_offset_)
if (self.has_header_):
out.putVarInt32(314)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app(d.getPrefixedString())
continue
if tt == 26:
self.set_kind(d.getPrefixedString())
continue
if tt == 35:
self.add_filter().TryMerge(d)
continue
if tt == 66:
self.set_search_query(d.getPrefixedString())
continue
if tt == 75:
self.add_order().TryMerge(d)
continue
if tt == 96:
self.set_offset(d.getVarInt32())
continue
if tt == 128:
self.set_limit(d.getVarInt32())
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_ancestor().TryMerge(tmp)
continue
if tt == 144:
self.set_hint(d.getVarInt32())
continue
if tt == 154:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 160:
self.set_require_perfect_plan(d.getBoolean())
continue
if tt == 168:
self.set_keys_only(d.getBoolean())
continue
if tt == 178:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 184:
self.set_count(d.getVarInt32())
continue
if tt == 192:
self.set_distinct(d.getBoolean())
continue
if tt == 200:
self.set_compile(d.getBoolean())
continue
if tt == 208:
self.set_failover_ms(d.getVarInt64())
continue
if tt == 234:
self.set_name_space(d.getPrefixedString())
continue
if tt == 242:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_compiled_cursor().TryMerge(tmp)
continue
if tt == 250:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_end_compiled_cursor().TryMerge(tmp)
continue
if tt == 256:
self.set_strong(d.getBoolean())
continue
if tt == 266:
self.add_property_name(d.getPrefixedString())
continue
if tt == 274:
self.add_group_by_property_name(d.getPrefixedString())
continue
if tt == 280:
self.set_min_safe_time_seconds(d.getVarInt64())
continue
if tt == 290:
self.add_safe_replica_name(d.getPrefixedString())
continue
if tt == 296:
self.set_persist_offset(d.getBoolean())
continue
if tt == 314:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_name_space_: res+=prefix+("name_space: %s\n" % self.DebugFormatString(self.name_space_))
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
if self.has_ancestor_:
res+=prefix+"ancestor <\n"
res+=self.ancestor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.filter_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Filter%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_search_query_: res+=prefix+("search_query: %s\n" % self.DebugFormatString(self.search_query_))
cnt=0
for e in self.order_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Order%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_))
if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
if self.has_compiled_cursor_:
res+=prefix+"compiled_cursor <\n"
res+=self.compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_end_compiled_cursor_:
res+=prefix+"end_compiled_cursor <\n"
res+=self.end_compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_))
if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_))
if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_))
cnt=0
for e in self.property_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.group_by_property_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("group_by_property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
if self.has_min_safe_time_seconds_: res+=prefix+("min_safe_time_seconds: %s\n" % self.DebugFormatInt64(self.min_safe_time_seconds_))
cnt=0
for e in self.safe_replica_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("safe_replica_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_persist_offset_: res+=prefix+("persist_offset: %s\n" % self.DebugFormatBool(self.persist_offset_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 39
kapp = 1
kname_space = 29
kkind = 3
kancestor = 17
kFilterGroup = 4
kFilterop = 6
kFilterproperty = 14
ksearch_query = 8
kOrderGroup = 9
kOrderproperty = 10
kOrderdirection = 11
khint = 18
kcount = 23
koffset = 12
klimit = 16
kcompiled_cursor = 30
kend_compiled_cursor = 31
kcomposite_index = 19
krequire_perfect_plan = 20
kkeys_only = 21
ktransaction = 22
kcompile = 25
kfailover_ms = 26
kstrong = 32
kproperty_name = 33
kgroup_by_property_name = 34
kdistinct = 24
kmin_safe_time_seconds = 35
ksafe_replica_name = 36
kpersist_offset = 37
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app",
3: "kind",
4: "Filter",
6: "op",
8: "search_query",
9: "Order",
10: "property",
11: "direction",
12: "offset",
14: "property",
16: "limit",
17: "ancestor",
18: "hint",
19: "composite_index",
20: "require_perfect_plan",
21: "keys_only",
22: "transaction",
23: "count",
24: "distinct",
25: "compile",
26: "failover_ms",
29: "name_space",
30: "compiled_cursor",
31: "end_compiled_cursor",
32: "strong",
33: "property_name",
34: "group_by_property_name",
35: "min_safe_time_seconds",
36: "safe_replica_name",
37: "persist_offset",
39: "header",
}, 39)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STARTGROUP,
6: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STARTGROUP,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.NUMERIC,
12: ProtocolBuffer.Encoder.NUMERIC,
14: ProtocolBuffer.Encoder.STRING,
16: ProtocolBuffer.Encoder.NUMERIC,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.NUMERIC,
19: ProtocolBuffer.Encoder.STRING,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.NUMERIC,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.NUMERIC,
24: ProtocolBuffer.Encoder.NUMERIC,
25: ProtocolBuffer.Encoder.NUMERIC,
26: ProtocolBuffer.Encoder.NUMERIC,
29: ProtocolBuffer.Encoder.STRING,
30: ProtocolBuffer.Encoder.STRING,
31: ProtocolBuffer.Encoder.STRING,
32: ProtocolBuffer.Encoder.NUMERIC,
33: ProtocolBuffer.Encoder.STRING,
34: ProtocolBuffer.Encoder.STRING,
35: ProtocolBuffer.Encoder.NUMERIC,
36: ProtocolBuffer.Encoder.STRING,
37: ProtocolBuffer.Encoder.NUMERIC,
39: ProtocolBuffer.Encoder.STRING,
}, 39, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Query'
class CompiledQuery_PrimaryScan(ProtocolBuffer.ProtocolMessage):
has_index_name_ = 0
index_name_ = ""
has_start_key_ = 0
start_key_ = ""
has_start_inclusive_ = 0
start_inclusive_ = 0
has_end_key_ = 0
end_key_ = ""
has_end_inclusive_ = 0
end_inclusive_ = 0
has_end_unapplied_log_timestamp_us_ = 0
end_unapplied_log_timestamp_us_ = 0
def __init__(self, contents=None):
self.start_postfix_value_ = []
self.end_postfix_value_ = []
if contents is not None: self.MergeFromString(contents)
def index_name(self): return self.index_name_
def set_index_name(self, x):
self.has_index_name_ = 1
self.index_name_ = x
def clear_index_name(self):
if self.has_index_name_:
self.has_index_name_ = 0
self.index_name_ = ""
def has_index_name(self): return self.has_index_name_
def start_key(self): return self.start_key_
def set_start_key(self, x):
self.has_start_key_ = 1
self.start_key_ = x
def clear_start_key(self):
if self.has_start_key_:
self.has_start_key_ = 0
self.start_key_ = ""
def has_start_key(self): return self.has_start_key_
def start_inclusive(self): return self.start_inclusive_
def set_start_inclusive(self, x):
self.has_start_inclusive_ = 1
self.start_inclusive_ = x
def clear_start_inclusive(self):
if self.has_start_inclusive_:
self.has_start_inclusive_ = 0
self.start_inclusive_ = 0
def has_start_inclusive(self): return self.has_start_inclusive_
def end_key(self): return self.end_key_
def set_end_key(self, x):
self.has_end_key_ = 1
self.end_key_ = x
def clear_end_key(self):
if self.has_end_key_:
self.has_end_key_ = 0
self.end_key_ = ""
def has_end_key(self): return self.has_end_key_
def end_inclusive(self): return self.end_inclusive_
def set_end_inclusive(self, x):
self.has_end_inclusive_ = 1
self.end_inclusive_ = x
def clear_end_inclusive(self):
if self.has_end_inclusive_:
self.has_end_inclusive_ = 0
self.end_inclusive_ = 0
def has_end_inclusive(self): return self.has_end_inclusive_
def start_postfix_value_size(self): return len(self.start_postfix_value_)
def start_postfix_value_list(self): return self.start_postfix_value_
def start_postfix_value(self, i):
return self.start_postfix_value_[i]
def set_start_postfix_value(self, i, x):
self.start_postfix_value_[i] = x
def add_start_postfix_value(self, x):
self.start_postfix_value_.append(x)
def clear_start_postfix_value(self):
self.start_postfix_value_ = []
def end_postfix_value_size(self): return len(self.end_postfix_value_)
def end_postfix_value_list(self): return self.end_postfix_value_
def end_postfix_value(self, i):
return self.end_postfix_value_[i]
def set_end_postfix_value(self, i, x):
self.end_postfix_value_[i] = x
def add_end_postfix_value(self, x):
self.end_postfix_value_.append(x)
def clear_end_postfix_value(self):
self.end_postfix_value_ = []
def end_unapplied_log_timestamp_us(self): return self.end_unapplied_log_timestamp_us_
def set_end_unapplied_log_timestamp_us(self, x):
self.has_end_unapplied_log_timestamp_us_ = 1
self.end_unapplied_log_timestamp_us_ = x
def clear_end_unapplied_log_timestamp_us(self):
if self.has_end_unapplied_log_timestamp_us_:
self.has_end_unapplied_log_timestamp_us_ = 0
self.end_unapplied_log_timestamp_us_ = 0
def has_end_unapplied_log_timestamp_us(self): return self.has_end_unapplied_log_timestamp_us_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_name()): self.set_index_name(x.index_name())
if (x.has_start_key()): self.set_start_key(x.start_key())
if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
if (x.has_end_key()): self.set_end_key(x.end_key())
if (x.has_end_inclusive()): self.set_end_inclusive(x.end_inclusive())
for i in xrange(x.start_postfix_value_size()): self.add_start_postfix_value(x.start_postfix_value(i))
for i in xrange(x.end_postfix_value_size()): self.add_end_postfix_value(x.end_postfix_value(i))
if (x.has_end_unapplied_log_timestamp_us()): self.set_end_unapplied_log_timestamp_us(x.end_unapplied_log_timestamp_us())
def Equals(self, x):
if x is self: return 1
if self.has_index_name_ != x.has_index_name_: return 0
if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
if self.has_start_key_ != x.has_start_key_: return 0
if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
if self.has_end_key_ != x.has_end_key_: return 0
if self.has_end_key_ and self.end_key_ != x.end_key_: return 0
if self.has_end_inclusive_ != x.has_end_inclusive_: return 0
if self.has_end_inclusive_ and self.end_inclusive_ != x.end_inclusive_: return 0
if len(self.start_postfix_value_) != len(x.start_postfix_value_): return 0
for e1, e2 in zip(self.start_postfix_value_, x.start_postfix_value_):
if e1 != e2: return 0
if len(self.end_postfix_value_) != len(x.end_postfix_value_): return 0
for e1, e2 in zip(self.end_postfix_value_, x.end_postfix_value_):
if e1 != e2: return 0
if self.has_end_unapplied_log_timestamp_us_ != x.has_end_unapplied_log_timestamp_us_: return 0
if self.has_end_unapplied_log_timestamp_us_ and self.end_unapplied_log_timestamp_us_ != x.end_unapplied_log_timestamp_us_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
if (self.has_start_inclusive_): n += 2
if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
if (self.has_end_inclusive_): n += 2
n += 2 * len(self.start_postfix_value_)
for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i]))
n += 2 * len(self.end_postfix_value_)
for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i]))
if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_index_name_): n += 1 + self.lengthString(len(self.index_name_))
if (self.has_start_key_): n += 1 + self.lengthString(len(self.start_key_))
if (self.has_start_inclusive_): n += 2
if (self.has_end_key_): n += 1 + self.lengthString(len(self.end_key_))
if (self.has_end_inclusive_): n += 2
n += 2 * len(self.start_postfix_value_)
for i in xrange(len(self.start_postfix_value_)): n += self.lengthString(len(self.start_postfix_value_[i]))
n += 2 * len(self.end_postfix_value_)
for i in xrange(len(self.end_postfix_value_)): n += self.lengthString(len(self.end_postfix_value_[i]))
if (self.has_end_unapplied_log_timestamp_us_): n += 2 + self.lengthVarInt64(self.end_unapplied_log_timestamp_us_)
return n
def Clear(self):
self.clear_index_name()
self.clear_start_key()
self.clear_start_inclusive()
self.clear_end_key()
self.clear_end_inclusive()
self.clear_start_postfix_value()
self.clear_end_postfix_value()
self.clear_end_unapplied_log_timestamp_us()
def OutputUnchecked(self, out):
if (self.has_index_name_):
out.putVarInt32(18)
out.putPrefixedString(self.index_name_)
if (self.has_start_key_):
out.putVarInt32(26)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(32)
out.putBoolean(self.start_inclusive_)
if (self.has_end_key_):
out.putVarInt32(42)
out.putPrefixedString(self.end_key_)
if (self.has_end_inclusive_):
out.putVarInt32(48)
out.putBoolean(self.end_inclusive_)
if (self.has_end_unapplied_log_timestamp_us_):
out.putVarInt32(152)
out.putVarInt64(self.end_unapplied_log_timestamp_us_)
for i in xrange(len(self.start_postfix_value_)):
out.putVarInt32(178)
out.putPrefixedString(self.start_postfix_value_[i])
for i in xrange(len(self.end_postfix_value_)):
out.putVarInt32(186)
out.putPrefixedString(self.end_postfix_value_[i])
def OutputPartial(self, out):
if (self.has_index_name_):
out.putVarInt32(18)
out.putPrefixedString(self.index_name_)
if (self.has_start_key_):
out.putVarInt32(26)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(32)
out.putBoolean(self.start_inclusive_)
if (self.has_end_key_):
out.putVarInt32(42)
out.putPrefixedString(self.end_key_)
if (self.has_end_inclusive_):
out.putVarInt32(48)
out.putBoolean(self.end_inclusive_)
if (self.has_end_unapplied_log_timestamp_us_):
out.putVarInt32(152)
out.putVarInt64(self.end_unapplied_log_timestamp_us_)
for i in xrange(len(self.start_postfix_value_)):
out.putVarInt32(178)
out.putPrefixedString(self.start_postfix_value_[i])
for i in xrange(len(self.end_postfix_value_)):
out.putVarInt32(186)
out.putPrefixedString(self.end_postfix_value_[i])
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_index_name(d.getPrefixedString())
continue
if tt == 26:
self.set_start_key(d.getPrefixedString())
continue
if tt == 32:
self.set_start_inclusive(d.getBoolean())
continue
if tt == 42:
self.set_end_key(d.getPrefixedString())
continue
if tt == 48:
self.set_end_inclusive(d.getBoolean())
continue
if tt == 152:
self.set_end_unapplied_log_timestamp_us(d.getVarInt64())
continue
if tt == 178:
self.add_start_postfix_value(d.getPrefixedString())
continue
if tt == 186:
self.add_end_postfix_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
if self.has_end_key_: res+=prefix+("end_key: %s\n" % self.DebugFormatString(self.end_key_))
if self.has_end_inclusive_: res+=prefix+("end_inclusive: %s\n" % self.DebugFormatBool(self.end_inclusive_))
cnt=0
for e in self.start_postfix_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("start_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.end_postfix_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("end_postfix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_end_unapplied_log_timestamp_us_: res+=prefix+("end_unapplied_log_timestamp_us: %s\n" % self.DebugFormatInt64(self.end_unapplied_log_timestamp_us_))
return res
class CompiledQuery_MergeJoinScan(ProtocolBuffer.ProtocolMessage):
has_index_name_ = 0
index_name_ = ""
has_value_prefix_ = 0
value_prefix_ = 0
def __init__(self, contents=None):
self.prefix_value_ = []
if contents is not None: self.MergeFromString(contents)
def index_name(self): return self.index_name_
def set_index_name(self, x):
self.has_index_name_ = 1
self.index_name_ = x
def clear_index_name(self):
if self.has_index_name_:
self.has_index_name_ = 0
self.index_name_ = ""
def has_index_name(self): return self.has_index_name_
def prefix_value_size(self): return len(self.prefix_value_)
def prefix_value_list(self): return self.prefix_value_
def prefix_value(self, i):
return self.prefix_value_[i]
def set_prefix_value(self, i, x):
self.prefix_value_[i] = x
def add_prefix_value(self, x):
self.prefix_value_.append(x)
def clear_prefix_value(self):
self.prefix_value_ = []
def value_prefix(self): return self.value_prefix_
def set_value_prefix(self, x):
self.has_value_prefix_ = 1
self.value_prefix_ = x
def clear_value_prefix(self):
if self.has_value_prefix_:
self.has_value_prefix_ = 0
self.value_prefix_ = 0
def has_value_prefix(self): return self.has_value_prefix_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_name()): self.set_index_name(x.index_name())
for i in xrange(x.prefix_value_size()): self.add_prefix_value(x.prefix_value(i))
if (x.has_value_prefix()): self.set_value_prefix(x.value_prefix())
def Equals(self, x):
if x is self: return 1
if self.has_index_name_ != x.has_index_name_: return 0
if self.has_index_name_ and self.index_name_ != x.index_name_: return 0
if len(self.prefix_value_) != len(x.prefix_value_): return 0
for e1, e2 in zip(self.prefix_value_, x.prefix_value_):
if e1 != e2: return 0
if self.has_value_prefix_ != x.has_value_prefix_: return 0
if self.has_value_prefix_ and self.value_prefix_ != x.value_prefix_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.index_name_))
n += 1 * len(self.prefix_value_)
for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
if (self.has_value_prefix_): n += 3
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_index_name_):
n += 1
n += self.lengthString(len(self.index_name_))
n += 1 * len(self.prefix_value_)
for i in xrange(len(self.prefix_value_)): n += self.lengthString(len(self.prefix_value_[i]))
if (self.has_value_prefix_): n += 3
return n
def Clear(self):
self.clear_index_name()
self.clear_prefix_value()
self.clear_value_prefix()
def OutputUnchecked(self, out):
out.putVarInt32(66)
out.putPrefixedString(self.index_name_)
for i in xrange(len(self.prefix_value_)):
out.putVarInt32(74)
out.putPrefixedString(self.prefix_value_[i])
if (self.has_value_prefix_):
out.putVarInt32(160)
out.putBoolean(self.value_prefix_)
def OutputPartial(self, out):
if (self.has_index_name_):
out.putVarInt32(66)
out.putPrefixedString(self.index_name_)
for i in xrange(len(self.prefix_value_)):
out.putVarInt32(74)
out.putPrefixedString(self.prefix_value_[i])
if (self.has_value_prefix_):
out.putVarInt32(160)
out.putBoolean(self.value_prefix_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 60: break
if tt == 66:
self.set_index_name(d.getPrefixedString())
continue
if tt == 74:
self.add_prefix_value(d.getPrefixedString())
continue
if tt == 160:
self.set_value_prefix(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_name_: res+=prefix+("index_name: %s\n" % self.DebugFormatString(self.index_name_))
cnt=0
for e in self.prefix_value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("prefix_value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_value_prefix_: res+=prefix+("value_prefix: %s\n" % self.DebugFormatBool(self.value_prefix_))
return res
class CompiledQuery_EntityFilter(ProtocolBuffer.ProtocolMessage):
has_distinct_ = 0
distinct_ = 0
has_kind_ = 0
kind_ = ""
has_ancestor_ = 0
ancestor_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def distinct(self): return self.distinct_
def set_distinct(self, x):
self.has_distinct_ = 1
self.distinct_ = x
def clear_distinct(self):
if self.has_distinct_:
self.has_distinct_ = 0
self.distinct_ = 0
def has_distinct(self): return self.has_distinct_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = ""
def has_kind(self): return self.has_kind_
def ancestor(self):
if self.ancestor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.ancestor_ is None: self.ancestor_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.ancestor_
def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0;
if self.ancestor_ is not None: self.ancestor_.Clear()
def has_ancestor(self): return self.has_ancestor_
def MergeFrom(self, x):
assert x is not self
if (x.has_distinct()): self.set_distinct(x.distinct())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
def Equals(self, x):
if x is self: return 1
if self.has_distinct_ != x.has_distinct_: return 0
if self.has_distinct_ and self.distinct_ != x.distinct_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_distinct_): n += 2
if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_distinct_): n += 2
if (self.has_kind_): n += 2 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSizePartial())
return n
def Clear(self):
self.clear_distinct()
self.clear_kind()
self.clear_ancestor()
def OutputUnchecked(self, out):
if (self.has_distinct_):
out.putVarInt32(112)
out.putBoolean(self.distinct_)
if (self.has_kind_):
out.putVarInt32(138)
out.putPrefixedString(self.kind_)
if (self.has_ancestor_):
out.putVarInt32(146)
out.putVarInt32(self.ancestor_.ByteSize())
self.ancestor_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_distinct_):
out.putVarInt32(112)
out.putBoolean(self.distinct_)
if (self.has_kind_):
out.putVarInt32(138)
out.putPrefixedString(self.kind_)
if (self.has_ancestor_):
out.putVarInt32(146)
out.putVarInt32(self.ancestor_.ByteSizePartial())
self.ancestor_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 108: break
if tt == 112:
self.set_distinct(d.getBoolean())
continue
if tt == 138:
self.set_kind(d.getPrefixedString())
continue
if tt == 146:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_ancestor().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_distinct_: res+=prefix+("distinct: %s\n" % self.DebugFormatBool(self.distinct_))
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
if self.has_ancestor_:
res+=prefix+"ancestor <\n"
res+=self.ancestor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class CompiledQuery(ProtocolBuffer.ProtocolMessage):
has_primaryscan_ = 0
has_index_def_ = 0
index_def_ = None
has_offset_ = 0
offset_ = 0
has_limit_ = 0
limit_ = 0
has_keys_only_ = 0
keys_only_ = 0
has_distinct_infix_size_ = 0
distinct_infix_size_ = 0
has_entityfilter_ = 0
entityfilter_ = None
has_plan_label_ = 0
plan_label_ = ""
def __init__(self, contents=None):
self.primaryscan_ = CompiledQuery_PrimaryScan()
self.mergejoinscan_ = []
self.property_name_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def primaryscan(self): return self.primaryscan_
def mutable_primaryscan(self): self.has_primaryscan_ = 1; return self.primaryscan_
def clear_primaryscan(self):self.has_primaryscan_ = 0; self.primaryscan_.Clear()
def has_primaryscan(self): return self.has_primaryscan_
def mergejoinscan_size(self): return len(self.mergejoinscan_)
def mergejoinscan_list(self): return self.mergejoinscan_
def mergejoinscan(self, i):
return self.mergejoinscan_[i]
def mutable_mergejoinscan(self, i):
return self.mergejoinscan_[i]
def add_mergejoinscan(self):
x = CompiledQuery_MergeJoinScan()
self.mergejoinscan_.append(x)
return x
def clear_mergejoinscan(self):
self.mergejoinscan_ = []
def index_def(self):
if self.index_def_ is None:
self.lazy_init_lock_.acquire()
try:
if self.index_def_ is None: self.index_def_ = Index()
finally:
self.lazy_init_lock_.release()
return self.index_def_
def mutable_index_def(self): self.has_index_def_ = 1; return self.index_def()
def clear_index_def(self):
if self.has_index_def_:
self.has_index_def_ = 0;
if self.index_def_ is not None: self.index_def_.Clear()
def has_index_def(self): return self.has_index_def_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def keys_only(self): return self.keys_only_
def set_keys_only(self, x):
self.has_keys_only_ = 1
self.keys_only_ = x
def clear_keys_only(self):
if self.has_keys_only_:
self.has_keys_only_ = 0
self.keys_only_ = 0
def has_keys_only(self): return self.has_keys_only_
def property_name_size(self): return len(self.property_name_)
def property_name_list(self): return self.property_name_
def property_name(self, i):
return self.property_name_[i]
def set_property_name(self, i, x):
self.property_name_[i] = x
def add_property_name(self, x):
self.property_name_.append(x)
def clear_property_name(self):
self.property_name_ = []
def distinct_infix_size(self): return self.distinct_infix_size_
def set_distinct_infix_size(self, x):
self.has_distinct_infix_size_ = 1
self.distinct_infix_size_ = x
def clear_distinct_infix_size(self):
if self.has_distinct_infix_size_:
self.has_distinct_infix_size_ = 0
self.distinct_infix_size_ = 0
def has_distinct_infix_size(self): return self.has_distinct_infix_size_
def entityfilter(self):
if self.entityfilter_ is None:
self.lazy_init_lock_.acquire()
try:
if self.entityfilter_ is None: self.entityfilter_ = CompiledQuery_EntityFilter()
finally:
self.lazy_init_lock_.release()
return self.entityfilter_
def mutable_entityfilter(self): self.has_entityfilter_ = 1; return self.entityfilter()
def clear_entityfilter(self):
if self.has_entityfilter_:
self.has_entityfilter_ = 0;
if self.entityfilter_ is not None: self.entityfilter_.Clear()
def has_entityfilter(self): return self.has_entityfilter_
def plan_label(self): return self.plan_label_
def set_plan_label(self, x):
self.has_plan_label_ = 1
self.plan_label_ = x
def clear_plan_label(self):
if self.has_plan_label_:
self.has_plan_label_ = 0
self.plan_label_ = ""
def has_plan_label(self): return self.has_plan_label_
def MergeFrom(self, x):
assert x is not self
if (x.has_primaryscan()): self.mutable_primaryscan().MergeFrom(x.primaryscan())
for i in xrange(x.mergejoinscan_size()): self.add_mergejoinscan().CopyFrom(x.mergejoinscan(i))
if (x.has_index_def()): self.mutable_index_def().MergeFrom(x.index_def())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_limit()): self.set_limit(x.limit())
if (x.has_keys_only()): self.set_keys_only(x.keys_only())
for i in xrange(x.property_name_size()): self.add_property_name(x.property_name(i))
if (x.has_distinct_infix_size()): self.set_distinct_infix_size(x.distinct_infix_size())
if (x.has_entityfilter()): self.mutable_entityfilter().MergeFrom(x.entityfilter())
if (x.has_plan_label()): self.set_plan_label(x.plan_label())
def Equals(self, x):
if x is self: return 1
if self.has_primaryscan_ != x.has_primaryscan_: return 0
if self.has_primaryscan_ and self.primaryscan_ != x.primaryscan_: return 0
if len(self.mergejoinscan_) != len(x.mergejoinscan_): return 0
for e1, e2 in zip(self.mergejoinscan_, x.mergejoinscan_):
if e1 != e2: return 0
if self.has_index_def_ != x.has_index_def_: return 0
if self.has_index_def_ and self.index_def_ != x.index_def_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
if self.has_keys_only_ != x.has_keys_only_: return 0
if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
if len(self.property_name_) != len(x.property_name_): return 0
for e1, e2 in zip(self.property_name_, x.property_name_):
if e1 != e2: return 0
if self.has_distinct_infix_size_ != x.has_distinct_infix_size_: return 0
if self.has_distinct_infix_size_ and self.distinct_infix_size_ != x.distinct_infix_size_: return 0
if self.has_entityfilter_ != x.has_entityfilter_: return 0
if self.has_entityfilter_ and self.entityfilter_ != x.entityfilter_: return 0
if self.has_plan_label_ != x.has_plan_label_: return 0
if self.has_plan_label_ and self.plan_label_ != x.plan_label_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_primaryscan_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: primaryscan not set.')
elif not self.primaryscan_.IsInitialized(debug_strs): initialized = 0
for p in self.mergejoinscan_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_index_def_ and not self.index_def_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_keys_only_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: keys_only not set.')
if (self.has_entityfilter_ and not self.entityfilter_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.primaryscan_.ByteSize()
n += 2 * len(self.mergejoinscan_)
for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSize()
if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSize())
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_)
if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSize()
if (self.has_plan_label_): n += 2 + self.lengthString(len(self.plan_label_))
return n + 4
def ByteSizePartial(self):
n = 0
if (self.has_primaryscan_):
n += 2
n += self.primaryscan_.ByteSizePartial()
n += 2 * len(self.mergejoinscan_)
for i in xrange(len(self.mergejoinscan_)): n += self.mergejoinscan_[i].ByteSizePartial()
if (self.has_index_def_): n += 2 + self.lengthString(self.index_def_.ByteSizePartial())
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 1 + self.lengthVarInt64(self.limit_)
if (self.has_keys_only_):
n += 2
n += 2 * len(self.property_name_)
for i in xrange(len(self.property_name_)): n += self.lengthString(len(self.property_name_[i]))
if (self.has_distinct_infix_size_): n += 2 + self.lengthVarInt64(self.distinct_infix_size_)
if (self.has_entityfilter_): n += 2 + self.entityfilter_.ByteSizePartial()
if (self.has_plan_label_): n += 2 + self.lengthString(len(self.plan_label_))
return n
def Clear(self):
self.clear_primaryscan()
self.clear_mergejoinscan()
self.clear_index_def()
self.clear_offset()
self.clear_limit()
self.clear_keys_only()
self.clear_property_name()
self.clear_distinct_infix_size()
self.clear_entityfilter()
self.clear_plan_label()
def OutputUnchecked(self, out):
out.putVarInt32(11)
self.primaryscan_.OutputUnchecked(out)
out.putVarInt32(12)
for i in xrange(len(self.mergejoinscan_)):
out.putVarInt32(59)
self.mergejoinscan_[i].OutputUnchecked(out)
out.putVarInt32(60)
if (self.has_offset_):
out.putVarInt32(80)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(88)
out.putVarInt32(self.limit_)
out.putVarInt32(96)
out.putBoolean(self.keys_only_)
if (self.has_entityfilter_):
out.putVarInt32(107)
self.entityfilter_.OutputUnchecked(out)
out.putVarInt32(108)
if (self.has_index_def_):
out.putVarInt32(170)
out.putVarInt32(self.index_def_.ByteSize())
self.index_def_.OutputUnchecked(out)
for i in xrange(len(self.property_name_)):
out.putVarInt32(194)
out.putPrefixedString(self.property_name_[i])
if (self.has_distinct_infix_size_):
out.putVarInt32(200)
out.putVarInt32(self.distinct_infix_size_)
if (self.has_plan_label_):
out.putVarInt32(210)
out.putPrefixedString(self.plan_label_)
def OutputPartial(self, out):
if (self.has_primaryscan_):
out.putVarInt32(11)
self.primaryscan_.OutputPartial(out)
out.putVarInt32(12)
for i in xrange(len(self.mergejoinscan_)):
out.putVarInt32(59)
self.mergejoinscan_[i].OutputPartial(out)
out.putVarInt32(60)
if (self.has_offset_):
out.putVarInt32(80)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(88)
out.putVarInt32(self.limit_)
if (self.has_keys_only_):
out.putVarInt32(96)
out.putBoolean(self.keys_only_)
if (self.has_entityfilter_):
out.putVarInt32(107)
self.entityfilter_.OutputPartial(out)
out.putVarInt32(108)
if (self.has_index_def_):
out.putVarInt32(170)
out.putVarInt32(self.index_def_.ByteSizePartial())
self.index_def_.OutputPartial(out)
for i in xrange(len(self.property_name_)):
out.putVarInt32(194)
out.putPrefixedString(self.property_name_[i])
if (self.has_distinct_infix_size_):
out.putVarInt32(200)
out.putVarInt32(self.distinct_infix_size_)
if (self.has_plan_label_):
out.putVarInt32(210)
out.putPrefixedString(self.plan_label_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.mutable_primaryscan().TryMerge(d)
continue
if tt == 59:
self.add_mergejoinscan().TryMerge(d)
continue
if tt == 80:
self.set_offset(d.getVarInt32())
continue
if tt == 88:
self.set_limit(d.getVarInt32())
continue
if tt == 96:
self.set_keys_only(d.getBoolean())
continue
if tt == 107:
self.mutable_entityfilter().TryMerge(d)
continue
if tt == 170:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_index_def().TryMerge(tmp)
continue
if tt == 194:
self.add_property_name(d.getPrefixedString())
continue
if tt == 200:
self.set_distinct_infix_size(d.getVarInt32())
continue
if tt == 210:
self.set_plan_label(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_primaryscan_:
res+=prefix+"PrimaryScan {\n"
res+=self.primaryscan_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt=0
for e in self.mergejoinscan_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("MergeJoinScan%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_index_def_:
res+=prefix+"index_def <\n"
res+=self.index_def_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
cnt=0
for e in self.property_name_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property_name%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_distinct_infix_size_: res+=prefix+("distinct_infix_size: %s\n" % self.DebugFormatInt32(self.distinct_infix_size_))
if self.has_entityfilter_:
res+=prefix+"EntityFilter {\n"
res+=self.entityfilter_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_plan_label_: res+=prefix+("plan_label: %s\n" % self.DebugFormatString(self.plan_label_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kPrimaryScanGroup = 1
kPrimaryScanindex_name = 2
kPrimaryScanstart_key = 3
kPrimaryScanstart_inclusive = 4
kPrimaryScanend_key = 5
kPrimaryScanend_inclusive = 6
kPrimaryScanstart_postfix_value = 22
kPrimaryScanend_postfix_value = 23
kPrimaryScanend_unapplied_log_timestamp_us = 19
kMergeJoinScanGroup = 7
kMergeJoinScanindex_name = 8
kMergeJoinScanprefix_value = 9
kMergeJoinScanvalue_prefix = 20
kindex_def = 21
koffset = 10
klimit = 11
kkeys_only = 12
kproperty_name = 24
kdistinct_infix_size = 25
kEntityFilterGroup = 13
kEntityFilterdistinct = 14
kEntityFilterkind = 17
kEntityFilterancestor = 18
kplan_label = 26
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "PrimaryScan",
2: "index_name",
3: "start_key",
4: "start_inclusive",
5: "end_key",
6: "end_inclusive",
7: "MergeJoinScan",
8: "index_name",
9: "prefix_value",
10: "offset",
11: "limit",
12: "keys_only",
13: "EntityFilter",
14: "distinct",
17: "kind",
18: "ancestor",
19: "end_unapplied_log_timestamp_us",
20: "value_prefix",
21: "index_def",
22: "start_postfix_value",
23: "end_postfix_value",
24: "property_name",
25: "distinct_infix_size",
26: "plan_label",
}, 26)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.STARTGROUP,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.NUMERIC,
12: ProtocolBuffer.Encoder.NUMERIC,
13: ProtocolBuffer.Encoder.STARTGROUP,
14: ProtocolBuffer.Encoder.NUMERIC,
17: ProtocolBuffer.Encoder.STRING,
18: ProtocolBuffer.Encoder.STRING,
19: ProtocolBuffer.Encoder.NUMERIC,
20: ProtocolBuffer.Encoder.NUMERIC,
21: ProtocolBuffer.Encoder.STRING,
22: ProtocolBuffer.Encoder.STRING,
23: ProtocolBuffer.Encoder.STRING,
24: ProtocolBuffer.Encoder.STRING,
25: ProtocolBuffer.Encoder.NUMERIC,
26: ProtocolBuffer.Encoder.STRING,
}, 26, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledQuery'
class CompiledCursor_PositionIndexValue(ProtocolBuffer.ProtocolMessage):
has_property_ = 0
property_ = ""
has_value_ = 0
def __init__(self, contents=None):
self.value_ = PropertyValue()
if contents is not None: self.MergeFromString(contents)
def property(self): return self.property_
def set_property(self, x):
self.has_property_ = 1
self.property_ = x
def clear_property(self):
if self.has_property_:
self.has_property_ = 0
self.property_ = ""
def has_property(self): return self.has_property_
def value(self): return self.value_
def mutable_value(self): self.has_value_ = 1; return self.value_
def clear_value(self):self.has_value_ = 0; self.value_.Clear()
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_property()): self.set_property(x.property())
if (x.has_value()): self.mutable_value().MergeFrom(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_property_ != x.has_property_: return 0
if self.has_property_ and self.property_ != x.property_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
elif not self.value_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_property_): n += 2 + self.lengthString(len(self.property_))
n += self.lengthString(self.value_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_property_): n += 2 + self.lengthString(len(self.property_))
if (self.has_value_):
n += 2
n += self.lengthString(self.value_.ByteSizePartial())
return n
def Clear(self):
self.clear_property()
self.clear_value()
def OutputUnchecked(self, out):
if (self.has_property_):
out.putVarInt32(242)
out.putPrefixedString(self.property_)
out.putVarInt32(250)
out.putVarInt32(self.value_.ByteSize())
self.value_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_property_):
out.putVarInt32(242)
out.putPrefixedString(self.property_)
if (self.has_value_):
out.putVarInt32(250)
out.putVarInt32(self.value_.ByteSizePartial())
self.value_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 236: break
if tt == 242:
self.set_property(d.getPrefixedString())
continue
if tt == 250:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_value().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
if self.has_value_:
res+=prefix+"value <\n"
res+=self.value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class CompiledCursor_Position(ProtocolBuffer.ProtocolMessage):
has_start_key_ = 0
start_key_ = ""
has_key_ = 0
key_ = None
has_start_inclusive_ = 0
start_inclusive_ = 1
has_before_ascending_ = 0
before_ascending_ = 0
def __init__(self, contents=None):
self.indexvalue_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def start_key(self): return self.start_key_
def set_start_key(self, x):
self.has_start_key_ = 1
self.start_key_ = x
def clear_start_key(self):
if self.has_start_key_:
self.has_start_key_ = 0
self.start_key_ = ""
def has_start_key(self): return self.has_start_key_
def indexvalue_size(self): return len(self.indexvalue_)
def indexvalue_list(self): return self.indexvalue_
def indexvalue(self, i):
return self.indexvalue_[i]
def mutable_indexvalue(self, i):
return self.indexvalue_[i]
def add_indexvalue(self):
x = CompiledCursor_PositionIndexValue()
self.indexvalue_.append(x)
return x
def clear_indexvalue(self):
self.indexvalue_ = []
def key(self):
if self.key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.key_ is None: self.key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key()
def clear_key(self):
if self.has_key_:
self.has_key_ = 0;
if self.key_ is not None: self.key_.Clear()
def has_key(self): return self.has_key_
def start_inclusive(self): return self.start_inclusive_
def set_start_inclusive(self, x):
self.has_start_inclusive_ = 1
self.start_inclusive_ = x
def clear_start_inclusive(self):
if self.has_start_inclusive_:
self.has_start_inclusive_ = 0
self.start_inclusive_ = 1
def has_start_inclusive(self): return self.has_start_inclusive_
def before_ascending(self): return self.before_ascending_
def set_before_ascending(self, x):
self.has_before_ascending_ = 1
self.before_ascending_ = x
def clear_before_ascending(self):
if self.has_before_ascending_:
self.has_before_ascending_ = 0
self.before_ascending_ = 0
def has_before_ascending(self): return self.has_before_ascending_
def MergeFrom(self, x):
assert x is not self
if (x.has_start_key()): self.set_start_key(x.start_key())
for i in xrange(x.indexvalue_size()): self.add_indexvalue().CopyFrom(x.indexvalue(i))
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_start_inclusive()): self.set_start_inclusive(x.start_inclusive())
if (x.has_before_ascending()): self.set_before_ascending(x.before_ascending())
def Equals(self, x):
if x is self: return 1
if self.has_start_key_ != x.has_start_key_: return 0
if self.has_start_key_ and self.start_key_ != x.start_key_: return 0
if len(self.indexvalue_) != len(x.indexvalue_): return 0
for e1, e2 in zip(self.indexvalue_, x.indexvalue_):
if e1 != e2: return 0
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_start_inclusive_ != x.has_start_inclusive_: return 0
if self.has_start_inclusive_ and self.start_inclusive_ != x.start_inclusive_: return 0
if self.has_before_ascending_ != x.has_before_ascending_: return 0
if self.has_before_ascending_ and self.before_ascending_ != x.before_ascending_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.indexvalue_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_))
n += 4 * len(self.indexvalue_)
for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSize()
if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSize())
if (self.has_start_inclusive_): n += 3
if (self.has_before_ascending_): n += 3
return n
def ByteSizePartial(self):
n = 0
if (self.has_start_key_): n += 2 + self.lengthString(len(self.start_key_))
n += 4 * len(self.indexvalue_)
for i in xrange(len(self.indexvalue_)): n += self.indexvalue_[i].ByteSizePartial()
if (self.has_key_): n += 2 + self.lengthString(self.key_.ByteSizePartial())
if (self.has_start_inclusive_): n += 3
if (self.has_before_ascending_): n += 3
return n
def Clear(self):
self.clear_start_key()
self.clear_indexvalue()
self.clear_key()
self.clear_start_inclusive()
self.clear_before_ascending()
def OutputUnchecked(self, out):
if (self.has_start_key_):
out.putVarInt32(218)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(224)
out.putBoolean(self.start_inclusive_)
for i in xrange(len(self.indexvalue_)):
out.putVarInt32(235)
self.indexvalue_[i].OutputUnchecked(out)
out.putVarInt32(236)
if (self.has_key_):
out.putVarInt32(258)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
if (self.has_before_ascending_):
out.putVarInt32(264)
out.putBoolean(self.before_ascending_)
def OutputPartial(self, out):
if (self.has_start_key_):
out.putVarInt32(218)
out.putPrefixedString(self.start_key_)
if (self.has_start_inclusive_):
out.putVarInt32(224)
out.putBoolean(self.start_inclusive_)
for i in xrange(len(self.indexvalue_)):
out.putVarInt32(235)
self.indexvalue_[i].OutputPartial(out)
out.putVarInt32(236)
if (self.has_key_):
out.putVarInt32(258)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
if (self.has_before_ascending_):
out.putVarInt32(264)
out.putBoolean(self.before_ascending_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 20: break
if tt == 218:
self.set_start_key(d.getPrefixedString())
continue
if tt == 224:
self.set_start_inclusive(d.getBoolean())
continue
if tt == 235:
self.add_indexvalue().TryMerge(d)
continue
if tt == 258:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 264:
self.set_before_ascending(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_start_key_: res+=prefix+("start_key: %s\n" % self.DebugFormatString(self.start_key_))
cnt=0
for e in self.indexvalue_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("IndexValue%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_start_inclusive_: res+=prefix+("start_inclusive: %s\n" % self.DebugFormatBool(self.start_inclusive_))
if self.has_before_ascending_: res+=prefix+("before_ascending: %s\n" % self.DebugFormatBool(self.before_ascending_))
return res
class CompiledCursor(ProtocolBuffer.ProtocolMessage):
has_position_ = 0
position_ = None
has_postfix_position_ = 0
postfix_position_ = None
has_absolute_position_ = 0
absolute_position_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def position(self):
if self.position_ is None:
self.lazy_init_lock_.acquire()
try:
if self.position_ is None: self.position_ = CompiledCursor_Position()
finally:
self.lazy_init_lock_.release()
return self.position_
def mutable_position(self): self.has_position_ = 1; return self.position()
def clear_position(self):
if self.has_position_:
self.has_position_ = 0;
if self.position_ is not None: self.position_.Clear()
def has_position(self): return self.has_position_
def postfix_position(self):
if self.postfix_position_ is None:
self.lazy_init_lock_.acquire()
try:
if self.postfix_position_ is None: self.postfix_position_ = IndexPostfix()
finally:
self.lazy_init_lock_.release()
return self.postfix_position_
def mutable_postfix_position(self): self.has_postfix_position_ = 1; return self.postfix_position()
def clear_postfix_position(self):
if self.has_postfix_position_:
self.has_postfix_position_ = 0;
if self.postfix_position_ is not None: self.postfix_position_.Clear()
def has_postfix_position(self): return self.has_postfix_position_
def absolute_position(self):
if self.absolute_position_ is None:
self.lazy_init_lock_.acquire()
try:
if self.absolute_position_ is None: self.absolute_position_ = IndexPosition()
finally:
self.lazy_init_lock_.release()
return self.absolute_position_
def mutable_absolute_position(self): self.has_absolute_position_ = 1; return self.absolute_position()
def clear_absolute_position(self):
if self.has_absolute_position_:
self.has_absolute_position_ = 0;
if self.absolute_position_ is not None: self.absolute_position_.Clear()
def has_absolute_position(self): return self.has_absolute_position_
def MergeFrom(self, x):
assert x is not self
if (x.has_position()): self.mutable_position().MergeFrom(x.position())
if (x.has_postfix_position()): self.mutable_postfix_position().MergeFrom(x.postfix_position())
if (x.has_absolute_position()): self.mutable_absolute_position().MergeFrom(x.absolute_position())
def Equals(self, x):
if x is self: return 1
if self.has_position_ != x.has_position_: return 0
if self.has_position_ and self.position_ != x.position_: return 0
if self.has_postfix_position_ != x.has_postfix_position_: return 0
if self.has_postfix_position_ and self.postfix_position_ != x.postfix_position_: return 0
if self.has_absolute_position_ != x.has_absolute_position_: return 0
if self.has_absolute_position_ and self.absolute_position_ != x.absolute_position_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_position_ and not self.position_.IsInitialized(debug_strs)): initialized = 0
if (self.has_postfix_position_ and not self.postfix_position_.IsInitialized(debug_strs)): initialized = 0
if (self.has_absolute_position_ and not self.absolute_position_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_position_): n += 2 + self.position_.ByteSize()
if (self.has_postfix_position_): n += 1 + self.lengthString(self.postfix_position_.ByteSize())
if (self.has_absolute_position_): n += 1 + self.lengthString(self.absolute_position_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_position_): n += 2 + self.position_.ByteSizePartial()
if (self.has_postfix_position_): n += 1 + self.lengthString(self.postfix_position_.ByteSizePartial())
if (self.has_absolute_position_): n += 1 + self.lengthString(self.absolute_position_.ByteSizePartial())
return n
def Clear(self):
self.clear_position()
self.clear_postfix_position()
self.clear_absolute_position()
def OutputUnchecked(self, out):
if (self.has_postfix_position_):
out.putVarInt32(10)
out.putVarInt32(self.postfix_position_.ByteSize())
self.postfix_position_.OutputUnchecked(out)
if (self.has_position_):
out.putVarInt32(19)
self.position_.OutputUnchecked(out)
out.putVarInt32(20)
if (self.has_absolute_position_):
out.putVarInt32(26)
out.putVarInt32(self.absolute_position_.ByteSize())
self.absolute_position_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_postfix_position_):
out.putVarInt32(10)
out.putVarInt32(self.postfix_position_.ByteSizePartial())
self.postfix_position_.OutputPartial(out)
if (self.has_position_):
out.putVarInt32(19)
self.position_.OutputPartial(out)
out.putVarInt32(20)
if (self.has_absolute_position_):
out.putVarInt32(26)
out.putVarInt32(self.absolute_position_.ByteSizePartial())
self.absolute_position_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_postfix_position().TryMerge(tmp)
continue
if tt == 19:
self.mutable_position().TryMerge(d)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_absolute_position().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_position_:
res+=prefix+"Position {\n"
res+=self.position_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_postfix_position_:
res+=prefix+"postfix_position <\n"
res+=self.postfix_position_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_absolute_position_:
res+=prefix+"absolute_position <\n"
res+=self.absolute_position_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kPositionGroup = 2
kPositionstart_key = 27
kPositionIndexValueGroup = 29
kPositionIndexValueproperty = 30
kPositionIndexValuevalue = 31
kPositionkey = 32
kPositionstart_inclusive = 28
kPositionbefore_ascending = 33
kpostfix_position = 1
kabsolute_position = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "postfix_position",
2: "Position",
3: "absolute_position",
27: "start_key",
28: "start_inclusive",
29: "IndexValue",
30: "property",
31: "value",
32: "key",
33: "before_ascending",
}, 33)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STARTGROUP,
3: ProtocolBuffer.Encoder.STRING,
27: ProtocolBuffer.Encoder.STRING,
28: ProtocolBuffer.Encoder.NUMERIC,
29: ProtocolBuffer.Encoder.STARTGROUP,
30: ProtocolBuffer.Encoder.STRING,
31: ProtocolBuffer.Encoder.STRING,
32: ProtocolBuffer.Encoder.STRING,
33: ProtocolBuffer.Encoder.NUMERIC,
}, 33, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompiledCursor'
class Cursor(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = 0
has_app_ = 0
app_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def cursor(self): return self.cursor_
def set_cursor(self, x):
self.has_cursor_ = 1
self.cursor_ = x
def clear_cursor(self):
if self.has_cursor_:
self.has_cursor_ = 0
self.cursor_ = 0
def has_cursor(self): return self.has_cursor_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.set_cursor(x.cursor())
if (x.has_app()): self.set_app(x.app())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_cursor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: cursor not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_app_): n += 1 + self.lengthString(len(self.app_))
return n + 9
def ByteSizePartial(self):
n = 0
if (self.has_cursor_):
n += 9
if (self.has_app_): n += 1 + self.lengthString(len(self.app_))
return n
def Clear(self):
self.clear_cursor()
self.clear_app()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.put64(self.cursor_)
if (self.has_app_):
out.putVarInt32(18)
out.putPrefixedString(self.app_)
def OutputPartial(self, out):
if (self.has_cursor_):
out.putVarInt32(9)
out.put64(self.cursor_)
if (self.has_app_):
out.putVarInt32(18)
out.putPrefixedString(self.app_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_cursor(d.get64())
continue
if tt == 18:
self.set_app(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_))
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcursor = 1
kapp = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cursor",
2: "app",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.DOUBLE,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cursor'
class Error(ProtocolBuffer.ProtocolMessage):
BAD_REQUEST = 1
CONCURRENT_TRANSACTION = 2
INTERNAL_ERROR = 3
NEED_INDEX = 4
TIMEOUT = 5
PERMISSION_DENIED = 6
BIGTABLE_ERROR = 7
COMMITTED_BUT_STILL_APPLYING = 8
CAPABILITY_DISABLED = 9
TRY_ALTERNATE_BACKEND = 10
SAFE_TIME_TOO_OLD = 11
_ErrorCode_NAMES = {
1: "BAD_REQUEST",
2: "CONCURRENT_TRANSACTION",
3: "INTERNAL_ERROR",
4: "NEED_INDEX",
5: "TIMEOUT",
6: "PERMISSION_DENIED",
7: "BIGTABLE_ERROR",
8: "COMMITTED_BUT_STILL_APPLYING",
9: "CAPABILITY_DISABLED",
10: "TRY_ALTERNATE_BACKEND",
11: "SAFE_TIME_TOO_OLD",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Error'
class Cost_CommitCost(ProtocolBuffer.ProtocolMessage):
has_requested_entity_puts_ = 0
requested_entity_puts_ = 0
has_requested_entity_deletes_ = 0
requested_entity_deletes_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def requested_entity_puts(self): return self.requested_entity_puts_
def set_requested_entity_puts(self, x):
self.has_requested_entity_puts_ = 1
self.requested_entity_puts_ = x
def clear_requested_entity_puts(self):
if self.has_requested_entity_puts_:
self.has_requested_entity_puts_ = 0
self.requested_entity_puts_ = 0
def has_requested_entity_puts(self): return self.has_requested_entity_puts_
def requested_entity_deletes(self): return self.requested_entity_deletes_
def set_requested_entity_deletes(self, x):
self.has_requested_entity_deletes_ = 1
self.requested_entity_deletes_ = x
def clear_requested_entity_deletes(self):
if self.has_requested_entity_deletes_:
self.has_requested_entity_deletes_ = 0
self.requested_entity_deletes_ = 0
def has_requested_entity_deletes(self): return self.has_requested_entity_deletes_
def MergeFrom(self, x):
assert x is not self
if (x.has_requested_entity_puts()): self.set_requested_entity_puts(x.requested_entity_puts())
if (x.has_requested_entity_deletes()): self.set_requested_entity_deletes(x.requested_entity_deletes())
def Equals(self, x):
if x is self: return 1
if self.has_requested_entity_puts_ != x.has_requested_entity_puts_: return 0
if self.has_requested_entity_puts_ and self.requested_entity_puts_ != x.requested_entity_puts_: return 0
if self.has_requested_entity_deletes_ != x.has_requested_entity_deletes_: return 0
if self.has_requested_entity_deletes_ and self.requested_entity_deletes_ != x.requested_entity_deletes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_requested_entity_puts_): n += 1 + self.lengthVarInt64(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_): n += 1 + self.lengthVarInt64(self.requested_entity_deletes_)
return n
def Clear(self):
self.clear_requested_entity_puts()
self.clear_requested_entity_deletes()
def OutputUnchecked(self, out):
if (self.has_requested_entity_puts_):
out.putVarInt32(48)
out.putVarInt32(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_):
out.putVarInt32(56)
out.putVarInt32(self.requested_entity_deletes_)
def OutputPartial(self, out):
if (self.has_requested_entity_puts_):
out.putVarInt32(48)
out.putVarInt32(self.requested_entity_puts_)
if (self.has_requested_entity_deletes_):
out.putVarInt32(56)
out.putVarInt32(self.requested_entity_deletes_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 44: break
if tt == 48:
self.set_requested_entity_puts(d.getVarInt32())
continue
if tt == 56:
self.set_requested_entity_deletes(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_requested_entity_puts_: res+=prefix+("requested_entity_puts: %s\n" % self.DebugFormatInt32(self.requested_entity_puts_))
if self.has_requested_entity_deletes_: res+=prefix+("requested_entity_deletes: %s\n" % self.DebugFormatInt32(self.requested_entity_deletes_))
return res
class Cost(ProtocolBuffer.ProtocolMessage):
has_index_writes_ = 0
index_writes_ = 0
has_index_write_bytes_ = 0
index_write_bytes_ = 0
has_entity_writes_ = 0
entity_writes_ = 0
has_entity_write_bytes_ = 0
entity_write_bytes_ = 0
has_commitcost_ = 0
commitcost_ = None
has_approximate_storage_delta_ = 0
approximate_storage_delta_ = 0
has_id_sequence_updates_ = 0
id_sequence_updates_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def index_writes(self): return self.index_writes_
def set_index_writes(self, x):
self.has_index_writes_ = 1
self.index_writes_ = x
def clear_index_writes(self):
if self.has_index_writes_:
self.has_index_writes_ = 0
self.index_writes_ = 0
def has_index_writes(self): return self.has_index_writes_
def index_write_bytes(self): return self.index_write_bytes_
def set_index_write_bytes(self, x):
self.has_index_write_bytes_ = 1
self.index_write_bytes_ = x
def clear_index_write_bytes(self):
if self.has_index_write_bytes_:
self.has_index_write_bytes_ = 0
self.index_write_bytes_ = 0
def has_index_write_bytes(self): return self.has_index_write_bytes_
def entity_writes(self): return self.entity_writes_
def set_entity_writes(self, x):
self.has_entity_writes_ = 1
self.entity_writes_ = x
def clear_entity_writes(self):
if self.has_entity_writes_:
self.has_entity_writes_ = 0
self.entity_writes_ = 0
def has_entity_writes(self): return self.has_entity_writes_
def entity_write_bytes(self): return self.entity_write_bytes_
def set_entity_write_bytes(self, x):
self.has_entity_write_bytes_ = 1
self.entity_write_bytes_ = x
def clear_entity_write_bytes(self):
if self.has_entity_write_bytes_:
self.has_entity_write_bytes_ = 0
self.entity_write_bytes_ = 0
def has_entity_write_bytes(self): return self.has_entity_write_bytes_
def commitcost(self):
if self.commitcost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.commitcost_ is None: self.commitcost_ = Cost_CommitCost()
finally:
self.lazy_init_lock_.release()
return self.commitcost_
def mutable_commitcost(self): self.has_commitcost_ = 1; return self.commitcost()
def clear_commitcost(self):
if self.has_commitcost_:
self.has_commitcost_ = 0;
if self.commitcost_ is not None: self.commitcost_.Clear()
def has_commitcost(self): return self.has_commitcost_
def approximate_storage_delta(self): return self.approximate_storage_delta_
def set_approximate_storage_delta(self, x):
self.has_approximate_storage_delta_ = 1
self.approximate_storage_delta_ = x
def clear_approximate_storage_delta(self):
if self.has_approximate_storage_delta_:
self.has_approximate_storage_delta_ = 0
self.approximate_storage_delta_ = 0
def has_approximate_storage_delta(self): return self.has_approximate_storage_delta_
def id_sequence_updates(self): return self.id_sequence_updates_
def set_id_sequence_updates(self, x):
self.has_id_sequence_updates_ = 1
self.id_sequence_updates_ = x
def clear_id_sequence_updates(self):
if self.has_id_sequence_updates_:
self.has_id_sequence_updates_ = 0
self.id_sequence_updates_ = 0
def has_id_sequence_updates(self): return self.has_id_sequence_updates_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_writes()): self.set_index_writes(x.index_writes())
if (x.has_index_write_bytes()): self.set_index_write_bytes(x.index_write_bytes())
if (x.has_entity_writes()): self.set_entity_writes(x.entity_writes())
if (x.has_entity_write_bytes()): self.set_entity_write_bytes(x.entity_write_bytes())
if (x.has_commitcost()): self.mutable_commitcost().MergeFrom(x.commitcost())
if (x.has_approximate_storage_delta()): self.set_approximate_storage_delta(x.approximate_storage_delta())
if (x.has_id_sequence_updates()): self.set_id_sequence_updates(x.id_sequence_updates())
def Equals(self, x):
if x is self: return 1
if self.has_index_writes_ != x.has_index_writes_: return 0
if self.has_index_writes_ and self.index_writes_ != x.index_writes_: return 0
if self.has_index_write_bytes_ != x.has_index_write_bytes_: return 0
if self.has_index_write_bytes_ and self.index_write_bytes_ != x.index_write_bytes_: return 0
if self.has_entity_writes_ != x.has_entity_writes_: return 0
if self.has_entity_writes_ and self.entity_writes_ != x.entity_writes_: return 0
if self.has_entity_write_bytes_ != x.has_entity_write_bytes_: return 0
if self.has_entity_write_bytes_ and self.entity_write_bytes_ != x.entity_write_bytes_: return 0
if self.has_commitcost_ != x.has_commitcost_: return 0
if self.has_commitcost_ and self.commitcost_ != x.commitcost_: return 0
if self.has_approximate_storage_delta_ != x.has_approximate_storage_delta_: return 0
if self.has_approximate_storage_delta_ and self.approximate_storage_delta_ != x.approximate_storage_delta_: return 0
if self.has_id_sequence_updates_ != x.has_id_sequence_updates_: return 0
if self.has_id_sequence_updates_ and self.id_sequence_updates_ != x.id_sequence_updates_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_commitcost_ and not self.commitcost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSize()
if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
if (self.has_index_write_bytes_): n += 1 + self.lengthVarInt64(self.index_write_bytes_)
if (self.has_entity_writes_): n += 1 + self.lengthVarInt64(self.entity_writes_)
if (self.has_entity_write_bytes_): n += 1 + self.lengthVarInt64(self.entity_write_bytes_)
if (self.has_commitcost_): n += 2 + self.commitcost_.ByteSizePartial()
if (self.has_approximate_storage_delta_): n += 1 + self.lengthVarInt64(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_): n += 1 + self.lengthVarInt64(self.id_sequence_updates_)
return n
def Clear(self):
self.clear_index_writes()
self.clear_index_write_bytes()
self.clear_entity_writes()
self.clear_entity_write_bytes()
self.clear_commitcost()
self.clear_approximate_storage_delta()
self.clear_id_sequence_updates()
def OutputUnchecked(self, out):
if (self.has_index_writes_):
out.putVarInt32(8)
out.putVarInt32(self.index_writes_)
if (self.has_index_write_bytes_):
out.putVarInt32(16)
out.putVarInt32(self.index_write_bytes_)
if (self.has_entity_writes_):
out.putVarInt32(24)
out.putVarInt32(self.entity_writes_)
if (self.has_entity_write_bytes_):
out.putVarInt32(32)
out.putVarInt32(self.entity_write_bytes_)
if (self.has_commitcost_):
out.putVarInt32(43)
self.commitcost_.OutputUnchecked(out)
out.putVarInt32(44)
if (self.has_approximate_storage_delta_):
out.putVarInt32(64)
out.putVarInt32(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_):
out.putVarInt32(72)
out.putVarInt32(self.id_sequence_updates_)
def OutputPartial(self, out):
if (self.has_index_writes_):
out.putVarInt32(8)
out.putVarInt32(self.index_writes_)
if (self.has_index_write_bytes_):
out.putVarInt32(16)
out.putVarInt32(self.index_write_bytes_)
if (self.has_entity_writes_):
out.putVarInt32(24)
out.putVarInt32(self.entity_writes_)
if (self.has_entity_write_bytes_):
out.putVarInt32(32)
out.putVarInt32(self.entity_write_bytes_)
if (self.has_commitcost_):
out.putVarInt32(43)
self.commitcost_.OutputPartial(out)
out.putVarInt32(44)
if (self.has_approximate_storage_delta_):
out.putVarInt32(64)
out.putVarInt32(self.approximate_storage_delta_)
if (self.has_id_sequence_updates_):
out.putVarInt32(72)
out.putVarInt32(self.id_sequence_updates_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_writes(d.getVarInt32())
continue
if tt == 16:
self.set_index_write_bytes(d.getVarInt32())
continue
if tt == 24:
self.set_entity_writes(d.getVarInt32())
continue
if tt == 32:
self.set_entity_write_bytes(d.getVarInt32())
continue
if tt == 43:
self.mutable_commitcost().TryMerge(d)
continue
if tt == 64:
self.set_approximate_storage_delta(d.getVarInt32())
continue
if tt == 72:
self.set_id_sequence_updates(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_writes_: res+=prefix+("index_writes: %s\n" % self.DebugFormatInt32(self.index_writes_))
if self.has_index_write_bytes_: res+=prefix+("index_write_bytes: %s\n" % self.DebugFormatInt32(self.index_write_bytes_))
if self.has_entity_writes_: res+=prefix+("entity_writes: %s\n" % self.DebugFormatInt32(self.entity_writes_))
if self.has_entity_write_bytes_: res+=prefix+("entity_write_bytes: %s\n" % self.DebugFormatInt32(self.entity_write_bytes_))
if self.has_commitcost_:
res+=prefix+"CommitCost {\n"
res+=self.commitcost_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_approximate_storage_delta_: res+=prefix+("approximate_storage_delta: %s\n" % self.DebugFormatInt32(self.approximate_storage_delta_))
if self.has_id_sequence_updates_: res+=prefix+("id_sequence_updates: %s\n" % self.DebugFormatInt32(self.id_sequence_updates_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex_writes = 1
kindex_write_bytes = 2
kentity_writes = 3
kentity_write_bytes = 4
kCommitCostGroup = 5
kCommitCostrequested_entity_puts = 6
kCommitCostrequested_entity_deletes = 7
kapproximate_storage_delta = 8
kid_sequence_updates = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index_writes",
2: "index_write_bytes",
3: "entity_writes",
4: "entity_write_bytes",
5: "CommitCost",
6: "requested_entity_puts",
7: "requested_entity_deletes",
8: "approximate_storage_delta",
9: "id_sequence_updates",
}, 9)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STARTGROUP,
6: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.NUMERIC,
}, 9, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.Cost'
class GetRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_transaction_ = 0
transaction_ = None
has_failover_ms_ = 0
failover_ms_ = 0
has_strong_ = 0
strong_ = 0
has_allow_deferred_ = 0
allow_deferred_ = 0
def __init__(self, contents=None):
self.key_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def failover_ms(self): return self.failover_ms_
def set_failover_ms(self, x):
self.has_failover_ms_ = 1
self.failover_ms_ = x
def clear_failover_ms(self):
if self.has_failover_ms_:
self.has_failover_ms_ = 0
self.failover_ms_ = 0
def has_failover_ms(self): return self.has_failover_ms_
def strong(self): return self.strong_
def set_strong(self, x):
self.has_strong_ = 1
self.strong_ = x
def clear_strong(self):
if self.has_strong_:
self.has_strong_ = 0
self.strong_ = 0
def has_strong(self): return self.has_strong_
def allow_deferred(self): return self.allow_deferred_
def set_allow_deferred(self, x):
self.has_allow_deferred_ = 1
self.allow_deferred_ = x
def clear_allow_deferred(self):
if self.has_allow_deferred_:
self.has_allow_deferred_ = 0
self.allow_deferred_ = 0
def has_allow_deferred(self): return self.has_allow_deferred_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_failover_ms()): self.set_failover_ms(x.failover_ms())
if (x.has_strong()): self.set_strong(x.strong())
if (x.has_allow_deferred()): self.set_allow_deferred(x.allow_deferred())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_failover_ms_ != x.has_failover_ms_: return 0
if self.has_failover_ms_ and self.failover_ms_ != x.failover_ms_: return 0
if self.has_strong_ != x.has_strong_: return 0
if self.has_strong_ and self.strong_ != x.strong_: return 0
if self.has_allow_deferred_ != x.has_allow_deferred_: return 0
if self.has_allow_deferred_ and self.allow_deferred_ != x.allow_deferred_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 2
if (self.has_allow_deferred_): n += 2
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
if (self.has_failover_ms_): n += 1 + self.lengthVarInt64(self.failover_ms_)
if (self.has_strong_): n += 2
if (self.has_allow_deferred_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_key()
self.clear_transaction()
self.clear_failover_ms()
self.clear_strong()
self.clear_allow_deferred()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
if (self.has_failover_ms_):
out.putVarInt32(24)
out.putVarInt64(self.failover_ms_)
if (self.has_strong_):
out.putVarInt32(32)
out.putBoolean(self.strong_)
if (self.has_allow_deferred_):
out.putVarInt32(40)
out.putBoolean(self.allow_deferred_)
if (self.has_header_):
out.putVarInt32(50)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
if (self.has_failover_ms_):
out.putVarInt32(24)
out.putVarInt64(self.failover_ms_)
if (self.has_strong_):
out.putVarInt32(32)
out.putBoolean(self.strong_)
if (self.has_allow_deferred_):
out.putVarInt32(40)
out.putBoolean(self.allow_deferred_)
if (self.has_header_):
out.putVarInt32(50)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 24:
self.set_failover_ms(d.getVarInt64())
continue
if tt == 32:
self.set_strong(d.getBoolean())
continue
if tt == 40:
self.set_allow_deferred(d.getBoolean())
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_failover_ms_: res+=prefix+("failover_ms: %s\n" % self.DebugFormatInt64(self.failover_ms_))
if self.has_strong_: res+=prefix+("strong: %s\n" % self.DebugFormatBool(self.strong_))
if self.has_allow_deferred_: res+=prefix+("allow_deferred: %s\n" % self.DebugFormatBool(self.allow_deferred_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 6
kkey = 1
ktransaction = 2
kfailover_ms = 3
kstrong = 4
kallow_deferred = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "transaction",
3: "failover_ms",
4: "strong",
5: "allow_deferred",
6: "header",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.STRING,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetRequest'
class GetResponse_Entity(ProtocolBuffer.ProtocolMessage):
has_entity_ = 0
entity_ = None
has_key_ = 0
key_ = None
has_version_ = 0
version_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def entity(self):
if self.entity_ is None:
self.lazy_init_lock_.acquire()
try:
if self.entity_ is None: self.entity_ = EntityProto()
finally:
self.lazy_init_lock_.release()
return self.entity_
def mutable_entity(self): self.has_entity_ = 1; return self.entity()
def clear_entity(self):
if self.has_entity_:
self.has_entity_ = 0;
if self.entity_ is not None: self.entity_.Clear()
def has_entity(self): return self.has_entity_
def key(self):
if self.key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.key_ is None: self.key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key()
def clear_key(self):
if self.has_key_:
self.has_key_ = 0;
if self.key_ is not None: self.key_.Clear()
def has_key(self): return self.has_key_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = 0
def has_version(self): return self.has_version_
def MergeFrom(self, x):
assert x is not self
if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity())
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_version()): self.set_version(x.version())
def Equals(self, x):
if x is self: return 1
if self.has_entity_ != x.has_entity_: return 0
if self.has_entity_ and self.entity_ != x.entity_: return 0
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0
if (self.has_key_ and not self.key_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize())
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSize())
if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSizePartial())
if (self.has_key_): n += 1 + self.lengthString(self.key_.ByteSizePartial())
if (self.has_version_): n += 1 + self.lengthVarInt64(self.version_)
return n
def Clear(self):
self.clear_entity()
self.clear_key()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_entity_):
out.putVarInt32(18)
out.putVarInt32(self.entity_.ByteSize())
self.entity_.OutputUnchecked(out)
if (self.has_version_):
out.putVarInt32(24)
out.putVarInt64(self.version_)
if (self.has_key_):
out.putVarInt32(34)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_entity_):
out.putVarInt32(18)
out.putVarInt32(self.entity_.ByteSizePartial())
self.entity_.OutputPartial(out)
if (self.has_version_):
out.putVarInt32(24)
out.putVarInt64(self.version_)
if (self.has_key_):
out.putVarInt32(34)
out.putVarInt32(self.key_.ByteSizePartial())
self.key_.OutputPartial(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_entity().TryMerge(tmp)
continue
if tt == 24:
self.set_version(d.getVarInt64())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_entity_:
res+=prefix+"entity <\n"
res+=self.entity_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
return res
class GetResponse(ProtocolBuffer.ProtocolMessage):
has_in_order_ = 0
in_order_ = 1
def __init__(self, contents=None):
self.entity_ = []
self.deferred_ = []
if contents is not None: self.MergeFromString(contents)
def entity_size(self): return len(self.entity_)
def entity_list(self): return self.entity_
def entity(self, i):
return self.entity_[i]
def mutable_entity(self, i):
return self.entity_[i]
def add_entity(self):
x = GetResponse_Entity()
self.entity_.append(x)
return x
def clear_entity(self):
self.entity_ = []
def deferred_size(self): return len(self.deferred_)
def deferred_list(self): return self.deferred_
def deferred(self, i):
return self.deferred_[i]
def mutable_deferred(self, i):
return self.deferred_[i]
def add_deferred(self):
x = Reference()
self.deferred_.append(x)
return x
def clear_deferred(self):
self.deferred_ = []
def in_order(self): return self.in_order_
def set_in_order(self, x):
self.has_in_order_ = 1
self.in_order_ = x
def clear_in_order(self):
if self.has_in_order_:
self.has_in_order_ = 0
self.in_order_ = 1
def has_in_order(self): return self.has_in_order_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
for i in xrange(x.deferred_size()): self.add_deferred().CopyFrom(x.deferred(i))
if (x.has_in_order()): self.set_in_order(x.in_order())
def Equals(self, x):
if x is self: return 1
if len(self.entity_) != len(x.entity_): return 0
for e1, e2 in zip(self.entity_, x.entity_):
if e1 != e2: return 0
if len(self.deferred_) != len(x.deferred_): return 0
for e1, e2 in zip(self.deferred_, x.deferred_):
if e1 != e2: return 0
if self.has_in_order_ != x.has_in_order_: return 0
if self.has_in_order_ and self.in_order_ != x.in_order_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.entity_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.deferred_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSize()
n += 1 * len(self.deferred_)
for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSize())
if (self.has_in_order_): n += 2
return n
def ByteSizePartial(self):
n = 0
n += 2 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSizePartial()
n += 1 * len(self.deferred_)
for i in xrange(len(self.deferred_)): n += self.lengthString(self.deferred_[i].ByteSizePartial())
if (self.has_in_order_): n += 2
return n
def Clear(self):
self.clear_entity()
self.clear_deferred()
self.clear_in_order()
def OutputUnchecked(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(11)
self.entity_[i].OutputUnchecked(out)
out.putVarInt32(12)
for i in xrange(len(self.deferred_)):
out.putVarInt32(42)
out.putVarInt32(self.deferred_[i].ByteSize())
self.deferred_[i].OutputUnchecked(out)
if (self.has_in_order_):
out.putVarInt32(48)
out.putBoolean(self.in_order_)
def OutputPartial(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(11)
self.entity_[i].OutputPartial(out)
out.putVarInt32(12)
for i in xrange(len(self.deferred_)):
out.putVarInt32(42)
out.putVarInt32(self.deferred_[i].ByteSizePartial())
self.deferred_[i].OutputPartial(out)
if (self.has_in_order_):
out.putVarInt32(48)
out.putBoolean(self.in_order_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_entity().TryMerge(d)
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_deferred().TryMerge(tmp)
continue
if tt == 48:
self.set_in_order(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.entity_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Entity%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
cnt=0
for e in self.deferred_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("deferred%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_in_order_: res+=prefix+("in_order: %s\n" % self.DebugFormatBool(self.in_order_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kEntityGroup = 1
kEntityentity = 2
kEntitykey = 4
kEntityversion = 3
kdeferred = 5
kin_order = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "Entity",
2: "entity",
3: "version",
4: "key",
5: "deferred",
6: "in_order",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STARTGROUP,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.GetResponse'
class PutRequest(ProtocolBuffer.ProtocolMessage):
CURRENT = 0
SEQUENTIAL = 1
_AutoIdPolicy_NAMES = {
0: "CURRENT",
1: "SEQUENTIAL",
}
def AutoIdPolicy_Name(cls, x): return cls._AutoIdPolicy_NAMES.get(x, "")
AutoIdPolicy_Name = classmethod(AutoIdPolicy_Name)
has_header_ = 0
header_ = None
has_transaction_ = 0
transaction_ = None
has_trusted_ = 0
trusted_ = 0
has_force_ = 0
force_ = 0
has_mark_changes_ = 0
mark_changes_ = 0
has_auto_id_policy_ = 0
auto_id_policy_ = 0
def __init__(self, contents=None):
self.entity_ = []
self.composite_index_ = []
self.snapshot_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def entity_size(self): return len(self.entity_)
def entity_list(self): return self.entity_
def entity(self, i):
return self.entity_[i]
def mutable_entity(self, i):
return self.entity_[i]
def add_entity(self):
x = EntityProto()
self.entity_.append(x)
return x
def clear_entity(self):
self.entity_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def force(self): return self.force_
def set_force(self, x):
self.has_force_ = 1
self.force_ = x
def clear_force(self):
if self.has_force_:
self.has_force_ = 0
self.force_ = 0
def has_force(self): return self.has_force_
def mark_changes(self): return self.mark_changes_
def set_mark_changes(self, x):
self.has_mark_changes_ = 1
self.mark_changes_ = x
def clear_mark_changes(self):
if self.has_mark_changes_:
self.has_mark_changes_ = 0
self.mark_changes_ = 0
def has_mark_changes(self): return self.has_mark_changes_
def snapshot_size(self): return len(self.snapshot_)
def snapshot_list(self): return self.snapshot_
def snapshot(self, i):
return self.snapshot_[i]
def mutable_snapshot(self, i):
return self.snapshot_[i]
def add_snapshot(self):
x = Snapshot()
self.snapshot_.append(x)
return x
def clear_snapshot(self):
self.snapshot_ = []
def auto_id_policy(self): return self.auto_id_policy_
def set_auto_id_policy(self, x):
self.has_auto_id_policy_ = 1
self.auto_id_policy_ = x
def clear_auto_id_policy(self):
if self.has_auto_id_policy_:
self.has_auto_id_policy_ = 0
self.auto_id_policy_ = 0
def has_auto_id_policy(self): return self.has_auto_id_policy_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
if (x.has_force()): self.set_force(x.force())
if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
if (x.has_auto_id_policy()): self.set_auto_id_policy(x.auto_id_policy())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.entity_) != len(x.entity_): return 0
for e1, e2 in zip(self.entity_, x.entity_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
if self.has_force_ != x.has_force_: return 0
if self.has_force_ and self.force_ != x.force_: return 0
if self.has_mark_changes_ != x.has_mark_changes_: return 0
if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
if len(self.snapshot_) != len(x.snapshot_): return 0
for e1, e2 in zip(self.snapshot_, x.snapshot_):
if e1 != e2: return 0
if self.has_auto_id_policy_ != x.has_auto_id_policy_: return 0
if self.has_auto_id_policy_ and self.auto_id_policy_ != x.auto_id_policy_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.entity_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.snapshot_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSizePartial())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
if (self.has_auto_id_policy_): n += 1 + self.lengthVarInt64(self.auto_id_policy_)
return n
def Clear(self):
self.clear_header()
self.clear_entity()
self.clear_transaction()
self.clear_composite_index()
self.clear_trusted()
self.clear_force()
self.clear_mark_changes()
self.clear_snapshot()
self.clear_auto_id_policy()
def OutputUnchecked(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(10)
out.putVarInt32(self.entity_[i].ByteSize())
self.entity_[i].OutputUnchecked(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(26)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSize())
self.snapshot_[i].OutputUnchecked(out)
if (self.has_auto_id_policy_):
out.putVarInt32(80)
out.putVarInt32(self.auto_id_policy_)
if (self.has_header_):
out.putVarInt32(90)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(10)
out.putVarInt32(self.entity_[i].ByteSizePartial())
self.entity_[i].OutputPartial(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(26)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSizePartial())
self.snapshot_[i].OutputPartial(out)
if (self.has_auto_id_policy_):
out.putVarInt32(80)
out.putVarInt32(self.auto_id_policy_)
if (self.has_header_):
out.putVarInt32(90)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_entity().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 32:
self.set_trusted(d.getBoolean())
continue
if tt == 56:
self.set_force(d.getBoolean())
continue
if tt == 64:
self.set_mark_changes(d.getBoolean())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_snapshot().TryMerge(tmp)
continue
if tt == 80:
self.set_auto_id_policy(d.getVarInt32())
continue
if tt == 90:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.entity_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("entity%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
cnt=0
for e in self.snapshot_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("snapshot%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_auto_id_policy_: res+=prefix+("auto_id_policy: %s\n" % self.DebugFormatInt32(self.auto_id_policy_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 11
kentity = 1
ktransaction = 2
kcomposite_index = 3
ktrusted = 4
kforce = 7
kmark_changes = 8
ksnapshot = 9
kauto_id_policy = 10
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "entity",
2: "transaction",
3: "composite_index",
4: "trusted",
7: "force",
8: "mark_changes",
9: "snapshot",
10: "auto_id_policy",
11: "header",
}, 11)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.NUMERIC,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.STRING,
}, 11, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutRequest'
class PutResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.key_ = []
self.version_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def set_version(self, i, x):
self.version_[i] = x
def add_version(self, x):
self.version_.append(x)
def clear_version(self):
self.version_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
for i in xrange(x.version_size()): self.add_version(x.version(i))
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def Clear(self):
self.clear_key()
self.clear_cost()
self.clear_version()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_cost_):
out.putVarInt32(18)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def OutputPartial(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
if (self.has_cost_):
out.putVarInt32(18)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if tt == 24:
self.add_version(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kkey = 1
kcost = 2
kversion = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "cost",
3: "version",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.PutResponse'
class TouchRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_force_ = 0
force_ = 0
def __init__(self, contents=None):
self.key_ = []
self.composite_index_ = []
self.snapshot_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def force(self): return self.force_
def set_force(self, x):
self.has_force_ = 1
self.force_ = x
def clear_force(self):
if self.has_force_:
self.has_force_ = 0
self.force_ = 0
def has_force(self): return self.has_force_
def snapshot_size(self): return len(self.snapshot_)
def snapshot_list(self): return self.snapshot_
def snapshot(self, i):
return self.snapshot_[i]
def mutable_snapshot(self, i):
return self.snapshot_[i]
def add_snapshot(self):
x = Snapshot()
self.snapshot_.append(x)
return x
def clear_snapshot(self):
self.snapshot_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_force()): self.set_force(x.force())
for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_force_ != x.has_force_: return 0
if self.has_force_ and self.force_ != x.force_: return 0
if len(self.snapshot_) != len(x.snapshot_): return 0
for e1, e2 in zip(self.snapshot_, x.snapshot_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.snapshot_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_force_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_force_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_header()
self.clear_key()
self.clear_composite_index()
self.clear_force()
self.clear_snapshot()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(18)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_force_):
out.putVarInt32(24)
out.putBoolean(self.force_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSize())
self.snapshot_[i].OutputUnchecked(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(18)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
if (self.has_force_):
out.putVarInt32(24)
out.putBoolean(self.force_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSizePartial())
self.snapshot_[i].OutputPartial(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 24:
self.set_force(d.getBoolean())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_snapshot().TryMerge(tmp)
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
cnt=0
for e in self.snapshot_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("snapshot%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 10
kkey = 1
kcomposite_index = 2
kforce = 3
ksnapshot = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "key",
2: "composite_index",
3: "force",
9: "snapshot",
10: "header",
}, 10)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
}, 10, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchRequest'
class TouchResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
return n
def Clear(self):
self.clear_cost()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcost = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cost",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.TouchResponse'
class DeleteRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_transaction_ = 0
transaction_ = None
has_trusted_ = 0
trusted_ = 0
has_force_ = 0
force_ = 0
has_mark_changes_ = 0
mark_changes_ = 0
def __init__(self, contents=None):
self.key_ = []
self.composite_index_ = []
self.snapshot_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def force(self): return self.force_
def set_force(self, x):
self.has_force_ = 1
self.force_ = x
def clear_force(self):
if self.has_force_:
self.has_force_ = 0
self.force_ = 0
def has_force(self): return self.has_force_
def mark_changes(self): return self.mark_changes_
def set_mark_changes(self, x):
self.has_mark_changes_ = 1
self.mark_changes_ = x
def clear_mark_changes(self):
if self.has_mark_changes_:
self.has_mark_changes_ = 0
self.mark_changes_ = 0
def has_mark_changes(self): return self.has_mark_changes_
def snapshot_size(self): return len(self.snapshot_)
def snapshot_list(self): return self.snapshot_
def snapshot(self, i):
return self.snapshot_[i]
def mutable_snapshot(self, i):
return self.snapshot_[i]
def add_snapshot(self):
x = Snapshot()
self.snapshot_.append(x)
return x
def clear_snapshot(self):
self.snapshot_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
if (x.has_force()): self.set_force(x.force())
if (x.has_mark_changes()): self.set_mark_changes(x.mark_changes())
for i in xrange(x.snapshot_size()): self.add_snapshot().CopyFrom(x.snapshot(i))
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
if self.has_force_ != x.has_force_: return 0
if self.has_force_ and self.force_ != x.force_: return 0
if self.has_mark_changes_ != x.has_mark_changes_: return 0
if self.has_mark_changes_ and self.mark_changes_ != x.mark_changes_: return 0
if len(self.snapshot_) != len(x.snapshot_): return 0
for e1, e2 in zip(self.snapshot_, x.snapshot_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.snapshot_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSizePartial())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSizePartial())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSizePartial())
if (self.has_trusted_): n += 2
if (self.has_force_): n += 2
if (self.has_mark_changes_): n += 2
n += 1 * len(self.snapshot_)
for i in xrange(len(self.snapshot_)): n += self.lengthString(self.snapshot_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_header()
self.clear_key()
self.clear_transaction()
self.clear_composite_index()
self.clear_trusted()
self.clear_force()
self.clear_mark_changes()
self.clear_snapshot()
def OutputUnchecked(self, out):
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_transaction_):
out.putVarInt32(42)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.key_)):
out.putVarInt32(50)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSize())
self.snapshot_[i].OutputUnchecked(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(90)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_transaction_):
out.putVarInt32(42)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
for i in xrange(len(self.key_)):
out.putVarInt32(50)
out.putVarInt32(self.key_[i].ByteSizePartial())
self.key_[i].OutputPartial(out)
if (self.has_force_):
out.putVarInt32(56)
out.putBoolean(self.force_)
if (self.has_mark_changes_):
out.putVarInt32(64)
out.putBoolean(self.mark_changes_)
for i in xrange(len(self.snapshot_)):
out.putVarInt32(74)
out.putVarInt32(self.snapshot_[i].ByteSizePartial())
self.snapshot_[i].OutputPartial(out)
if (self.has_header_):
out.putVarInt32(82)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(90)
out.putVarInt32(self.composite_index_[i].ByteSizePartial())
self.composite_index_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 32:
self.set_trusted(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 56:
self.set_force(d.getBoolean())
continue
if tt == 64:
self.set_mark_changes(d.getBoolean())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_snapshot().TryMerge(tmp)
continue
if tt == 82:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if tt == 90:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
if self.has_force_: res+=prefix+("force: %s\n" % self.DebugFormatBool(self.force_))
if self.has_mark_changes_: res+=prefix+("mark_changes: %s\n" % self.DebugFormatBool(self.mark_changes_))
cnt=0
for e in self.snapshot_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("snapshot%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 10
kkey = 6
ktransaction = 5
kcomposite_index = 11
ktrusted = 4
kforce = 7
kmark_changes = 8
ksnapshot = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
4: "trusted",
5: "transaction",
6: "key",
7: "force",
8: "mark_changes",
9: "snapshot",
10: "header",
11: "composite_index",
}, 11)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.NUMERIC,
9: ProtocolBuffer.Encoder.STRING,
10: ProtocolBuffer.Encoder.STRING,
11: ProtocolBuffer.Encoder.STRING,
}, 11, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteRequest'
class DeleteResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.version_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def set_version(self, i, x):
self.version_[i] = x
def add_version(self, x):
self.version_.append(x)
def clear_version(self):
self.version_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
for i in xrange(x.version_size()): self.add_version(x.version(i))
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def ByteSizePartial(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
return n
def Clear(self):
self.clear_cost()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def OutputPartial(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
for i in xrange(len(self.version_)):
out.putVarInt32(24)
out.putVarInt64(self.version_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if tt == 24:
self.add_version(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcost = 1
kversion = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cost",
3: "version",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.DeleteResponse'
class NextRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_cursor_ = 0
has_count_ = 0
count_ = 0
has_offset_ = 0
offset_ = 0
has_compile_ = 0
compile_ = 0
def __init__(self, contents=None):
self.cursor_ = Cursor()
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def cursor(self): return self.cursor_
def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor_
def clear_cursor(self):self.has_cursor_ = 0; self.cursor_.Clear()
def has_cursor(self): return self.has_cursor_
def count(self): return self.count_
def set_count(self, x):
self.has_count_ = 1
self.count_ = x
def clear_count(self):
if self.has_count_:
self.has_count_ = 0
self.count_ = 0
def has_count(self): return self.has_count_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def compile(self): return self.compile_
def set_compile(self, x):
self.has_compile_ = 1
self.compile_ = x
def clear_compile(self):
if self.has_compile_:
self.has_compile_ = 0
self.compile_ = 0
def has_compile(self): return self.has_compile_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
if (x.has_count()): self.set_count(x.count())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_compile()): self.set_compile(x.compile())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if self.has_count_ != x.has_count_: return 0
if self.has_count_ and self.count_ != x.count_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_compile_ != x.has_compile_: return 0
if self.has_compile_ and self.compile_ != x.compile_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_cursor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: cursor not set.')
elif not self.cursor_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(self.cursor_.ByteSize())
if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_compile_): n += 2
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_cursor_):
n += 1
n += self.lengthString(self.cursor_.ByteSizePartial())
if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_compile_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_cursor()
self.clear_count()
self.clear_offset()
self.clear_compile()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSize())
self.cursor_.OutputUnchecked(out)
if (self.has_count_):
out.putVarInt32(16)
out.putVarInt32(self.count_)
if (self.has_compile_):
out.putVarInt32(24)
out.putBoolean(self.compile_)
if (self.has_offset_):
out.putVarInt32(32)
out.putVarInt32(self.offset_)
if (self.has_header_):
out.putVarInt32(42)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSizePartial())
self.cursor_.OutputPartial(out)
if (self.has_count_):
out.putVarInt32(16)
out.putVarInt32(self.count_)
if (self.has_compile_):
out.putVarInt32(24)
out.putBoolean(self.compile_)
if (self.has_offset_):
out.putVarInt32(32)
out.putVarInt32(self.offset_)
if (self.has_header_):
out.putVarInt32(42)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cursor().TryMerge(tmp)
continue
if tt == 16:
self.set_count(d.getVarInt32())
continue
if tt == 24:
self.set_compile(d.getBoolean())
continue
if tt == 32:
self.set_offset(d.getVarInt32())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_cursor_:
res+=prefix+"cursor <\n"
res+=self.cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_compile_: res+=prefix+("compile: %s\n" % self.DebugFormatBool(self.compile_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 5
kcursor = 1
kcount = 2
koffset = 4
kcompile = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cursor",
2: "count",
3: "compile",
4: "offset",
5: "header",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.NextRequest'
class QueryResult(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = None
has_skipped_results_ = 0
skipped_results_ = 0
has_more_results_ = 0
more_results_ = 0
has_keys_only_ = 0
keys_only_ = 0
has_index_only_ = 0
index_only_ = 0
has_small_ops_ = 0
small_ops_ = 0
has_compiled_query_ = 0
compiled_query_ = None
has_compiled_cursor_ = 0
compiled_cursor_ = None
has_skipped_results_compiled_cursor_ = 0
skipped_results_compiled_cursor_ = None
def __init__(self, contents=None):
self.result_ = []
self.index_ = []
self.version_ = []
self.result_compiled_cursor_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cursor(self):
if self.cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cursor_ is None: self.cursor_ = Cursor()
finally:
self.lazy_init_lock_.release()
return self.cursor_
def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor()
def clear_cursor(self):
if self.has_cursor_:
self.has_cursor_ = 0;
if self.cursor_ is not None: self.cursor_.Clear()
def has_cursor(self): return self.has_cursor_
def result_size(self): return len(self.result_)
def result_list(self): return self.result_
def result(self, i):
return self.result_[i]
def mutable_result(self, i):
return self.result_[i]
def add_result(self):
x = EntityProto()
self.result_.append(x)
return x
def clear_result(self):
self.result_ = []
def skipped_results(self): return self.skipped_results_
def set_skipped_results(self, x):
self.has_skipped_results_ = 1
self.skipped_results_ = x
def clear_skipped_results(self):
if self.has_skipped_results_:
self.has_skipped_results_ = 0
self.skipped_results_ = 0
def has_skipped_results(self): return self.has_skipped_results_
def more_results(self): return self.more_results_
def set_more_results(self, x):
self.has_more_results_ = 1
self.more_results_ = x
def clear_more_results(self):
if self.has_more_results_:
self.has_more_results_ = 0
self.more_results_ = 0
def has_more_results(self): return self.has_more_results_
def keys_only(self): return self.keys_only_
def set_keys_only(self, x):
self.has_keys_only_ = 1
self.keys_only_ = x
def clear_keys_only(self):
if self.has_keys_only_:
self.has_keys_only_ = 0
self.keys_only_ = 0
def has_keys_only(self): return self.has_keys_only_
def index_only(self): return self.index_only_
def set_index_only(self, x):
self.has_index_only_ = 1
self.index_only_ = x
def clear_index_only(self):
if self.has_index_only_:
self.has_index_only_ = 0
self.index_only_ = 0
def has_index_only(self): return self.has_index_only_
def small_ops(self): return self.small_ops_
def set_small_ops(self, x):
self.has_small_ops_ = 1
self.small_ops_ = x
def clear_small_ops(self):
if self.has_small_ops_:
self.has_small_ops_ = 0
self.small_ops_ = 0
def has_small_ops(self): return self.has_small_ops_
def compiled_query(self):
if self.compiled_query_ is None:
self.lazy_init_lock_.acquire()
try:
if self.compiled_query_ is None: self.compiled_query_ = CompiledQuery()
finally:
self.lazy_init_lock_.release()
return self.compiled_query_
def mutable_compiled_query(self): self.has_compiled_query_ = 1; return self.compiled_query()
def clear_compiled_query(self):
if self.has_compiled_query_:
self.has_compiled_query_ = 0;
if self.compiled_query_ is not None: self.compiled_query_.Clear()
def has_compiled_query(self): return self.has_compiled_query_
def compiled_cursor(self):
if self.compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.compiled_cursor_ is None: self.compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.compiled_cursor_
def mutable_compiled_cursor(self): self.has_compiled_cursor_ = 1; return self.compiled_cursor()
def clear_compiled_cursor(self):
if self.has_compiled_cursor_:
self.has_compiled_cursor_ = 0;
if self.compiled_cursor_ is not None: self.compiled_cursor_.Clear()
def has_compiled_cursor(self): return self.has_compiled_cursor_
def index_size(self): return len(self.index_)
def index_list(self): return self.index_
def index(self, i):
return self.index_[i]
def mutable_index(self, i):
return self.index_[i]
def add_index(self):
x = CompositeIndex()
self.index_.append(x)
return x
def clear_index(self):
self.index_ = []
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def set_version(self, i, x):
self.version_[i] = x
def add_version(self, x):
self.version_.append(x)
def clear_version(self):
self.version_ = []
def result_compiled_cursor_size(self): return len(self.result_compiled_cursor_)
def result_compiled_cursor_list(self): return self.result_compiled_cursor_
def result_compiled_cursor(self, i):
return self.result_compiled_cursor_[i]
def mutable_result_compiled_cursor(self, i):
return self.result_compiled_cursor_[i]
def add_result_compiled_cursor(self):
x = CompiledCursor()
self.result_compiled_cursor_.append(x)
return x
def clear_result_compiled_cursor(self):
self.result_compiled_cursor_ = []
def skipped_results_compiled_cursor(self):
if self.skipped_results_compiled_cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.skipped_results_compiled_cursor_ is None: self.skipped_results_compiled_cursor_ = CompiledCursor()
finally:
self.lazy_init_lock_.release()
return self.skipped_results_compiled_cursor_
def mutable_skipped_results_compiled_cursor(self): self.has_skipped_results_compiled_cursor_ = 1; return self.skipped_results_compiled_cursor()
def clear_skipped_results_compiled_cursor(self):
if self.has_skipped_results_compiled_cursor_:
self.has_skipped_results_compiled_cursor_ = 0;
if self.skipped_results_compiled_cursor_ is not None: self.skipped_results_compiled_cursor_.Clear()
def has_skipped_results_compiled_cursor(self): return self.has_skipped_results_compiled_cursor_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
if (x.has_skipped_results()): self.set_skipped_results(x.skipped_results())
if (x.has_more_results()): self.set_more_results(x.more_results())
if (x.has_keys_only()): self.set_keys_only(x.keys_only())
if (x.has_index_only()): self.set_index_only(x.index_only())
if (x.has_small_ops()): self.set_small_ops(x.small_ops())
if (x.has_compiled_query()): self.mutable_compiled_query().MergeFrom(x.compiled_query())
if (x.has_compiled_cursor()): self.mutable_compiled_cursor().MergeFrom(x.compiled_cursor())
for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
for i in xrange(x.version_size()): self.add_version(x.version(i))
for i in xrange(x.result_compiled_cursor_size()): self.add_result_compiled_cursor().CopyFrom(x.result_compiled_cursor(i))
if (x.has_skipped_results_compiled_cursor()): self.mutable_skipped_results_compiled_cursor().MergeFrom(x.skipped_results_compiled_cursor())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if len(self.result_) != len(x.result_): return 0
for e1, e2 in zip(self.result_, x.result_):
if e1 != e2: return 0
if self.has_skipped_results_ != x.has_skipped_results_: return 0
if self.has_skipped_results_ and self.skipped_results_ != x.skipped_results_: return 0
if self.has_more_results_ != x.has_more_results_: return 0
if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
if self.has_keys_only_ != x.has_keys_only_: return 0
if self.has_keys_only_ and self.keys_only_ != x.keys_only_: return 0
if self.has_index_only_ != x.has_index_only_: return 0
if self.has_index_only_ and self.index_only_ != x.index_only_: return 0
if self.has_small_ops_ != x.has_small_ops_: return 0
if self.has_small_ops_ and self.small_ops_ != x.small_ops_: return 0
if self.has_compiled_query_ != x.has_compiled_query_: return 0
if self.has_compiled_query_ and self.compiled_query_ != x.compiled_query_: return 0
if self.has_compiled_cursor_ != x.has_compiled_cursor_: return 0
if self.has_compiled_cursor_ and self.compiled_cursor_ != x.compiled_cursor_: return 0
if len(self.index_) != len(x.index_): return 0
for e1, e2 in zip(self.index_, x.index_):
if e1 != e2: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
if len(self.result_compiled_cursor_) != len(x.result_compiled_cursor_): return 0
for e1, e2 in zip(self.result_compiled_cursor_, x.result_compiled_cursor_):
if e1 != e2: return 0
if self.has_skipped_results_compiled_cursor_ != x.has_skipped_results_compiled_cursor_: return 0
if self.has_skipped_results_compiled_cursor_ and self.skipped_results_compiled_cursor_ != x.skipped_results_compiled_cursor_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cursor_ and not self.cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.result_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_more_results_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: more_results not set.')
if (self.has_compiled_query_ and not self.compiled_query_.IsInitialized(debug_strs)): initialized = 0
if (self.has_compiled_cursor_ and not self.compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.index_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.result_compiled_cursor_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_skipped_results_compiled_cursor_ and not self.skipped_results_compiled_cursor_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize())
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
if (self.has_keys_only_): n += 2
if (self.has_index_only_): n += 2
if (self.has_small_ops_): n += 2
if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSize())
if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSize())
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
n += 1 * len(self.result_compiled_cursor_)
for i in xrange(len(self.result_compiled_cursor_)): n += self.lengthString(self.result_compiled_cursor_[i].ByteSize())
if (self.has_skipped_results_compiled_cursor_): n += 1 + self.lengthString(self.skipped_results_compiled_cursor_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSizePartial())
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSizePartial())
if (self.has_skipped_results_): n += 1 + self.lengthVarInt64(self.skipped_results_)
if (self.has_more_results_):
n += 2
if (self.has_keys_only_): n += 2
if (self.has_index_only_): n += 2
if (self.has_small_ops_): n += 2
if (self.has_compiled_query_): n += 1 + self.lengthString(self.compiled_query_.ByteSizePartial())
if (self.has_compiled_cursor_): n += 1 + self.lengthString(self.compiled_cursor_.ByteSizePartial())
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
n += 1 * len(self.version_)
for i in xrange(len(self.version_)): n += self.lengthVarInt64(self.version_[i])
n += 1 * len(self.result_compiled_cursor_)
for i in xrange(len(self.result_compiled_cursor_)): n += self.lengthString(self.result_compiled_cursor_[i].ByteSizePartial())
if (self.has_skipped_results_compiled_cursor_): n += 1 + self.lengthString(self.skipped_results_compiled_cursor_.ByteSizePartial())
return n
def Clear(self):
self.clear_cursor()
self.clear_result()
self.clear_skipped_results()
self.clear_more_results()
self.clear_keys_only()
self.clear_index_only()
self.clear_small_ops()
self.clear_compiled_query()
self.clear_compiled_cursor()
self.clear_index()
self.clear_version()
self.clear_result_compiled_cursor()
self.clear_skipped_results_compiled_cursor()
def OutputUnchecked(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSize())
self.cursor_.OutputUnchecked(out)
for i in xrange(len(self.result_)):
out.putVarInt32(18)
out.putVarInt32(self.result_[i].ByteSize())
self.result_[i].OutputUnchecked(out)
out.putVarInt32(24)
out.putBoolean(self.more_results_)
if (self.has_keys_only_):
out.putVarInt32(32)
out.putBoolean(self.keys_only_)
if (self.has_compiled_query_):
out.putVarInt32(42)
out.putVarInt32(self.compiled_query_.ByteSize())
self.compiled_query_.OutputUnchecked(out)
if (self.has_compiled_cursor_):
out.putVarInt32(50)
out.putVarInt32(self.compiled_cursor_.ByteSize())
self.compiled_cursor_.OutputUnchecked(out)
if (self.has_skipped_results_):
out.putVarInt32(56)
out.putVarInt32(self.skipped_results_)
for i in xrange(len(self.index_)):
out.putVarInt32(66)
out.putVarInt32(self.index_[i].ByteSize())
self.index_[i].OutputUnchecked(out)
if (self.has_index_only_):
out.putVarInt32(72)
out.putBoolean(self.index_only_)
if (self.has_small_ops_):
out.putVarInt32(80)
out.putBoolean(self.small_ops_)
for i in xrange(len(self.version_)):
out.putVarInt32(88)
out.putVarInt64(self.version_[i])
for i in xrange(len(self.result_compiled_cursor_)):
out.putVarInt32(98)
out.putVarInt32(self.result_compiled_cursor_[i].ByteSize())
self.result_compiled_cursor_[i].OutputUnchecked(out)
if (self.has_skipped_results_compiled_cursor_):
out.putVarInt32(106)
out.putVarInt32(self.skipped_results_compiled_cursor_.ByteSize())
self.skipped_results_compiled_cursor_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSizePartial())
self.cursor_.OutputPartial(out)
for i in xrange(len(self.result_)):
out.putVarInt32(18)
out.putVarInt32(self.result_[i].ByteSizePartial())
self.result_[i].OutputPartial(out)
if (self.has_more_results_):
out.putVarInt32(24)
out.putBoolean(self.more_results_)
if (self.has_keys_only_):
out.putVarInt32(32)
out.putBoolean(self.keys_only_)
if (self.has_compiled_query_):
out.putVarInt32(42)
out.putVarInt32(self.compiled_query_.ByteSizePartial())
self.compiled_query_.OutputPartial(out)
if (self.has_compiled_cursor_):
out.putVarInt32(50)
out.putVarInt32(self.compiled_cursor_.ByteSizePartial())
self.compiled_cursor_.OutputPartial(out)
if (self.has_skipped_results_):
out.putVarInt32(56)
out.putVarInt32(self.skipped_results_)
for i in xrange(len(self.index_)):
out.putVarInt32(66)
out.putVarInt32(self.index_[i].ByteSizePartial())
self.index_[i].OutputPartial(out)
if (self.has_index_only_):
out.putVarInt32(72)
out.putBoolean(self.index_only_)
if (self.has_small_ops_):
out.putVarInt32(80)
out.putBoolean(self.small_ops_)
for i in xrange(len(self.version_)):
out.putVarInt32(88)
out.putVarInt64(self.version_[i])
for i in xrange(len(self.result_compiled_cursor_)):
out.putVarInt32(98)
out.putVarInt32(self.result_compiled_cursor_[i].ByteSizePartial())
self.result_compiled_cursor_[i].OutputPartial(out)
if (self.has_skipped_results_compiled_cursor_):
out.putVarInt32(106)
out.putVarInt32(self.skipped_results_compiled_cursor_.ByteSizePartial())
self.skipped_results_compiled_cursor_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cursor().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_result().TryMerge(tmp)
continue
if tt == 24:
self.set_more_results(d.getBoolean())
continue
if tt == 32:
self.set_keys_only(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_compiled_query().TryMerge(tmp)
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_compiled_cursor().TryMerge(tmp)
continue
if tt == 56:
self.set_skipped_results(d.getVarInt32())
continue
if tt == 66:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_index().TryMerge(tmp)
continue
if tt == 72:
self.set_index_only(d.getBoolean())
continue
if tt == 80:
self.set_small_ops(d.getBoolean())
continue
if tt == 88:
self.add_version(d.getVarInt64())
continue
if tt == 98:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_result_compiled_cursor().TryMerge(tmp)
continue
if tt == 106:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_skipped_results_compiled_cursor().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_:
res+=prefix+"cursor <\n"
res+=self.cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.result_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_skipped_results_: res+=prefix+("skipped_results: %s\n" % self.DebugFormatInt32(self.skipped_results_))
if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
if self.has_keys_only_: res+=prefix+("keys_only: %s\n" % self.DebugFormatBool(self.keys_only_))
if self.has_index_only_: res+=prefix+("index_only: %s\n" % self.DebugFormatBool(self.index_only_))
if self.has_small_ops_: res+=prefix+("small_ops: %s\n" % self.DebugFormatBool(self.small_ops_))
if self.has_compiled_query_:
res+=prefix+"compiled_query <\n"
res+=self.compiled_query_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_compiled_cursor_:
res+=prefix+"compiled_cursor <\n"
res+=self.compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("version%s: %s\n" % (elm, self.DebugFormatInt64(e)))
cnt+=1
cnt=0
for e in self.result_compiled_cursor_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result_compiled_cursor%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_skipped_results_compiled_cursor_:
res+=prefix+"skipped_results_compiled_cursor <\n"
res+=self.skipped_results_compiled_cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcursor = 1
kresult = 2
kskipped_results = 7
kmore_results = 3
kkeys_only = 4
kindex_only = 9
ksmall_ops = 10
kcompiled_query = 5
kcompiled_cursor = 6
kindex = 8
kversion = 11
kresult_compiled_cursor = 12
kskipped_results_compiled_cursor = 13
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cursor",
2: "result",
3: "more_results",
4: "keys_only",
5: "compiled_query",
6: "compiled_cursor",
7: "skipped_results",
8: "index",
9: "index_only",
10: "small_ops",
11: "version",
12: "result_compiled_cursor",
13: "skipped_results_compiled_cursor",
}, 13)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.STRING,
7: ProtocolBuffer.Encoder.NUMERIC,
8: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.NUMERIC,
10: ProtocolBuffer.Encoder.NUMERIC,
11: ProtocolBuffer.Encoder.NUMERIC,
12: ProtocolBuffer.Encoder.STRING,
13: ProtocolBuffer.Encoder.STRING,
}, 13, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.QueryResult'
class AllocateIdsRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_model_key_ = 0
model_key_ = None
has_size_ = 0
size_ = 0
has_max_ = 0
max_ = 0
has_trusted_ = 0
trusted_ = 0
def __init__(self, contents=None):
self.reserve_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def model_key(self):
if self.model_key_ is None:
self.lazy_init_lock_.acquire()
try:
if self.model_key_ is None: self.model_key_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.model_key_
def mutable_model_key(self): self.has_model_key_ = 1; return self.model_key()
def clear_model_key(self):
if self.has_model_key_:
self.has_model_key_ = 0;
if self.model_key_ is not None: self.model_key_.Clear()
def has_model_key(self): return self.has_model_key_
def size(self): return self.size_
def set_size(self, x):
self.has_size_ = 1
self.size_ = x
def clear_size(self):
if self.has_size_:
self.has_size_ = 0
self.size_ = 0
def has_size(self): return self.has_size_
def max(self): return self.max_
def set_max(self, x):
self.has_max_ = 1
self.max_ = x
def clear_max(self):
if self.has_max_:
self.has_max_ = 0
self.max_ = 0
def has_max(self): return self.has_max_
def reserve_size(self): return len(self.reserve_)
def reserve_list(self): return self.reserve_
def reserve(self, i):
return self.reserve_[i]
def mutable_reserve(self, i):
return self.reserve_[i]
def add_reserve(self):
x = Reference()
self.reserve_.append(x)
return x
def clear_reserve(self):
self.reserve_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_model_key()): self.mutable_model_key().MergeFrom(x.model_key())
if (x.has_size()): self.set_size(x.size())
if (x.has_max()): self.set_max(x.max())
for i in xrange(x.reserve_size()): self.add_reserve().CopyFrom(x.reserve(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_model_key_ != x.has_model_key_: return 0
if self.has_model_key_ and self.model_key_ != x.model_key_: return 0
if self.has_size_ != x.has_size_: return 0
if self.has_size_ and self.size_ != x.size_: return 0
if self.has_max_ != x.has_max_: return 0
if self.has_max_ and self.max_ != x.max_: return 0
if len(self.reserve_) != len(x.reserve_): return 0
for e1, e2 in zip(self.reserve_, x.reserve_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (self.has_model_key_ and not self.model_key_.IsInitialized(debug_strs)): initialized = 0
for p in self.reserve_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
if (self.has_model_key_): n += 1 + self.lengthString(self.model_key_.ByteSize())
if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_)
if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_)
n += 1 * len(self.reserve_)
for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSize())
if (self.has_trusted_): n += 2
return n
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_model_key_): n += 1 + self.lengthString(self.model_key_.ByteSizePartial())
if (self.has_size_): n += 1 + self.lengthVarInt64(self.size_)
if (self.has_max_): n += 1 + self.lengthVarInt64(self.max_)
n += 1 * len(self.reserve_)
for i in xrange(len(self.reserve_)): n += self.lengthString(self.reserve_[i].ByteSizePartial())
if (self.has_trusted_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_model_key()
self.clear_size()
self.clear_max()
self.clear_reserve()
self.clear_trusted()
def OutputUnchecked(self, out):
if (self.has_model_key_):
out.putVarInt32(10)
out.putVarInt32(self.model_key_.ByteSize())
self.model_key_.OutputUnchecked(out)
if (self.has_size_):
out.putVarInt32(16)
out.putVarInt64(self.size_)
if (self.has_max_):
out.putVarInt32(24)
out.putVarInt64(self.max_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
for i in xrange(len(self.reserve_)):
out.putVarInt32(42)
out.putVarInt32(self.reserve_[i].ByteSize())
self.reserve_[i].OutputUnchecked(out)
if (self.has_trusted_):
out.putVarInt32(48)
out.putBoolean(self.trusted_)
def OutputPartial(self, out):
if (self.has_model_key_):
out.putVarInt32(10)
out.putVarInt32(self.model_key_.ByteSizePartial())
self.model_key_.OutputPartial(out)
if (self.has_size_):
out.putVarInt32(16)
out.putVarInt64(self.size_)
if (self.has_max_):
out.putVarInt32(24)
out.putVarInt64(self.max_)
if (self.has_header_):
out.putVarInt32(34)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
for i in xrange(len(self.reserve_)):
out.putVarInt32(42)
out.putVarInt32(self.reserve_[i].ByteSizePartial())
self.reserve_[i].OutputPartial(out)
if (self.has_trusted_):
out.putVarInt32(48)
out.putBoolean(self.trusted_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_model_key().TryMerge(tmp)
continue
if tt == 16:
self.set_size(d.getVarInt64())
continue
if tt == 24:
self.set_max(d.getVarInt64())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_reserve().TryMerge(tmp)
continue
if tt == 48:
self.set_trusted(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_model_key_:
res+=prefix+"model_key <\n"
res+=self.model_key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_size_: res+=prefix+("size: %s\n" % self.DebugFormatInt64(self.size_))
if self.has_max_: res+=prefix+("max: %s\n" % self.DebugFormatInt64(self.max_))
cnt=0
for e in self.reserve_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("reserve%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 4
kmodel_key = 1
ksize = 2
kmax = 3
kreserve = 5
ktrusted = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "model_key",
2: "size",
3: "max",
4: "header",
5: "reserve",
6: "trusted",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.NUMERIC,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsRequest'
class AllocateIdsResponse(ProtocolBuffer.ProtocolMessage):
has_start_ = 0
start_ = 0
has_end_ = 0
end_ = 0
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def start(self): return self.start_
def set_start(self, x):
self.has_start_ = 1
self.start_ = x
def clear_start(self):
if self.has_start_:
self.has_start_ = 0
self.start_ = 0
def has_start(self): return self.has_start_
def end(self): return self.end_
def set_end(self, x):
self.has_end_ = 1
self.end_ = x
def clear_end(self):
if self.has_end_:
self.has_end_ = 0
self.end_ = 0
def has_end(self): return self.has_end_
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
if (x.has_start()): self.set_start(x.start())
if (x.has_end()): self.set_end(x.end())
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if self.has_start_ != x.has_start_: return 0
if self.has_start_ and self.start_ != x.start_: return 0
if self.has_end_ != x.has_end_: return 0
if self.has_end_ and self.end_ != x.end_: return 0
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_start_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: start not set.')
if (not self.has_end_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: end not set.')
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.start_)
n += self.lengthVarInt64(self.end_)
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_start_):
n += 1
n += self.lengthVarInt64(self.start_)
if (self.has_end_):
n += 1
n += self.lengthVarInt64(self.end_)
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
return n
def Clear(self):
self.clear_start()
self.clear_end()
self.clear_cost()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.start_)
out.putVarInt32(16)
out.putVarInt64(self.end_)
if (self.has_cost_):
out.putVarInt32(26)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_start_):
out.putVarInt32(8)
out.putVarInt64(self.start_)
if (self.has_end_):
out.putVarInt32(16)
out.putVarInt64(self.end_)
if (self.has_cost_):
out.putVarInt32(26)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_start(d.getVarInt64())
continue
if tt == 16:
self.set_end(d.getVarInt64())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_start_: res+=prefix+("start: %s\n" % self.DebugFormatInt64(self.start_))
if self.has_end_: res+=prefix+("end: %s\n" % self.DebugFormatInt64(self.end_))
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kstart = 1
kend = 2
kcost = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "start",
2: "end",
3: "cost",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AllocateIdsResponse'
class CompositeIndices(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.index_ = []
if contents is not None: self.MergeFromString(contents)
def index_size(self): return len(self.index_)
def index_list(self): return self.index_
def index(self, i):
return self.index_[i]
def mutable_index(self, i):
return self.index_[i]
def add_index(self):
x = CompositeIndex()
self.index_.append(x)
return x
def clear_index(self):
self.index_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
def Equals(self, x):
if x is self: return 1
if len(self.index_) != len(x.index_): return 0
for e1, e2 in zip(self.index_, x.index_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_index()
def OutputUnchecked(self, out):
for i in xrange(len(self.index_)):
out.putVarInt32(10)
out.putVarInt32(self.index_[i].ByteSize())
self.index_[i].OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.index_)):
out.putVarInt32(10)
out.putVarInt32(self.index_[i].ByteSizePartial())
self.index_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_index().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kindex = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "index",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CompositeIndices'
class AddActionsRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_transaction_ = 0
def __init__(self, contents=None):
self.transaction_ = Transaction()
self.action_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def transaction(self): return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction_
def clear_transaction(self):self.has_transaction_ = 0; self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def action_size(self): return len(self.action_)
def action_list(self): return self.action_
def action(self, i):
return self.action_[i]
def mutable_action(self, i):
return self.action_[i]
def add_action(self):
x = Action()
self.action_.append(x)
return x
def clear_action(self):
self.action_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.action_size()): self.add_action().CopyFrom(x.action(i))
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.action_) != len(x.action_): return 0
for e1, e2 in zip(self.action_, x.action_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_transaction_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: transaction not set.')
elif not self.transaction_.IsInitialized(debug_strs): initialized = 0
for p in self.action_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.action_)
for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_transaction_):
n += 1
n += self.lengthString(self.transaction_.ByteSizePartial())
n += 1 * len(self.action_)
for i in xrange(len(self.action_)): n += self.lengthString(self.action_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_header()
self.clear_transaction()
self.clear_action()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.action_)):
out.putVarInt32(18)
out.putVarInt32(self.action_[i].ByteSize())
self.action_[i].OutputUnchecked(out)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_transaction_):
out.putVarInt32(10)
out.putVarInt32(self.transaction_.ByteSizePartial())
self.transaction_.OutputPartial(out)
for i in xrange(len(self.action_)):
out.putVarInt32(18)
out.putVarInt32(self.action_[i].ByteSizePartial())
self.action_[i].OutputPartial(out)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_action().TryMerge(tmp)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.action_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("action%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 3
ktransaction = 1
kaction = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "transaction",
2: "action",
3: "header",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsRequest'
class AddActionsResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.AddActionsResponse'
class BeginTransactionRequest(ProtocolBuffer.ProtocolMessage):
has_header_ = 0
header_ = None
has_app_ = 0
app_ = ""
has_allow_multiple_eg_ = 0
allow_multiple_eg_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def header(self):
if self.header_ is None:
self.lazy_init_lock_.acquire()
try:
if self.header_ is None: self.header_ = InternalHeader()
finally:
self.lazy_init_lock_.release()
return self.header_
def mutable_header(self): self.has_header_ = 1; return self.header()
def clear_header(self):
if self.has_header_:
self.has_header_ = 0;
if self.header_ is not None: self.header_.Clear()
def has_header(self): return self.has_header_
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def allow_multiple_eg(self): return self.allow_multiple_eg_
def set_allow_multiple_eg(self, x):
self.has_allow_multiple_eg_ = 1
self.allow_multiple_eg_ = x
def clear_allow_multiple_eg(self):
if self.has_allow_multiple_eg_:
self.has_allow_multiple_eg_ = 0
self.allow_multiple_eg_ = 0
def has_allow_multiple_eg(self): return self.has_allow_multiple_eg_
def MergeFrom(self, x):
assert x is not self
if (x.has_header()): self.mutable_header().MergeFrom(x.header())
if (x.has_app()): self.set_app(x.app())
if (x.has_allow_multiple_eg()): self.set_allow_multiple_eg(x.allow_multiple_eg())
def Equals(self, x):
if x is self: return 1
if self.has_header_ != x.has_header_: return 0
if self.has_header_ and self.header_ != x.header_: return 0
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_allow_multiple_eg_ != x.has_allow_multiple_eg_: return 0
if self.has_allow_multiple_eg_ and self.allow_multiple_eg_ != x.allow_multiple_eg_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_header_ and not self.header_.IsInitialized(debug_strs)): initialized = 0
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSize())
n += self.lengthString(len(self.app_))
if (self.has_allow_multiple_eg_): n += 2
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_header_): n += 1 + self.lengthString(self.header_.ByteSizePartial())
if (self.has_app_):
n += 1
n += self.lengthString(len(self.app_))
if (self.has_allow_multiple_eg_): n += 2
return n
def Clear(self):
self.clear_header()
self.clear_app()
self.clear_allow_multiple_eg()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_allow_multiple_eg_):
out.putVarInt32(16)
out.putBoolean(self.allow_multiple_eg_)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSize())
self.header_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_app_):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_allow_multiple_eg_):
out.putVarInt32(16)
out.putBoolean(self.allow_multiple_eg_)
if (self.has_header_):
out.putVarInt32(26)
out.putVarInt32(self.header_.ByteSizePartial())
self.header_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app(d.getPrefixedString())
continue
if tt == 16:
self.set_allow_multiple_eg(d.getBoolean())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_header().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_header_:
res+=prefix+"header <\n"
res+=self.header_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_allow_multiple_eg_: res+=prefix+("allow_multiple_eg: %s\n" % self.DebugFormatBool(self.allow_multiple_eg_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kheader = 3
kapp = 1
kallow_multiple_eg = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "app",
2: "allow_multiple_eg",
3: "header",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.BeginTransactionRequest'
class CommitResponse_Version(ProtocolBuffer.ProtocolMessage):
has_root_entity_key_ = 0
has_version_ = 0
version_ = 0
def __init__(self, contents=None):
self.root_entity_key_ = Reference()
if contents is not None: self.MergeFromString(contents)
def root_entity_key(self): return self.root_entity_key_
def mutable_root_entity_key(self): self.has_root_entity_key_ = 1; return self.root_entity_key_
def clear_root_entity_key(self):self.has_root_entity_key_ = 0; self.root_entity_key_.Clear()
def has_root_entity_key(self): return self.has_root_entity_key_
def version(self): return self.version_
def set_version(self, x):
self.has_version_ = 1
self.version_ = x
def clear_version(self):
if self.has_version_:
self.has_version_ = 0
self.version_ = 0
def has_version(self): return self.has_version_
def MergeFrom(self, x):
assert x is not self
if (x.has_root_entity_key()): self.mutable_root_entity_key().MergeFrom(x.root_entity_key())
if (x.has_version()): self.set_version(x.version())
def Equals(self, x):
if x is self: return 1
if self.has_root_entity_key_ != x.has_root_entity_key_: return 0
if self.has_root_entity_key_ and self.root_entity_key_ != x.root_entity_key_: return 0
if self.has_version_ != x.has_version_: return 0
if self.has_version_ and self.version_ != x.version_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_root_entity_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: root_entity_key not set.')
elif not self.root_entity_key_.IsInitialized(debug_strs): initialized = 0
if (not self.has_version_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: version not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.root_entity_key_.ByteSize())
n += self.lengthVarInt64(self.version_)
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_root_entity_key_):
n += 1
n += self.lengthString(self.root_entity_key_.ByteSizePartial())
if (self.has_version_):
n += 1
n += self.lengthVarInt64(self.version_)
return n
def Clear(self):
self.clear_root_entity_key()
self.clear_version()
def OutputUnchecked(self, out):
out.putVarInt32(34)
out.putVarInt32(self.root_entity_key_.ByteSize())
self.root_entity_key_.OutputUnchecked(out)
out.putVarInt32(40)
out.putVarInt64(self.version_)
def OutputPartial(self, out):
if (self.has_root_entity_key_):
out.putVarInt32(34)
out.putVarInt32(self.root_entity_key_.ByteSizePartial())
self.root_entity_key_.OutputPartial(out)
if (self.has_version_):
out.putVarInt32(40)
out.putVarInt64(self.version_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 28: break
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_root_entity_key().TryMerge(tmp)
continue
if tt == 40:
self.set_version(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_root_entity_key_:
res+=prefix+"root_entity_key <\n"
res+=self.root_entity_key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_version_: res+=prefix+("version: %s\n" % self.DebugFormatInt64(self.version_))
return res
class CommitResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.version_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def version_size(self): return len(self.version_)
def version_list(self): return self.version_
def version(self, i):
return self.version_[i]
def mutable_version(self, i):
return self.version_[i]
def add_version(self):
x = CommitResponse_Version()
self.version_.append(x)
return x
def clear_version(self):
self.version_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
for i in xrange(x.version_size()): self.add_version().CopyFrom(x.version(i))
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
if len(self.version_) != len(x.version_): return 0
for e1, e2 in zip(self.version_, x.version_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
for p in self.version_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
n += 2 * len(self.version_)
for i in xrange(len(self.version_)): n += self.version_[i].ByteSize()
return n
def ByteSizePartial(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSizePartial())
n += 2 * len(self.version_)
for i in xrange(len(self.version_)): n += self.version_[i].ByteSizePartial()
return n
def Clear(self):
self.clear_cost()
self.clear_version()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
for i in xrange(len(self.version_)):
out.putVarInt32(27)
self.version_[i].OutputUnchecked(out)
out.putVarInt32(28)
def OutputPartial(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSizePartial())
self.cost_.OutputPartial(out)
for i in xrange(len(self.version_)):
out.putVarInt32(27)
self.version_[i].OutputPartial(out)
out.putVarInt32(28)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if tt == 27:
self.add_version().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.version_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Version%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kcost = 1
kVersionGroup = 3
kVersionroot_entity_key = 4
kVersionversion = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "cost",
3: "Version",
4: "root_entity_key",
5: "version",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STARTGROUP,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting_datastore_v3.CommitResponse'
if _extension_runtime:
pass
__all__ = ['InternalHeader','Transaction','Query','Query_Filter','Query_Order','CompiledQuery','CompiledQuery_PrimaryScan','CompiledQuery_MergeJoinScan','CompiledQuery_EntityFilter','CompiledCursor','CompiledCursor_PositionIndexValue','CompiledCursor_Position','Cursor','Error','Cost','Cost_CommitCost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','TouchRequest','TouchResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','AllocateIdsRequest','AllocateIdsResponse','CompositeIndices','AddActionsRequest','AddActionsResponse','BeginTransactionRequest','CommitResponse','CommitResponse_Version']
| ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/google/appengine/datastore/datastore_v3_pb.py | Python | bsd-3-clause | 282,355 |
from .image import Image
from .product_category import ProductCategory
from .supplier import Supplier, PaymentMethod
from .product import Product
from .product import ProductImage
from .enum_values import EnumValues
from .related_values import RelatedValues
from .customer import Customer
from .expense import Expense
from .incoming import Incoming
from .shipping import Shipping, ShippingLine
from .receiving import Receiving, ReceivingLine
from .inventory_transaction import InventoryTransaction, InventoryTransactionLine
from .purchase_order import PurchaseOrder, PurchaseOrderLine
from .sales_order import SalesOrder, SalesOrderLine
from .user import User
from .role import Role, roles_users
from .organization import Organization
from .inventory_in_out_link import InventoryInOutLink
from .aspects import update_menemonic
from .product_inventory import ProductInventory
| betterlife/psi | psi/app/models/__init__.py | Python | mit | 875 |