code
stringlengths 22
1.05M
| apis
sequencelengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
# Copyright 2013-2019 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os.path as osp
class VisionaryDevTools(Package):
"""Developer convenience packages common to all visionary
development meta packages. Application specific build tools belong
to the dedicated meta packages."""
homepage = ''
# some random tarball, to make `spack fetch --dependencies visionary-defaults` work
url = 'https://github.com/electronicvisions/spack/archive/v0.8.tar.gz'
# This is only a dummy tarball (see difference between version numbers)
# TODO: as soon as a MetaPackage-concept has been merged, please update this package
version('1.0', '372ce038842f20bf0ae02de50c26e85d', url='https://github.com/electronicvisions/spack/archive/v0.8.tar.gz')
depends_on('ack')
depends_on('autoconf')
depends_on('automake')
depends_on('bash-completion')
depends_on('bazel')
depends_on('bear')
depends_on('cairo +X')
depends_on('cloc')
depends_on('cmake')
depends_on('connect-proxy')
depends_on('cppcheck +htmlreport')
depends_on('cquery')
depends_on('doxygen+graphviz')
depends_on('emacs ~X')
depends_on('gdb')
depends_on('genpybind')
depends_on('git+tcltk')
depends_on('git-fat-git')
depends_on('gtkplus')
depends_on('imagemagick')
depends_on('jq')
depends_on('libpcap')
depends_on('libtool')
depends_on('llvm+visionary+python~libcxx build_type=Release')
depends_on('mercurial')
depends_on('mosh')
depends_on('munge')
depends_on('ncdu')
depends_on('node-js')
depends_on('octave+fftw')
depends_on('openssh')
depends_on('pigz')
depends_on('pkg-config')
depends_on('py-autopep8')
depends_on('py-black', when="^python@3.6.0:")
depends_on('py-configargparse')
depends_on('py-doxypypy')
depends_on('py-flake8')
depends_on('py-gdbgui')
depends_on('py-git-review')
depends_on('py-ipython')
depends_on('py-jedi')
depends_on('py-junit-xml')
depends_on('py-language-server')
depends_on('py-line-profiler')
depends_on('py-nose')
depends_on('py-nose2')
depends_on('py-memory-profiler')
depends_on('py-pudb')
depends_on('py-pylint@:1.999.999', when="^python@:2.999.999")
depends_on('py-pylint', when="^python@3.4.0:")
depends_on('py-pyserial')
depends_on('py-pytest')
depends_on('py-pytest-xdist')
depends_on('py-ranger-fm')
depends_on('py-sqlalchemy')
depends_on('py-virtualenv')
depends_on('py-xmlrunner')
depends_on('py-yq')
depends_on('rtags')
depends_on('tar')
depends_on('texinfo')
# ECM (2020-05-14): removed 'the-silver-searcher' due to build fail on gcc@10.1.0
depends_on('tig')
depends_on('time')
depends_on('tmux')
depends_on('units')
depends_on('valgrind')
depends_on('verilator')
depends_on('vim +python +ruby +perl +cscope +huge +x')
depends_on('visionary-xilinx')
depends_on('wget')
depends_on('yaml-cpp+shared')
depends_on('zsh')
def install(self, spec, prefix):
mkdirp(prefix.etc)
# store a copy of this package.
filename = osp.basename(osp.dirname(__file__)) # gives name of parent folder
install(__file__, join_path(prefix.etc, filename + '.py'))
# we could create some filesystem view here?
| [
"os.path.dirname"
] | [((3313, 3334), 'os.path.dirname', 'osp.dirname', (['__file__'], {}), '(__file__)\n', (3324, 3334), True, 'import os.path as osp\n')] |
#usr/bin/env python
## useful imports
import time
import io
import os
import re
import sys
from sys import argv
import subprocess
## ARGV
if len (sys.argv) < 5:
print ("\nUsage:")
print ("python3 %s bam_file folder bedtools_bin samtools_bin logfile\n" %os.path.realpath(__file__))
exit()
bam_file = os.path.abspath(argv[1])
folder = argv[2]
bedtools_exe = argv[3]
samtools_exe = argv[4]
logFile = argv[5]
# start
output_file = open(logFile, 'a')
output_file.write("\nConvert BAM to Pilfer Input file:\n")
## Variables
dirname_name = os.path.dirname(bam_file)
split_name = os.path.splitext( os.path.basename(bam_file) )
bed_file = folder + '/' + split_name[0] + '.bed'
sam_file = folder + '/' + split_name[0] + '.sam'
pilfer_tmp = folder + '/' + split_name[0] + '.tmp.pilfer.bed'
pilfer_file = folder + '/' + split_name[0] + '.pilfer.bed'
## START
print ("\n+ Converting BAM file into PILFER input file")
## generate bed file with bedtools bamtobed -i bam_file
if (os.path.isfile(bed_file)):
print ("\t+ File %s already exists" %bed_file)
else:
cmd_bedtools = "%s bamtobed -i %s > %s" %(bedtools_exe, bam_file, bed_file)
output_file.write(cmd_bedtools)
output_file.write("\n")
try:
subprocess.check_output(cmd_bedtools, shell = True)
except Exception as exc:
print ('***ERROR:')
print (cmd_bedtools)
print('bedtools command generated an exception: %s' %exc)
exit()
## generate samtools
if (os.path.isfile(sam_file)):
print ("\t+ File %s already exists" %sam_file)
else:
cmd_samtools = "%s view %s > %s" %(samtools_exe, bam_file, sam_file)
output_file.write(cmd_samtools)
output_file.write("\n")
try:
subprocess.check_output(cmd_samtools, shell = True)
except Exception as exc:
print ('***ERROR:')
print (cmd_samtools)
print('samtools view command generated an exception: %s' %exc)
exit()
## generate paste filter tmp file
if (os.path.isfile(pilfer_tmp)):
print ("\t+ File %s already exists" %pilfer_tmp)
else:
## paste Aligned.sortedByCoord.out.bed Aligned.sortedByCoord.out.sam | awk -v "OFS=\t" '{print $1, $2, $3, $16, $6}'
cmd_paste = "paste %s %s | awk -v \"OFS=\t\" \'{print $1, $2, $3, $16, $6}\' > %s" %(bed_file, sam_file, pilfer_tmp)
output_file.write(cmd_paste)
output_file.write("\n")
try:
subprocess.check_output(cmd_paste, shell = True)
except Exception as exc:
print ('***ERROR:')
print (cmd_paste)
print('paste bed sam command generated an exception: %s' %exc)
exit()
## parse pilfer tmp file
counter = 1
previous_line = ()
# Open file OUT
output_file = open(pilfer_file, 'w')
# Open file IN
fileHandler = open (pilfer_tmp, "r")
while True:
# Get next line from file
line = fileHandler.readline().strip()
# If line is empty then end of file reached
if not line :
break;
seq = line.split('\t')[3]
real_seq = seq.split('::PU')
seq_len = len(str(real_seq[0]))
## Discard smaller
if (previous_line):
if (previous_line == line):
line = previous_line
counter += 1
else:
line_split = previous_line.split('\t')
output_file.write('%s\t%s\t%s\t%s::PI\t%s\t%s\n' %(line_split[0], line_split[1], line_split[2], line_split[3], counter, line_split[4]))
#counter += 1
while True:
#get next line
next_line = fileHandler.readline().strip()
if (next_line == line):
counter += 1
else:
line_split = line.split('\t')
output_file.write('%s\t%s\t%s\t%s::PI\t%s\t%s\n' %(line_split[0], line_split[1], line_split[2], line_split[3], counter, line_split[4]))
previous_line = next_line
counter = 1
break;
## close and finish
fileHandler.close()
output_file.close()
| [
"subprocess.check_output",
"os.path.isfile",
"os.path.dirname",
"os.path.realpath",
"os.path.basename",
"os.path.abspath"
] | [((305, 329), 'os.path.abspath', 'os.path.abspath', (['argv[1]'], {}), '(argv[1])\n', (320, 329), False, 'import os\n'), ((542, 567), 'os.path.dirname', 'os.path.dirname', (['bam_file'], {}), '(bam_file)\n', (557, 567), False, 'import os\n'), ((976, 1000), 'os.path.isfile', 'os.path.isfile', (['bed_file'], {}), '(bed_file)\n', (990, 1000), False, 'import os\n'), ((1422, 1446), 'os.path.isfile', 'os.path.isfile', (['sam_file'], {}), '(sam_file)\n', (1436, 1446), False, 'import os\n'), ((1877, 1903), 'os.path.isfile', 'os.path.isfile', (['pilfer_tmp'], {}), '(pilfer_tmp)\n', (1891, 1903), False, 'import os\n'), ((599, 625), 'os.path.basename', 'os.path.basename', (['bam_file'], {}), '(bam_file)\n', (615, 625), False, 'import os\n'), ((1202, 1251), 'subprocess.check_output', 'subprocess.check_output', (['cmd_bedtools'], {'shell': '(True)'}), '(cmd_bedtools, shell=True)\n', (1225, 1251), False, 'import subprocess\n'), ((1641, 1690), 'subprocess.check_output', 'subprocess.check_output', (['cmd_samtools'], {'shell': '(True)'}), '(cmd_samtools, shell=True)\n', (1664, 1690), False, 'import subprocess\n'), ((2264, 2310), 'subprocess.check_output', 'subprocess.check_output', (['cmd_paste'], {'shell': '(True)'}), '(cmd_paste, shell=True)\n', (2287, 2310), False, 'import subprocess\n'), ((257, 283), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (273, 283), False, 'import os\n')] |
#!/usr/bin/env python3
from sys import stderr, exit, argv
from random import randrange
#from TALinputs import TALinput
from multilanguage import Env, Lang, TALcolors
# METADATA OF THIS TAL_SERVICE:
problem="tiling_mxn-boards_with_1x2-boards"
service="is_tilable"
args_list = [
('m',int),
('n',int),
('my_conjecture',str),
('h',int),
('k',int),
('lang',str),
('ISATTY',bool),
]
ENV =Env(problem, service, args_list)
TAc =TALcolors(ENV)
LANG=Lang(ENV, TAc, lambda fstring: eval(f"f'{fstring}'"))
TAc.print(LANG.opening_msg, "green")
# START CODING YOUR SERVICE:
assert ENV['h']==1
assert ENV['k']==2
print()
if (ENV['m'] * ENV['n']) % 2 == 1:
if ENV['my_conjecture'] == "yes":
TAc.NO()
print(LANG.render_feedback("FALSE-is-not-tilable", f"Contrary to what you have asserted, the {ENV['m']}x{ENV['n']}-grid is NOT tilable. If you are not convinced you can submit a tiling of that grid to the service 'check_my_tiling'."))
if ENV['my_conjecture'] == "no":
TAc.OK()
print(LANG.render_feedback("TRUE-is-not-tilable", f"You are perfecty right: the {ENV['m']}x{ENV['n']}-grid is NOT tilable."))
if (ENV['m'] * ENV['n']) % 2 == 0:
if ENV['my_conjecture'] == "yes":
TAc.OK()
print(LANG.render_feedback("TRUE-is-tilable", f"We agree on the fact that the {ENV['m']}x{ENV['n']}-grid is tilable. If you want to exhibit us a tiling for this grid you can submit it to the service 'check_my_tiling'."))
if ENV['my_conjecture'] == "no":
TAc.NO()
print(LANG.render_feedback("FALSE-is-tilable", f"No, the {ENV['m']}x{ENV['n']}-grid is tilable. If you can not believe a tiling of the {ENV['m']}x{ENV['n']}-grid exists try the service 'gimme_hints_on_a_tiling'."))
exit(0)
| [
"multilanguage.Env",
"sys.exit",
"multilanguage.TALcolors"
] | [((413, 445), 'multilanguage.Env', 'Env', (['problem', 'service', 'args_list'], {}), '(problem, service, args_list)\n', (416, 445), False, 'from multilanguage import Env, Lang, TALcolors\n'), ((451, 465), 'multilanguage.TALcolors', 'TALcolors', (['ENV'], {}), '(ENV)\n', (460, 465), False, 'from multilanguage import Env, Lang, TALcolors\n'), ((1773, 1780), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (1777, 1780), False, 'from sys import stderr, exit, argv\n')] |
import pytest
from jina.enums import RemoteAccessType
from jina.flow import Flow
from jina.parser import set_pea_parser, set_pod_parser
from jina.peapods.pods import BasePod
from jina.peapods.runtimes.remote.ssh import SSHRuntime
from jina.proto import jina_pb2
@pytest.mark.skip('works locally, but until I findout how to mock ssh, this has to be skipped')
def test_ssh_pea():
p = set_pea_parser().parse_args(['--host', 'pi@172.16.1.110', '--timeout', '5000'])
with SSHRuntime(p, kind='pea') as pp:
assert pp.status.envelope.status.code == jina_pb2.StatusProto.READY
assert pp.status is None
@pytest.mark.skip('works locally, but until I find out how to mock ssh, this has to be skipped')
def test_ssh_pod():
p = set_pod_parser().parse_args(['--host', 'pi@172.16.1.110', '--timeout', '5000'])
with SSHRuntime(p, kind='pod') as pp:
assert pp.status.envelope.status.code == jina_pb2.StatusProto.READY
assert pp.status is None
@pytest.mark.skip('not implemented yet')
def test_ssh_mutable_pod():
p = set_pod_parser().parse_args(['--host', 'pi@172.16.1.110', '--timeout', '5000'])
p = BasePod(p)
with SSHRuntime(p, kind='pod') as pp:
assert pp.status.envelope.status.code == jina_pb2.StatusProto.READY
assert pp.status is None
@pytest.mark.skip('not implemented yet')
def test_flow():
f = Flow().add().add(host='pi@172.16.1.110', remote_access=RemoteAccessType.SSH)
with f:
pass
| [
"jina.peapods.runtimes.remote.ssh.SSHRuntime",
"pytest.mark.skip",
"jina.parser.set_pea_parser",
"jina.flow.Flow",
"jina.peapods.pods.BasePod",
"jina.parser.set_pod_parser"
] | [((266, 370), 'pytest.mark.skip', 'pytest.mark.skip', (['"""works locally, but until I findout how to mock ssh, this has to be skipped"""'], {}), "(\n 'works locally, but until I findout how to mock ssh, this has to be skipped'\n )\n", (282, 370), False, 'import pytest\n'), ((621, 726), 'pytest.mark.skip', 'pytest.mark.skip', (['"""works locally, but until I find out how to mock ssh, this has to be skipped"""'], {}), "(\n 'works locally, but until I find out how to mock ssh, this has to be skipped'\n )\n", (637, 726), False, 'import pytest\n'), ((976, 1015), 'pytest.mark.skip', 'pytest.mark.skip', (['"""not implemented yet"""'], {}), "('not implemented yet')\n", (992, 1015), False, 'import pytest\n'), ((1302, 1341), 'pytest.mark.skip', 'pytest.mark.skip', (['"""not implemented yet"""'], {}), "('not implemented yet')\n", (1318, 1341), False, 'import pytest\n'), ((1140, 1150), 'jina.peapods.pods.BasePod', 'BasePod', (['p'], {}), '(p)\n', (1147, 1150), False, 'from jina.peapods.pods import BasePod\n'), ((479, 504), 'jina.peapods.runtimes.remote.ssh.SSHRuntime', 'SSHRuntime', (['p'], {'kind': '"""pea"""'}), "(p, kind='pea')\n", (489, 504), False, 'from jina.peapods.runtimes.remote.ssh import SSHRuntime\n'), ((834, 859), 'jina.peapods.runtimes.remote.ssh.SSHRuntime', 'SSHRuntime', (['p'], {'kind': '"""pod"""'}), "(p, kind='pod')\n", (844, 859), False, 'from jina.peapods.runtimes.remote.ssh import SSHRuntime\n'), ((1160, 1185), 'jina.peapods.runtimes.remote.ssh.SSHRuntime', 'SSHRuntime', (['p'], {'kind': '"""pod"""'}), "(p, kind='pod')\n", (1170, 1185), False, 'from jina.peapods.runtimes.remote.ssh import SSHRuntime\n'), ((389, 405), 'jina.parser.set_pea_parser', 'set_pea_parser', ([], {}), '()\n', (403, 405), False, 'from jina.parser import set_pea_parser, set_pod_parser\n'), ((745, 761), 'jina.parser.set_pod_parser', 'set_pod_parser', ([], {}), '()\n', (759, 761), False, 'from jina.parser import set_pea_parser, set_pod_parser\n'), ((1052, 1068), 'jina.parser.set_pod_parser', 'set_pod_parser', ([], {}), '()\n', (1066, 1068), False, 'from jina.parser import set_pea_parser, set_pod_parser\n'), ((1367, 1373), 'jina.flow.Flow', 'Flow', ([], {}), '()\n', (1371, 1373), False, 'from jina.flow import Flow\n')] |
import os
import sys
from copy import deepcopy
import traceback
import functools
from collections import defaultdict
import yaml
from argparse import ArgumentParser,\
RawDescriptionHelpFormatter, ArgumentDefaultsHelpFormatter
from sgains.configuration.parser import SgainsValidator, Config
from sgains.configuration.schema import sgains_schema
from sgains.executor import Executor
from sgains.pipelines.mappableregions_pipeline import MappableRegionsPipeline
from sgains.pipelines.genomeindex_pipeline import GenomeIndexPipeline
from sgains.pipelines.bins_pipeline import BinsPipeline
from sgains.pipelines.mapping_pipeline import MappingPipeline
from sgains.pipelines.extract_10x_pipeline import Extract10xPipeline
from sgains.pipelines.varbin_10x_pipeline import Varbin10xPipeline
from sgains.pipelines.varbin_pipeline import VarbinPipeline
from sgains.pipelines.r_pipeline import Rpipeline
from sgains.pipelines.composite_pipeline import CompositePipeline
SGAINS_COMMANDS = {
"genomeindex": {
"config_groups": ["aligner", "genome"],
"help": "builds appropriate hisat2 or bowtie index for the "
"reference genome",
},
"mappable_regions": {
"config_groups": ["aligner", "genome", "mappable_regions", "sge"],
"help": "finds all mappable regions in specified genome",
},
"bins": {
"config_groups": ["genome", "mappable_regions", "bins", "sge"],
"help": "calculates all bins boundaries for specified bins count "
"and read length",
},
"prepare": {
"config_groups": [
"aligner", "genome", "mappable_regions", "bins", "sge"],
"help": "combines all preparation steps ('genome', 'mappable-regions' "
"and 'bins') into single command",
},
"mapping": {
"config_groups": ["aligner", "genome", "reads", "mapping", "sge"],
"help": "performs mapping of cells reads to the reference genome",
},
"extract_10x": {
"config_groups": [
"data_10x", "reads", "sge"],
"help": "extracts cells reads from 10x Genomics datasets",
},
"varbin": {
"config_groups": ["bins", "mapping", "varbin", "sge"],
"help": "applies varbin algorithm to count read mappings in each bin",
},
"varbin_10x": {
"config_groups": [
"data_10x", "bins", "varbin", "sge"],
"help": "applies varbin algorithm to count read mappings in each bin "
"to 10x Genomics datasets without realigning",
},
"scclust": {
"config_groups": ["bins", "varbin", "scclust"],
"help": "segmentation and clustering based bin counts and "
"preparation of the SCGV input data"
},
"process": {
"config_groups": [
"aligner", "genome", "reads", "mapping", "bins", "varbin",
"scclust", "sge"],
"help": "combines all process steps ('mapping', 'varbin' "
"and 'scclust') into single command"
},
}
def build_common_options(parser):
parser.add_argument(
"-v", "--verbose",
dest="verbose",
action="count",
help="set verbosity level [default: %(default)s]",
default=0
)
parser.add_argument(
"-c", "--config",
dest="config",
help="configuration file",
metavar="path"
)
parser.add_argument(
"-n", "--dry-run",
dest="dry_run",
action="store_true",
help="perform a trial run with no changes made",
default=False
)
parser.add_argument(
"--force", "-F",
dest="force",
action="store_true",
help="allows overwriting nonempty results directory",
default=False
)
parser.add_argument(
"--parallel", "-p",
dest="parallel",
help="number of task to run in parallel",
type=int,
default=1
)
parser.add_argument(
"--sge",
dest="sge",
action="store_true",
help="parallelilizes commands using SGE cluster manager",
default=False
)
def _get_config_value(config, group_name, name):
if config is None:
return None
group = config.config.get(group_name)
if group is None:
return None
result = getattr(group, name)
return result
def build_cli_options(argparser, command=None, config=None, sge_flag=False):
work_dirname = os.getcwd()
if config is not None:
work_dirname = config.work_dirname
validator = SgainsValidator(
deepcopy(sgains_schema), work_dirname=work_dirname)
if command is None:
config_groups = list(validator.schema.keys())
else:
assert command in SGAINS_COMMANDS
command = SGAINS_COMMANDS[command]
config_groups = command["config_groups"]
for group_name in config_groups:
if group_name == "sge" and not sge_flag:
continue
group = validator.schema.get(group_name)
group_parser = argparser.add_argument_group(f"{group_name} group:")
assert group["type"] == "dict", (group_name, group)
group_schema = group["schema"]
for arg_name, arg_spec in group_schema.items():
name = f"--{arg_name.replace('_', '-')}"
arg_type = str
arg_type = arg_spec.get("type", "string")
if arg_type == "string":
arg_type = str
elif arg_type == "integer":
arg_type = int
elif arg_type == "float":
arg_type = float
elif arg_type == "list":
arg_type = list
else:
raise ValueError(f"wrong argument type {arg_type}")
help_data = None
meta_data = arg_spec.get("meta")
if meta_data is not None:
help_data = meta_data.get("help")
arg_default = _get_config_value(config, group_name, arg_name)
if arg_default is None:
arg_default = arg_spec.get("default")
group_parser.add_argument(
name,
help=help_data,
dest=arg_name,
type=arg_type,
default=arg_default)
return argparser
def parse_cli_options(args):
config_dict = defaultdict(dict)
work_dirname = os.getcwd()
if args.config is not None:
assert os.path.exists(args.config), args.config
with open(args.config, "r") as infile:
config_dict = yaml.safe_load(infile)
work_dirname = os.path.dirname(args.config)
validator = SgainsValidator(
deepcopy(sgains_schema), work_dirname=work_dirname)
result = defaultdict(dict)
config_groups = list(validator.schema.keys())
for group_name in config_groups:
if group_name == "sge" and not args.sge:
continue
group = validator.schema.get(group_name)
group_schema = group.get("schema")
if group_schema is None:
continue
group_result = {}
for arg_name in group_schema.keys():
arg_value = getattr(args, arg_name, None)
if arg_value is not None:
group_result[arg_name] = arg_value
else:
config_value = config_dict.get(group_name, None)
if config_value is not None:
config_value = config_value.get(arg_name, None)
if config_value is not None:
group_result[arg_name] = config_value
if group_result:
result[group_name] = group_result
config = Config.from_dict(result, work_dirname)
config.verbose = args.verbose
config.config_file = args.config
config.dry_run = args.dry_run
config.force = args.force
config.parallel = args.parallel
config.sge = args.sge
return config
def main(argv=sys.argv[1:]):
program_name = os.path.basename(sys.argv[0])
program_shortdesc = \
'sgains - sparse genomic analysis of individual nuclei by ' \
'sequencing pipeline'
program_description = '''%s
USAGE
''' % (program_shortdesc, )
try:
config = Config.parse_argv(argv)
sge_flag = Config.check_sge_argv(argv)
argparser = ArgumentParser(
description=program_description,
formatter_class=ArgumentDefaultsHelpFormatter)
build_common_options(argparser)
subparsers = argparser.add_subparsers(
title="sGAINS subcommands"
)
for command in SGAINS_COMMANDS:
command_name = command.replace("_", "-")
command_help = SGAINS_COMMANDS[command].get("help", "")
subparser = subparsers.add_parser(
name=command_name,
help=command_help,
formatter_class=ArgumentDefaultsHelpFormatter
)
build_cli_options(subparser, command, config, sge_flag=sge_flag)
subparser.set_defaults(func=functools.partial(execute, command))
args = argparser.parse_args(argv)
args.func(args)
except KeyboardInterrupt:
traceback.print_exc()
return 0
except Exception as e:
traceback.print_exc()
indent = len(program_name) * " "
sys.stderr.write(program_name + ": " + repr(e) + "\n")
sys.stderr.write(indent + " for help use --help")
sys.stderr.write('\n')
return 2
def create_pipeline(command, config):
if command == "genomeindex":
return GenomeIndexPipeline(config)
elif command == "mappable_regions":
return MappableRegionsPipeline(config)
elif command == "bins":
return BinsPipeline(config)
elif command == "mapping":
return MappingPipeline(config)
elif command == "varbin":
return VarbinPipeline(config)
elif command == "scclust":
return Rpipeline(config)
elif command == "extract_10x":
return Extract10xPipeline(config)
elif command == "varbin_10x":
return Varbin10xPipeline(config)
elif command == "prepare":
pipelines = [
GenomeIndexPipeline(config),
MappableRegionsPipeline(config),
BinsPipeline(config),
]
return CompositePipeline(config, pipelines)
elif command == "process":
pipelines = [
MappingPipeline(config),
VarbinPipeline(config),
Rpipeline(config),
]
return CompositePipeline(config, pipelines)
raise ValueError(f"Unexpected command: {command}")
def execute(command, args):
config = parse_cli_options(args)
pipeline = create_pipeline(command, config)
assert pipeline is not None, command
executor = Executor(config)
executor.run_pipeline(pipeline)
if __name__ == "__main__":
sys.exit(main())
| [
"sgains.pipelines.composite_pipeline.CompositePipeline",
"sgains.pipelines.mappableregions_pipeline.MappableRegionsPipeline",
"sgains.pipelines.varbin_10x_pipeline.Varbin10xPipeline",
"sgains.pipelines.bins_pipeline.BinsPipeline",
"copy.deepcopy",
"sgains.pipelines.r_pipeline.Rpipeline",
"sgains.pipelines.mapping_pipeline.MappingPipeline",
"os.path.exists",
"argparse.ArgumentParser",
"sgains.executor.Executor",
"traceback.print_exc",
"sgains.pipelines.extract_10x_pipeline.Extract10xPipeline",
"os.path.dirname",
"sgains.configuration.parser.Config.check_sge_argv",
"sys.stderr.write",
"sgains.pipelines.varbin_pipeline.VarbinPipeline",
"sgains.pipelines.genomeindex_pipeline.GenomeIndexPipeline",
"sgains.configuration.parser.Config.parse_argv",
"sgains.configuration.parser.Config.from_dict",
"os.getcwd",
"yaml.safe_load",
"collections.defaultdict",
"os.path.basename",
"functools.partial"
] | [((4403, 4414), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (4412, 4414), False, 'import os\n'), ((6285, 6302), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (6296, 6302), False, 'from collections import defaultdict\n'), ((6322, 6333), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (6331, 6333), False, 'import os\n'), ((6680, 6697), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (6691, 6697), False, 'from collections import defaultdict\n'), ((7603, 7641), 'sgains.configuration.parser.Config.from_dict', 'Config.from_dict', (['result', 'work_dirname'], {}), '(result, work_dirname)\n', (7619, 7641), False, 'from sgains.configuration.parser import SgainsValidator, Config\n'), ((7908, 7937), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (7924, 7937), False, 'import os\n'), ((10742, 10758), 'sgains.executor.Executor', 'Executor', (['config'], {}), '(config)\n', (10750, 10758), False, 'from sgains.executor import Executor\n'), ((4527, 4550), 'copy.deepcopy', 'deepcopy', (['sgains_schema'], {}), '(sgains_schema)\n', (4535, 4550), False, 'from copy import deepcopy\n'), ((6382, 6409), 'os.path.exists', 'os.path.exists', (['args.config'], {}), '(args.config)\n', (6396, 6409), False, 'import os\n'), ((6543, 6571), 'os.path.dirname', 'os.path.dirname', (['args.config'], {}), '(args.config)\n', (6558, 6571), False, 'import os\n'), ((6614, 6637), 'copy.deepcopy', 'deepcopy', (['sgains_schema'], {}), '(sgains_schema)\n', (6622, 6637), False, 'from copy import deepcopy\n'), ((8159, 8182), 'sgains.configuration.parser.Config.parse_argv', 'Config.parse_argv', (['argv'], {}), '(argv)\n', (8176, 8182), False, 'from sgains.configuration.parser import SgainsValidator, Config\n'), ((8202, 8229), 'sgains.configuration.parser.Config.check_sge_argv', 'Config.check_sge_argv', (['argv'], {}), '(argv)\n', (8223, 8229), False, 'from sgains.configuration.parser import SgainsValidator, Config\n'), ((8251, 8350), 'argparse.ArgumentParser', 'ArgumentParser', ([], {'description': 'program_description', 'formatter_class': 'ArgumentDefaultsHelpFormatter'}), '(description=program_description, formatter_class=\n ArgumentDefaultsHelpFormatter)\n', (8265, 8350), False, 'from argparse import ArgumentParser, RawDescriptionHelpFormatter, ArgumentDefaultsHelpFormatter\n'), ((9527, 9554), 'sgains.pipelines.genomeindex_pipeline.GenomeIndexPipeline', 'GenomeIndexPipeline', (['config'], {}), '(config)\n', (9546, 9554), False, 'from sgains.pipelines.genomeindex_pipeline import GenomeIndexPipeline\n'), ((6497, 6519), 'yaml.safe_load', 'yaml.safe_load', (['infile'], {}), '(infile)\n', (6511, 6519), False, 'import yaml\n'), ((9132, 9153), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (9151, 9153), False, 'import traceback\n'), ((9206, 9227), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (9225, 9227), False, 'import traceback\n'), ((9341, 9391), 'sys.stderr.write', 'sys.stderr.write', (["(indent + ' for help use --help')"], {}), "(indent + ' for help use --help')\n", (9357, 9391), False, 'import sys\n'), ((9400, 9422), 'sys.stderr.write', 'sys.stderr.write', (['"""\n"""'], {}), "('\\n')\n", (9416, 9422), False, 'import sys\n'), ((9610, 9641), 'sgains.pipelines.mappableregions_pipeline.MappableRegionsPipeline', 'MappableRegionsPipeline', (['config'], {}), '(config)\n', (9633, 9641), False, 'from sgains.pipelines.mappableregions_pipeline import MappableRegionsPipeline\n'), ((9685, 9705), 'sgains.pipelines.bins_pipeline.BinsPipeline', 'BinsPipeline', (['config'], {}), '(config)\n', (9697, 9705), False, 'from sgains.pipelines.bins_pipeline import BinsPipeline\n'), ((8989, 9024), 'functools.partial', 'functools.partial', (['execute', 'command'], {}), '(execute, command)\n', (9006, 9024), False, 'import functools\n'), ((9752, 9775), 'sgains.pipelines.mapping_pipeline.MappingPipeline', 'MappingPipeline', (['config'], {}), '(config)\n', (9767, 9775), False, 'from sgains.pipelines.mapping_pipeline import MappingPipeline\n'), ((9821, 9843), 'sgains.pipelines.varbin_pipeline.VarbinPipeline', 'VarbinPipeline', (['config'], {}), '(config)\n', (9835, 9843), False, 'from sgains.pipelines.varbin_pipeline import VarbinPipeline\n'), ((9890, 9907), 'sgains.pipelines.r_pipeline.Rpipeline', 'Rpipeline', (['config'], {}), '(config)\n', (9899, 9907), False, 'from sgains.pipelines.r_pipeline import Rpipeline\n'), ((9958, 9984), 'sgains.pipelines.extract_10x_pipeline.Extract10xPipeline', 'Extract10xPipeline', (['config'], {}), '(config)\n', (9976, 9984), False, 'from sgains.pipelines.extract_10x_pipeline import Extract10xPipeline\n'), ((10034, 10059), 'sgains.pipelines.varbin_10x_pipeline.Varbin10xPipeline', 'Varbin10xPipeline', (['config'], {}), '(config)\n', (10051, 10059), False, 'from sgains.pipelines.varbin_10x_pipeline import Varbin10xPipeline\n'), ((10258, 10294), 'sgains.pipelines.composite_pipeline.CompositePipeline', 'CompositePipeline', (['config', 'pipelines'], {}), '(config, pipelines)\n', (10275, 10294), False, 'from sgains.pipelines.composite_pipeline import CompositePipeline\n'), ((10125, 10152), 'sgains.pipelines.genomeindex_pipeline.GenomeIndexPipeline', 'GenomeIndexPipeline', (['config'], {}), '(config)\n', (10144, 10152), False, 'from sgains.pipelines.genomeindex_pipeline import GenomeIndexPipeline\n'), ((10166, 10197), 'sgains.pipelines.mappableregions_pipeline.MappableRegionsPipeline', 'MappableRegionsPipeline', (['config'], {}), '(config)\n', (10189, 10197), False, 'from sgains.pipelines.mappableregions_pipeline import MappableRegionsPipeline\n'), ((10211, 10231), 'sgains.pipelines.bins_pipeline.BinsPipeline', 'BinsPipeline', (['config'], {}), '(config)\n', (10223, 10231), False, 'from sgains.pipelines.bins_pipeline import BinsPipeline\n'), ((10477, 10513), 'sgains.pipelines.composite_pipeline.CompositePipeline', 'CompositePipeline', (['config', 'pipelines'], {}), '(config, pipelines)\n', (10494, 10513), False, 'from sgains.pipelines.composite_pipeline import CompositePipeline\n'), ((10360, 10383), 'sgains.pipelines.mapping_pipeline.MappingPipeline', 'MappingPipeline', (['config'], {}), '(config)\n', (10375, 10383), False, 'from sgains.pipelines.mapping_pipeline import MappingPipeline\n'), ((10397, 10419), 'sgains.pipelines.varbin_pipeline.VarbinPipeline', 'VarbinPipeline', (['config'], {}), '(config)\n', (10411, 10419), False, 'from sgains.pipelines.varbin_pipeline import VarbinPipeline\n'), ((10433, 10450), 'sgains.pipelines.r_pipeline.Rpipeline', 'Rpipeline', (['config'], {}), '(config)\n', (10442, 10450), False, 'from sgains.pipelines.r_pipeline import Rpipeline\n')] |
#!/usr/env/bin python
# -*- coding: utf-8 -*-
import io
import os
import sys
from shutil import rmtree
from setuptools import setup, find_packages, Command
about = {}
here = os.path.abspath(os.path.dirname(__file__))
with io.open(os.path.join(here, 'xmind2testcase', '__about__.py'), encoding='utf-8') as f: # custom
exec(f.read(), about)
with io.open('README.md', encoding='utf-8') as f:
long_description = f.read()
install_requires = [ # custom
"xmind",
"flask",
"arrow",
]
class PyPiCommand(Command):
""" Build and publish this package and make a tag.
Support: python setup.py pypi
Copied from requests_html
"""
user_options = []
@staticmethod
def status(s):
"""Prints things in green color."""
print('\033[0;32m{0}\033[0m'.format(s))
def initialize_options(self):
""" override
"""
pass
def finalize_options(self):
""" override
"""
pass
def run(self):
self.status('Building Source and Wheel (universal) distribution...')
os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
self.status('Uploading the package to PyPi via Twine...')
os.system('twine upload dist/*')
self.status('Publishing git tags...')
os.system('git tag v{0}'.format(about['__version__']))
os.system('git push --tags')
try:
self.status('Removing current build artifacts...')
rmtree(os.path.join(here, 'dist'))
rmtree(os.path.join(here, 'build'))
rmtree(os.path.join(here, 'xmind2testcase.egg-info')) # custom
except OSError:
pass
self.status('Congratulations! Upload PyPi and publish git tag successfully...')
sys.exit()
setup(
name=about['__title__'],
version=about['__version__'],
description=about['__description__'],
long_description=long_description,
long_description_content_type='text/markdown',
keywords=about['__keywords__'],
author=about['__author__'],
author_email=about['__author_email__'],
url=about['__url__'],
license=about['__license__'],
packages=find_packages(exclude=['tests', 'test.*', 'docs']), # custom
package_data={ # custom
'': ['README.md'],
'webtool': ['static/*', 'static/css/*', 'static/guide/*', 'templates/*', 'schema.sql'],
},
install_requires=install_requires,
extras_require={},
python_requires='>=3.0, <4', # custom
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
entry_points={ # custom
'console_scripts': [
'xmind2testcase=xmind2testcase.cli:cli_main',
]
},
cmdclass={
# python3 setup.py pypi
'pypi': PyPiCommand
}
)
| [
"setuptools.find_packages",
"os.path.join",
"io.open",
"os.path.dirname",
"sys.exit",
"os.system"
] | [((191, 216), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (206, 216), False, 'import os\n'), ((351, 389), 'io.open', 'io.open', (['"""README.md"""'], {'encoding': '"""utf-8"""'}), "('README.md', encoding='utf-8')\n", (358, 389), False, 'import io\n'), ((231, 283), 'os.path.join', 'os.path.join', (['here', '"""xmind2testcase"""', '"""__about__.py"""'], {}), "(here, 'xmind2testcase', '__about__.py')\n", (243, 283), False, 'import os\n'), ((1237, 1269), 'os.system', 'os.system', (['"""twine upload dist/*"""'], {}), "('twine upload dist/*')\n", (1246, 1269), False, 'import os\n'), ((1388, 1416), 'os.system', 'os.system', (['"""git push --tags"""'], {}), "('git push --tags')\n", (1397, 1416), False, 'import os\n'), ((1803, 1813), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1811, 1813), False, 'import sys\n'), ((2203, 2253), 'setuptools.find_packages', 'find_packages', ([], {'exclude': "['tests', 'test.*', 'docs']"}), "(exclude=['tests', 'test.*', 'docs'])\n", (2216, 2253), False, 'from setuptools import setup, find_packages, Command\n'), ((1513, 1539), 'os.path.join', 'os.path.join', (['here', '"""dist"""'], {}), "(here, 'dist')\n", (1525, 1539), False, 'import os\n'), ((1560, 1587), 'os.path.join', 'os.path.join', (['here', '"""build"""'], {}), "(here, 'build')\n", (1572, 1587), False, 'import os\n'), ((1608, 1653), 'os.path.join', 'os.path.join', (['here', '"""xmind2testcase.egg-info"""'], {}), "(here, 'xmind2testcase.egg-info')\n", (1620, 1653), False, 'import os\n')] |
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from .build_run_source import BuildRunSource
from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel # noqa: F401
from oci.decorators import init_model_state_from_kwargs
@init_model_state_from_kwargs
class GithubBuildRunSource(BuildRunSource):
"""
Specifies details of build run through GitHub.
"""
def __init__(self, **kwargs):
"""
Initializes a new GithubBuildRunSource object with values from keyword arguments. The default value of the :py:attr:`~oci.devops.models.GithubBuildRunSource.source_type` attribute
of this class is ``GITHUB`` and it should not be changed.
The following keyword arguments are supported (corresponding to the getters/setters of this class):
:param source_type:
The value to assign to the source_type property of this GithubBuildRunSource.
Allowed values for this property are: "MANUAL", "GITHUB", "GITLAB", "DEVOPS_CODE_REPOSITORY"
:type source_type: str
:param trigger_id:
The value to assign to the trigger_id property of this GithubBuildRunSource.
:type trigger_id: str
:param trigger_info:
The value to assign to the trigger_info property of this GithubBuildRunSource.
:type trigger_info: oci.devops.models.TriggerInfo
"""
self.swagger_types = {
'source_type': 'str',
'trigger_id': 'str',
'trigger_info': 'TriggerInfo'
}
self.attribute_map = {
'source_type': 'sourceType',
'trigger_id': 'triggerId',
'trigger_info': 'triggerInfo'
}
self._source_type = None
self._trigger_id = None
self._trigger_info = None
self._source_type = 'GITHUB'
@property
def trigger_id(self):
"""
**[Required]** Gets the trigger_id of this GithubBuildRunSource.
The trigger that invoked the build run.
:return: The trigger_id of this GithubBuildRunSource.
:rtype: str
"""
return self._trigger_id
@trigger_id.setter
def trigger_id(self, trigger_id):
"""
Sets the trigger_id of this GithubBuildRunSource.
The trigger that invoked the build run.
:param trigger_id: The trigger_id of this GithubBuildRunSource.
:type: str
"""
self._trigger_id = trigger_id
@property
def trigger_info(self):
"""
**[Required]** Gets the trigger_info of this GithubBuildRunSource.
:return: The trigger_info of this GithubBuildRunSource.
:rtype: oci.devops.models.TriggerInfo
"""
return self._trigger_info
@trigger_info.setter
def trigger_info(self, trigger_info):
"""
Sets the trigger_info of this GithubBuildRunSource.
:param trigger_info: The trigger_info of this GithubBuildRunSource.
:type: oci.devops.models.TriggerInfo
"""
self._trigger_info = trigger_info
def __repr__(self):
return formatted_flat_dict(self)
def __eq__(self, other):
if other is None:
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| [
"oci.util.formatted_flat_dict"
] | [((3416, 3441), 'oci.util.formatted_flat_dict', 'formatted_flat_dict', (['self'], {}), '(self)\n', (3435, 3441), False, 'from oci.util import formatted_flat_dict, NONE_SENTINEL, value_allowed_none_or_none_sentinel\n')] |
"""
Random Variables.
This module implements random variables. Random variables are the main in- and outputs
of probabilistic numerical methods.
"""
from typing import Any, Callable, Dict, Generic, Optional, Tuple, TypeVar, Union
import numpy as np
from probnum import utils as _utils
from probnum.type import (
ArrayLikeGetitemArgType,
DTypeArgType,
FloatArgType,
RandomStateArgType,
RandomStateType,
ShapeArgType,
ShapeType,
)
try:
# functools.cached_property is only available in Python >=3.8
from functools import cached_property
except ImportError:
from cached_property import cached_property
_ValueType = TypeVar("ValueType")
class RandomVariable(Generic[_ValueType]):
"""
Random variables are the main objects used by probabilistic numerical methods.
Every probabilistic numerical method takes a random variable encoding the prior
distribution as input and outputs a random variable whose distribution encodes the
uncertainty arising from finite computation. The generic signature of a
probabilistic numerical method is:
``output_rv = probnum_method(input_rv, method_params)``
In practice, most random variables used by methods in ProbNum have Dirac or Gaussian
measure.
Instances of :class:`RandomVariable` can be added, multiplied, etc. with arrays and
linear operators. This may change their ``distribution`` and not necessarily all
previously available methods are retained.
The internals of :class:`RandomVariable` objects are assumed to be constant over
their whole lifecycle. This is due to the caches used to make certain computations
more efficient. As a consequence, altering the internal state of a
:class:`RandomVariable` (e.g. its mean, cov, sampling function, etc.) will result in
undefined behavior. In particular, this should be kept in mind when subclassing
:class:`RandomVariable` or any of its descendants.
Parameters
----------
shape :
Shape of realizations of this random variable.
dtype :
Data type of realizations of this random variable. If ``object`` will be
converted to ``numpy.dtype``.
as_value_type :
Function which can be used to transform user-supplied arguments, interpreted as
realizations of this random variable, to an easy-to-process, normalized format.
Will be called internally to transform the argument of functions like
``in_support``, ``cdf`` and ``logcdf``, ``pmf`` and ``logpmf`` (in
:class:`DiscreteRandomVariable`), ``pdf`` and ``logpdf`` (in
:class:`ContinuousRandomVariable`), and potentially by similar functions in
subclasses.
For instance, this method is useful if (``log``)``cdf`` and (``log``)``pdf``
both only work on :class:`np.float_` arguments, but we still want the user to be
able to pass Python :class:`float`. Then ``as_value_type`` should be set to
something like ``lambda x: np.float64(x)``.
See Also
--------
asrandvar : Transform into a :class:`RandomVariable`.
Examples
--------
"""
# pylint: disable=too-many-instance-attributes,too-many-public-methods
def __init__(
self,
shape: ShapeArgType,
dtype: DTypeArgType,
random_state: RandomStateArgType = None,
parameters: Optional[Dict[str, Any]] = None,
sample: Optional[Callable[[ShapeType], _ValueType]] = None,
in_support: Optional[Callable[[_ValueType], bool]] = None,
cdf: Optional[Callable[[_ValueType], np.float_]] = None,
logcdf: Optional[Callable[[_ValueType], np.float_]] = None,
quantile: Optional[Callable[[FloatArgType], _ValueType]] = None,
mode: Optional[Callable[[], _ValueType]] = None,
median: Optional[Callable[[], _ValueType]] = None,
mean: Optional[Callable[[], _ValueType]] = None,
cov: Optional[Callable[[], _ValueType]] = None,
var: Optional[Callable[[], _ValueType]] = None,
std: Optional[Callable[[], _ValueType]] = None,
entropy: Optional[Callable[[], np.float_]] = None,
as_value_type: Optional[Callable[[Any], _ValueType]] = None,
):
# pylint: disable=too-many-arguments,too-many-locals
"""Create a new random variable."""
self.__shape = _utils.as_shape(shape)
# Data Types
self.__dtype = np.dtype(dtype)
self.__median_dtype = RandomVariable.infer_median_dtype(self.__dtype)
self.__moment_dtype = RandomVariable.infer_moment_dtype(self.__dtype)
self._random_state = _utils.as_random_state(random_state)
# Probability distribution of the random variable
self.__parameters = parameters.copy() if parameters is not None else {}
self.__sample = sample
self.__in_support = in_support
self.__cdf = cdf
self.__logcdf = logcdf
self.__quantile = quantile
# Properties of the random variable
self.__mode = mode
self.__median = median
self.__mean = mean
self.__cov = cov
self.__var = var
self.__std = std
self.__entropy = entropy
# Utilities
self.__as_value_type = as_value_type
def __repr__(self) -> str:
return f"<{self.shape} {self.__class__.__name__} with dtype={self.dtype}>"
@property
def shape(self) -> ShapeType:
"""Shape of realizations of the random variable."""
return self.__shape
@cached_property
def ndim(self) -> int:
return len(self.__shape)
@cached_property
def size(self) -> int:
return int(np.prod(self.__shape))
@property
def dtype(self) -> np.dtype:
"""Data type of (elements of) a realization of this random variable."""
return self.__dtype
@property
def median_dtype(self) -> np.dtype:
"""The dtype of the :attr:`median`. It will be set to the dtype arising from
the multiplication of values with dtypes :attr:`dtype` and :class:`np.float_`.
This is motivated by the fact that, even for discrete random variables, e.g.
integer-valued random variables, the :attr:`median` might lie in between two
values in which case these values are averaged. For example, a uniform random
variable on :math:`\\{ 1, 2, 3, 4 \\}` will have a median of :math:`2.5`.
"""
return self.__median_dtype
@property
def moment_dtype(self) -> np.dtype:
"""The dtype of any (function of a) moment of the random variable, e.g. its
:attr:`mean`, :attr:`cov`, :attr:`var`, or :attr:`std`. It will be set to the
dtype arising from the multiplication of values with dtypes :attr:`dtype`
and :class:`np.float_`. This is motivated by the mathematical definition of a
moment as a sum or an integral over products of probabilities and values of the
random variable, which are represented as using the dtypes :class:`np.float_`
and :attr:`dtype`, respectively.
"""
return self.__moment_dtype
@property
def random_state(self) -> RandomStateType:
"""Random state of the random variable.
This attribute defines the RandomState object to use for drawing
realizations from this random variable.
If None (or np.random), the global np.random state is used.
If integer, it is used to seed the local :class:`~numpy.random.RandomState`
instance.
"""
return self._random_state
@random_state.setter
def random_state(self, seed: RandomStateArgType):
"""Get or set the RandomState object of the underlying distribution.
This can be either None or an existing RandomState object.
If None (or np.random), use the RandomState singleton used by np.random.
If already a RandomState instance, use it.
If an int, use a new RandomState instance seeded with seed.
"""
self._random_state = _utils.as_random_state(seed)
@property
def parameters(self) -> Dict[str, Any]:
"""
Parameters of the probability distribution.
The parameters of the distribution such as mean, variance, et cetera stored in a
``dict``.
"""
return self.__parameters.copy()
@cached_property
def mode(self) -> _ValueType:
"""
Mode of the random variable.
Returns
-------
mode : float
The mode of the random variable.
"""
if self.__mode is None:
raise NotImplementedError
mode = self.__mode()
RandomVariable._check_property_value(
"mode",
mode,
shape=self.__shape,
dtype=self.__dtype,
)
# Make immutable
if isinstance(mode, np.ndarray):
mode.setflags(write=False)
return mode
@cached_property
def median(self) -> _ValueType:
"""
Median of the random variable.
To learn about the dtype of the median, see :attr:`median_dtype`.
Returns
-------
median : float
The median of the distribution.
"""
if self.__shape != ():
raise NotImplementedError(
"The median is only defined for scalar random variables."
)
median = self.__median()
RandomVariable._check_property_value(
"median",
median,
shape=self.__shape,
dtype=self.__median_dtype,
)
# Make immutable
if isinstance(median, np.ndarray):
median.setflags(write=False)
return median
@cached_property
def mean(self) -> _ValueType:
"""
Mean :math:`\\mathbb{E}(X)` of the distribution.
To learn about the dtype of the mean, see :attr:`moment_dtype`.
Returns
-------
mean : array-like
The mean of the distribution.
"""
if self.__mean is None:
raise NotImplementedError
mean = self.__mean()
RandomVariable._check_property_value(
"mean",
mean,
shape=self.__shape,
dtype=self.__moment_dtype,
)
# Make immutable
if isinstance(mean, np.ndarray):
mean.setflags(write=False)
return mean
@cached_property
def cov(self) -> _ValueType:
"""
Covariance :math:`\\operatorname{Cov}(X) = \\mathbb{E}((X-\\mathbb{E}(X))(X-\\mathbb{E}(X))^\\top)`
of the random variable.
To learn about the dtype of the covariance, see :attr:`moment_dtype`.
Returns
-------
cov : array-like
The kernels of the random variable.
""" # pylint: disable=line-too-long
if self.__cov is None:
raise NotImplementedError
cov = self.__cov()
RandomVariable._check_property_value(
"covariance",
cov,
shape=(self.size, self.size) if self.ndim > 0 else (),
dtype=self.__moment_dtype,
)
# Make immutable
if isinstance(cov, np.ndarray):
cov.setflags(write=False)
return cov
@cached_property
def var(self) -> _ValueType:
"""
Variance :math:`\\operatorname{Var}(X) = \\mathbb{E}((X-\\mathbb{E}(X))^2)` of
the distribution.
To learn about the dtype of the variance, see :attr:`moment_dtype`.
Returns
-------
var : array-like
The variance of the distribution.
"""
if self.__var is None:
try:
var = np.diag(self.cov).reshape(self.__shape).copy()
except NotImplementedError as exc:
raise NotImplementedError from exc
else:
var = self.__var()
RandomVariable._check_property_value(
"variance",
var,
shape=self.__shape,
dtype=self.__moment_dtype,
)
# Make immutable
if isinstance(var, np.ndarray):
var.setflags(write=False)
return var
@cached_property
def std(self) -> _ValueType:
"""
Standard deviation of the distribution.
To learn about the dtype of the standard deviation, see :attr:`moment_dtype`.
Returns
-------
std : array-like
The standard deviation of the distribution.
"""
if self.__std is None:
try:
std = np.sqrt(self.var)
except NotImplementedError as exc:
raise NotImplementedError from exc
else:
std = self.__std()
RandomVariable._check_property_value(
"standard deviation",
std,
shape=self.__shape,
dtype=self.__moment_dtype,
)
# Make immutable
if isinstance(std, np.ndarray):
std.setflags(write=False)
return std
@cached_property
def entropy(self) -> np.float_:
if self.__entropy is None:
raise NotImplementedError
entropy = self.__entropy()
entropy = RandomVariable._ensure_numpy_float(
"entropy", entropy, force_scalar=True
)
return entropy
def in_support(self, x: _ValueType) -> bool:
if self.__in_support is None:
raise NotImplementedError
in_support = self.__in_support(self._as_value_type(x))
if not isinstance(in_support, bool):
raise ValueError(
f"The function `in_support` must return a `bool`, but its return value "
f"is of type `{type(x)}`."
)
return in_support
def sample(self, size: ShapeArgType = ()) -> _ValueType:
"""
Draw realizations from a random variable.
Parameters
----------
size : tuple
Size of the drawn sample of realizations.
Returns
-------
sample : array-like
Sample of realizations with the given ``size`` and the inherent ``shape``.
"""
if self.__sample is None:
raise NotImplementedError("No sampling method provided.")
return self.__sample(size=_utils.as_shape(size))
def cdf(self, x: _ValueType) -> np.float_:
"""
Cumulative distribution function.
Parameters
----------
x : array-like
Evaluation points of the cumulative distribution function.
The shape of this argument should be :code:`(..., S1, ..., SN)`, where
:code:`(S1, ..., SN)` is the :attr:`shape` of the random variable.
The cdf evaluation will be broadcast over all additional dimensions.
Returns
-------
q : array-like
Value of the cumulative density function at the given points.
"""
if self.__cdf is not None:
return RandomVariable._ensure_numpy_float(
"cdf", self.__cdf(self._as_value_type(x))
)
elif self.__logcdf is not None:
cdf = np.exp(self.logcdf(self._as_value_type(x)))
assert isinstance(cdf, np.float_)
return cdf
else:
raise NotImplementedError(
f"Neither the `cdf` nor the `logcdf` of the random variable object "
f"with type `{type(self).__name__}` is implemented."
)
def logcdf(self, x: _ValueType) -> np.float_:
"""
Log-cumulative distribution function.
Parameters
----------
x : array-like
Evaluation points of the cumulative distribution function.
The shape of this argument should be :code:`(..., S1, ..., SN)`, where
:code:`(S1, ..., SN)` is the :attr:`shape` of the random variable.
The logcdf evaluation will be broadcast over all additional dimensions.
Returns
-------
q : array-like
Value of the log-cumulative density function at the given points.
"""
if self.__logcdf is not None:
return RandomVariable._ensure_numpy_float(
"logcdf", self.__logcdf(self._as_value_type(x))
)
elif self.__cdf is not None:
logcdf = np.log(self.__cdf(x))
assert isinstance(logcdf, np.float_)
return logcdf
else:
raise NotImplementedError(
f"Neither the `logcdf` nor the `cdf` of the random variable object "
f"with type `{type(self).__name__}` is implemented."
)
def quantile(self, p: FloatArgType) -> _ValueType:
"""Quantile function.
The quantile function :math:`Q \\colon [0, 1] \\to \\mathbb{R}` of a random
variable :math:`X` is defined as
:math:`Q(p) = \\inf\\{ x \\in \\mathbb{R} \\colon p \\le F_X(x) \\}`, where
:math:`F_X \\colon \\mathbb{R} \\to [0, 1]` is the :meth:`cdf` of the random
variable. From the definition it follows that the quantile function always
returns values of the same dtype as the random variable. For instance, for a
discrete distribution over the integers, the returned quantiles will also be
integers. This means that, in general, :math:`Q(0.5)` is not equal to the
:attr:`median` as it is defined in this class. See
https://en.wikipedia.org/wiki/Quantile_function for more details and examples.
"""
if self.__shape != ():
raise NotImplementedError(
"The quantile function is only defined for scalar random variables."
)
if self.__quantile is None:
raise NotImplementedError
try:
p = _utils.as_numpy_scalar(p, dtype=np.floating)
except TypeError as exc:
raise TypeError(
"The given argument `p` can not be cast to a `np.floating` object."
) from exc
quantile = self.__quantile(p)
if quantile.shape != self.__shape:
raise ValueError(
f"The quantile function should return values of the same shape as the "
f"random variable, i.e. {self.__shape}, but it returned a value with "
f"{quantile.shape}."
)
if quantile.dtype != self.__dtype:
raise ValueError(
f"The quantile function should return values of the same dtype as the "
f"random variable, i.e. `{self.__dtype.name}`, but it returned a value "
f"with dtype `{quantile.dtype.name}`."
)
return quantile
def __getitem__(self, key: ArrayLikeGetitemArgType) -> "RandomVariable":
return RandomVariable(
shape=np.empty(shape=self.shape)[key].shape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: self.sample(size)[key],
mode=lambda: self.mode[key],
mean=lambda: self.mean[key],
var=lambda: self.var[key],
std=lambda: self.std[key],
entropy=lambda: self.entropy,
as_value_type=self.__as_value_type,
)
def reshape(self, newshape: ShapeArgType) -> "RandomVariable":
"""
Give a new shape to a random variable.
Parameters
----------
newshape : int or tuple of ints
New shape for the random variable. It must be compatible with the original
shape.
Returns
-------
reshaped_rv : ``self`` with the new dimensions of ``shape``.
"""
newshape = _utils.as_shape(newshape)
return RandomVariable(
shape=newshape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: self.sample(size).reshape(size + newshape),
mode=lambda: self.mode.reshape(newshape),
median=lambda: self.median.reshape(newshape),
mean=lambda: self.mean.reshape(newshape),
cov=lambda: self.cov,
var=lambda: self.var.reshape(newshape),
std=lambda: self.std.reshape(newshape),
entropy=lambda: self.entropy,
as_value_type=self.__as_value_type,
)
def transpose(self, *axes: int) -> "RandomVariable":
"""
Transpose the random variable.
Parameters
----------
axes : None, tuple of ints, or n ints
See documentation of numpy.ndarray.transpose.
Returns
-------
transposed_rv : The transposed random variable.
"""
return RandomVariable(
shape=np.empty(shape=self.shape).transpose(*axes).shape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: self.sample(size).transpose(*axes),
mode=lambda: self.mode.transpose(*axes),
median=lambda: self.median.transpose(*axes),
mean=lambda: self.mean.transpose(*axes),
cov=lambda: self.cov,
var=lambda: self.var.transpose(*axes),
std=lambda: self.std.transpose(*axes),
entropy=lambda: self.entropy,
as_value_type=self.__as_value_type,
)
T = property(transpose)
# Unary arithmetic operations
def __neg__(self) -> "RandomVariable":
return RandomVariable(
shape=self.shape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: -self.sample(size=size),
in_support=lambda x: self.in_support(-x),
mode=lambda: -self.mode,
median=lambda: -self.median,
mean=lambda: -self.mean,
cov=lambda: self.cov,
var=lambda: self.var,
std=lambda: self.std,
as_value_type=self.__as_value_type,
)
def __pos__(self) -> "RandomVariable":
return RandomVariable(
shape=self.shape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: +self.sample(size=size),
in_support=lambda x: self.in_support(+x),
mode=lambda: +self.mode,
median=lambda: +self.median,
mean=lambda: +self.mean,
cov=lambda: self.cov,
var=lambda: self.var,
std=lambda: self.std,
as_value_type=self.__as_value_type,
)
def __abs__(self) -> "RandomVariable":
return RandomVariable(
shape=self.shape,
dtype=self.dtype,
random_state=_utils.derive_random_seed(self.random_state),
sample=lambda size: abs(self.sample(size=size)),
)
# Binary arithmetic operations
__array_ufunc__ = None
"""
This prevents numpy from calling elementwise arithmetic
operations allowing expressions like: y = np.array([1, 1]) + RV
to call the arithmetic operations defined by RandomVariable
instead of elementwise. Thus no array of RandomVariables but a
RandomVariable with the correct shape is returned.
"""
def __add__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import add
return add(self, other)
def __radd__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import add
return add(other, self)
def __sub__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import sub
return sub(self, other)
def __rsub__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import sub
return sub(other, self)
def __mul__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import mul
return mul(self, other)
def __rmul__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import mul
return mul(other, self)
def __matmul__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import matmul
return matmul(self, other)
def __rmatmul__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import matmul
return matmul(other, self)
def __truediv__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import truediv
return truediv(self, other)
def __rtruediv__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import truediv
return truediv(other, self)
def __floordiv__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import floordiv
return floordiv(self, other)
def __rfloordiv__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import floordiv
return floordiv(other, self)
def __mod__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import mod
return mod(self, other)
def __rmod__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import mod
return mod(other, self)
def __divmod__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import divmod_
return divmod_(self, other)
def __rdivmod__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import divmod_
return divmod_(other, self)
def __pow__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import pow_
return pow_(self, other)
def __rpow__(self, other: Any) -> "RandomVariable":
# pylint: disable=import-outside-toplevel,cyclic-import
from ._arithmetic import pow_
return pow_(other, self)
@staticmethod
def infer_median_dtype(value_dtype: DTypeArgType) -> np.dtype:
return RandomVariable.infer_moment_dtype(value_dtype)
@staticmethod
def infer_moment_dtype(value_dtype: DTypeArgType) -> np.dtype:
return np.promote_types(value_dtype, np.float_)
def _as_value_type(self, x: Any) -> _ValueType:
if self.__as_value_type is not None:
return self.__as_value_type(x)
return x
@staticmethod
def _check_property_value(
name: str,
value: Any,
shape: Optional[Tuple[int, ...]] = None,
dtype: Optional[np.dtype] = None,
):
if shape is not None:
if value.shape != shape:
raise ValueError(
f"The {name} of the random variable does not have the correct "
f"shape. Expected {shape} but got {value.shape}."
)
if dtype is not None:
if not np.issubdtype(value.dtype, dtype):
raise ValueError(
f"The {name} of the random variable does not have the correct "
f"dtype. Expected {dtype.name} but got {value.dtype.name}."
)
@classmethod
def _ensure_numpy_float(
cls, name: str, value: Any, force_scalar: bool = False
) -> Union[np.float_, np.ndarray]:
if np.isscalar(value):
if not isinstance(value, np.float_):
try:
value = _utils.as_numpy_scalar(value, dtype=np.float_)
except TypeError as err:
raise TypeError(
f"The function `{name}` specified via the constructor of "
f"`{cls.__name__}` must return a scalar value that can be "
f"converted to a `np.float_`, which is not possible for "
f"{value} of type {type(value)}."
) from err
elif not force_scalar:
try:
value = np.asarray(value, dtype=np.float_)
except TypeError as err:
raise TypeError(
f"The function `{name}` specified via the constructor of "
f"`{cls.__name__}` must return a value that can be converted "
f"to a `np.ndarray` of type `np.float_`, which is not possible "
f"for {value} of type {type(value)}."
) from err
else:
raise TypeError(
f"The function `{name}` specified via the constructor of "
f"`{cls.__name__}` must return a scalar value, but {value} of type "
f"{type(value)} is not scalar."
)
assert isinstance(value, (np.float_, np.ndarray))
return value
class DiscreteRandomVariable(RandomVariable[_ValueType]):
def __init__(
self,
shape: ShapeArgType,
dtype: DTypeArgType,
random_state: Optional[RandomStateType] = None,
parameters: Optional[Dict[str, Any]] = None,
sample: Optional[Callable[[ShapeArgType], _ValueType]] = None,
in_support: Optional[Callable[[_ValueType], bool]] = None,
pmf: Optional[Callable[[_ValueType], np.float_]] = None,
logpmf: Optional[Callable[[_ValueType], np.float_]] = None,
cdf: Optional[Callable[[_ValueType], np.float_]] = None,
logcdf: Optional[Callable[[_ValueType], np.float_]] = None,
quantile: Optional[Callable[[FloatArgType], _ValueType]] = None,
mode: Optional[Callable[[], _ValueType]] = None,
median: Optional[Callable[[], _ValueType]] = None,
mean: Optional[Callable[[], _ValueType]] = None,
cov: Optional[Callable[[], _ValueType]] = None,
var: Optional[Callable[[], _ValueType]] = None,
std: Optional[Callable[[], _ValueType]] = None,
entropy: Optional[Callable[[], np.float_]] = None,
):
# Probability mass function
self.__pmf = pmf
self.__logpmf = logpmf
super().__init__(
shape=shape,
dtype=dtype,
random_state=random_state,
parameters=parameters,
sample=sample,
in_support=in_support,
cdf=cdf,
logcdf=logcdf,
quantile=quantile,
mode=mode,
median=median,
mean=mean,
cov=cov,
var=var,
std=std,
entropy=entropy,
)
def pmf(self, x: _ValueType) -> np.float_:
if self.__pmf is not None:
return DiscreteRandomVariable._ensure_numpy_float("pmf", self.__pmf(x))
elif self.__logpmf is not None:
pmf = np.exp(self.__logpmf(x))
assert isinstance(pmf, np.float_)
return pmf
else:
raise NotImplementedError(
f"Neither the `pmf` nor the `logpmf` of the discrete random variable "
f"object with type `{type(self).__name__}` is implemented."
)
def logpmf(self, x: _ValueType) -> np.float_:
if self.__logpmf is not None:
return DiscreteRandomVariable._ensure_numpy_float(
"logpmf", self.__logpmf(self._as_value_type(x))
)
elif self.__pmf is not None:
logpmf = np.log(self.__pmf(self._as_value_type(x)))
assert isinstance(logpmf, np.float_)
return logpmf
else:
raise NotImplementedError(
f"Neither the `logpmf` nor the `pmf` of the discrete random variable "
f"object with type `{type(self).__name__}` is implemented."
)
class ContinuousRandomVariable(RandomVariable[_ValueType]):
def __init__(
self,
shape: ShapeArgType,
dtype: DTypeArgType,
random_state: Optional[RandomStateType] = None,
parameters: Optional[Dict[str, Any]] = None,
sample: Optional[Callable[[ShapeArgType], _ValueType]] = None,
in_support: Optional[Callable[[_ValueType], bool]] = None,
pdf: Optional[Callable[[_ValueType], np.float_]] = None,
logpdf: Optional[Callable[[_ValueType], np.float_]] = None,
cdf: Optional[Callable[[_ValueType], np.float_]] = None,
logcdf: Optional[Callable[[_ValueType], np.float_]] = None,
quantile: Optional[Callable[[FloatArgType], _ValueType]] = None,
mode: Optional[Callable[[], _ValueType]] = None,
median: Optional[Callable[[], _ValueType]] = None,
mean: Optional[Callable[[], _ValueType]] = None,
cov: Optional[Callable[[], _ValueType]] = None,
var: Optional[Callable[[], _ValueType]] = None,
std: Optional[Callable[[], _ValueType]] = None,
entropy: Optional[Callable[[], np.float_]] = None,
):
# Probability density function
self.__pdf = pdf
self.__logpdf = logpdf
super().__init__(
shape=shape,
dtype=dtype,
random_state=random_state,
parameters=parameters,
sample=sample,
in_support=in_support,
cdf=cdf,
logcdf=logcdf,
quantile=quantile,
mode=mode,
median=median,
mean=mean,
cov=cov,
var=var,
std=std,
entropy=entropy,
)
def pdf(self, x: _ValueType) -> np.float_:
"""
Probability density or mass function.
Following the predominant convention in mathematics, we express pdfs with
respect to the Lebesgue measure unless stated otherwise.
Parameters
----------
x : array-like
Evaluation points of the probability density / mass function.
The shape of this argument should be :code:`(..., S1, ..., SN)`, where
:code:`(S1, ..., SN)` is the :attr:`shape` of the random variable.
The pdf evaluation will be broadcast over all additional dimensions.
Returns
-------
p : array-like
Value of the probability density / mass function at the given points.
"""
if self.__pdf is not None:
return ContinuousRandomVariable._ensure_numpy_float(
"pdf", self.__pdf(self._as_value_type(x))
)
if self.__logpdf is not None:
pdf = np.exp(self.__logpdf(self._as_value_type(x)))
assert isinstance(pdf, np.float_)
return pdf
raise NotImplementedError(
f"Neither the `pdf` nor the `logpdf` of the continuous random variable "
f"object with type `{type(self).__name__}` is implemented."
)
def logpdf(self, x: _ValueType) -> np.float_:
"""
Natural logarithm of the probability density function.
Parameters
----------
x : array-like
Evaluation points of the log-probability density/mass function.
The shape of this argument should be :code:`(..., S1, ..., SN)`, where
:code:`(S1, ..., SN)` is the :attr:`shape` of the random variable.
The logpdf evaluation will be broadcast over all additional dimensions.
Returns
-------
logp : array-like
Value of the log-probability density / mass function at the given points.
"""
if self.__logpdf is not None:
return ContinuousRandomVariable._ensure_numpy_float(
"logpdf", self.__logpdf(self._as_value_type(x))
)
elif self.__pdf is not None:
logpdf = np.log(self.__pdf(self._as_value_type(x)))
assert isinstance(logpdf, np.float_)
return logpdf
else:
raise NotImplementedError(
f"Neither the `logpdf` nor the `pdf` of the continuous random variable "
f"object with type `{type(self).__name__}` is implemented."
)
| [
"numpy.prod",
"probnum.utils.as_shape",
"numpy.sqrt",
"numpy.isscalar",
"numpy.promote_types",
"probnum.utils.as_random_state",
"numpy.asarray",
"probnum.utils.as_numpy_scalar",
"numpy.diag",
"numpy.issubdtype",
"numpy.empty",
"probnum.utils.derive_random_seed",
"numpy.dtype",
"typing.TypeVar"
] | [((658, 678), 'typing.TypeVar', 'TypeVar', (['"""ValueType"""'], {}), "('ValueType')\n", (665, 678), False, 'from typing import Any, Callable, Dict, Generic, Optional, Tuple, TypeVar, Union\n'), ((4355, 4377), 'probnum.utils.as_shape', '_utils.as_shape', (['shape'], {}), '(shape)\n', (4370, 4377), True, 'from probnum import utils as _utils\n'), ((4423, 4438), 'numpy.dtype', 'np.dtype', (['dtype'], {}), '(dtype)\n', (4431, 4438), True, 'import numpy as np\n'), ((4625, 4661), 'probnum.utils.as_random_state', '_utils.as_random_state', (['random_state'], {}), '(random_state)\n', (4647, 4661), True, 'from probnum import utils as _utils\n'), ((8030, 8058), 'probnum.utils.as_random_state', '_utils.as_random_state', (['seed'], {}), '(seed)\n', (8052, 8058), True, 'from probnum import utils as _utils\n'), ((19830, 19855), 'probnum.utils.as_shape', '_utils.as_shape', (['newshape'], {}), '(newshape)\n', (19845, 19855), True, 'from probnum import utils as _utils\n'), ((27213, 27253), 'numpy.promote_types', 'np.promote_types', (['value_dtype', 'np.float_'], {}), '(value_dtype, np.float_)\n', (27229, 27253), True, 'import numpy as np\n'), ((28334, 28352), 'numpy.isscalar', 'np.isscalar', (['value'], {}), '(value)\n', (28345, 28352), True, 'import numpy as np\n'), ((5670, 5691), 'numpy.prod', 'np.prod', (['self.__shape'], {}), '(self.__shape)\n', (5677, 5691), True, 'import numpy as np\n'), ((17904, 17948), 'probnum.utils.as_numpy_scalar', '_utils.as_numpy_scalar', (['p'], {'dtype': 'np.floating'}), '(p, dtype=np.floating)\n', (17926, 17948), True, 'from probnum import utils as _utils\n'), ((12628, 12645), 'numpy.sqrt', 'np.sqrt', (['self.var'], {}), '(self.var)\n', (12635, 12645), True, 'import numpy as np\n'), ((14374, 14395), 'probnum.utils.as_shape', '_utils.as_shape', (['size'], {}), '(size)\n', (14389, 14395), True, 'from probnum import utils as _utils\n'), ((19023, 19067), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (19048, 19067), True, 'from probnum import utils as _utils\n'), ((19971, 20015), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (19996, 20015), True, 'from probnum import utils as _utils\n'), ((21005, 21049), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (21030, 21049), True, 'from probnum import utils as _utils\n'), ((21742, 21786), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (21767, 21786), True, 'from probnum import utils as _utils\n'), ((22334, 22378), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (22359, 22378), True, 'from probnum import utils as _utils\n'), ((22926, 22970), 'probnum.utils.derive_random_seed', '_utils.derive_random_seed', (['self.random_state'], {}), '(self.random_state)\n', (22951, 22970), True, 'from probnum import utils as _utils\n'), ((27923, 27956), 'numpy.issubdtype', 'np.issubdtype', (['value.dtype', 'dtype'], {}), '(value.dtype, dtype)\n', (27936, 27956), True, 'import numpy as np\n'), ((28452, 28498), 'probnum.utils.as_numpy_scalar', '_utils.as_numpy_scalar', (['value'], {'dtype': 'np.float_'}), '(value, dtype=np.float_)\n', (28474, 28498), True, 'from probnum import utils as _utils\n'), ((28987, 29021), 'numpy.asarray', 'np.asarray', (['value'], {'dtype': 'np.float_'}), '(value, dtype=np.float_)\n', (28997, 29021), True, 'import numpy as np\n'), ((18929, 18955), 'numpy.empty', 'np.empty', ([], {'shape': 'self.shape'}), '(shape=self.shape)\n', (18937, 18955), True, 'import numpy as np\n'), ((20899, 20925), 'numpy.empty', 'np.empty', ([], {'shape': 'self.shape'}), '(shape=self.shape)\n', (20907, 20925), True, 'import numpy as np\n'), ((11747, 11764), 'numpy.diag', 'np.diag', (['self.cov'], {}), '(self.cov)\n', (11754, 11764), True, 'import numpy as np\n')] |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for address collection commands."""
import path_initializer
path_initializer.InitSysPath()
import json
import unittest
import gflags as flags
from gcutil_lib import address_cmds
from gcutil_lib import gcutil_unittest
from gcutil_lib import mock_api
from gcutil_lib import mock_lists
FLAGS = flags.FLAGS
class AddressCmdsTest(gcutil_unittest.GcutilTestCase):
def setUp(self):
self.mock, self.api = mock_api.CreateApi(self.version)
def testReserveAddressPromptsForRegion(self):
expected_project = 'test_project'
expected_address = 'test_address'
expected_description = 'test address'
expected_region = 'test-region'
expected_source_address = '123.123.123.1'
set_flags = {
'project': expected_project,
'description': expected_description,
'source_address': expected_source_address,
}
command = self._CreateAndInitializeCommand(
address_cmds.ReserveAddress, 'reserveaddress', set_flags=set_flags)
mock_lists.GetSampleRegionListCall(
command, self.mock, num_responses=1, name=[expected_region])
call = self.mock.Respond('compute.addresses.insert', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('POST', request.method)
self.assertEqual(expected_project, request.parameters['project'])
self.assertEquals(expected_region, request.parameters['region'])
body = json.loads(request.body)
self.assertEqual(body['name'], expected_address)
self.assertEqual(body['description'], expected_description)
self.assertEquals(body['address'], expected_source_address)
def testReserveAddressGeneratesCorrectRequest(self):
expected_project = 'test_project'
expected_address = 'test_address'
expected_description = 'test address'
submitted_region = 'test-region'
expected_source_address = '192.168.127.12'
set_flags = {
'project': expected_project,
'description': expected_description,
'region': submitted_region,
'source_address': expected_source_address,
}
command = self._CreateAndInitializeCommand(
address_cmds.ReserveAddress, 'reserveaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.insert', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('POST', request.method)
self.assertEqual(expected_project, request.parameters['project'])
self.assertEquals(submitted_region, request.parameters['region'])
body = json.loads(request.body)
self.assertEqual(body['name'], expected_address)
self.assertEqual(body['description'], expected_description)
self.assertEquals(body['address'], expected_source_address)
def testGetAddressGeneratesCorrectRequest(self):
expected_project = 'test_project'
expected_address = 'test_address'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.GetAddress, 'getaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.get', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('GET', request.method)
self.assertEqual(None, request.body)
parameters = request.parameters
self.assertEqual(parameters['project'], expected_project)
self.assertEqual(parameters['region'], submitted_region)
self.assertEqual(parameters['address'], expected_address)
def testGetAddressPrintNonEmptyUsers(self):
expected_project = 'test_project'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.GetAddress, 'getaddress', set_flags=set_flags)
data = command.GetDetailRow({'users': ['fr-1', 'fr-2']})
expected_data = {
'v1': [
('users', ['fr-1', 'fr-2'])
],
}
self.assertEquals(
gcutil_unittest.SelectTemplateForVersion(
expected_data, command.api.version),
data)
def testGetAddressPrintEmptyUsers(self):
expected_project = 'test_project'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.GetAddress, 'getaddress', set_flags=set_flags)
data = command.GetDetailRow({'users': []})
expected_data = {
'v1': [
('users', [])
],
}
self.assertEquals(
gcutil_unittest.SelectTemplateForVersion(
expected_data, command.api.version),
data)
def testReleaseAddressGeneratesCorrectRequest(self):
expected_project = 'test_project'
expected_address = 'test_address'
submitted_region = 'test-region'
set_flags = {
'project': expected_project,
'region': submitted_region,
}
command = self._CreateAndInitializeCommand(
address_cmds.ReleaseAddress, 'releaseaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.delete', {})
command.Handle(expected_address)
request = call.GetRequest()
self.assertEqual('DELETE', request.method)
self.assertEqual(None, request.body)
parameters = request.parameters
self.assertEqual(parameters['project'], expected_project)
self.assertEqual(parameters['region'], submitted_region)
self.assertEqual(parameters['address'], expected_address)
def testReleaseAddressWithoutRegionFlag(self):
expected_project = 'test_project'
expected_region = 'test-region'
expected_address = 'test_address'
address = ('projects/%s/regions/%s/addresses/%s' %
(expected_project, expected_region, expected_address))
set_flags = {
'project': 'incorrect_project',
}
command = self._CreateAndInitializeCommand(
address_cmds.ReleaseAddress, 'releaseaddress', set_flags=set_flags)
call = self.mock.Respond('compute.addresses.delete', {})
command.Handle(address)
request = call.GetRequest()
self.assertEqual('DELETE', request.method)
self.assertEqual(None, request.body)
parameters = request.parameters
self.assertEqual(parameters['project'], expected_project)
self.assertEqual(parameters['region'], expected_region)
self.assertEqual(parameters['address'], expected_address)
def testReleaseMultipleAddresses(self):
expected_project = 'test_project'
expected_addresses = [
'test-addresses-%02d' % x for x in xrange(100)]
set_flags = {
'project': expected_project,
'region': 'region-a',
}
command = self._CreateAndInitializeCommand(
address_cmds.ReleaseAddress, 'releaseaddress', set_flags=set_flags)
calls = [self.mock.Respond('compute.addresses.delete', {})
for x in xrange(len(expected_addresses))]
_, exceptions = command.Handle(*expected_addresses)
self.assertEqual(0, len(exceptions))
sorted_calls = sorted([call.GetRequest().parameters['address'] for
call in calls])
self.assertEqual(expected_addresses, sorted_calls)
if __name__ == '__main__':
unittest.main(testLoader=gcutil_unittest.GcutilLoader())
| [
"json.loads",
"gcutil_lib.gcutil_unittest.SelectTemplateForVersion",
"gcutil_lib.mock_lists.GetSampleRegionListCall",
"gcutil_lib.mock_api.CreateApi",
"gcutil_lib.gcutil_unittest.GcutilLoader",
"path_initializer.InitSysPath"
] | [((674, 704), 'path_initializer.InitSysPath', 'path_initializer.InitSysPath', ([], {}), '()\n', (702, 704), False, 'import path_initializer\n'), ((1023, 1055), 'gcutil_lib.mock_api.CreateApi', 'mock_api.CreateApi', (['self.version'], {}), '(self.version)\n', (1041, 1055), False, 'from gcutil_lib import mock_api\n'), ((1593, 1692), 'gcutil_lib.mock_lists.GetSampleRegionListCall', 'mock_lists.GetSampleRegionListCall', (['command', 'self.mock'], {'num_responses': '(1)', 'name': '[expected_region]'}), '(command, self.mock, num_responses=1,\n name=[expected_region])\n', (1627, 1692), False, 'from gcutil_lib import mock_lists\n'), ((2026, 2050), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (2036, 2050), False, 'import json\n'), ((3137, 3161), 'json.loads', 'json.loads', (['request.body'], {}), '(request.body)\n', (3147, 3161), False, 'import json\n'), ((4700, 4776), 'gcutil_lib.gcutil_unittest.SelectTemplateForVersion', 'gcutil_unittest.SelectTemplateForVersion', (['expected_data', 'command.api.version'], {}), '(expected_data, command.api.version)\n', (4740, 4776), False, 'from gcutil_lib import gcutil_unittest\n'), ((5311, 5387), 'gcutil_lib.gcutil_unittest.SelectTemplateForVersion', 'gcutil_unittest.SelectTemplateForVersion', (['expected_data', 'command.api.version'], {}), '(expected_data, command.api.version)\n', (5351, 5387), False, 'from gcutil_lib import gcutil_unittest\n'), ((7995, 8025), 'gcutil_lib.gcutil_unittest.GcutilLoader', 'gcutil_unittest.GcutilLoader', ([], {}), '()\n', (8023, 8025), False, 'from gcutil_lib import gcutil_unittest\n')] |
#!/usr/bin/env python
# Copyright 2018 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
"""Utility tools that extracts DWARF information encoded in a wasm output
produced by the LLVM tools, and encodes it as a wasm source map. Additionally,
it can collect original sources, change files prefixes, and strip debug
sections from a wasm file.
"""
import argparse
from collections import OrderedDict, namedtuple
import json
import logging
from math import floor, log
import os
import re
from subprocess import Popen, PIPE
import sys
sys.path.insert(1, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from tools.shared import asstr
logger = logging.getLogger('wasm-sourcemap')
def parse_args():
parser = argparse.ArgumentParser(prog='wasm-sourcemap.py', description=__doc__)
parser.add_argument('wasm', help='wasm file')
parser.add_argument('-o', '--output', help='output source map')
parser.add_argument('-p', '--prefix', nargs='*', help='replace source debug filename prefix for source map', default=[])
parser.add_argument('-s', '--sources', action='store_true', help='read and embed source files from file system into source map')
parser.add_argument('-l', '--load-prefix', nargs='*', help='replace source debug filename prefix for reading sources from file system (see also --sources)', default=[])
parser.add_argument('-w', nargs='?', help='set output wasm file')
parser.add_argument('-x', '--strip', action='store_true', help='removes debug and linking sections')
parser.add_argument('-u', '--source-map-url', nargs='?', help='specifies sourceMappingURL section contest')
parser.add_argument('--dwarfdump', help="path to llvm-dwarfdump executable")
parser.add_argument('--dwarfdump-output', nargs='?', help=argparse.SUPPRESS)
return parser.parse_args()
class Prefixes:
def __init__(self, args):
prefixes = []
for p in args:
if '=' in p:
prefix, replacement = p.split('=')
prefixes.append({'prefix': prefix, 'replacement': replacement})
else:
prefixes.append({'prefix': p, 'replacement': None})
self.prefixes = prefixes
self.cache = {}
def resolve(self, name):
if name in self.cache:
return self.cache[name]
result = name
for p in self.prefixes:
if name.startswith(p['prefix']):
if p['replacement'] is None:
result = name[len(p['prefix'])::]
else:
result = p['replacement'] + name[len(p['prefix'])::]
break
self.cache[name] = result
return result
# SourceMapPrefixes contains resolver for file names that are:
# - "sources" is for names that output to source maps JSON
# - "load" is for paths that used to load source text
SourceMapPrefixes = namedtuple('SourceMapPrefixes', 'sources, load')
def encode_vlq(n):
VLQ_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
x = (n << 1) if n >= 0 else ((-n << 1) + 1)
result = ""
while x > 31:
result = result + VLQ_CHARS[32 + (x & 31)]
x = x >> 5
return result + VLQ_CHARS[x]
def read_var_uint(wasm, pos):
n = 0
shift = 0
b = ord(wasm[pos:pos + 1])
pos = pos + 1
while b >= 128:
n = n | ((b - 128) << shift)
b = ord(wasm[pos:pos + 1])
pos = pos + 1
shift += 7
return n + (b << shift), pos
def strip_debug_sections(wasm):
logger.debug('Strip debug sections')
pos = 8
stripped = wasm[:pos]
while pos < len(wasm):
section_start = pos
section_id, pos_ = read_var_uint(wasm, pos)
section_size, section_body = read_var_uint(wasm, pos_)
pos = section_body + section_size
if section_id == 0:
name_len, name_pos = read_var_uint(wasm, section_body)
name_end = name_pos + name_len
name = wasm[name_pos:name_end]
if name == "linking" or name == "sourceMappingURL" or name.startswith("reloc..debug_") or name.startswith(".debug_"):
continue # skip debug related sections
stripped = stripped + wasm[section_start:pos]
return stripped
def encode_uint_var(n):
result = bytearray()
while n > 127:
result.append(128 | (n & 127))
n = n >> 7
result.append(n)
return bytes(result)
def append_source_mapping(wasm, url):
logger.debug('Append sourceMappingURL section')
section_name = "sourceMappingURL"
section_content = encode_uint_var(len(section_name)) + section_name + encode_uint_var(len(url)) + url
return wasm + encode_uint_var(0) + encode_uint_var(len(section_content)) + section_content
def get_code_section_offset(wasm):
logger.debug('Read sections index')
pos = 8
while pos < len(wasm):
section_id, pos_ = read_var_uint(wasm, pos)
section_size, pos = read_var_uint(wasm, pos_)
if section_id == 10:
return pos
pos = pos + section_size
def remove_dead_entries(entries):
# Remove entries for dead functions. It is a heuristics to ignore data if the
# function starting address near to 0 (is equal to its size field length).
block_start = 0
cur_entry = 0
while cur_entry < len(entries):
if not entries[cur_entry]['eos']:
cur_entry += 1
continue
fn_start = entries[block_start]['address']
# Calculate the LEB encoded function size (including size field)
fn_size_length = floor(log(entries[cur_entry]['address'] - fn_start + 1, 128)) + 1
min_live_offset = 1 + fn_size_length # 1 byte is for code section entries
if fn_start < min_live_offset:
# Remove dead code debug info block.
del entries[block_start:cur_entry + 1]
cur_entry = block_start
continue
cur_entry += 1
block_start = cur_entry
def read_dwarf_entries(wasm, options):
if options.dwarfdump_output:
output = open(options.dwarfdump_output, 'r').read()
elif options.dwarfdump:
logger.debug('Reading DWARF information from %s' % wasm)
if not os.path.exists(options.dwarfdump):
logger.error('llvm-dwarfdump not found: ' + options.dwarfdump)
sys.exit(1)
process = Popen([options.dwarfdump, "-debug-info", "-debug-line", wasm], stdout=PIPE)
output, err = process.communicate()
exit_code = process.wait()
if exit_code != 0:
logger.error('Error during llvm-dwarfdump execution (%s)' % exit_code)
sys.exit(1)
else:
logger.error('Please specify either --dwarfdump or --dwarfdump-output')
sys.exit(1)
entries = []
debug_line_chunks = re.split(r"debug_line\[(0x[0-9a-f]*)\]", asstr(output))
maybe_debug_info_content = debug_line_chunks[0]
for i in range(1, len(debug_line_chunks), 2):
stmt_list = debug_line_chunks[i]
comp_dir_match = re.search(r"DW_AT_stmt_list\s+\(" + stmt_list + r"\)\s+" +
r"DW_AT_comp_dir\s+\(\"([^\"]+)", maybe_debug_info_content)
comp_dir = comp_dir_match.group(1) if comp_dir_match is not None else ""
line_chunk = debug_line_chunks[i + 1]
# include_directories[ 1] = "/Users/yury/Work/junk/sqlite-playground/src"
# file_names[ 1]:
# name: "playground.c"
# dir_index: 1
# mod_time: 0x00000000
# length: 0x00000000
#
# Address Line Column File ISA Discriminator Flags
# ------------------ ------ ------ ------ --- ------------- -------------
# 0x0000000000000006 22 0 1 0 0 is_stmt
# 0x0000000000000007 23 10 1 0 0 is_stmt prologue_end
# 0x000000000000000f 23 3 1 0 0
# 0x0000000000000010 23 3 1 0 0 end_sequence
# 0x0000000000000011 28 0 1 0 0 is_stmt
include_directories = {'0': comp_dir}
for dir in re.finditer(r"include_directories\[\s*(\d+)\] = \"([^\"]*)", line_chunk):
include_directories[dir.group(1)] = dir.group(2)
files = {}
for file in re.finditer(r"file_names\[\s*(\d+)\]:\s+name: \"([^\"]*)\"\s+dir_index: (\d+)", line_chunk):
dir = include_directories[file.group(3)]
file_path = (dir + '/' if file.group(2)[0] != '/' else '') + file.group(2)
files[file.group(1)] = file_path
for line in re.finditer(r"\n0x([0-9a-f]+)\s+(\d+)\s+(\d+)\s+(\d+)(.*?end_sequence)?", line_chunk):
entry = {'address': int(line.group(1), 16), 'line': int(line.group(2)), 'column': int(line.group(3)), 'file': files[line.group(4)], 'eos': line.group(5) is not None}
if not entry['eos']:
entries.append(entry)
else:
# move end of function to the last END operator
entry['address'] -= 1
if entries[-1]['address'] == entry['address']:
# last entry has the same address, reusing
entries[-1]['eos'] = True
else:
entries.append(entry)
remove_dead_entries(entries)
# return entries sorted by the address field
return sorted(entries, key=lambda entry: entry['address'])
def build_sourcemap(entries, code_section_offset, prefixes, collect_sources):
sources = []
sources_content = [] if collect_sources else None
mappings = []
sources_map = {}
last_address = 0
last_source_id = 0
last_line = 1
last_column = 1
for entry in entries:
line = entry['line']
column = entry['column']
# ignore entries with line 0
if line == 0:
continue
# start at least at column 1
if column == 0:
column = 1
address = entry['address'] + code_section_offset
file_name = entry['file']
source_name = prefixes.sources.resolve(file_name)
if source_name not in sources_map:
source_id = len(sources)
sources_map[source_name] = source_id
sources.append(source_name)
if collect_sources:
load_name = prefixes.load.resolve(file_name)
try:
with open(load_name, 'r') as infile:
source_content = infile.read()
sources_content.append(source_content)
except IOError:
print('Failed to read source: %s' % load_name)
sources_content.append(None)
else:
source_id = sources_map[source_name]
address_delta = address - last_address
source_id_delta = source_id - last_source_id
line_delta = line - last_line
column_delta = column - last_column
mappings.append(encode_vlq(address_delta) + encode_vlq(source_id_delta) + encode_vlq(line_delta) + encode_vlq(column_delta))
last_address = address
last_source_id = source_id
last_line = line
last_column = column
return OrderedDict([('version', 3),
('names', []),
('sources', sources),
('sourcesContent', sources_content),
('mappings', ','.join(mappings))])
def main():
options = parse_args()
wasm_input = options.wasm
with open(wasm_input, 'rb') as infile:
wasm = infile.read()
entries = read_dwarf_entries(wasm_input, options)
code_section_offset = get_code_section_offset(wasm)
prefixes = SourceMapPrefixes(sources=Prefixes(options.prefix), load=Prefixes(options.load_prefix))
logger.debug('Saving to %s' % options.output)
map = build_sourcemap(entries, code_section_offset, prefixes, options.sources)
with open(options.output, 'w') as outfile:
json.dump(map, outfile, separators=(',', ':'))
if options.strip:
wasm = strip_debug_sections(wasm)
if options.source_map_url:
wasm = append_source_mapping(wasm, options.source_map_url)
if options.w:
logger.debug('Saving wasm to %s' % options.w)
with open(options.w, 'wb') as outfile:
outfile.write(wasm)
logger.debug('Done')
return 0
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG if os.environ.get('EMCC_DEBUG') else logging.INFO)
sys.exit(main())
| [
"logging.getLogger",
"os.path.exists",
"collections.namedtuple",
"argparse.ArgumentParser",
"subprocess.Popen",
"os.environ.get",
"math.log",
"tools.shared.asstr",
"re.finditer",
"sys.exit",
"os.path.abspath",
"json.dump",
"re.search"
] | [((838, 873), 'logging.getLogger', 'logging.getLogger', (['"""wasm-sourcemap"""'], {}), "('wasm-sourcemap')\n", (855, 873), False, 'import logging\n'), ((2912, 2960), 'collections.namedtuple', 'namedtuple', (['"""SourceMapPrefixes"""', '"""sources, load"""'], {}), "('SourceMapPrefixes', 'sources, load')\n", (2922, 2960), False, 'from collections import OrderedDict, namedtuple\n'), ((905, 975), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""wasm-sourcemap.py"""', 'description': '__doc__'}), "(prog='wasm-sourcemap.py', description=__doc__)\n", (928, 975), False, 'import argparse\n'), ((6575, 6588), 'tools.shared.asstr', 'asstr', (['output'], {}), '(output)\n', (6580, 6588), False, 'from tools.shared import asstr\n'), ((6746, 6873), 're.search', 're.search', (['(\'DW_AT_stmt_list\\\\s+\\\\(\' + stmt_list + \'\\\\)\\\\s+\' +\n \'DW_AT_comp_dir\\\\s+\\\\(\\\\"([^\\\\"]+)\')', 'maybe_debug_info_content'], {}), '(\'DW_AT_stmt_list\\\\s+\\\\(\' + stmt_list + \'\\\\)\\\\s+\' +\n \'DW_AT_comp_dir\\\\s+\\\\(\\\\"([^\\\\"]+)\', maybe_debug_info_content)\n', (6755, 6873), False, 'import re\n'), ((7836, 7913), 're.finditer', 're.finditer', (['"""include_directories\\\\[\\\\s*(\\\\d+)\\\\] = \\\\"([^\\\\"]*)"""', 'line_chunk'], {}), '(\'include_directories\\\\[\\\\s*(\\\\d+)\\\\] = \\\\"([^\\\\"]*)\', line_chunk)\n', (7847, 7913), False, 'import re\n'), ((7997, 8107), 're.finditer', 're.finditer', (['"""file_names\\\\[\\\\s*(\\\\d+)\\\\]:\\\\s+name: \\\\"([^\\\\"]*)\\\\"\\\\s+dir_index: (\\\\d+)"""', 'line_chunk'], {}), '(\n \'file_names\\\\[\\\\s*(\\\\d+)\\\\]:\\\\s+name: \\\\"([^\\\\"]*)\\\\"\\\\s+dir_index: (\\\\d+)\'\n , line_chunk)\n', (8008, 8107), False, 'import re\n'), ((8274, 8369), 're.finditer', 're.finditer', (['"""\\\\n0x([0-9a-f]+)\\\\s+(\\\\d+)\\\\s+(\\\\d+)\\\\s+(\\\\d+)(.*?end_sequence)?"""', 'line_chunk'], {}), "('\\\\n0x([0-9a-f]+)\\\\s+(\\\\d+)\\\\s+(\\\\d+)\\\\s+(\\\\d+)(.*?end_sequence)?',\n line_chunk)\n", (8285, 8369), False, 'import re\n'), ((11336, 11382), 'json.dump', 'json.dump', (['map', 'outfile'], {'separators': "(',', ':')"}), "(map, outfile, separators=(',', ':'))\n", (11345, 11382), False, 'import json\n'), ((767, 792), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (782, 792), False, 'import os\n'), ((6131, 6206), 'subprocess.Popen', 'Popen', (["[options.dwarfdump, '-debug-info', '-debug-line', wasm]"], {'stdout': 'PIPE'}), "([options.dwarfdump, '-debug-info', '-debug-line', wasm], stdout=PIPE)\n", (6136, 6206), False, 'from subprocess import Popen, PIPE\n'), ((6484, 6495), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6492, 6495), False, 'import sys\n'), ((5416, 5470), 'math.log', 'log', (["(entries[cur_entry]['address'] - fn_start + 1)", '(128)'], {}), "(entries[cur_entry]['address'] - fn_start + 1, 128)\n", (5419, 5470), False, 'from math import floor, log\n'), ((5995, 6028), 'os.path.exists', 'os.path.exists', (['options.dwarfdump'], {}), '(options.dwarfdump)\n', (6009, 6028), False, 'import os\n'), ((6105, 6116), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6113, 6116), False, 'import sys\n'), ((6384, 6395), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (6392, 6395), False, 'import sys\n'), ((11780, 11808), 'os.environ.get', 'os.environ.get', (['"""EMCC_DEBUG"""'], {}), "('EMCC_DEBUG')\n", (11794, 11808), False, 'import os\n')] |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import
import atexit
import os
import shutil
import signal
import subprocess
import sys
import tempfile
import threading
import grpc
from apache_beam.portability.api import beam_job_api_pb2_grpc
from apache_beam.runners.portability import local_job_service
from apache_beam.utils import subprocess_server
from apache_beam.version import __version__ as beam_version
class JobServer(object):
def start(self):
"""Starts this JobServer, returning a grpc service to which to submit jobs.
"""
raise NotImplementedError(type(self))
def stop(self):
"""Stops this job server."""
raise NotImplementedError(type(self))
class ExternalJobServer(JobServer):
def __init__(self, endpoint, timeout=None):
self._endpoint = endpoint
self._timeout = timeout
def start(self):
channel = grpc.insecure_channel(self._endpoint)
grpc.channel_ready_future(channel).result(timeout=self._timeout)
return beam_job_api_pb2_grpc.JobServiceStub(channel)
def stop(self):
pass
class EmbeddedJobServer(JobServer):
def start(self):
return local_job_service.LocalJobServicer()
def stop(self):
pass
class StopOnExitJobServer(JobServer):
"""Wraps a JobServer such that its stop will automatically be called on exit.
"""
def __init__(self, job_server):
self._lock = threading.Lock()
self._job_server = job_server
self._started = False
def start(self):
with self._lock:
if not self._started:
self._endpoint = self._job_server.start()
self._started = True
atexit.register(self.stop)
signal.signal(signal.SIGINT, self.stop)
return self._endpoint
def stop(self):
with self._lock:
if self._started:
self._job_server.stop()
self._started = False
class SubprocessJobServer(JobServer):
"""An abstract base class for JobServers run as an external process."""
def __init__(self):
self._local_temp_root = None
self._server = None
def subprocess_cmd_and_endpoint(self):
raise NotImplementedError(type(self))
def start(self):
if self._server is None:
self._local_temp_root = tempfile.mkdtemp(prefix='beam-temp')
cmd, endpoint = self.subprocess_cmd_and_endpoint()
port = int(endpoint.split(':')[-1])
self._server = subprocess_server.SubprocessServer(
beam_job_api_pb2_grpc.JobServiceStub, cmd, port=port)
return self._server.start()
def stop(self):
if self._local_temp_root:
shutil.rmtree(self._local_temp_root)
self._local_temp_root = None
return self._server.stop()
def local_temp_dir(self, **kwargs):
return tempfile.mkdtemp(dir=self._local_temp_root, **kwargs)
class JavaJarJobServer(SubprocessJobServer):
MAVEN_REPOSITORY = 'https://repo.maven.apache.org/maven2/org/apache/beam'
JAR_CACHE = os.path.expanduser("~/.apache_beam/cache")
def java_arguments(self, job_port, artifacts_dir):
raise NotImplementedError(type(self))
def path_to_jar(self):
raise NotImplementedError(type(self))
@staticmethod
def path_to_beam_jar(gradle_target):
return subprocess_server.JavaJarServer.path_to_beam_jar(gradle_target)
@staticmethod
def local_jar(url):
return subprocess_server.JavaJarServer.local_jar(url)
def subprocess_cmd_and_endpoint(self):
jar_path = self.local_jar(self.path_to_jar())
artifacts_dir = self.local_temp_dir(prefix='artifacts')
job_port, = subprocess_server.pick_port(None)
return (
['java', '-jar', jar_path] + list(
self.java_arguments(job_port, artifacts_dir)),
'localhost:%s' % job_port)
class DockerizedJobServer(SubprocessJobServer):
"""
Spins up the JobServer in a docker container for local execution.
"""
def __init__(self, job_host="localhost",
job_port=None,
artifact_port=None,
expansion_port=None,
harness_port_range=(8100, 8200),
max_connection_retries=5):
super(DockerizedJobServer, self).__init__()
self.job_host = job_host
self.job_port = job_port
self.expansion_port = expansion_port
self.artifact_port = artifact_port
self.harness_port_range = harness_port_range
self.max_connection_retries = max_connection_retries
def subprocess_cmd_and_endpoint(self):
# TODO This is hardcoded to Flink at the moment but should be changed
job_server_image_name = os.environ['USER'] + \
"-docker-apache.bintray.io/beam/flink-job-server:latest"
docker_path = subprocess.check_output(
['which', 'docker']).strip().decode('utf-8')
cmd = ["docker", "run",
# We mount the docker binary and socket to be able to spin up
# "sibling" containers for the SDK harness.
"-v", ':'.join([docker_path, "/bin/docker"]),
"-v", "/var/run/docker.sock:/var/run/docker.sock"]
self.job_port, self.artifact_port, self.expansion_port = (
subprocess_server.pick_port(
self.job_port, self.artifact_port, self.expansion_port))
args = ['--job-host', self.job_host,
'--job-port', str(self.job_port),
'--artifact-port', str(self.artifact_port),
'--expansion-port', str(self.expansion_port)]
if sys.platform == "darwin":
# Docker-for-Mac doesn't support host networking, so we need to explictly
# publish ports from the Docker container to be able to connect to it.
# Also, all other containers need to be aware that they run Docker-on-Mac
# to connect against the internal Docker-for-Mac address.
cmd += ["-e", "DOCKER_MAC_CONTAINER=1"]
cmd += ["-p", "{}:{}".format(self.job_port, self.job_port)]
cmd += ["-p", "{}:{}".format(self.artifact_port, self.artifact_port)]
cmd += ["-p", "{}:{}".format(self.expansion_port, self.expansion_port)]
cmd += ["-p", "{0}-{1}:{0}-{1}".format(
self.harness_port_range[0], self.harness_port_range[1])]
else:
# This shouldn't be set for MacOS because it detroys port forwardings,
# even though host networking is not supported on MacOS.
cmd.append("--network=host")
cmd.append(job_server_image_name)
return cmd + args, '%s:%s' % (self.job_host, self.job_port)
| [
"subprocess.check_output",
"signal.signal",
"apache_beam.utils.subprocess_server.JavaJarServer.path_to_beam_jar",
"apache_beam.runners.portability.local_job_service.LocalJobServicer",
"apache_beam.utils.subprocess_server.pick_port",
"threading.Lock",
"apache_beam.utils.subprocess_server.SubprocessServer",
"grpc.insecure_channel",
"apache_beam.portability.api.beam_job_api_pb2_grpc.JobServiceStub",
"apache_beam.utils.subprocess_server.JavaJarServer.local_jar",
"tempfile.mkdtemp",
"shutil.rmtree",
"grpc.channel_ready_future",
"atexit.register",
"os.path.expanduser"
] | [((3638, 3680), 'os.path.expanduser', 'os.path.expanduser', (['"""~/.apache_beam/cache"""'], {}), "('~/.apache_beam/cache')\n", (3656, 3680), False, 'import os\n'), ((1631, 1668), 'grpc.insecure_channel', 'grpc.insecure_channel', (['self._endpoint'], {}), '(self._endpoint)\n', (1652, 1668), False, 'import grpc\n'), ((1749, 1794), 'apache_beam.portability.api.beam_job_api_pb2_grpc.JobServiceStub', 'beam_job_api_pb2_grpc.JobServiceStub', (['channel'], {}), '(channel)\n', (1785, 1794), False, 'from apache_beam.portability.api import beam_job_api_pb2_grpc\n'), ((1891, 1927), 'apache_beam.runners.portability.local_job_service.LocalJobServicer', 'local_job_service.LocalJobServicer', ([], {}), '()\n', (1925, 1927), False, 'from apache_beam.runners.portability import local_job_service\n'), ((2133, 2149), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (2147, 2149), False, 'import threading\n'), ((3446, 3499), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'dir': 'self._local_temp_root'}), '(dir=self._local_temp_root, **kwargs)\n', (3462, 3499), False, 'import tempfile\n'), ((3912, 3975), 'apache_beam.utils.subprocess_server.JavaJarServer.path_to_beam_jar', 'subprocess_server.JavaJarServer.path_to_beam_jar', (['gradle_target'], {}), '(gradle_target)\n', (3960, 3975), False, 'from apache_beam.utils import subprocess_server\n'), ((4026, 4072), 'apache_beam.utils.subprocess_server.JavaJarServer.local_jar', 'subprocess_server.JavaJarServer.local_jar', (['url'], {}), '(url)\n', (4067, 4072), False, 'from apache_beam.utils import subprocess_server\n'), ((4241, 4274), 'apache_beam.utils.subprocess_server.pick_port', 'subprocess_server.pick_port', (['None'], {}), '(None)\n', (4268, 4274), False, 'from apache_beam.utils import subprocess_server\n'), ((5757, 5845), 'apache_beam.utils.subprocess_server.pick_port', 'subprocess_server.pick_port', (['self.job_port', 'self.artifact_port', 'self.expansion_port'], {}), '(self.job_port, self.artifact_port, self.\n expansion_port)\n', (5784, 5845), False, 'from apache_beam.utils import subprocess_server\n'), ((2949, 2985), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""beam-temp"""'}), "(prefix='beam-temp')\n", (2965, 2985), False, 'import tempfile\n'), ((3106, 3198), 'apache_beam.utils.subprocess_server.SubprocessServer', 'subprocess_server.SubprocessServer', (['beam_job_api_pb2_grpc.JobServiceStub', 'cmd'], {'port': 'port'}), '(beam_job_api_pb2_grpc.JobServiceStub,\n cmd, port=port)\n', (3140, 3198), False, 'from apache_beam.utils import subprocess_server\n'), ((3293, 3329), 'shutil.rmtree', 'shutil.rmtree', (['self._local_temp_root'], {}), '(self._local_temp_root)\n', (3306, 3329), False, 'import shutil\n'), ((1673, 1707), 'grpc.channel_ready_future', 'grpc.channel_ready_future', (['channel'], {}), '(channel)\n', (1698, 1707), False, 'import grpc\n'), ((2366, 2392), 'atexit.register', 'atexit.register', (['self.stop'], {}), '(self.stop)\n', (2381, 2392), False, 'import atexit\n'), ((2401, 2440), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'self.stop'], {}), '(signal.SIGINT, self.stop)\n', (2414, 2440), False, 'import signal\n'), ((5332, 5376), 'subprocess.check_output', 'subprocess.check_output', (["['which', 'docker']"], {}), "(['which', 'docker'])\n", (5355, 5376), False, 'import subprocess\n')] |
from collections import defaultdict
from functools import wraps
from itertools import chain
from sympy.core import sympify
from .precedence import precedence
from .codeprinter import CodePrinter
_kw_py2and3 = {
'and', 'as', 'assert', 'break', 'class', 'continue', 'def', 'del', 'elif',
'else', 'except', 'finally', 'for', 'from', 'global', 'if', 'import', 'in',
'is', 'lambda', 'not', 'or', 'pass', 'raise', 'return', 'try', 'while',
'with', 'yield', 'None' # 'None' is actually not in Python 2's keyword.kwlist
}
_kw_only_py2 = {'exec', 'print'}
_kw_only_py3 = {'False', 'nonlocal', 'True'}
_known_functions = {
'Abs': 'abs',
}
_known_functions_math = {
'acos': 'acos',
'acosh': 'acosh',
'asin': 'asin',
'asinh': 'asinh',
'atan': 'atan',
'atan2': 'atan2',
'atanh': 'atanh',
'ceiling': 'ceil',
'cos': 'cos',
'cosh': 'cosh',
'erf': 'erf',
'erfc': 'erfc',
'exp': 'exp',
'expm1': 'expm1',
'factorial': 'factorial',
'floor': 'floor',
'gamma': 'gamma',
'hypot': 'hypot',
'loggamma': 'lgamma',
'log': 'log',
'log10': 'log10',
'log1p': 'log1p',
'log2': 'log2',
'sin': 'sin',
'sinh': 'sinh',
'Sqrt': 'sqrt',
'tan': 'tan',
'tanh': 'tanh'
} # Not used from ``math``: [copysign isclose isfinite isinf isnan ldexp frexp pow modf
# radians trunc fmod fsum gcd degrees fabs]
_known_constants_math = {
'Exp1': 'e',
'Pi': 'pi',
# Only in python >= 3.5:
# 'Infinity': 'inf',
# 'NaN': 'nan'
}
def _print_known_func(self, expr):
known = self.known_functions[expr.__class__.__name__]
return '{name}({args})'.format(name=self._module_format(known),
args=', '.join(map(self._print, expr.args)))
def _print_known_const(self, expr):
known = self.known_constants[expr.__class__.__name__]
return self._module_format(known)
class PythonCodePrinter(CodePrinter):
printmethod = "_pythoncode"
language = "Python"
standard = "python3"
reserved_words = _kw_py2and3.union(_kw_only_py3)
modules = None # initialized to a set in __init__
tab = ' '
_kf = dict(chain(
_known_functions.items(),
[(k, 'math.' + v) for k, v in _known_functions_math.items()]
))
_kc = {k: 'math.'+v for k, v in _known_constants_math.items()}
_operators = {'and': 'and', 'or': 'or', 'not': 'not'}
_default_settings = dict(
CodePrinter._default_settings,
user_functions={},
precision=17,
inline=True,
fully_qualified_modules=True
)
def __init__(self, settings=None):
super(PythonCodePrinter, self).__init__(settings)
self.module_imports = defaultdict(set)
self.known_functions = dict(self._kf, **(settings or {}).get(
'user_functions', {}))
self.known_constants = dict(self._kc, **(settings or {}).get(
'user_constants', {}))
def _declare_number_const(self, name, value):
return "%s = %s" % (name, value)
def _module_format(self, fqn, register=True):
parts = fqn.split('.')
if register and len(parts) > 1:
self.module_imports['.'.join(parts[:-1])].add(parts[-1])
if self._settings['fully_qualified_modules']:
return fqn
else:
return fqn.split('(')[0].split('[')[0].split('.')[-1]
def _format_code(self, lines):
return lines
def _get_comment(self, text):
return " # {0}".format(text)
def _print_NaN(self, expr):
return "float('nan')"
def _print_Infinity(self, expr):
return "float('inf')"
def _print_Mod(self, expr):
PREC = precedence(expr)
return ('{0} % {1}'.format(*map(lambda x: self.parenthesize(x, PREC), expr.args)))
def _print_Piecewise(self, expr):
result = []
i = 0
for arg in expr.args:
e = arg.expr
c = arg.cond
result.append('((')
result.append(self._print(e))
result.append(') if (')
result.append(self._print(c))
result.append(') else (')
i += 1
result = result[:-1]
result.append(') else None)')
result.append(')'*(2*i - 2))
return ''.join(result)
def _print_ITE(self, expr):
from sympy.functions.elementary.piecewise import Piecewise
return self._print(expr.rewrite(Piecewise))
def _print_Sum(self, expr):
loops = (
'for {i} in range({a}, {b}+1)'.format(
i=self._print(i),
a=self._print(a),
b=self._print(b))
for i, a, b in expr.limits)
return '(builtins.sum({function} {loops}))'.format(
function=self._print(expr.function),
loops=' '.join(loops))
def _print_ImaginaryUnit(self, expr):
return '1j'
def _print_MatrixBase(self, expr):
name = expr.__class__.__name__
func = self.known_functions.get(name, name)
return "%s(%s)" % (func, self._print(expr.tolist()))
_print_SparseMatrix = \
_print_MutableSparseMatrix = \
_print_ImmutableSparseMatrix = \
_print_Matrix = \
_print_DenseMatrix = \
_print_MutableDenseMatrix = \
_print_ImmutableMatrix = \
_print_ImmutableDenseMatrix = \
lambda self, expr: self._print_MatrixBase(expr)
for k in PythonCodePrinter._kf:
setattr(PythonCodePrinter, '_print_%s' % k, _print_known_func)
for k in _known_constants_math:
setattr(PythonCodePrinter, '_print_%s' % k, _print_known_const)
def pycode(expr, **settings):
return PythonCodePrinter(settings).doprint(expr)
_not_in_mpmath = 'log1p log2'.split()
_in_mpmath = [(k, v) for k, v in _known_functions_math.items() if k not in _not_in_mpmath]
_known_functions_mpmath = dict(_in_mpmath)
_known_constants_mpmath = {
'Pi': 'pi'
}
class MpmathPrinter(PythonCodePrinter):
"""
Lambda printer for mpmath which maintains precision for floats
"""
printmethod = "_mpmathcode"
_kf = dict(chain(
_known_functions.items(),
[(k, 'mpmath.' + v) for k, v in _known_functions_mpmath.items()]
))
def _print_Integer(self, e):
return '%s(%d)' % (self._module_format('mpmath.mpf'), e)
def _print_Float(self, e):
# XXX: This does not handle setting mpmath.mp.dps. It is assumed that
# the caller of the lambdified function will have set it to sufficient
# precision to match the Floats in the expression.
# Remove 'mpz' if gmpy is installed.
args = str(tuple(map(int, e._mpf_)))
return '{func}({args})'.format(func=self._module_format('mpmath.mpf'), args=args)
def _print_uppergamma(self,e): #printer for the uppergamma function
return "{0}({1}, {2}, {3})".format(
self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]),
self._module_format('mpmath.inf'))
def _print_lowergamma(self,e): #printer for the lowergamma functioin
return "{0}({1}, 0, {2})".format(
self._module_format('mpmath.gammainc'), self._print(e.args[0]), self._print(e.args[1]))
def _print_log2(self, e):
return '{0}({1})/{0}(2)'.format(
self._module_format('mpmath.log'), self._print(e.args[0]))
def _print_log1p(self, e):
return '{0}({1}+1)'.format(
self._module_format('mpmath.log'), self._print(e.args[0]))
for k in MpmathPrinter._kf:
setattr(MpmathPrinter, '_print_%s' % k, _print_known_func)
for k in _known_constants_mpmath:
setattr(MpmathPrinter, '_print_%s' % k, _print_known_const)
_not_in_numpy = 'erf erfc factorial gamma lgamma'.split()
_in_numpy = [(k, v) for k, v in _known_functions_math.items() if k not in _not_in_numpy]
_known_functions_numpy = dict(_in_numpy, **{
'acos': 'arccos',
'acosh': 'arccosh',
'asin': 'arcsin',
'asinh': 'arcsinh',
'atan': 'arctan',
'atan2': 'arctan2',
'atanh': 'arctanh',
'exp2': 'exp2',
})
class NumPyPrinter(PythonCodePrinter):
"""
Numpy printer which handles vectorized piecewise functions,
logical operators, etc.
"""
printmethod = "_numpycode"
_kf = dict(chain(
PythonCodePrinter._kf.items(),
[(k, 'numpy.' + v) for k, v in _known_functions_numpy.items()]
))
_kc = {k: 'numpy.'+v for k, v in _known_constants_math.items()}
def _print_seq(self, seq, delimiter=', '):
"General sequence printer: converts to tuple"
# Print tuples here instead of lists because numba supports
# tuples in nopython mode.
return '({},)'.format(delimiter.join(self._print(item) for item in seq))
def _print_MatMul(self, expr):
"Matrix multiplication printer"
return '({0})'.format(').dot('.join(self._print(i) for i in expr.args))
def _print_DotProduct(self, expr):
# DotProduct allows any shape order, but numpy.dot does matrix
# multiplication, so we have to make sure it gets 1 x n by n x 1.
arg1, arg2 = expr.args
if arg1.shape[0] != 1:
arg1 = arg1.T
if arg2.shape[1] != 1:
arg2 = arg2.T
return "%s(%s, %s)" % (self._module_format('numpy.dot'), self._print(arg1), self._print(arg2))
def _print_Piecewise(self, expr):
"Piecewise function printer"
exprs = '[{0}]'.format(','.join(self._print(arg.expr) for arg in expr.args))
conds = '[{0}]'.format(','.join(self._print(arg.cond) for arg in expr.args))
# If [default_value, True] is a (expr, cond) sequence in a Piecewise object
# it will behave the same as passing the 'default' kwarg to select()
# *as long as* it is the last element in expr.args.
# If this is not the case, it may be triggered prematurely.
return '{0}({1}, {2}, default=numpy.nan)'.format(self._module_format('numpy.select'), conds, exprs)
def _print_Relational(self, expr):
"Relational printer for Equality and Unequality"
op = {
'==' :'equal',
'!=' :'not_equal',
'<' :'less',
'<=' :'less_equal',
'>' :'greater',
'>=' :'greater_equal',
}
if expr.rel_op in op:
lhs = self._print(expr.lhs)
rhs = self._print(expr.rhs)
return '{op}({lhs}, {rhs})'.format(op=self._module_format('numpy.'+op[expr.rel_op]),
lhs=lhs, rhs=rhs)
return super(NumPyPrinter, self)._print_Relational(expr)
def _print_And(self, expr):
"Logical And printer"
# We have to override LambdaPrinter because it uses Python 'and' keyword.
# If LambdaPrinter didn't define it, we could use StrPrinter's
# version of the function and add 'logical_and' to NUMPY_TRANSLATIONS.
return '{0}({1})'.format(self._module_format('numpy.logical_and'), ','.join(self._print(i) for i in expr.args))
def _print_Or(self, expr):
"Logical Or printer"
# We have to override LambdaPrinter because it uses Python 'or' keyword.
# If LambdaPrinter didn't define it, we could use StrPrinter's
# version of the function and add 'logical_or' to NUMPY_TRANSLATIONS.
return '{0}({1})'.format(self._module_format('numpy.logical_or'), ','.join(self._print(i) for i in expr.args))
def _print_Not(self, expr):
"Logical Not printer"
# We have to override LambdaPrinter because it uses Python 'not' keyword.
# If LambdaPrinter didn't define it, we would still have to define our
# own because StrPrinter doesn't define it.
return '{0}({1})'.format(self._module_format('numpy.logical_not'), ','.join(self._print(i) for i in expr.args))
def _print_Min(self, expr):
return '{0}(({1}))'.format(self._module_format('numpy.amin'), ','.join(self._print(i) for i in expr.args))
def _print_Max(self, expr):
return '{0}(({1}))'.format(self._module_format('numpy.amax'), ','.join(self._print(i) for i in expr.args))
def _print_Pow(self, expr):
if expr.exp == 0.5:
return '{0}({1})'.format(self._module_format('numpy.sqrt'), self._print(expr.base))
else:
return super(NumPyPrinter, self)._print_Pow(expr)
def _print_arg(self, expr):
return "%s(%s)" % (self._module_format('numpy.angle'), self._print(expr.args[0]))
def _print_im(self, expr):
return "%s(%s)" % (self._module_format('numpy.imag', self._print(expr.args[0])))
def _print_Mod(self, expr):
return "%s(%s)" % (self._module_format('numpy.mod'), ', '.join(map(self._print, expr.args)))
def _print_re(self, expr):
return "%s(%s)" % (self._module_format('numpy.real'), self._print(expr.args[0]))
def _print_MatrixBase(self, expr):
func = self.known_functions.get(expr.__class__.__name__, None)
if func is None:
func = self._module_format('numpy.array')
return "%s(%s)" % (func, self._print(expr.tolist()))
for k in NumPyPrinter._kf:
setattr(NumPyPrinter, '_print_%s' % k, _print_known_func)
for k in NumPyPrinter._kc:
setattr(NumPyPrinter, '_print_%s' % k, _print_known_const)
_known_functions_scipy_special = {
'erf': 'erf',
'erfc': 'erfc',
'gamma': 'gamma',
'loggamma': 'gammaln'
}
_known_constants_scipy_constants = {
'GoldenRatio': 'golden_ratio'
}
class SciPyPrinter(NumPyPrinter):
_kf = dict(chain(
NumPyPrinter._kf.items(),
[(k, 'scipy.special.' + v) for k, v in _known_functions_scipy_special.items()]
))
_kc = {k: 'scipy.constants.' + v for k, v in _known_constants_scipy_constants.items()}
def _print_SparseMatrix(self, expr):
i, j, data = [], [], []
for (r, c), v in expr._smat.items():
i.append(r)
j.append(c)
data.append(v)
return "{name}({data}, ({i}, {j}), shape={shape})".format(
name=self._module_format('scipy.sparse.coo_matrix'),
data=data, i=i, j=j, shape=expr.shape
)
_print_ImmutableSparseMatrix = _print_SparseMatrix
for k in SciPyPrinter._kf:
setattr(SciPyPrinter, '_print_%s' % k, _print_known_func)
for k in SciPyPrinter._kc:
setattr(SciPyPrinter, '_print_%s' % k, _print_known_const)
class SymPyPrinter(PythonCodePrinter):
_kf = dict([(k, 'sympy.' + v) for k, v in chain(
_known_functions.items(),
_known_functions_math.items()
)])
def _print_Function(self, expr):
mod = expr.func.__module__ or ''
return '%s(%s)' % (self._module_format(mod + ('.' if mod else '') + expr.func.__name__),
', '.join(map(self._print, expr.args)))
| [
"collections.defaultdict"
] | [((2720, 2736), 'collections.defaultdict', 'defaultdict', (['set'], {}), '(set)\n', (2731, 2736), False, 'from collections import defaultdict\n')] |
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 28 12:10:11 2019
@author: Omer
"""
## File handler
## This file was initially intended purely to generate the matrices for the near earth code found in: https://public.ccsds.org/Pubs/131x1o2e2s.pdf
## The values from the above pdf were copied manually to a txt file, and it is the purpose of this file to parse it.
## The emphasis here is on correctness, I currently do not see a reason to generalise this file, since matrices will be saved in either json or some matrix friendly format.
import numpy as np
from scipy.linalg import circulant
#import matplotlib.pyplot as plt
import scipy.io
import common
import hashlib
import os
projectDir = os.environ.get('LDPC')
if projectDir == None:
import pathlib
projectDir = pathlib.Path(__file__).parent.absolute()
## <NAME>: added on 01/12/2020, need to make sure this doesn't break anything.
import sys
sys.path.insert(1, projectDir)
FILE_HANDLER_INT_DATA_TYPE = np.int32
GENERAL_CODE_MATRIX_DATA_TYPE = np.int32
NIBBLE_CONVERTER = np.array([8, 4, 2, 1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
def nibbleToHex(inputArray):
n = NIBBLE_CONVERTER.dot(inputArray)
if n == 10:
h = 'A'
elif n== 11:
h = 'B'
elif n== 12:
h = 'C'
elif n== 13:
h = 'D'
elif n== 14:
h = 'E'
elif n== 15:
h = 'F'
else:
h = str(n)
return h
def binaryArraytoHex(inputArray):
d1 = len(inputArray)
assert (d1 % 4 == 0)
outputArray = np.zeros(d1//4, dtype = str)
outputString = ''
for j in range(d1//4):
nibble = inputArray[4 * j : 4 * j + 4]
h = nibbleToHex(nibble)
outputArray[j] = h
outputString = outputString + h
return outputArray, outputString
def hexStringToBinaryArray(hexString):
outputBinary = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
for i in hexString:
if i == '0':
nibble = np.array([0,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '1':
nibble = np.array([0,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '2':
nibble = np.array([0,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '3':
nibble = np.array([0,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '4':
nibble = np.array([0,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '5':
nibble = np.array([0,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '6':
nibble = np.array([0,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '7':
nibble = np.array([0,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '8':
nibble = np.array([1,0,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == '9':
nibble = np.array([1,0,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'A':
nibble = np.array([1,0,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'B':
nibble = np.array([1,0,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'C':
nibble = np.array([1,1,0,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'D':
nibble = np.array([1,1,0,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'E':
nibble = np.array([1,1,1,0], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
elif i == 'F':
nibble = np.array([1,1,1,1], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
else:
#print('Error, 0-9 or A-F')
pass
nibble = np.array([], dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
outputBinary = np.hstack((outputBinary, nibble))
return outputBinary
def hexToCirculant(hexStr, circulantSize):
binaryArray = hexStringToBinaryArray(hexStr)
if len(binaryArray) < circulantSize:
binaryArray = np.hstack(np.zeros(circulantSize-len(binaryArray), dtype = GENERAL_CODE_MATRIX_DATA_TYPE))
else:
binaryArray = binaryArray[1:]
circulantMatrix = circulant(binaryArray)
circulantMatrix = circulantMatrix.T
return circulantMatrix
def hotLocationsToCirculant(locationList, circulantSize):
generatingVector = np.zeros(circulantSize, dtype = GENERAL_CODE_MATRIX_DATA_TYPE)
generatingVector[locationList] = 1
newCirculant = circulant(generatingVector)
newCirculant = newCirculant.T
return newCirculant
def readMatrixFromFile(fileName, dim0, dim1, circulantSize, isRow = True, isHex = True, isGenerator = True ):
# This function assumes that each line in the file contains the non zero locations of the first row of a circulant.
# Each line in the file then defines a circulant, and the order in which they are defined is top to bottom left to right, i.e.:
# line 0 defines circulant 0,0
with open(fileName) as fid:
lines = fid.readlines()
if isGenerator:
for i in range((dim0 // circulantSize) ):
bLeft = hexToCirculant(lines[2 * i], circulantSize)
bRight = hexToCirculant(lines[2 * i + 1], circulantSize)
newBlock = np.hstack((bLeft, bRight))
if i == 0:
accumulatedBlock = newBlock
else:
accumulatedBlock = np.vstack((accumulatedBlock, newBlock))
newMatrix = np.hstack((np.eye(dim0, dtype = GENERAL_CODE_MATRIX_DATA_TYPE), accumulatedBlock))
else:
for i in range((dim1 // circulantSize)):
locationList1 = list(lines[ i].rstrip('\n').split(','))
locationList1 = list(map(int, locationList1))
upBlock = hotLocationsToCirculant(locationList1, circulantSize)
if i == 0:
accumulatedUpBlock1 = upBlock
else:
accumulatedUpBlock1 = np.hstack((accumulatedUpBlock1, upBlock))
for i in range((dim1 // circulantSize)):
locationList = list(lines[(dim1 // circulantSize) + i].rstrip('\n').split(','))
locationList = list(map(int, locationList))
newBlock = hotLocationsToCirculant(locationList, circulantSize)
if i == 0:
accumulatedBlock2 = newBlock
else:
accumulatedBlock2 = np.hstack((accumulatedBlock2, newBlock))
newMatrix = np.vstack((accumulatedUpBlock1, accumulatedBlock2))
return newMatrix
def binaryMatrixToHexString(binaryMatrix, circulantSize):
leftPadding = np.array(4 - (circulantSize % 4))
m,n = binaryMatrix.shape
#print(m)
#print(n)
assert( m % circulantSize == 0)
assert (n % circulantSize == 0)
M = m // circulantSize
N = n // circulantSize
hexName = ''
for r in range(M):
for k in range(N):
nextLine = np.hstack((leftPadding, binaryMatrix[ r * circulantSize , k * circulantSize : (k + 1) * circulantSize]))
hexArray, hexString = binaryArraytoHex(nextLine)
hexName = hexName + hexString
return hexName
def saveCodeInstance(parityMatrix, circulantSize, codewordSize, evaluationData = None, path = None, evaluationTime = 0, numberOfNonZero = 0, fileName = None):
print("*** in saveCodeInstance ...")
m, n = parityMatrix.shape
M = m // circulantSize
N = n // circulantSize
if fileName == None:
fileName = binaryMatrixToHexString(parityMatrix, circulantSize)
fileNameSHA224 = str(circulantSize) + '_' + str(M) + '_' + str(N) + '_' + str(hashlib.sha224(str(fileName).encode('utf-8')).hexdigest())
fileNameWithPath = path + fileNameSHA224
else:
fileNameWithPath = path + fileName
print("*** " + fileName)
workspaceDict = {}
workspaceDict['parityMatrix'] = parityMatrix
workspaceDict['fileName'] = fileName
if evaluationData != None:
scatterSNR, scatterBER, scatterITR, snrAxis, averageSnrAxis, berData, averageNumberOfIterations = evaluationData.getStatsV2()
workspaceDict['snrData'] = scatterSNR
workspaceDict['berData'] = scatterBER
workspaceDict['itrData'] = scatterITR
workspaceDict['averageSnrAxis'] = averageSnrAxis
workspaceDict['averageNumberOfIterations'] = averageNumberOfIterations
workspaceDict['evaluationTime'] = evaluationTime
workspaceDict['nonZero'] = numberOfNonZero
scipy.io.savemat((fileNameWithPath + '.mat'), workspaceDict)
#evaluationData.plotStats(codewordSize, fileNameWithPath)
print("*** Finishing saveCodeInstance !")
return fileName
def testFileHandler():
nearEarthGenerator = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthGenerator.txt', 7154, 8176, 511, True, True, True)
nearEarthParity = readMatrixFromFile(projectDir + '/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False)
return 'OK'
def plotResults(path, makeMat = False):
i = 10
evaluationFaildAt = np.zeros(4, dtype = FILE_HANDLER_INT_DATA_TYPE)
evalTimes = []
numberOfIterationsAtHigh = []
for root, dirs, files in os.walk(path):
for file in files:
if str(file).endswith('.mat'):
i = i + 1
mat = scipy.io.loadmat(str(os.path.join(root, file)))
snrAxis = mat['snrAxis']
snrActual = mat['averageSnrAxis']
if len(snrAxis) < 3:
evaluationFaildAt[len(snrAxis)] = evaluationFaildAt[len(snrAxis)] + 1
berAxis = mat['berData']
if ('evaluationTime' in mat.keys()):
evalTimes.append(mat['evaluationTime'])
averageNumberOfIterations = mat['averageNumberOfIterations']
numberOfIterationsAtHigh.append(averageNumberOfIterations[-1])
common.plotSNRvsBER(snrActual, berAxis, fileName = None, inputLabel = '', figureNumber = i, figureName = str(file))
else:
pass
return evalTimes, evaluationFaildAt, numberOfIterationsAtHigh
#plt.imshow(nearEarthParity)
#nearEarthParity = readMatrixFromFile('/home/oss22/swift/swift/codeMatrices/nearEarthParity.txt', 1022, 8176, 511, True, False, False)
#import networkx as nx
#from networkx.algorithms import bipartite
#B = nx.Graph()
#B.add_nodes_from(range(1022), bipartite=0)
#B.add_nodes_from(range(1022, 7156 + 1022), bipartite=1)
# Add edges only between nodes of opposite node sets
#for i in range(8176):
# for j in range(1022):
# if nearEarthParity[j,i] != 0:
# B.add_edges_from([(j, 7156 + i)])
#X, Y = bipartite.sets(B)
#pos = dict()
#pos.update( (n, (1, i)) for i, n in enumerate(X) )
#pos.update( (n, (2, i)) for i, n in enumerate(Y) )
#nx.draw(B, pos=pos)
#plt.show()
| [
"numpy.eye",
"sys.path.insert",
"numpy.hstack",
"pathlib.Path",
"os.environ.get",
"os.path.join",
"numpy.array",
"numpy.zeros",
"scipy.linalg.circulant",
"numpy.vstack",
"os.walk"
] | [((694, 716), 'os.environ.get', 'os.environ.get', (['"""LDPC"""'], {}), "('LDPC')\n", (708, 716), False, 'import os\n'), ((907, 937), 'sys.path.insert', 'sys.path.insert', (['(1)', 'projectDir'], {}), '(1, projectDir)\n', (922, 937), False, 'import sys\n'), ((1038, 1097), 'numpy.array', 'np.array', (['[8, 4, 2, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([8, 4, 2, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (1046, 1097), True, 'import numpy as np\n'), ((1524, 1552), 'numpy.zeros', 'np.zeros', (['(d1 // 4)'], {'dtype': 'str'}), '(d1 // 4, dtype=str)\n', (1532, 1552), True, 'import numpy as np\n'), ((1848, 1897), 'numpy.array', 'np.array', (['[]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (1856, 1897), True, 'import numpy as np\n'), ((4356, 4378), 'scipy.linalg.circulant', 'circulant', (['binaryArray'], {}), '(binaryArray)\n', (4365, 4378), False, 'from scipy.linalg import circulant\n'), ((4528, 4588), 'numpy.zeros', 'np.zeros', (['circulantSize'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '(circulantSize, dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (4536, 4588), True, 'import numpy as np\n'), ((4649, 4676), 'scipy.linalg.circulant', 'circulant', (['generatingVector'], {}), '(generatingVector)\n', (4658, 4676), False, 'from scipy.linalg import circulant\n'), ((6792, 6823), 'numpy.array', 'np.array', (['(4 - circulantSize % 4)'], {}), '(4 - circulantSize % 4)\n', (6800, 6823), True, 'import numpy as np\n'), ((9217, 9262), 'numpy.zeros', 'np.zeros', (['(4)'], {'dtype': 'FILE_HANDLER_INT_DATA_TYPE'}), '(4, dtype=FILE_HANDLER_INT_DATA_TYPE)\n', (9225, 9262), True, 'import numpy as np\n'), ((9347, 9360), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (9354, 9360), False, 'import os\n'), ((3963, 3996), 'numpy.hstack', 'np.hstack', (['(outputBinary, nibble)'], {}), '((outputBinary, nibble))\n', (3972, 3996), True, 'import numpy as np\n'), ((6625, 6676), 'numpy.vstack', 'np.vstack', (['(accumulatedUpBlock1, accumulatedBlock2)'], {}), '((accumulatedUpBlock1, accumulatedBlock2))\n', (6634, 6676), True, 'import numpy as np\n'), ((1967, 2026), 'numpy.array', 'np.array', (['[0, 0, 0, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 0, 0, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (1975, 2026), True, 'import numpy as np\n'), ((5433, 5459), 'numpy.hstack', 'np.hstack', (['(bLeft, bRight)'], {}), '((bLeft, bRight))\n', (5442, 5459), True, 'import numpy as np\n'), ((7104, 7209), 'numpy.hstack', 'np.hstack', (['(leftPadding, binaryMatrix[r * circulantSize, k * circulantSize:(k + 1) *\n circulantSize])'], {}), '((leftPadding, binaryMatrix[r * circulantSize, k * circulantSize:(\n k + 1) * circulantSize]))\n', (7113, 7209), True, 'import numpy as np\n'), ((776, 798), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (788, 798), False, 'import pathlib\n'), ((2072, 2131), 'numpy.array', 'np.array', (['[0, 0, 0, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 0, 0, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2080, 2131), True, 'import numpy as np\n'), ((5580, 5619), 'numpy.vstack', 'np.vstack', (['(accumulatedBlock, newBlock)'], {}), '((accumulatedBlock, newBlock))\n', (5589, 5619), True, 'import numpy as np\n'), ((5651, 5700), 'numpy.eye', 'np.eye', (['dim0'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '(dim0, dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (5657, 5700), True, 'import numpy as np\n'), ((6118, 6159), 'numpy.hstack', 'np.hstack', (['(accumulatedUpBlock1, upBlock)'], {}), '((accumulatedUpBlock1, upBlock))\n', (6127, 6159), True, 'import numpy as np\n'), ((6564, 6604), 'numpy.hstack', 'np.hstack', (['(accumulatedBlock2, newBlock)'], {}), '((accumulatedBlock2, newBlock))\n', (6573, 6604), True, 'import numpy as np\n'), ((2190, 2249), 'numpy.array', 'np.array', (['[0, 0, 1, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 0, 1, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2198, 2249), True, 'import numpy as np\n'), ((2308, 2367), 'numpy.array', 'np.array', (['[0, 0, 1, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 0, 1, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2316, 2367), True, 'import numpy as np\n'), ((9501, 9525), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (9513, 9525), False, 'import os\n'), ((2426, 2485), 'numpy.array', 'np.array', (['[0, 1, 0, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 1, 0, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2434, 2485), True, 'import numpy as np\n'), ((2544, 2603), 'numpy.array', 'np.array', (['[0, 1, 0, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 1, 0, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2552, 2603), True, 'import numpy as np\n'), ((2662, 2721), 'numpy.array', 'np.array', (['[0, 1, 1, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 1, 1, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2670, 2721), True, 'import numpy as np\n'), ((2780, 2839), 'numpy.array', 'np.array', (['[0, 1, 1, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([0, 1, 1, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2788, 2839), True, 'import numpy as np\n'), ((2898, 2957), 'numpy.array', 'np.array', (['[1, 0, 0, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 0, 0, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (2906, 2957), True, 'import numpy as np\n'), ((3016, 3075), 'numpy.array', 'np.array', (['[1, 0, 0, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 0, 0, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3024, 3075), True, 'import numpy as np\n'), ((3134, 3193), 'numpy.array', 'np.array', (['[1, 0, 1, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 0, 1, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3142, 3193), True, 'import numpy as np\n'), ((3252, 3311), 'numpy.array', 'np.array', (['[1, 0, 1, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 0, 1, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3260, 3311), True, 'import numpy as np\n'), ((3370, 3429), 'numpy.array', 'np.array', (['[1, 1, 0, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 1, 0, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3378, 3429), True, 'import numpy as np\n'), ((3488, 3547), 'numpy.array', 'np.array', (['[1, 1, 0, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 1, 0, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3496, 3547), True, 'import numpy as np\n'), ((3606, 3665), 'numpy.array', 'np.array', (['[1, 1, 1, 0]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 1, 1, 0], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3614, 3665), True, 'import numpy as np\n'), ((3724, 3783), 'numpy.array', 'np.array', (['[1, 1, 1, 1]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([1, 1, 1, 1], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3732, 3783), True, 'import numpy as np\n'), ((3888, 3937), 'numpy.array', 'np.array', (['[]'], {'dtype': 'GENERAL_CODE_MATRIX_DATA_TYPE'}), '([], dtype=GENERAL_CODE_MATRIX_DATA_TYPE)\n', (3896, 3937), True, 'import numpy as np\n')] |
import unittest
from ..adl import ADL
import six
if six.PY3:
from unittest.mock import Mock, MagicMock
else:
from mock import Mock, MagicMock
class ADLTest(unittest.TestCase):
"""
Tests for `ADL`
"""
def setUp(self):
self.ls = Mock(return_value=["foo", "bar", "baz"])
self.fakeFile = MagicMock()
self.fakeFile.__iter__.return_value = [b"a", b"b", b"c"]
self.fakeFile.__enter__.return_value = self.fakeFile
self.open = Mock(return_value=self.fakeFile)
self.fakeAdapter = Mock(open=self.open, ls=self.ls)
self.adl = ADL()
self.adl._create_adapter = Mock(return_value=self.fakeAdapter)
def test_split_url_raises_exception_on_invalid_url(self):
with self.assertRaises(Exception) as context:
ADL._split_url("this_is_not_a_valid_url")
self.assertTrue("Invalid ADL url 'this_is_not_a_valid_url'" in str(context.exception))
def test_split_url_splits_valid_url(self):
(store_name, path) = ADL._split_url("adl://foo.azuredatalakestore.net/bar/baz")
self.assertEqual(store_name, "foo")
self.assertEqual(path, "bar/baz")
def test_listdir_calls_ls_on_adl_adapter(self):
self.assertEqual(
self.adl.listdir("adl://foo_store.azuredatalakestore.net/path/to/file"),
["foo", "bar", "baz"],
)
self.ls.assert_called_once_with("path/to/file")
def test_read_opens_and_reads_file(self):
self.assertEquals(
self.adl.read("adl://foo_store.azuredatalakestore.net/path/to/file"), ["a", "b", "c"]
)
self.fakeFile.__iter__.assert_called_once_with()
def test_write_opens_file_and_writes_to_it(self):
self.adl.write("hello world", "adl://foo_store.azuredatalakestore.net/path/to/file")
self.fakeFile.write.assert_called_once_with(b"hello world")
| [
"mock.Mock",
"mock.MagicMock"
] | [((263, 303), 'mock.Mock', 'Mock', ([], {'return_value': "['foo', 'bar', 'baz']"}), "(return_value=['foo', 'bar', 'baz'])\n", (267, 303), False, 'from mock import Mock, MagicMock\n'), ((328, 339), 'mock.MagicMock', 'MagicMock', ([], {}), '()\n', (337, 339), False, 'from mock import Mock, MagicMock\n'), ((486, 518), 'mock.Mock', 'Mock', ([], {'return_value': 'self.fakeFile'}), '(return_value=self.fakeFile)\n', (490, 518), False, 'from mock import Mock, MagicMock\n'), ((546, 578), 'mock.Mock', 'Mock', ([], {'open': 'self.open', 'ls': 'self.ls'}), '(open=self.open, ls=self.ls)\n', (550, 578), False, 'from mock import Mock, MagicMock\n'), ((639, 674), 'mock.Mock', 'Mock', ([], {'return_value': 'self.fakeAdapter'}), '(return_value=self.fakeAdapter)\n', (643, 674), False, 'from mock import Mock, MagicMock\n')] |
#!/usr/bin/python3.4
__author__ = "<NAME>"
import sys
import os
sys.path.append("..")
import unittest
from modules import storage
from modules.service import Service
from modules.database import Database
class TestStorage(unittest.TestCase):
def setUp(self):
self.service = Service()
self.database = Database()
open("test.service", "w+").close()
open("test.db", "w+").close()
def test_write_read_service(self):
self.service.service_name = "Hello"
self.service.username = "This"
self.service.password = "<PASSWORD>"
storage.write("test", self.service, "test.service")
service2 = Service()
storage.read("test", service2, "test.service")
self.assertEqual(service2.service_name, self.service.service_name)
self.assertEqual(service2.username, self.service.username)
self.assertEqual(service2.password, self.service.password)
def test_write_read_database(self):
self.database.add_service(Service())
self.database.add_service(Service())
self.database.name = "Hey"
storage.write("test", self.database, "test.db")
database2 = Database()
storage.read("test", database2, "test.db")
self.assertEqual(database2.name, self.database.name)
for i in range(len(self.database.services)):
self.assertEqual(database2.services[i].service_name, self.database.services[i].service_name)
self.assertEqual(database2.services[i].username, self.database.services[i].username)
self.assertEqual(database2.services[i].password, self.database.services[i].password)
def tearDown(self):
os.remove(os.getcwd() + "/test.service")
os.remove(os.getcwd() + "/test.db")
if __name__ == "__main__":
unittest.main() | [
"modules.service.Service",
"modules.storage.read",
"modules.storage.write",
"os.getcwd",
"unittest.main",
"modules.database.Database",
"sys.path.append"
] | [((65, 86), 'sys.path.append', 'sys.path.append', (['""".."""'], {}), "('..')\n", (80, 86), False, 'import sys\n'), ((1806, 1821), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1819, 1821), False, 'import unittest\n'), ((290, 299), 'modules.service.Service', 'Service', ([], {}), '()\n', (297, 299), False, 'from modules.service import Service\n'), ((324, 334), 'modules.database.Database', 'Database', ([], {}), '()\n', (332, 334), False, 'from modules.database import Database\n'), ((593, 644), 'modules.storage.write', 'storage.write', (['"""test"""', 'self.service', '"""test.service"""'], {}), "('test', self.service, 'test.service')\n", (606, 644), False, 'from modules import storage\n'), ((664, 673), 'modules.service.Service', 'Service', ([], {}), '()\n', (671, 673), False, 'from modules.service import Service\n'), ((682, 728), 'modules.storage.read', 'storage.read', (['"""test"""', 'service2', '"""test.service"""'], {}), "('test', service2, 'test.service')\n", (694, 728), False, 'from modules import storage\n'), ((1113, 1160), 'modules.storage.write', 'storage.write', (['"""test"""', 'self.database', '"""test.db"""'], {}), "('test', self.database, 'test.db')\n", (1126, 1160), False, 'from modules import storage\n'), ((1181, 1191), 'modules.database.Database', 'Database', ([], {}), '()\n', (1189, 1191), False, 'from modules.database import Database\n'), ((1200, 1242), 'modules.storage.read', 'storage.read', (['"""test"""', 'database2', '"""test.db"""'], {}), "('test', database2, 'test.db')\n", (1212, 1242), False, 'from modules import storage\n'), ((1013, 1022), 'modules.service.Service', 'Service', ([], {}), '()\n', (1020, 1022), False, 'from modules.service import Service\n'), ((1058, 1067), 'modules.service.Service', 'Service', ([], {}), '()\n', (1065, 1067), False, 'from modules.service import Service\n'), ((1699, 1710), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1708, 1710), False, 'import os\n'), ((1748, 1759), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1757, 1759), False, 'import os\n')] |
from setuptools import setup, find_packages
from os.path import join, dirname
setup(
name="fant_sizer",
version="0.7",
author="<NAME>",
author_email="<EMAIL>",
description="fant_sizer command-line file-information",
url="https://github.com/ripiuk/fant_sizer",
keywords="file command-line information size tool recursively",
license="MIT",
classifiers=[
'Topic :: Utilities',
'Environment :: Console',
'Natural Language :: English',
'License :: OSI Approved :: MIT License',
'Intended Audience :: Developers',
'Intended Audience :: Information Technology',
'Development Status :: 5 - Production/Stable',
'Programming Language :: Python :: 3.6'
],
packages=find_packages(),
long_description=open(join(dirname(__file__), "README.rst")).read(),
entry_points={
"console_scripts":
['fant_sizer = fant_sizer.fant_sizer:_main'],
},
)
| [
"os.path.dirname",
"setuptools.find_packages"
] | [((840, 855), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (853, 855), False, 'from setuptools import setup, find_packages\n'), ((892, 909), 'os.path.dirname', 'dirname', (['__file__'], {}), '(__file__)\n', (899, 909), False, 'from os.path import join, dirname\n')] |
import math
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.utils.model_zoo as model_zoo
from model.sync_batchnorm.batchnorm import SynchronizedBatchNorm2d
def fixed_padding(inputs, kernel_size, dilation):
kernel_size_effective = kernel_size + (kernel_size - 1) * (dilation - 1)
pad_total = kernel_size_effective - 1
pad_beg = pad_total // 2
pad_end = pad_total - pad_beg
padded_inputs = F.pad(inputs, (pad_beg, pad_end, pad_beg, pad_end))
return padded_inputs
class SeparableConv2d(nn.Module):
def __init__(self, inplanes, planes, kernel_size=3, stride=1, dilation=1, bias=False, BatchNorm=None):
super(SeparableConv2d, self).__init__()
self.conv1 = nn.Conv2d(inplanes, inplanes, kernel_size, stride, 0, dilation,
groups=inplanes, bias=bias)
self.bn = BatchNorm(inplanes)
self.pointwise = nn.Conv2d(inplanes, planes, 1, 1, 0, 1, 1, bias=bias)
def forward(self, x):
x = fixed_padding(x, self.conv1.kernel_size[0], dilation=self.conv1.dilation[0])
x = self.conv1(x)
x = self.bn(x)
x = self.pointwise(x)
return x
class Block(nn.Module):
def __init__(self, inplanes, planes, reps, stride=1, dilation=1, BatchNorm=None,
start_with_relu=True, grow_first=True, is_last=False):
super(Block, self).__init__()
if planes != inplanes or stride != 1:
self.skip = nn.Conv2d(inplanes, planes, 1, stride=stride, bias=False)
self.skipbn = BatchNorm(planes)
else:
self.skip = None
self.relu = nn.ReLU(inplace=True)
rep = []
filters = inplanes
if grow_first:
rep.append(self.relu)
rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm))
rep.append(BatchNorm(planes))
filters = planes
for i in range(reps - 1):
rep.append(self.relu)
rep.append(SeparableConv2d(filters, filters, 3, 1, dilation, BatchNorm=BatchNorm))
rep.append(BatchNorm(filters))
if not grow_first:
rep.append(self.relu)
rep.append(SeparableConv2d(inplanes, planes, 3, 1, dilation, BatchNorm=BatchNorm))
rep.append(BatchNorm(planes))
if stride != 1:
rep.append(self.relu)
rep.append(SeparableConv2d(planes, planes, 3, 2, BatchNorm=BatchNorm))
rep.append(BatchNorm(planes))
if stride == 1 and is_last:
rep.append(self.relu)
rep.append(SeparableConv2d(planes, planes, 3, 1, BatchNorm=BatchNorm))
rep.append(BatchNorm(planes))
if not start_with_relu:
rep = rep[1:]
self.rep = nn.Sequential(*rep)
def forward(self, inp):
x = self.rep(inp)
if self.skip is not None:
skip = self.skip(inp)
skip = self.skipbn(skip)
else:
skip = inp
x = x + skip
return x
class AlignedXception(nn.Module):
"""
Modified Alighed Xception
"""
def __init__(self, output_stride, BatchNorm,
pretrained=True):
super(AlignedXception, self).__init__()
if output_stride == 16:
entry_block3_stride = 2
middle_block_dilation = 1
exit_block_dilations = (1, 2)
elif output_stride == 8:
entry_block3_stride = 1
middle_block_dilation = 2
exit_block_dilations = (2, 4)
else:
raise NotImplementedError
# Entry flow
self.conv1 = nn.Conv2d(3, 32, 3, stride=2, padding=1, bias=False)
self.bn1 = BatchNorm(32)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(32, 64, 3, stride=1, padding=1, bias=False)
self.bn2 = BatchNorm(64)
self.block1 = Block(64, 128, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False)
self.block2 = Block(128, 256, reps=2, stride=2, BatchNorm=BatchNorm, start_with_relu=False,
grow_first=True)
self.block3 = Block(256, 728, reps=2, stride=entry_block3_stride, BatchNorm=BatchNorm,
start_with_relu=True, grow_first=True, is_last=True)
# Middle flow
self.block4 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block5 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block6 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block7 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block8 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block9 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block10 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block11 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block12 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block13 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block14 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block15 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block16 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block17 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block18 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
self.block19 = Block(728, 728, reps=3, stride=1, dilation=middle_block_dilation,
BatchNorm=BatchNorm, start_with_relu=True, grow_first=True)
# Exit flow
self.block20 = Block(728, 1024, reps=2, stride=1, dilation=exit_block_dilations[0],
BatchNorm=BatchNorm, start_with_relu=True, grow_first=False, is_last=True)
self.conv3 = SeparableConv2d(1024, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm)
self.bn3 = BatchNorm(1536)
self.conv4 = SeparableConv2d(1536, 1536, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm)
self.bn4 = BatchNorm(1536)
self.conv5 = SeparableConv2d(1536, 2048, 3, stride=1, dilation=exit_block_dilations[1], BatchNorm=BatchNorm)
self.bn5 = BatchNorm(2048)
# Init weights
self._init_weight()
# Load pretrained model
if pretrained:
self._load_pretrained_model()
def forward(self, x):
# Entry flow
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x = self.conv2(x)
x = self.bn2(x)
x = self.relu(x)
x = self.block1(x)
# add relu here
x = self.relu(x)
low_level_feat = x
x = self.block2(x)
x = self.block3(x)
# Middle flow
x = self.block4(x)
x = self.block5(x)
x = self.block6(x)
x = self.block7(x)
x = self.block8(x)
x = self.block9(x)
x = self.block10(x)
x = self.block11(x)
x = self.block12(x)
x = self.block13(x)
x = self.block14(x)
x = self.block15(x)
x = self.block16(x)
x = self.block17(x)
x = self.block18(x)
x = self.block19(x)
# Exit flow
x = self.block20(x)
x = self.relu(x)
x = self.conv3(x)
x = self.bn3(x)
x = self.relu(x)
x = self.conv4(x)
x = self.bn4(x)
x = self.relu(x)
x = self.conv5(x)
x = self.bn5(x)
x = self.relu(x)
return x, low_level_feat
def _init_weight(self):
for m in self.modules():
if isinstance(m, nn.Conv2d):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, SynchronizedBatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
elif isinstance(m, nn.BatchNorm2d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _load_pretrained_model(self):
pretrain_dict = model_zoo.load_url('http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth')
model_dict = {}
state_dict = self.state_dict()
for k, v in pretrain_dict.items():
if k in model_dict:
if 'pointwise' in k:
v = v.unsqueeze(-1).unsqueeze(-1)
if k.startswith('block11'):
model_dict[k] = v
model_dict[k.replace('block11', 'block12')] = v
model_dict[k.replace('block11', 'block13')] = v
model_dict[k.replace('block11', 'block14')] = v
model_dict[k.replace('block11', 'block15')] = v
model_dict[k.replace('block11', 'block16')] = v
model_dict[k.replace('block11', 'block17')] = v
model_dict[k.replace('block11', 'block18')] = v
model_dict[k.replace('block11', 'block19')] = v
elif k.startswith('block12'):
model_dict[k.replace('block12', 'block20')] = v
elif k.startswith('bn3'):
model_dict[k] = v
model_dict[k.replace('bn3', 'bn4')] = v
elif k.startswith('conv4'):
model_dict[k.replace('conv4', 'conv5')] = v
elif k.startswith('bn4'):
model_dict[k.replace('bn4', 'bn5')] = v
else:
model_dict[k] = v
state_dict.update(model_dict)
self.load_state_dict(state_dict)
if __name__ == "__main__":
import torch
model = AlignedXception(BatchNorm=nn.BatchNorm2d, pretrained=True, output_stride=16)
input = torch.rand(1, 3, 512, 512)
output, low_level_feat = model(input)
print(output.size())
print(low_level_feat.size()) | [
"torch.nn.ReLU",
"torch.nn.Sequential",
"torch.utils.model_zoo.load_url",
"math.sqrt",
"torch.nn.Conv2d",
"torch.nn.functional.pad",
"torch.rand"
] | [((441, 492), 'torch.nn.functional.pad', 'F.pad', (['inputs', '(pad_beg, pad_end, pad_beg, pad_end)'], {}), '(inputs, (pad_beg, pad_end, pad_beg, pad_end))\n', (446, 492), True, 'import torch.nn.functional as F\n'), ((11424, 11450), 'torch.rand', 'torch.rand', (['(1)', '(3)', '(512)', '(512)'], {}), '(1, 3, 512, 512)\n', (11434, 11450), False, 'import torch\n'), ((731, 827), 'torch.nn.Conv2d', 'nn.Conv2d', (['inplanes', 'inplanes', 'kernel_size', 'stride', '(0)', 'dilation'], {'groups': 'inplanes', 'bias': 'bias'}), '(inplanes, inplanes, kernel_size, stride, 0, dilation, groups=\n inplanes, bias=bias)\n', (740, 827), True, 'import torch.nn as nn\n'), ((917, 970), 'torch.nn.Conv2d', 'nn.Conv2d', (['inplanes', 'planes', '(1)', '(1)', '(0)', '(1)', '(1)'], {'bias': 'bias'}), '(inplanes, planes, 1, 1, 0, 1, 1, bias=bias)\n', (926, 970), True, 'import torch.nn as nn\n'), ((1641, 1662), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (1648, 1662), True, 'import torch.nn as nn\n'), ((2796, 2815), 'torch.nn.Sequential', 'nn.Sequential', (['*rep'], {}), '(*rep)\n', (2809, 2815), True, 'import torch.nn as nn\n'), ((3662, 3714), 'torch.nn.Conv2d', 'nn.Conv2d', (['(3)', '(32)', '(3)'], {'stride': '(2)', 'padding': '(1)', 'bias': '(False)'}), '(3, 32, 3, stride=2, padding=1, bias=False)\n', (3671, 3714), True, 'import torch.nn as nn\n'), ((3768, 3789), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (3775, 3789), True, 'import torch.nn as nn\n'), ((3812, 3865), 'torch.nn.Conv2d', 'nn.Conv2d', (['(32)', '(64)', '(3)'], {'stride': '(1)', 'padding': '(1)', 'bias': '(False)'}), '(32, 64, 3, stride=1, padding=1, bias=False)\n', (3821, 3865), True, 'import torch.nn as nn\n'), ((9729, 9821), 'torch.utils.model_zoo.load_url', 'model_zoo.load_url', (['"""http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth"""'], {}), "(\n 'http://data.lip6.fr/cadene/pretrainedmodels/xception-b5690688.pth')\n", (9747, 9821), True, 'import torch.utils.model_zoo as model_zoo\n'), ((1475, 1532), 'torch.nn.Conv2d', 'nn.Conv2d', (['inplanes', 'planes', '(1)'], {'stride': 'stride', 'bias': '(False)'}), '(inplanes, planes, 1, stride=stride, bias=False)\n', (1484, 1532), True, 'import torch.nn as nn\n'), ((9391, 9409), 'math.sqrt', 'math.sqrt', (['(2.0 / n)'], {}), '(2.0 / n)\n', (9400, 9409), False, 'import math\n')] |
"""
"""
# Created on 2015.08.19
#
# Author: <NAME>
#
# Copyright 2015 - 2018 <NAME>
#
# This file is part of ldap3.
#
# ldap3 is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ldap3 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with ldap3 in the COPYING and COPYING.LESSER files.
# If not, see <http://www.gnu.org/licenses/>.
from pyasn1 import __version__ as pyasn1_version
from pyasn1.codec.ber import decoder # for usage in other modules
from pyasn1.codec.ber.encoder import Encoder # for monkeypatching of boolean value
from ..core.results import RESULT_CODES
from ..utils.conv import to_unicode
from ..protocol.convert import referrals_to_list
CLASSES = {(False, False): 0, # Universal
(False, True): 1, # Application
(True, False): 2, # Context
(True, True): 3} # Private
# Monkeypatching of pyasn1 for encoding Boolean with the value 0xFF for TRUE
# THIS IS NOT PART OF THE FAST BER DECODER
if pyasn1_version == 'xxx0.2.3':
from pyasn1.codec.ber.encoder import tagMap, BooleanEncoder, encode
from pyasn1.type.univ import Boolean
from pyasn1.compat.octets import ints2octs
class BooleanCEREncoder(BooleanEncoder):
_true = ints2octs((255,))
tagMap[Boolean.tagSet] = BooleanCEREncoder()
else:
from pyasn1.codec.ber.encoder import tagMap, typeMap, AbstractItemEncoder
from pyasn1.type.univ import Boolean
from copy import deepcopy
class LDAPBooleanEncoder(AbstractItemEncoder):
supportIndefLenMode = False
if pyasn1_version <= '0.2.3':
from pyasn1.compat.octets import ints2octs
_true = ints2octs((255,))
_false = ints2octs((0,))
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return value and self._true or self._false, 0
elif pyasn1_version <= '0.3.1':
def encodeValue(self, encodeFun, value, defMode, maxChunkSize):
return value and (255,) or (0,), False, False
elif pyasn1_version <= '0.3.4':
def encodeValue(self, encodeFun, value, defMode, maxChunkSize, ifNotEmpty=False):
return value and (255,) or (0,), False, False
elif pyasn1_version <= '0.3.7':
def encodeValue(self, value, encodeFun, **options):
return value and (255,) or (0,), False, False
else:
def encodeValue(self, value, asn1Spec, encodeFun, **options):
return value and (255,) or (0,), False, False
customTagMap = deepcopy(tagMap)
customTypeMap = deepcopy(typeMap)
customTagMap[Boolean.tagSet] = LDAPBooleanEncoder()
customTypeMap[Boolean.typeId] = LDAPBooleanEncoder()
encode = Encoder(customTagMap, customTypeMap)
# end of monkey patching
# a fast BER decoder for LDAP responses only
def compute_ber_size(data):
"""
Compute size according to BER definite length rules
Returns size of value and value offset
"""
if data[1] <= 127: # BER definite length - short form. Highest bit of byte 1 is 0, message length is in the last 7 bits - Value can be up to 127 bytes long
return data[1], 2
else: # BER definite length - long form. Highest bit of byte 1 is 1, last 7 bits counts the number of following octets containing the value length
bytes_length = data[1] - 128
value_length = 0
cont = bytes_length
for byte in data[2: 2 + bytes_length]:
cont -= 1
value_length += byte * (256 ** cont)
return value_length, bytes_length + 2
def decode_message_fast(message):
ber_len, ber_value_offset = compute_ber_size(get_bytes(message[:10])) # get start of sequence, at maximum 3 bytes for length
decoded = decode_sequence(message, ber_value_offset, ber_len + ber_value_offset, LDAP_MESSAGE_CONTEXT)
return {
'messageID': decoded[0][3],
'protocolOp': decoded[1][2],
'payload': decoded[1][3],
'controls': decoded[2][3] if len(decoded) == 3 else None
}
def decode_sequence(message, start, stop, context_decoders=None):
decoded = []
while start < stop:
octet = get_byte(message[start])
ber_class = CLASSES[(bool(octet & 0b10000000), bool(octet & 0b01000000))]
ber_constructed = bool(octet & 0b00100000)
ber_type = octet & 0b00011111
ber_decoder = DECODERS[(ber_class, octet & 0b00011111)] if ber_class < 2 else None
ber_len, ber_value_offset = compute_ber_size(get_bytes(message[start: start + 10]))
start += ber_value_offset
if ber_decoder:
value = ber_decoder(message, start, start + ber_len, context_decoders) # call value decode function
else:
# try:
value = context_decoders[ber_type](message, start, start + ber_len) # call value decode function for context class
# except KeyError:
# if ber_type == 3: # Referral in result
# value = decode_sequence(message, start, start + ber_len)
# else:
# raise # re-raise, should never happen
decoded.append((ber_class, ber_constructed, ber_type, value))
start += ber_len
return decoded
def decode_integer(message, start, stop, context_decoders=None):
first = message[start]
value = -1 if get_byte(first) & 0x80 else 0
for octet in message[start: stop]:
value = value << 8 | get_byte(octet)
return value
def decode_octet_string(message, start, stop, context_decoders=None):
return message[start: stop]
def decode_boolean(message, start, stop, context_decoders=None):
return False if message[start: stop] == 0 else True
def decode_bind_response(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, BIND_RESPONSE_CONTEXT)
def decode_extended_response(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, EXTENDED_RESPONSE_CONTEXT)
def decode_intermediate_response(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, INTERMEDIATE_RESPONSE_CONTEXT)
def decode_controls(message, start, stop, context_decoders=None):
return decode_sequence(message, start, stop, CONTROLS_CONTEXT)
def ldap_result_to_dict_fast(response):
response_dict = dict()
response_dict['result'] = int(response[0][3]) # resultCode
response_dict['description'] = RESULT_CODES[response_dict['result']]
response_dict['dn'] = to_unicode(response[1][3], from_server=True) # matchedDN
response_dict['message'] = to_unicode(response[2][3], from_server=True) # diagnosticMessage
if len(response) == 4:
response_dict['referrals'] = referrals_to_list([to_unicode(referral[3], from_server=True) for referral in response[3][3]]) # referrals
else:
response_dict['referrals'] = None
return response_dict
######
if str is not bytes: # Python 3
def get_byte(x):
return x
def get_bytes(x):
return x
else: # Python 2
def get_byte(x):
return ord(x)
def get_bytes(x):
return bytearray(x)
DECODERS = {
# Universal
(0, 1): decode_boolean, # Boolean
(0, 2): decode_integer, # Integer
(0, 4): decode_octet_string, # Octet String
(0, 10): decode_integer, # Enumerated
(0, 16): decode_sequence, # Sequence
(0, 17): decode_sequence, # Set
# Application
(1, 1): decode_bind_response, # Bind response
(1, 4): decode_sequence, # Search result entry
(1, 5): decode_sequence, # Search result done
(1, 7): decode_sequence, # Modify response
(1, 9): decode_sequence, # Add response
(1, 11): decode_sequence, # Delete response
(1, 13): decode_sequence, # ModifyDN response
(1, 15): decode_sequence, # Compare response
(1, 19): decode_sequence, # Search result reference
(1, 24): decode_extended_response, # Extended response
(1, 25): decode_intermediate_response, # intermediate response
(2, 3): decode_octet_string #
}
BIND_RESPONSE_CONTEXT = {
7: decode_octet_string # SaslCredentials
}
EXTENDED_RESPONSE_CONTEXT = {
10: decode_octet_string, # ResponseName
11: decode_octet_string # Response Value
}
INTERMEDIATE_RESPONSE_CONTEXT = {
0: decode_octet_string, # IntermediateResponseName
1: decode_octet_string # IntermediateResponseValue
}
LDAP_MESSAGE_CONTEXT = {
0: decode_controls, # Controls
3: decode_sequence # Referral
}
CONTROLS_CONTEXT = {
0: decode_sequence # Control
}
| [
"pyasn1.codec.ber.encoder.Encoder",
"pyasn1.compat.octets.ints2octs",
"copy.deepcopy"
] | [((3063, 3079), 'copy.deepcopy', 'deepcopy', (['tagMap'], {}), '(tagMap)\n', (3071, 3079), False, 'from copy import deepcopy\n'), ((3101, 3118), 'copy.deepcopy', 'deepcopy', (['typeMap'], {}), '(typeMap)\n', (3109, 3118), False, 'from copy import deepcopy\n'), ((3250, 3286), 'pyasn1.codec.ber.encoder.Encoder', 'Encoder', (['customTagMap', 'customTypeMap'], {}), '(customTagMap, customTypeMap)\n', (3257, 3286), False, 'from pyasn1.codec.ber.encoder import Encoder\n'), ((1707, 1724), 'pyasn1.compat.octets.ints2octs', 'ints2octs', (['(255,)'], {}), '((255,))\n', (1716, 1724), False, 'from pyasn1.compat.octets import ints2octs\n'), ((2143, 2160), 'pyasn1.compat.octets.ints2octs', 'ints2octs', (['(255,)'], {}), '((255,))\n', (2152, 2160), False, 'from pyasn1.compat.octets import ints2octs\n'), ((2183, 2198), 'pyasn1.compat.octets.ints2octs', 'ints2octs', (['(0,)'], {}), '((0,))\n', (2192, 2198), False, 'from pyasn1.compat.octets import ints2octs\n')] |
import copy
from contextlib import contextmanager
from functools import wraps
from collections import defaultdict
import tensorflow as tf
_ArgScopeStack = []
@contextmanager
def arg_scope(layers, **kwargs):
"""
Args:
layers (list or layer): layer or list of layers to apply the arguments.
Returns:
a context where all appearance of these layer will by default have the
arguments specified by kwargs.
Example:
.. code-block:: python
with arg_scope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32):
x = Conv2D('conv0', x)
x = Conv2D('conv1', x)
x = Conv2D('conv2', x, out_channel=64) # override argscope
"""
if not isinstance(layers, list):
layers = [layers]
for l in layers:
assert hasattr(l, '__arg_scope_enabled__'), "Argscope not supported for {}".format(l)
# need to deepcopy so that changes to new_scope does not affect outer scope
new_scope = copy.deepcopy(get_arg_scope())
for l in layers:
new_scope[l.__name__].update(kwargs)
_ArgScopeStack.append(new_scope)
yield
del _ArgScopeStack[-1]
def get_arg_scope():
"""
Returns:
dict: the current argscope.
An argscope is a dict of dict: ``dict[layername] = {arg: val}``
"""
if len(_ArgScopeStack) > 0:
return _ArgScopeStack[-1]
else:
return defaultdict(dict)
def add_arg_scope(cls):
"""Decorator for function to support argscope
Example:
.. code-block:: python
from mylib import MyClass
myfunc = add_arg_scope(MyClass)
Args:
func: A function mapping one or multiple tensors to one or multiple
tensors.
Remarks:
If the function ``func`` returns multiple input or output tensors,
only the first input/output tensor shape is displayed during logging.
Returns:
The decorated function.
"""
original_init = cls.__init__
@wraps(original_init)
def wrapped_init(self, *args, **kwargs):
actual_args = copy.copy(get_arg_scope()[cls.__name__])
actual_args.update(kwargs)
instance = original_init(self, *args, **actual_args)
return instance
cls.__arg_scope_enabled__ = True
cls.__init__ = wrapped_init
return cls
| [
"collections.defaultdict",
"functools.wraps"
] | [((2018, 2038), 'functools.wraps', 'wraps', (['original_init'], {}), '(original_init)\n', (2023, 2038), False, 'from functools import wraps\n'), ((1427, 1444), 'collections.defaultdict', 'defaultdict', (['dict'], {}), '(dict)\n', (1438, 1444), False, 'from collections import defaultdict\n')] |
import pygame
class Player(pygame.sprite.Sprite):
death = [pygame.image.load('Images/death1.png'), pygame.image.load('Images/death2.png'),
pygame.image.load('Images/death3.png'),
pygame.image.load('Images/death4.png'), pygame.image.load('Images/death5.png'),
pygame.image.load('Images/death6.png'),
pygame.image.load('Images/death7.png'), pygame.image.load('Images/death8.png'),
pygame.image.load('Images/death9.png'),
pygame.image.load('Images/death10.png'), pygame.image.load('Images/death11.png'),
pygame.image.load('Images/death12.png'),
pygame.image.load('Images/death13.png'), pygame.image.load('Images/death14.png'),
pygame.image.load('Images/death15.png'),
pygame.image.load('Images/death16.png'), pygame.image.load('Images/death17.png'),
pygame.image.load('Images/death18.png'),pygame.image.load('Images/death19.png'),
pygame.image.load('Images/death20.png')]
p1walkLeft = [pygame.image.load('Images/p1left.png'), pygame.image.load('Images/p1left1.png'),
pygame.image.load('Images/p1left2.png'),
pygame.image.load('Images/p1left3.png'), pygame.image.load('Images/p1left2.png'),
pygame.image.load('Images/p1left1.png'), pygame.image.load('Images/p1left.png')]
p1walkLeftshield = [pygame.image.load('Images/p1leftshield.png'), pygame.image.load('Images/p1left1shield.png'),
pygame.image.load('Images/p1left2shield.png'),
pygame.image.load('Images/p1left3shield.png'), pygame.image.load('Images/p1left2shield.png'),
pygame.image.load('Images/p1left1shield.png'), pygame.image.load('Images/p1leftshield.png')]
p1walkRight = [pygame.image.load('Images/p1right.png'), pygame.image.load('Images/p1right1.png'),
pygame.image.load('Images/p1right2.png'),
pygame.image.load('Images/p1right3.png'), pygame.image.load('Images/p1right2.png'),
pygame.image.load('Images/p1right1.png'), pygame.image.load('Images/p1right.png')]
p1walkRightshield = [pygame.image.load('Images/p1rightshield.png'), pygame.image.load('Images/p1right1shield.png'),
pygame.image.load('Images/p1right2shield.png'),
pygame.image.load('Images/p1right3shield.png'), pygame.image.load('Images/p1right2shield.png'),
pygame.image.load('Images/p1right1shield.png'), pygame.image.load('Images/p1rightshield.png')]
p1walkFront = [pygame.image.load('Images/p1front.png'), pygame.image.load('Images/p1front1.png'),
pygame.image.load('Images/p1front2.png'),
pygame.image.load('Images/p1front3.png'), pygame.image.load('Images/p1front2.png'),
pygame.image.load('Images/p1front1.png'), pygame.image.load('Images/p1front.png')]
p1walkFrontshield = [pygame.image.load('Images/p1frontshield.png'), pygame.image.load('Images/p1front1shield.png'),
pygame.image.load('Images/p1front2shield.png'),
pygame.image.load('Images/p1front3shield.png'), pygame.image.load('Images/p1front2shield.png'),
pygame.image.load('Images/p1front1shield.png'), pygame.image.load('Images/p1frontshield.png')]
p1walkBack = [pygame.image.load('Images/p1back.png'), pygame.image.load('Images/p1back1.png'),
pygame.image.load('Images/p1back2.png'),
pygame.image.load('Images/p1back3.png'), pygame.image.load('Images/p1back2.png'),
pygame.image.load('Images/p1back1.png'), pygame.image.load('Images/p1back.png')]
p1walkBackshield = [pygame.image.load('Images/p1backshield.png'), pygame.image.load('Images/p1back1shield.png'),
pygame.image.load('Images/p1back2shield.png'),
pygame.image.load('Images/p1back3shield.png'), pygame.image.load('Images/p1back2shield.png'),
pygame.image.load('Images/p1back1shield.png'), pygame.image.load('Images/p1backshield.png')]
p2walkLeft = [pygame.image.load('Images/p2left.png'), pygame.image.load('Images/p2left1.png'),
pygame.image.load('Images/p2left2.png'),
pygame.image.load('Images/p2left3.png'), pygame.image.load('Images/p2left2.png'),
pygame.image.load('Images/p2left1.png'), pygame.image.load('Images/p2left.png')]
p2walkRight = [pygame.image.load('Images/p2right.png'), pygame.image.load('Images/p2right1.png'),
pygame.image.load('Images/p2right2.png'),
pygame.image.load('Images/p2right3.png'), pygame.image.load('Images/p2right2.png'),
pygame.image.load('Images/p2right1.png'), pygame.image.load('Images/p2right.png')]
p2walkFront = [pygame.image.load('Images/p2front.png'), pygame.image.load('Images/p2front1.png'),
pygame.image.load('Images/p2front2.png'),
pygame.image.load('Images/p2front3.png'), pygame.image.load('Images/p2front2.png'),
pygame.image.load('Images/p2front1.png'), pygame.image.load('Images/p2front.png')]
p2walkBack = [pygame.image.load('Images/p2back.png'), pygame.image.load('Images/p2back1.png'),
pygame.image.load('Images/p2back2.png'),
pygame.image.load('Images/p2back3.png'), pygame.image.load('Images/p2back2.png'),
pygame.image.load('Images/p2back1.png'), pygame.image.load('Images/p2back.png')]
p2walkLeftshield = [pygame.image.load('Images/p2leftshield.png'), pygame.image.load('Images/p2left1shield.png'),
pygame.image.load('Images/p2left2shield.png'),
pygame.image.load('Images/p2left3shield.png'), pygame.image.load('Images/p2left2shield.png'),
pygame.image.load('Images/p2left1shield.png'), pygame.image.load('Images/p2leftshield.png')]
p2walkRightshield = [pygame.image.load('Images/p2rightshield.png'), pygame.image.load('Images/p2right1shield.png'),
pygame.image.load('Images/p2right2shield.png'),
pygame.image.load('Images/p2right3shield.png'), pygame.image.load('Images/p2right2shield.png'),
pygame.image.load('Images/p2right1shield.png'), pygame.image.load('Images/p2rightshield.png')]
p2walkFrontshield = [pygame.image.load('Images/p2frontshield.png'), pygame.image.load('Images/p2front1shield.png'),
pygame.image.load('Images/p2front2shield.png'),
pygame.image.load('Images/p2front3shield.png'), pygame.image.load('Images/p2front2shield.png'),
pygame.image.load('Images/p2front1shield.png'), pygame.image.load('Images/p2frontshield.png')]
p2walkBackshield = [pygame.image.load('Images/p2backshield.png'), pygame.image.load('Images/p2back1shield.png'),
pygame.image.load('Images/p2back2shield.png'),
pygame.image.load('Images/p2back3shield.png'), pygame.image.load('Images/p2back2shield.png'),
pygame.image.load('Images/p2back1shield.png'), pygame.image.load('Images/p2backshield.png')]
# Constructor function
def __init__(self, x, y, number):
super().__init__()
self.image = pygame.Surface([24, 28])
self.image.fill((0,0,0))
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
self.front = True
self.back = False
self.left = False
self.right = False
self.number = number
self.change_x = 0
self.change_y = 0
self.walkCount = 0
self.walls = None
self.alive = True
self.canmove = True
self.deathCount = 0
self.gotomenu=False
self.speed=3
self.superspeed=False
self.superspeedcount=0
self.shield=False
self.shieldcount=0
self.megabombs=False
self.megabombcount = 0
def changespeed(self, x, y):
self.change_x += x
self.change_y += y
if self.superspeed and self.change_x==0 and self.change_y==0:
self.speed=6
if self.superspeedcount>=150:
self.superspeed = False
self.speed=3
self.superspeedcount=0
def update(self):
if self.canmove:
self.rect.x += self.change_x
if self.change_x <0:
self.left=True
self.right=False
self.front=False
self.back=False
elif self.change_x >0:
self.left=False
self.right=True
self.front=False
self.back=False
block_hit_list = pygame.sprite.spritecollide(self, self.walls, False)
for block in block_hit_list:
if self.change_x > 0:
self.rect.right = block.rect.left
else:
self.rect.left = block.rect.right
self.rect.y += self.change_y
if self.change_y <0:
self.left=False
self.right=False
self.front=False
self.back=True
elif self.change_y >0:
self.left=False
self.right=False
self.front=True
self.back=False
block_hit_list = pygame.sprite.spritecollide(self, self.walls, False)
for block in block_hit_list:
if self.change_y > 0:
self.rect.bottom = block.rect.top
else:
self.rect.top = block.rect.bottom
def draw(self, screen):
if self.number == 1:
screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (2, 0))
if self.alive:
if self.front:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1frontshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1front.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkFront[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.back:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1backshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p1back.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkBack[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.left:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1leftshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1left.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.right:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1rightshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p1right.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p1walkRight[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.number == 2:
screen.blit(pygame.image.load('Images2/'+str(self.megabombcount)+'megabombs.png'), (415, 0))
if self.alive:
if self.front:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2frontshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkFrontshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2front.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkFront[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.back:
if self.shield:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2backshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkBackshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_y == 0:
screen.blit(pygame.image.load('Images/p2back.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkBack[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.left:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2leftshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkLeftshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2left.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkLeft[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
elif self.right:
if self.shield:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2rightshield.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkRightshield[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
else:
if self.change_x == 0:
screen.blit(pygame.image.load('Images/p2right.png'),
(self.rect.x, self.rect.y))
else:
if self.walkCount + 1 >= 21:
self.walkCount = 0
screen.blit(self.p2walkRight[self.walkCount // 3], (self.rect.x, self.rect.y))
self.walkCount += 1
if self.alive == False and self.deathCount < 200:
screen.blit(self.death[self.deathCount // 10], (self.rect.x, self.rect.y))
self.deathCount += 1
if self.deathCount >= 200:
self.rect.x = 1000
self.gotomenu=True
def reset(self,x,y):
self.gotomenu = False
self.alive = True
self.deathCount = 0
self.rect.x = x
self.rect.y = y
self.canmove = True
self.front = True
self.change_x=0
self.change_y=0
self.superspeed=False
self.speed=3
self.shield=False
self.megabombs=False
self.megabombcount=0
class Wall(pygame.sprite.Sprite):
def __init__(self, x, y, width, height):
super().__init__()
self.image = pygame.Surface([width,height], pygame.SRCALPHA, 32)
image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
class powerup(pygame.sprite.Sprite):
superspeedanimation=[pygame.image.load('Images/superspeed1.png'), pygame.image.load('Images/superspeed2.png'),
pygame.image.load('Images/superspeed3.png'), pygame.image.load('Images/superspeed3.png'),
pygame.image.load('Images/superspeed2.png'), pygame.image.load('Images/superspeed1.png')]
shieldanimation = [pygame.image.load('Images/shield1.png'), pygame.image.load('Images/shield2.png'),
pygame.image.load('Images/shield3.png'), pygame.image.load('Images/shield3.png'),
pygame.image.load('Images/shield2.png'), pygame.image.load('Images/shield1.png')]
megabombanimation = [pygame.image.load('Images2/megabombicon1.png'), pygame.image.load('Images2/megabombicon2.png'),
pygame.image.load('Images2/megabombicon3.png'), pygame.image.load('Images2/megabombicon3.png'),
pygame.image.load('Images2/megabombicon2.png'), pygame.image.load('Images2/megabombicon1.png')]
def __init__(self, x, y, number):
super().__init__()
self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32)
image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
self.number = number
self.spawntimer=0
self.respawntimer=0
self.exists=True
self.animationcount=0
def draw(self, screen):
if self.number==1:
if self.exists and self.spawntimer>50:
if self.animationcount + 1 >= 30:
self.animationcount = 0
screen.blit(self.superspeedanimation[self.animationcount // 5], (self.rect.x, self.rect.y))
self.animationcount += 1
elif self.number==2:
if self.exists and self.spawntimer > 50:
if self.animationcount + 1 >= 30:
self.animationcount = 0
screen.blit(self.shieldanimation[self.animationcount // 5], (self.rect.x, self.rect.y))
self.animationcount += 1
else:
if self.exists and self.spawntimer > 50:
if self.animationcount + 1 >= 30:
self.animationcount = 0
screen.blit(self.megabombanimation[self.animationcount // 5], (self.rect.x, self.rect.y))
self.animationcount += 1
def reset(self):
self.spawntimer=0
self.respawntimer=0
self.exists=True
class bomb(pygame.sprite.Sprite):
def __init__(self, x, y, width, height, bomb_count, bomb_type):
super().__init__()
self.image = pygame.Surface([22, 28], pygame.SRCALPHA, 32)
image = self.image.convert_alpha()
self.rect = self.image.get_rect()
self.rect.y = y
self.rect.x = x
self.width = width
self.height = height
self.bomb_count = bomb_count
self.bomb_type = bomb_type
self.walls = None
self.leftcheck = self.rect.x - 32
self.rightcheck = self.rect.x + self.width
self.upcheck = self.rect.y - 32
self.downcheck = self.rect.y + self.height
self.expleft = True
self.doubleexpleft = True
self.expright = True
self.doubleexpright = True
self.expup = True
self.doubleexpup = True
self.expdown = True
self.doubleexpdown = True
self.expboxlist = []
def draw(self, screen):
if self.bomb_count < 30:
if self.bomb_type==0:
screen.blit(pygame.image.load('Images/bomb3.png'), (self.rect.x, self.rect.y))
else:
screen.blit(pygame.image.load('Images2/megabomb3.png'), (self.rect.x, self.rect.y))
elif self.bomb_count < 60:
if self.bomb_type == 0:
screen.blit(pygame.image.load('Images/bomb2.png'), (self.rect.x, self.rect.y))
else:
screen.blit(pygame.image.load('Images2/megabomb2.png'), (self.rect.x, self.rect.y))
elif self.bomb_count < 90:
if self.bomb_type == 0:
screen.blit(pygame.image.load('Images/bomb1.png'), (self.rect.x, self.rect.y))
else:
screen.blit(pygame.image.load('Images2/megabomb1.png'), (self.rect.x, self.rect.y))
elif self.bomb_count < 120:
if self.bomb_type==0:
for i in self.walls:
if i.rect.collidepoint(self.leftcheck,self.rect.y):
self.expleft = False
if i.rect.collidepoint(self.rightcheck,self.rect.y):
self.expright = False
if i.rect.collidepoint(self.rect.x,self.upcheck):
self.expup = False
if i.rect.collidepoint(self.rect.x,self.downcheck):
self.expdown = False
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32))
if self.expleft:
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck+16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck+16, self.rect.y, 32, 32))
if self.expright:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck-16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck-16, self.rect.y, 32, 32))
if self.expup:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck+16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck+16, 32, 32))
if self.expdown:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck-16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck-16, 32, 32))
elif self.bomb_type==1:
for i in self.walls:
if i.rect.collidepoint(self.leftcheck, self.rect.y):
self.expleft = False
if i.rect.collidepoint(self.leftcheck-32, self.rect.y):
self.doubleexpleft = False
if i.rect.collidepoint(self.rightcheck, self.rect.y):
self.expright = False
if i.rect.collidepoint(self.rightcheck+32, self.rect.y):
self.doubleexpright = False
if i.rect.collidepoint(self.rect.x, self.upcheck):
self.expup = False
if i.rect.collidepoint(self.rect.x, self.upcheck-32):
self.doubleexpup = False
if i.rect.collidepoint(self.rect.x, self.downcheck):
self.expdown = False
if i.rect.collidepoint(self.rect.x, self.downcheck+32):
self.doubleexpdown = False
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rect.x, self.rect.y, 32, 32))
if self.expleft:
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck + 16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck + 16, self.rect.y, 32, 32))
if self.doubleexpleft:
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-32, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck-32, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.leftcheck-16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.leftcheck-16, self.rect.y, 32, 32))
if self.expright:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck - 16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck - 16, self.rect.y, 32, 32))
if self.doubleexpright:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+32, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck+32, self.rect.y, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rightcheck+16, self.rect.y))
self.expboxlist.append(pygame.Rect(self.rightcheck+16, self.rect.y, 32, 32))
if self.expup:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck + 16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck + 16, 32, 32))
if self.doubleexpup:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-32))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-32, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.upcheck-16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.upcheck-16, 32, 32))
if self.expdown:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck - 16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck - 16, 32, 32))
if self.doubleexpdown:
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+32))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+32, 32, 32))
screen.blit(pygame.image.load('Images/explosion.png'), (self.rect.x, self.downcheck+16))
self.expboxlist.append(pygame.Rect(self.rect.x, self.downcheck+16, 32, 32)) | [
"pygame.image.load",
"pygame.Rect",
"pygame.sprite.spritecollide",
"pygame.Surface"
] | [((69, 107), 'pygame.image.load', 'pygame.image.load', (['"""Images/death1.png"""'], {}), "('Images/death1.png')\n", (86, 107), False, 'import pygame\n'), ((109, 147), 'pygame.image.load', 'pygame.image.load', (['"""Images/death2.png"""'], {}), "('Images/death2.png')\n", (126, 147), False, 'import pygame\n'), ((163, 201), 'pygame.image.load', 'pygame.image.load', (['"""Images/death3.png"""'], {}), "('Images/death3.png')\n", (180, 201), False, 'import pygame\n'), ((217, 255), 'pygame.image.load', 'pygame.image.load', (['"""Images/death4.png"""'], {}), "('Images/death4.png')\n", (234, 255), False, 'import pygame\n'), ((257, 295), 'pygame.image.load', 'pygame.image.load', (['"""Images/death5.png"""'], {}), "('Images/death5.png')\n", (274, 295), False, 'import pygame\n'), ((311, 349), 'pygame.image.load', 'pygame.image.load', (['"""Images/death6.png"""'], {}), "('Images/death6.png')\n", (328, 349), False, 'import pygame\n'), ((365, 403), 'pygame.image.load', 'pygame.image.load', (['"""Images/death7.png"""'], {}), "('Images/death7.png')\n", (382, 403), False, 'import pygame\n'), ((405, 443), 'pygame.image.load', 'pygame.image.load', (['"""Images/death8.png"""'], {}), "('Images/death8.png')\n", (422, 443), False, 'import pygame\n'), ((459, 497), 'pygame.image.load', 'pygame.image.load', (['"""Images/death9.png"""'], {}), "('Images/death9.png')\n", (476, 497), False, 'import pygame\n'), ((513, 552), 'pygame.image.load', 'pygame.image.load', (['"""Images/death10.png"""'], {}), "('Images/death10.png')\n", (530, 552), False, 'import pygame\n'), ((554, 593), 'pygame.image.load', 'pygame.image.load', (['"""Images/death11.png"""'], {}), "('Images/death11.png')\n", (571, 593), False, 'import pygame\n'), ((609, 648), 'pygame.image.load', 'pygame.image.load', (['"""Images/death12.png"""'], {}), "('Images/death12.png')\n", (626, 648), False, 'import pygame\n'), ((664, 703), 'pygame.image.load', 'pygame.image.load', (['"""Images/death13.png"""'], {}), "('Images/death13.png')\n", (681, 703), False, 'import pygame\n'), ((705, 744), 'pygame.image.load', 'pygame.image.load', (['"""Images/death14.png"""'], {}), "('Images/death14.png')\n", (722, 744), False, 'import pygame\n'), ((760, 799), 'pygame.image.load', 'pygame.image.load', (['"""Images/death15.png"""'], {}), "('Images/death15.png')\n", (777, 799), False, 'import pygame\n'), ((815, 854), 'pygame.image.load', 'pygame.image.load', (['"""Images/death16.png"""'], {}), "('Images/death16.png')\n", (832, 854), False, 'import pygame\n'), ((856, 895), 'pygame.image.load', 'pygame.image.load', (['"""Images/death17.png"""'], {}), "('Images/death17.png')\n", (873, 895), False, 'import pygame\n'), ((911, 950), 'pygame.image.load', 'pygame.image.load', (['"""Images/death18.png"""'], {}), "('Images/death18.png')\n", (928, 950), False, 'import pygame\n'), ((951, 990), 'pygame.image.load', 'pygame.image.load', (['"""Images/death19.png"""'], {}), "('Images/death19.png')\n", (968, 990), False, 'import pygame\n'), ((1006, 1045), 'pygame.image.load', 'pygame.image.load', (['"""Images/death20.png"""'], {}), "('Images/death20.png')\n", (1023, 1045), False, 'import pygame\n'), ((1066, 1104), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left.png"""'], {}), "('Images/p1left.png')\n", (1083, 1104), False, 'import pygame\n'), ((1106, 1145), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left1.png"""'], {}), "('Images/p1left1.png')\n", (1123, 1145), False, 'import pygame\n'), ((1161, 1200), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left2.png"""'], {}), "('Images/p1left2.png')\n", (1178, 1200), False, 'import pygame\n'), ((1216, 1255), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left3.png"""'], {}), "('Images/p1left3.png')\n", (1233, 1255), False, 'import pygame\n'), ((1257, 1296), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left2.png"""'], {}), "('Images/p1left2.png')\n", (1274, 1296), False, 'import pygame\n'), ((1312, 1351), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left1.png"""'], {}), "('Images/p1left1.png')\n", (1329, 1351), False, 'import pygame\n'), ((1353, 1391), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left.png"""'], {}), "('Images/p1left.png')\n", (1370, 1391), False, 'import pygame\n'), ((1418, 1462), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1leftshield.png"""'], {}), "('Images/p1leftshield.png')\n", (1435, 1462), False, 'import pygame\n'), ((1464, 1509), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left1shield.png"""'], {}), "('Images/p1left1shield.png')\n", (1481, 1509), False, 'import pygame\n'), ((1530, 1575), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left2shield.png"""'], {}), "('Images/p1left2shield.png')\n", (1547, 1575), False, 'import pygame\n'), ((1596, 1641), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left3shield.png"""'], {}), "('Images/p1left3shield.png')\n", (1613, 1641), False, 'import pygame\n'), ((1643, 1688), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left2shield.png"""'], {}), "('Images/p1left2shield.png')\n", (1660, 1688), False, 'import pygame\n'), ((1709, 1754), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left1shield.png"""'], {}), "('Images/p1left1shield.png')\n", (1726, 1754), False, 'import pygame\n'), ((1756, 1800), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1leftshield.png"""'], {}), "('Images/p1leftshield.png')\n", (1773, 1800), False, 'import pygame\n'), ((1822, 1861), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right.png"""'], {}), "('Images/p1right.png')\n", (1839, 1861), False, 'import pygame\n'), ((1863, 1903), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right1.png"""'], {}), "('Images/p1right1.png')\n", (1880, 1903), False, 'import pygame\n'), ((1924, 1964), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right2.png"""'], {}), "('Images/p1right2.png')\n", (1941, 1964), False, 'import pygame\n'), ((1985, 2025), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right3.png"""'], {}), "('Images/p1right3.png')\n", (2002, 2025), False, 'import pygame\n'), ((2027, 2067), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right2.png"""'], {}), "('Images/p1right2.png')\n", (2044, 2067), False, 'import pygame\n'), ((2088, 2128), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right1.png"""'], {}), "('Images/p1right1.png')\n", (2105, 2128), False, 'import pygame\n'), ((2130, 2169), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right.png"""'], {}), "('Images/p1right.png')\n", (2147, 2169), False, 'import pygame\n'), ((2197, 2242), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1rightshield.png"""'], {}), "('Images/p1rightshield.png')\n", (2214, 2242), False, 'import pygame\n'), ((2244, 2290), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right1shield.png"""'], {}), "('Images/p1right1shield.png')\n", (2261, 2290), False, 'import pygame\n'), ((2312, 2358), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right2shield.png"""'], {}), "('Images/p1right2shield.png')\n", (2329, 2358), False, 'import pygame\n'), ((2380, 2426), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right3shield.png"""'], {}), "('Images/p1right3shield.png')\n", (2397, 2426), False, 'import pygame\n'), ((2428, 2474), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right2shield.png"""'], {}), "('Images/p1right2shield.png')\n", (2445, 2474), False, 'import pygame\n'), ((2496, 2542), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right1shield.png"""'], {}), "('Images/p1right1shield.png')\n", (2513, 2542), False, 'import pygame\n'), ((2544, 2589), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1rightshield.png"""'], {}), "('Images/p1rightshield.png')\n", (2561, 2589), False, 'import pygame\n'), ((2611, 2650), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front.png"""'], {}), "('Images/p1front.png')\n", (2628, 2650), False, 'import pygame\n'), ((2652, 2692), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front1.png"""'], {}), "('Images/p1front1.png')\n", (2669, 2692), False, 'import pygame\n'), ((2714, 2754), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front2.png"""'], {}), "('Images/p1front2.png')\n", (2731, 2754), False, 'import pygame\n'), ((2776, 2816), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front3.png"""'], {}), "('Images/p1front3.png')\n", (2793, 2816), False, 'import pygame\n'), ((2818, 2858), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front2.png"""'], {}), "('Images/p1front2.png')\n", (2835, 2858), False, 'import pygame\n'), ((2880, 2920), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front1.png"""'], {}), "('Images/p1front1.png')\n", (2897, 2920), False, 'import pygame\n'), ((2922, 2961), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front.png"""'], {}), "('Images/p1front.png')\n", (2939, 2961), False, 'import pygame\n'), ((2989, 3034), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1frontshield.png"""'], {}), "('Images/p1frontshield.png')\n", (3006, 3034), False, 'import pygame\n'), ((3036, 3082), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front1shield.png"""'], {}), "('Images/p1front1shield.png')\n", (3053, 3082), False, 'import pygame\n'), ((3104, 3150), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front2shield.png"""'], {}), "('Images/p1front2shield.png')\n", (3121, 3150), False, 'import pygame\n'), ((3172, 3218), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front3shield.png"""'], {}), "('Images/p1front3shield.png')\n", (3189, 3218), False, 'import pygame\n'), ((3220, 3266), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front2shield.png"""'], {}), "('Images/p1front2shield.png')\n", (3237, 3266), False, 'import pygame\n'), ((3288, 3334), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front1shield.png"""'], {}), "('Images/p1front1shield.png')\n", (3305, 3334), False, 'import pygame\n'), ((3336, 3381), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1frontshield.png"""'], {}), "('Images/p1frontshield.png')\n", (3353, 3381), False, 'import pygame\n'), ((3402, 3440), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back.png"""'], {}), "('Images/p1back.png')\n", (3419, 3440), False, 'import pygame\n'), ((3442, 3481), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back1.png"""'], {}), "('Images/p1back1.png')\n", (3459, 3481), False, 'import pygame\n'), ((3503, 3542), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back2.png"""'], {}), "('Images/p1back2.png')\n", (3520, 3542), False, 'import pygame\n'), ((3564, 3603), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back3.png"""'], {}), "('Images/p1back3.png')\n", (3581, 3603), False, 'import pygame\n'), ((3605, 3644), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back2.png"""'], {}), "('Images/p1back2.png')\n", (3622, 3644), False, 'import pygame\n'), ((3666, 3705), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back1.png"""'], {}), "('Images/p1back1.png')\n", (3683, 3705), False, 'import pygame\n'), ((3707, 3745), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back.png"""'], {}), "('Images/p1back.png')\n", (3724, 3745), False, 'import pygame\n'), ((3772, 3816), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1backshield.png"""'], {}), "('Images/p1backshield.png')\n", (3789, 3816), False, 'import pygame\n'), ((3818, 3863), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back1shield.png"""'], {}), "('Images/p1back1shield.png')\n", (3835, 3863), False, 'import pygame\n'), ((3884, 3929), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back2shield.png"""'], {}), "('Images/p1back2shield.png')\n", (3901, 3929), False, 'import pygame\n'), ((3950, 3995), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back3shield.png"""'], {}), "('Images/p1back3shield.png')\n", (3967, 3995), False, 'import pygame\n'), ((3997, 4042), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back2shield.png"""'], {}), "('Images/p1back2shield.png')\n", (4014, 4042), False, 'import pygame\n'), ((4063, 4108), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back1shield.png"""'], {}), "('Images/p1back1shield.png')\n", (4080, 4108), False, 'import pygame\n'), ((4110, 4154), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1backshield.png"""'], {}), "('Images/p1backshield.png')\n", (4127, 4154), False, 'import pygame\n'), ((4175, 4213), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left.png"""'], {}), "('Images/p2left.png')\n", (4192, 4213), False, 'import pygame\n'), ((4215, 4254), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left1.png"""'], {}), "('Images/p2left1.png')\n", (4232, 4254), False, 'import pygame\n'), ((4275, 4314), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left2.png"""'], {}), "('Images/p2left2.png')\n", (4292, 4314), False, 'import pygame\n'), ((4335, 4374), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left3.png"""'], {}), "('Images/p2left3.png')\n", (4352, 4374), False, 'import pygame\n'), ((4376, 4415), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left2.png"""'], {}), "('Images/p2left2.png')\n", (4393, 4415), False, 'import pygame\n'), ((4436, 4475), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left1.png"""'], {}), "('Images/p2left1.png')\n", (4453, 4475), False, 'import pygame\n'), ((4477, 4515), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left.png"""'], {}), "('Images/p2left.png')\n", (4494, 4515), False, 'import pygame\n'), ((4537, 4576), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right.png"""'], {}), "('Images/p2right.png')\n", (4554, 4576), False, 'import pygame\n'), ((4578, 4618), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right1.png"""'], {}), "('Images/p2right1.png')\n", (4595, 4618), False, 'import pygame\n'), ((4640, 4680), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right2.png"""'], {}), "('Images/p2right2.png')\n", (4657, 4680), False, 'import pygame\n'), ((4702, 4742), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right3.png"""'], {}), "('Images/p2right3.png')\n", (4719, 4742), False, 'import pygame\n'), ((4744, 4784), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right2.png"""'], {}), "('Images/p2right2.png')\n", (4761, 4784), False, 'import pygame\n'), ((4806, 4846), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right1.png"""'], {}), "('Images/p2right1.png')\n", (4823, 4846), False, 'import pygame\n'), ((4848, 4887), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right.png"""'], {}), "('Images/p2right.png')\n", (4865, 4887), False, 'import pygame\n'), ((4909, 4948), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front.png"""'], {}), "('Images/p2front.png')\n", (4926, 4948), False, 'import pygame\n'), ((4950, 4990), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front1.png"""'], {}), "('Images/p2front1.png')\n", (4967, 4990), False, 'import pygame\n'), ((5012, 5052), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front2.png"""'], {}), "('Images/p2front2.png')\n", (5029, 5052), False, 'import pygame\n'), ((5074, 5114), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front3.png"""'], {}), "('Images/p2front3.png')\n", (5091, 5114), False, 'import pygame\n'), ((5116, 5156), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front2.png"""'], {}), "('Images/p2front2.png')\n", (5133, 5156), False, 'import pygame\n'), ((5178, 5218), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front1.png"""'], {}), "('Images/p2front1.png')\n", (5195, 5218), False, 'import pygame\n'), ((5220, 5259), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front.png"""'], {}), "('Images/p2front.png')\n", (5237, 5259), False, 'import pygame\n'), ((5280, 5318), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back.png"""'], {}), "('Images/p2back.png')\n", (5297, 5318), False, 'import pygame\n'), ((5320, 5359), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back1.png"""'], {}), "('Images/p2back1.png')\n", (5337, 5359), False, 'import pygame\n'), ((5380, 5419), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back2.png"""'], {}), "('Images/p2back2.png')\n", (5397, 5419), False, 'import pygame\n'), ((5440, 5479), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back3.png"""'], {}), "('Images/p2back3.png')\n", (5457, 5479), False, 'import pygame\n'), ((5481, 5520), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back2.png"""'], {}), "('Images/p2back2.png')\n", (5498, 5520), False, 'import pygame\n'), ((5541, 5580), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back1.png"""'], {}), "('Images/p2back1.png')\n", (5558, 5580), False, 'import pygame\n'), ((5582, 5620), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back.png"""'], {}), "('Images/p2back.png')\n", (5599, 5620), False, 'import pygame\n'), ((5647, 5691), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2leftshield.png"""'], {}), "('Images/p2leftshield.png')\n", (5664, 5691), False, 'import pygame\n'), ((5693, 5738), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left1shield.png"""'], {}), "('Images/p2left1shield.png')\n", (5710, 5738), False, 'import pygame\n'), ((5759, 5804), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left2shield.png"""'], {}), "('Images/p2left2shield.png')\n", (5776, 5804), False, 'import pygame\n'), ((5825, 5870), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left3shield.png"""'], {}), "('Images/p2left3shield.png')\n", (5842, 5870), False, 'import pygame\n'), ((5872, 5917), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left2shield.png"""'], {}), "('Images/p2left2shield.png')\n", (5889, 5917), False, 'import pygame\n'), ((5938, 5983), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left1shield.png"""'], {}), "('Images/p2left1shield.png')\n", (5955, 5983), False, 'import pygame\n'), ((5985, 6029), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2leftshield.png"""'], {}), "('Images/p2leftshield.png')\n", (6002, 6029), False, 'import pygame\n'), ((6057, 6102), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2rightshield.png"""'], {}), "('Images/p2rightshield.png')\n", (6074, 6102), False, 'import pygame\n'), ((6104, 6150), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right1shield.png"""'], {}), "('Images/p2right1shield.png')\n", (6121, 6150), False, 'import pygame\n'), ((6172, 6218), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right2shield.png"""'], {}), "('Images/p2right2shield.png')\n", (6189, 6218), False, 'import pygame\n'), ((6240, 6286), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right3shield.png"""'], {}), "('Images/p2right3shield.png')\n", (6257, 6286), False, 'import pygame\n'), ((6288, 6334), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right2shield.png"""'], {}), "('Images/p2right2shield.png')\n", (6305, 6334), False, 'import pygame\n'), ((6356, 6402), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right1shield.png"""'], {}), "('Images/p2right1shield.png')\n", (6373, 6402), False, 'import pygame\n'), ((6404, 6449), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2rightshield.png"""'], {}), "('Images/p2rightshield.png')\n", (6421, 6449), False, 'import pygame\n'), ((6477, 6522), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2frontshield.png"""'], {}), "('Images/p2frontshield.png')\n", (6494, 6522), False, 'import pygame\n'), ((6524, 6570), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front1shield.png"""'], {}), "('Images/p2front1shield.png')\n", (6541, 6570), False, 'import pygame\n'), ((6592, 6638), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front2shield.png"""'], {}), "('Images/p2front2shield.png')\n", (6609, 6638), False, 'import pygame\n'), ((6660, 6706), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front3shield.png"""'], {}), "('Images/p2front3shield.png')\n", (6677, 6706), False, 'import pygame\n'), ((6708, 6754), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front2shield.png"""'], {}), "('Images/p2front2shield.png')\n", (6725, 6754), False, 'import pygame\n'), ((6776, 6822), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front1shield.png"""'], {}), "('Images/p2front1shield.png')\n", (6793, 6822), False, 'import pygame\n'), ((6824, 6869), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2frontshield.png"""'], {}), "('Images/p2frontshield.png')\n", (6841, 6869), False, 'import pygame\n'), ((6896, 6940), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2backshield.png"""'], {}), "('Images/p2backshield.png')\n", (6913, 6940), False, 'import pygame\n'), ((6942, 6987), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back1shield.png"""'], {}), "('Images/p2back1shield.png')\n", (6959, 6987), False, 'import pygame\n'), ((7008, 7053), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back2shield.png"""'], {}), "('Images/p2back2shield.png')\n", (7025, 7053), False, 'import pygame\n'), ((7074, 7119), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back3shield.png"""'], {}), "('Images/p2back3shield.png')\n", (7091, 7119), False, 'import pygame\n'), ((7121, 7166), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back2shield.png"""'], {}), "('Images/p2back2shield.png')\n", (7138, 7166), False, 'import pygame\n'), ((7187, 7232), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back1shield.png"""'], {}), "('Images/p2back1shield.png')\n", (7204, 7232), False, 'import pygame\n'), ((7234, 7278), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2backshield.png"""'], {}), "('Images/p2backshield.png')\n", (7251, 7278), False, 'import pygame\n'), ((7397, 7421), 'pygame.Surface', 'pygame.Surface', (['[24, 28]'], {}), '([24, 28])\n', (7411, 7421), False, 'import pygame\n'), ((19857, 19909), 'pygame.Surface', 'pygame.Surface', (['[width, height]', 'pygame.SRCALPHA', '(32)'], {}), '([width, height], pygame.SRCALPHA, 32)\n', (19871, 19909), False, 'import pygame\n'), ((20112, 20155), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed1.png"""'], {}), "('Images/superspeed1.png')\n", (20129, 20155), False, 'import pygame\n'), ((20157, 20200), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed2.png"""'], {}), "('Images/superspeed2.png')\n", (20174, 20200), False, 'import pygame\n'), ((20216, 20259), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed3.png"""'], {}), "('Images/superspeed3.png')\n", (20233, 20259), False, 'import pygame\n'), ((20261, 20304), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed3.png"""'], {}), "('Images/superspeed3.png')\n", (20278, 20304), False, 'import pygame\n'), ((20320, 20363), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed2.png"""'], {}), "('Images/superspeed2.png')\n", (20337, 20363), False, 'import pygame\n'), ((20365, 20408), 'pygame.image.load', 'pygame.image.load', (['"""Images/superspeed1.png"""'], {}), "('Images/superspeed1.png')\n", (20382, 20408), False, 'import pygame\n'), ((20434, 20473), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield1.png"""'], {}), "('Images/shield1.png')\n", (20451, 20473), False, 'import pygame\n'), ((20475, 20514), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield2.png"""'], {}), "('Images/shield2.png')\n", (20492, 20514), False, 'import pygame\n'), ((20544, 20583), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield3.png"""'], {}), "('Images/shield3.png')\n", (20561, 20583), False, 'import pygame\n'), ((20585, 20624), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield3.png"""'], {}), "('Images/shield3.png')\n", (20602, 20624), False, 'import pygame\n'), ((20654, 20693), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield2.png"""'], {}), "('Images/shield2.png')\n", (20671, 20693), False, 'import pygame\n'), ((20695, 20734), 'pygame.image.load', 'pygame.image.load', (['"""Images/shield1.png"""'], {}), "('Images/shield1.png')\n", (20712, 20734), False, 'import pygame\n'), ((20762, 20808), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon1.png"""'], {}), "('Images2/megabombicon1.png')\n", (20779, 20808), False, 'import pygame\n'), ((20810, 20856), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon2.png"""'], {}), "('Images2/megabombicon2.png')\n", (20827, 20856), False, 'import pygame\n'), ((20882, 20928), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon3.png"""'], {}), "('Images2/megabombicon3.png')\n", (20899, 20928), False, 'import pygame\n'), ((20930, 20976), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon3.png"""'], {}), "('Images2/megabombicon3.png')\n", (20947, 20976), False, 'import pygame\n'), ((21002, 21048), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon2.png"""'], {}), "('Images2/megabombicon2.png')\n", (21019, 21048), False, 'import pygame\n'), ((21050, 21096), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabombicon1.png"""'], {}), "('Images2/megabombicon1.png')\n", (21067, 21096), False, 'import pygame\n'), ((21187, 21232), 'pygame.Surface', 'pygame.Surface', (['[22, 28]', 'pygame.SRCALPHA', '(32)'], {}), '([22, 28], pygame.SRCALPHA, 32)\n', (21201, 21232), False, 'import pygame\n'), ((22768, 22813), 'pygame.Surface', 'pygame.Surface', (['[22, 28]', 'pygame.SRCALPHA', '(32)'], {}), '([22, 28], pygame.SRCALPHA, 32)\n', (22782, 22813), False, 'import pygame\n'), ((8927, 8979), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'self.walls', '(False)'], {}), '(self, self.walls, False)\n', (8954, 8979), False, 'import pygame\n'), ((9602, 9654), 'pygame.sprite.spritecollide', 'pygame.sprite.spritecollide', (['self', 'self.walls', '(False)'], {}), '(self, self.walls, False)\n', (9629, 9654), False, 'import pygame\n'), ((23713, 23750), 'pygame.image.load', 'pygame.image.load', (['"""Images/bomb3.png"""'], {}), "('Images/bomb3.png')\n", (23730, 23750), False, 'import pygame\n'), ((23828, 23870), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabomb3.png"""'], {}), "('Images2/megabomb3.png')\n", (23845, 23870), False, 'import pygame\n'), ((24002, 24039), 'pygame.image.load', 'pygame.image.load', (['"""Images/bomb2.png"""'], {}), "('Images/bomb2.png')\n", (24019, 24039), False, 'import pygame\n'), ((24117, 24159), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabomb2.png"""'], {}), "('Images2/megabomb2.png')\n", (24134, 24159), False, 'import pygame\n'), ((24291, 24328), 'pygame.image.load', 'pygame.image.load', (['"""Images/bomb1.png"""'], {}), "('Images/bomb1.png')\n", (24308, 24328), False, 'import pygame\n'), ((24406, 24448), 'pygame.image.load', 'pygame.image.load', (['"""Images2/megabomb1.png"""'], {}), "('Images2/megabomb1.png')\n", (24423, 24448), False, 'import pygame\n'), ((10220, 10265), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1frontshield.png"""'], {}), "('Images/p1frontshield.png')\n", (10237, 10265), False, 'import pygame\n'), ((10756, 10795), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1front.png"""'], {}), "('Images/p1front.png')\n", (10773, 10795), False, 'import pygame\n'), ((25091, 25132), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (25108, 25132), False, 'import pygame\n'), ((25202, 25247), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.rect.y', '(32)', '(32)'], {}), '(self.rect.x, self.rect.y, 32, 32)\n', (25213, 25247), False, 'import pygame\n'), ((11323, 11367), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1backshield.png"""'], {}), "('Images/p1backshield.png')\n", (11340, 11367), False, 'import pygame\n'), ((11857, 11895), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1back.png"""'], {}), "('Images/p1back.png')\n", (11874, 11895), False, 'import pygame\n'), ((14790, 14835), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2frontshield.png"""'], {}), "('Images/p2frontshield.png')\n", (14807, 14835), False, 'import pygame\n'), ((15326, 15365), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2front.png"""'], {}), "('Images/p2front.png')\n", (15343, 15365), False, 'import pygame\n'), ((25316, 25357), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (25333, 25357), False, 'import pygame\n'), ((25434, 25482), 'pygame.Rect', 'pygame.Rect', (['self.leftcheck', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck, self.rect.y, 32, 32)\n', (25445, 25482), False, 'import pygame\n'), ((25517, 25558), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (25534, 25558), False, 'import pygame\n'), ((25638, 25691), 'pygame.Rect', 'pygame.Rect', (['(self.leftcheck + 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck + 16, self.rect.y, 32, 32)\n', (25649, 25691), False, 'import pygame\n'), ((25759, 25800), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (25776, 25800), False, 'import pygame\n'), ((25878, 25927), 'pygame.Rect', 'pygame.Rect', (['self.rightcheck', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck, self.rect.y, 32, 32)\n', (25889, 25927), False, 'import pygame\n'), ((25962, 26003), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (25979, 26003), False, 'import pygame\n'), ((26084, 26138), 'pygame.Rect', 'pygame.Rect', (['(self.rightcheck - 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck - 16, self.rect.y, 32, 32)\n', (26095, 26138), False, 'import pygame\n'), ((26203, 26244), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (26220, 26244), False, 'import pygame\n'), ((26319, 26365), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.upcheck', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck, 32, 32)\n', (26330, 26365), False, 'import pygame\n'), ((26400, 26441), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (26417, 26441), False, 'import pygame\n'), ((26519, 26570), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.upcheck + 16)', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck + 16, 32, 32)\n', (26530, 26570), False, 'import pygame\n'), ((26637, 26678), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (26654, 26678), False, 'import pygame\n'), ((26755, 26803), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.downcheck', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck, 32, 32)\n', (26766, 26803), False, 'import pygame\n'), ((26838, 26879), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (26855, 26879), False, 'import pygame\n'), ((26959, 27012), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.downcheck - 16)', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck - 16, 32, 32)\n', (26970, 27012), False, 'import pygame\n'), ((28124, 28165), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (28141, 28165), False, 'import pygame\n'), ((28235, 28280), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.rect.y', '(32)', '(32)'], {}), '(self.rect.x, self.rect.y, 32, 32)\n', (28246, 28280), False, 'import pygame\n'), ((12422, 12466), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1leftshield.png"""'], {}), "('Images/p1leftshield.png')\n", (12439, 12466), False, 'import pygame\n'), ((12956, 12994), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1left.png"""'], {}), "('Images/p1left.png')\n", (12973, 12994), False, 'import pygame\n'), ((15893, 15937), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2backshield.png"""'], {}), "('Images/p2backshield.png')\n", (15910, 15937), False, 'import pygame\n'), ((16427, 16465), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2back.png"""'], {}), "('Images/p2back.png')\n", (16444, 16465), False, 'import pygame\n'), ((28349, 28390), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (28366, 28390), False, 'import pygame\n'), ((28467, 28515), 'pygame.Rect', 'pygame.Rect', (['self.leftcheck', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck, self.rect.y, 32, 32)\n', (28478, 28515), False, 'import pygame\n'), ((28550, 28591), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (28567, 28591), False, 'import pygame\n'), ((28673, 28726), 'pygame.Rect', 'pygame.Rect', (['(self.leftcheck + 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck + 16, self.rect.y, 32, 32)\n', (28684, 28726), False, 'import pygame\n'), ((29270, 29311), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (29287, 29311), False, 'import pygame\n'), ((29389, 29438), 'pygame.Rect', 'pygame.Rect', (['self.rightcheck', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck, self.rect.y, 32, 32)\n', (29400, 29438), False, 'import pygame\n'), ((29473, 29514), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (29490, 29514), False, 'import pygame\n'), ((29597, 29651), 'pygame.Rect', 'pygame.Rect', (['(self.rightcheck - 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck - 16, self.rect.y, 32, 32)\n', (29608, 29651), False, 'import pygame\n'), ((30197, 30238), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (30214, 30238), False, 'import pygame\n'), ((30313, 30359), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.upcheck', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck, 32, 32)\n', (30324, 30359), False, 'import pygame\n'), ((30394, 30435), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (30411, 30435), False, 'import pygame\n'), ((30515, 30566), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.upcheck + 16)', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck + 16, 32, 32)\n', (30526, 30566), False, 'import pygame\n'), ((31099, 31140), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (31116, 31140), False, 'import pygame\n'), ((31217, 31265), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', 'self.downcheck', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck, 32, 32)\n', (31228, 31265), False, 'import pygame\n'), ((31300, 31341), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (31317, 31341), False, 'import pygame\n'), ((31423, 31476), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.downcheck - 16)', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck - 16, 32, 32)\n', (31434, 31476), False, 'import pygame\n'), ((13522, 13567), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1rightshield.png"""'], {}), "('Images/p1rightshield.png')\n", (13539, 13567), False, 'import pygame\n'), ((14058, 14097), 'pygame.image.load', 'pygame.image.load', (['"""Images/p1right.png"""'], {}), "('Images/p1right.png')\n", (14075, 14097), False, 'import pygame\n'), ((16992, 17036), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2leftshield.png"""'], {}), "('Images/p2leftshield.png')\n", (17009, 17036), False, 'import pygame\n'), ((17526, 17564), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2left.png"""'], {}), "('Images/p2left.png')\n", (17543, 17564), False, 'import pygame\n'), ((28809, 28850), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (28826, 28850), False, 'import pygame\n'), ((28934, 28987), 'pygame.Rect', 'pygame.Rect', (['(self.leftcheck - 32)', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck - 32, self.rect.y, 32, 32)\n', (28945, 28987), False, 'import pygame\n'), ((29024, 29065), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (29041, 29065), False, 'import pygame\n'), ((29149, 29202), 'pygame.Rect', 'pygame.Rect', (['(self.leftcheck - 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.leftcheck - 16, self.rect.y, 32, 32)\n', (29160, 29202), False, 'import pygame\n'), ((29735, 29776), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (29752, 29776), False, 'import pygame\n'), ((29861, 29915), 'pygame.Rect', 'pygame.Rect', (['(self.rightcheck + 32)', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck + 32, self.rect.y, 32, 32)\n', (29872, 29915), False, 'import pygame\n'), ((29952, 29993), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (29969, 29993), False, 'import pygame\n'), ((30078, 30132), 'pygame.Rect', 'pygame.Rect', (['(self.rightcheck + 16)', 'self.rect.y', '(32)', '(32)'], {}), '(self.rightcheck + 16, self.rect.y, 32, 32)\n', (30089, 30132), False, 'import pygame\n'), ((30647, 30688), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (30664, 30688), False, 'import pygame\n'), ((30770, 30821), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.upcheck - 32)', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck - 32, 32, 32)\n', (30781, 30821), False, 'import pygame\n'), ((30858, 30899), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (30875, 30899), False, 'import pygame\n'), ((30981, 31032), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.upcheck - 16)', '(32)', '(32)'], {}), '(self.rect.x, self.upcheck - 16, 32, 32)\n', (30992, 31032), False, 'import pygame\n'), ((31559, 31600), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (31576, 31600), False, 'import pygame\n'), ((31684, 31737), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.downcheck + 32)', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck + 32, 32, 32)\n', (31695, 31737), False, 'import pygame\n'), ((31774, 31815), 'pygame.image.load', 'pygame.image.load', (['"""Images/explosion.png"""'], {}), "('Images/explosion.png')\n", (31791, 31815), False, 'import pygame\n'), ((31899, 31952), 'pygame.Rect', 'pygame.Rect', (['self.rect.x', '(self.downcheck + 16)', '(32)', '(32)'], {}), '(self.rect.x, self.downcheck + 16, 32, 32)\n', (31910, 31952), False, 'import pygame\n'), ((18092, 18137), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2rightshield.png"""'], {}), "('Images/p2rightshield.png')\n", (18109, 18137), False, 'import pygame\n'), ((18628, 18667), 'pygame.image.load', 'pygame.image.load', (['"""Images/p2right.png"""'], {}), "('Images/p2right.png')\n", (18645, 18667), False, 'import pygame\n')] |
"""A file containing representations of install configurations.
The core Data representation for installSynApps. An InstallConfiguration object
is parsed from a configuration, and is then used throughout the build process.
InjectorFile objects are used for representing text that need to be injected
into configuration files prior to builds.
"""
import os
import installSynApps
from installSynApps.data_model.install_module import InstallModule as IM
class InstallConfiguration:
"""
Class that represents an Install Configuration for installSynApps
It stores the top level install_location, the path to the configuration files,
any OS specific configurations, and the actual list of modules that will be
installed.
Attributes
----------
install_location : str
path to top level install location
path_to_configure : str
path to configure folder of installSynApps
modules : List of InsallModule
list of InstallModule objects representing the modules that will be installed
base_path : str
abs path to install location of EPICS base
support_path : str
abs path to install location of EPICS support modules
ad_path : str
abs path to install location of EPICS area detector
motor_path : str
abs path to install location of EPICS motor
module_map : dict of str -> int
Dictionary storing relation of module names to build index
injector_files : list of InjectorFile
list of injector files loaded by install configuration
build_flags : list of list of str
list of macro-value pairs enforced at build time
"""
def __init__(self, install_location, path_to_configure):
"""Constructor for the InstallConfiguration object
"""
# Paths to configure and output locations
self.path_to_configure = path_to_configure
self.install_location = os.path.abspath(install_location)
# Modules loaded into install config
self.modules = []
# Dict that maps module name to index in module list for easier searching.
self.module_map = {}
self.injector_files = []
self.build_flags = []
# Paths to the three install location paths used for relative path correction
self.base_path = None
self.support_path = None
self.ad_path = None
self.motor_path = None
self.extensions_path = None
def is_install_valid(self):
"""Function that checks if given install location is valid
Parameters
----------
self : InstallConfiguration
Self object
Returns
-------
bool
True if install location is valid, false otherwise
str
Error message if applicable, None otherwise
"""
valid = True
message = None
target = self.install_location
if not os.path.exists(target):
target = os.path.dirname(self.install_location)
if not os.path.exists(target):
valid = False
message = 'Install location and parent directory do not exist'
elif not os.access(target, os.W_OK | os.X_OK):
valid = False
message = 'Permission Error: {}'.format(target)
return valid, message
def add_module(self, module):
"""Function that adds a module to the InstallConfiguration module list
First checks if parameter is a valid InstallModule, then sets the config, and abs path,
then if it is one of the three key modules to track, sets the appropriate variables. Also,
add the module to the map of modules which will keep track of which position each module is
in in the list/build order
Parameters
----------
module : InstallModule
new installation module being added.
"""
if isinstance(module, IM):
# Updates the abs path
module.abs_path = self.convert_path_abs(module.rel_path)
# Key paths to track
if module.name == "EPICS_BASE":
self.base_path = module.abs_path
elif module.name == "SUPPORT":
self.support_path = module.abs_path
elif module.name == "AREA_DETECTOR":
self.ad_path = module.abs_path
elif module.name == "MOTOR":
self.motor_path = module.abs_path
elif module.name == "EXTENSIONS":
self.extensions_path = module.abs_path
self.module_map[module.name] = len(self.modules)
self.modules.append(module)
def add_injector_file(self, name, contents, target):
"""Function that adds a new injector file to the install_config object
Parameters
----------
name : str
name of the file
contents : str
The contents of the file
target : str
The target location file into which contents will be injected.
"""
new_injector = InjectorFile(self.path_to_configure, name, contents, target)
self.injector_files.append(new_injector)
def add_macros(self, macro_list):
"""Function that adds macro-value pairs to a list of macros
Parameters
----------
macro_list : list of [str, str]
list of new macros to append
"""
self.build_flags = self.build_flags + macro_list
def get_module_list(self):
"""Function that gets the list of modules in the configuration
Returns
-------
List
self.modules - list of modules to install in this install configuration
"""
return self.modules
def get_module_by_name(self, name):
"""Function that returns install module object given module name
Uses module name as a key in a dictionary to return reference to given module object.
Parameters
----------
name : str
Module name
Returns
-------
obj - InstallModule
Return matching module, or None if not found.
"""
if name in self.module_map.keys():
return self.modules[self.module_map[name]]
else:
return None
def get_module_build_index(self, name):
"""Function that returns the index in the build order for the module
Used for ensuring dependencies are built before lower level packages.
Parameters
----------
name : str
Module name
Returns
-------
int
Index of module in build order if found, otherwise -1
"""
if name in self.module_map.keys():
return self.module_map[name]
else:
return -1
def get_core_version(self):
"""Funciton that returns selected version of ADCore
"""
return self.get_module_by_name('ADCORE').version
def swap_module_positions(self, module_A, module_B):
"""Swaps build order of modules
Used to ensure dependencies are built before lower level packages
Parameters
----------
module_A : str
Name of first module
module_B : str
Name of second module
"""
index_A = self.get_module_build_index(module_A)
index_B = self.get_module_build_index(module_B)
if index_A >= 0 and index_B >= 0:
temp_A = self.get_module_by_name(module_B)
temp_B = self.get_module_by_name(module_A)
self.modules[index_A] = temp_A
self.modules[index_B] = temp_B
self.module_map[module_A] = index_B
self.module_map[module_B] = index_A
def convert_path_abs(self, rel_path):
"""Function that converts a given modules relative path to an absolute path
If the macro name can be found in the list of accounted for modules, replace it with that module's absolute path
Parameters
----------
rel_path : str
The relative installation path for the given module
Returns
-------
str
The absolute installation path for the module. (Macros are replaced)
"""
temp = rel_path.split('/', 1)[-1]
if "$(INSTALL)" in rel_path and self.install_location != None:
return installSynApps.join_path(self.install_location, temp)
elif "$(EPICS_BASE)" in rel_path and self.base_path != None:
return installSynApps.join_path(self.base_path, temp)
elif "$(SUPPORT)" in rel_path and self.support_path != None:
return installSynApps.join_path(self.support_path, temp)
elif "$(AREA_DETECTOR)" in rel_path and self.ad_path != None:
return installSynApps.join_path(self.ad_path, temp)
elif "$(MOTOR)" in rel_path and self.motor_path != None:
return installSynApps.join_path(self.motor_path, temp)
elif "$(EXTENSIONS)" in rel_path and self.extensions_path != None:
return installSynApps.join_path(self.extensions_path, temp)
elif "$(" in rel_path:
macro_part = rel_path.split(')')[0]
rel_to = macro_part.split('(')[1]
rel_to_module = self.get_module_by_name(rel_to)
if rel_to_module is not None:
return installSynApps.join_path(rel_to_module.abs_path, temp)
return rel_path
def print_installation_info(self, fp = None):
"""Function that prints installation info
Prints list of all modules including clone/build/package information
Parameters
----------
fp = None : file pointer
Optional pointer to an external log file
"""
if fp == None:
print(self.get_printable_string().strip())
else:
fp.write(self.get_printable_string())
def get_printable_string(self):
"""Function that gets a toString for an InstallConfigurations
Returns
-------
str
A string representing the install configuration
"""
out = "--------------------------------\n"
out = out + "Install Location = {}\n".format(self.install_location)
out = out + "This Install Config is saved at {}\n".format(self.path_to_configure)
for module in self.modules:
if module.clone == 'YES':
out = out + module.get_printable_string()
return out
def get_module_names_list(self):
"""Function that gets list of modules being built
Returns
-------
list of str
list of module names that are set to build
"""
out = []
for module in self.modules:
if module.build == 'YES':
out.append(module.name)
return out
class InjectorFile:
"""Class that represents an injector file and stores its name, contents, and target
Injector file classes are used to represent data that needs to be appended to target files
at build time. Used to add to commonPlugins, commonPlugin_settings, etc.
TODO: This class can probably be abstracted into a simpler data structure (since its used as a struct anyway)
Attributes
----------
path_to_configure : str
path to the configure dir that houses this injector file
name : str
name of the file
contents : str
The contents of the file
target : str
The target location file into which contents will be injected.
"""
def __init__(self, path_to_configure, name, contents, target):
"""Constructor of InjectorFile class
"""
self.path_to_configure = path_to_configure
self.name = name
self.contents = contents
self.target = target
def generate_default_install_config(target_install_loc='/epics', update_versions=False, with_pva=True):
config = InstallConfiguration(target_install_loc, None)
y = 'YES'
n = 'NO'
gu = 'GIT_URL'
wu = 'WGET_URL'
base_org = 'https://github.com/epics-base/'
syn_org = 'https://github.com/EPICS-synApps/'
mod_org = 'https://github.com/epics-modules/'
ad_org = 'https://github.com/areaDetector/'
seq_rel = 'http://www-csr.bessy.de/control/SoftDist/sequencer/releases/'
psi_org = 'https://github.com/paulscherrerinstitute/'
# Add core modules that will generally always be built
config.add_module(IM("EPICS_BASE", "R7.0.3", "$(INSTALL)/base", gu, base_org, "epics-base", y, y, y))
config.add_module(IM("SUPPORT", "R6-1", "$(INSTALL)/support", gu, syn_org, "support", y, y, n))
config.add_module(IM("CONFIGURE", "R6-1", "$(SUPPORT)/configure", gu, syn_org, "configure", y, y, n))
config.add_module(IM("UTILS", "R6-1", "$(SUPPORT)/utils", gu, syn_org, "utils", y, y, n))
config.add_module(IM("SNCSEQ", "2.2.8", "$(SUPPORT)/seq", wu, seq_rel, "seq-2.2.8.tar.gz", y, y, y))
config.add_module(IM("IPAC", "2.15", "$(SUPPORT)/ipac", gu, mod_org, "ipac", y, y, y))
config.add_module(IM("ASYN", "R4-37", "$(SUPPORT)/asyn", gu, mod_org, "asyn", y, y, y))
config.add_module(IM("AUTOSAVE", "R5-10", "$(SUPPORT)/autosave", gu, mod_org, "autosave", y, y, y))
config.add_module(IM("BUSY", "R1-7-2", "$(SUPPORT)/busy", gu, mod_org, "busy", y, y, y))
config.add_module(IM("CALC", "R3-7-3", "$(SUPPORT)/calc", gu, mod_org, "calc", y, y, y))
config.add_module(IM("DEVIOCSTATS", "master", "$(SUPPORT)/iocStats", gu, mod_org, "iocStats", y, y, y))
config.add_module(IM("SSCAN", "R2-11-3", "$(SUPPORT)/sscan", gu, mod_org, "sscan", y, y, y))
config.add_module(IM("IPUNIDIG", "R2-11", "$(SUPPORT)/ipUnidig", gu, mod_org, "ipUnidig", y, y, y))
# Some modules that are commonly needed
config.add_module(IM("XSPRESS3", "master", "$(SUPPORT)/xspress3", gu, mod_org, "xspress3", y, y, y))
config.add_module(IM("MOTOR", "R7-1", "$(SUPPORT)/motor", gu, mod_org, "motor", y, y, y))
config.add_module(IM("QUADEM", "R9-3", "$(SUPPORT)/quadEM", gu, mod_org, "quadEM", y, y, y))
config.add_module(IM("STREAM", "2.8.10", "$(SUPPORT)/stream", gu, psi_org, "StreamDevice", y, y, y))
# AreaDetector and commonly used drivers
config.add_module(IM("AREA_DETECTOR", "R3-8", "$(SUPPORT)/areaDetector", gu, ad_org, "areaDetector", y, y, n))
config.add_module(IM("ADSUPPORT", "R1-9", "$(AREA_DETECTOR)/ADSupport", gu, ad_org, "ADSupport", y, y, y))
config.add_module(IM("ADCORE", "R3-8", "$(AREA_DETECTOR)/ADCore", gu, ad_org, "ADCore", y, y, y))
config.add_module(IM("ADPERKINELMER", "master", "$(AREA_DETECTOR)/ADPerkinElmer", gu, ad_org, "ADPerkinElmer", n, n, n))
config.add_module(IM("ADGENICAM", "master", "$(AREA_DETECTOR)/ADGenICam", gu, ad_org, "ADGenICam", n, n, n))
config.add_module(IM("ADANDOR3", "master", "$(AREA_DETECTOR)/ADAndor3", gu, ad_org, "ADAndor3", n, n, n))
config.add_module(IM("ADPROSILICA", "R2-5", "$(AREA_DETECTOR)/ADProsilica", gu, ad_org, "ADProsilica", n, n, n))
config.add_module(IM("ADSIMDETECTOR", "master", "$(AREA_DETECTOR)/ADSimDetector", gu, ad_org, "ADSimDetector", n, n, n))
config.add_module(IM("ADPILATUS", "R2-8", "$(AREA_DETECTOR)/ADPilatus", gu, ad_org, "ADPilatus", n, n, n))
config.add_module(IM("ADMERLIN", "master", "$(AREA_DETECTOR)/ADMerlin", gu, ad_org, "ADMerlin", n, n, n))
config.add_module(IM("ADARAVIS", "master", "$(AREA_DETECTOR)/ADAravis", gu, ad_org, "ADAravis", n, n, n))
config.add_module(IM("ADEIGER", "R2-6", "$(AREA_DETECTOR)/ADEiger", gu, ad_org, "ADEiger", n, n, n))
config.add_module(IM("ADVIMBA", "master", "$(AREA_DETECTOR)/ADVimba", gu, ad_org, "ADVimba", n, n, n))
config.add_module(IM("ADPOINTGREY", "master", "$(AREA_DETECTOR)/ADPointGrey", gu, ad_org, "ADPointGrey", n, n, n))
config.add_module(IM("ADANDOR", "R2-8", "$(AREA_DETECTOR)/ADAndor", gu, ad_org, "ADAndor", n, n, n))
config.add_module(IM("ADDEXELA", "R2-3", "$(AREA_DETECTOR)/ADDexela", gu, ad_org, "ADDexela", n, n, n))
config.add_module(IM("ADMYTHEN", "master", "$(AREA_DETECTOR)/ADMythen", gu, ad_org, "ADMythen", n, n, n))
config.add_module(IM("ADURL", "master", "$(AREA_DETECTOR)/ADURL", gu, ad_org, "ADURL", n, n, n))
common_plugins_str = 'dbLoadRecords("$(DEVIOCSTATS)/db/iocAdminSoft.db", "IOC=$(PREFIX)")\n'
autosave_str = 'file "sseqRecord_settings.req", P=$(P), S=AcquireSequence\n'
if with_pva:
autosave_str += 'file "NDPva_settings.req", P=$(P), R=Pva1:\n'
common_plugins_str += 'NDPvaConfigure("PVA1", $(QSIZE), 0, "$(PORT)", 0, $(PREFIX)Pva1:Image, 0, 0, 0)\n' \
'dbLoadRecords("NDPva.template", "P=$(PREFIX),R=Pva1:, PORT=PVA1,ADDR=0,TIMEOUT=1,NDARRAY_PORT=$(PORT)")\n' \
'# Must start PVA server if this is enabled\n' \
'startPVAServer\n' \
config.add_injector_file('PLUGIN_CONFIG', common_plugins_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugins.cmd')
config.add_injector_file('AUTOSAVE_CONFIG', autosave_str, '$(AREA_DETECTOR)/ADCore/iocBoot/EXAMPLE_commonPlugin_settings.req')
if update_versions:
installSynApps.sync_all_module_tags(config)
return config
| [
"os.path.exists",
"os.access",
"os.path.dirname",
"installSynApps.data_model.install_module.InstallModule",
"installSynApps.join_path",
"os.path.abspath",
"installSynApps.sync_all_module_tags"
] | [((1937, 1970), 'os.path.abspath', 'os.path.abspath', (['install_location'], {}), '(install_location)\n', (1952, 1970), False, 'import os\n'), ((12720, 12806), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""EPICS_BASE"""', '"""R7.0.3"""', '"""$(INSTALL)/base"""', 'gu', 'base_org', '"""epics-base"""', 'y', 'y', 'y'], {}), "('EPICS_BASE', 'R7.0.3', '$(INSTALL)/base', gu, base_org, 'epics-base', y,\n y, y)\n", (12722, 12806), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((12840, 12916), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""SUPPORT"""', '"""R6-1"""', '"""$(INSTALL)/support"""', 'gu', 'syn_org', '"""support"""', 'y', 'y', 'n'], {}), "('SUPPORT', 'R6-1', '$(INSTALL)/support', gu, syn_org, 'support', y, y, n)\n", (12842, 12916), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((12958, 13044), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""CONFIGURE"""', '"""R6-1"""', '"""$(SUPPORT)/configure"""', 'gu', 'syn_org', '"""configure"""', 'y', 'y', 'n'], {}), "('CONFIGURE', 'R6-1', '$(SUPPORT)/configure', gu, syn_org, 'configure', y,\n y, n)\n", (12960, 13044), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13077, 13147), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""UTILS"""', '"""R6-1"""', '"""$(SUPPORT)/utils"""', 'gu', 'syn_org', '"""utils"""', 'y', 'y', 'n'], {}), "('UTILS', 'R6-1', '$(SUPPORT)/utils', gu, syn_org, 'utils', y, y, n)\n", (13079, 13147), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13195, 13280), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""SNCSEQ"""', '"""2.2.8"""', '"""$(SUPPORT)/seq"""', 'wu', 'seq_rel', '"""seq-2.2.8.tar.gz"""', 'y', 'y', 'y'], {}), "('SNCSEQ', '2.2.8', '$(SUPPORT)/seq', wu, seq_rel, 'seq-2.2.8.tar.gz', y,\n y, y)\n", (13197, 13280), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13321, 13388), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""IPAC"""', '"""2.15"""', '"""$(SUPPORT)/ipac"""', 'gu', 'mod_org', '"""ipac"""', 'y', 'y', 'y'], {}), "('IPAC', '2.15', '$(SUPPORT)/ipac', gu, mod_org, 'ipac', y, y, y)\n", (13323, 13388), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13439, 13507), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ASYN"""', '"""R4-37"""', '"""$(SUPPORT)/asyn"""', 'gu', 'mod_org', '"""asyn"""', 'y', 'y', 'y'], {}), "('ASYN', 'R4-37', '$(SUPPORT)/asyn', gu, mod_org, 'asyn', y, y, y)\n", (13441, 13507), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13557, 13642), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""AUTOSAVE"""', '"""R5-10"""', '"""$(SUPPORT)/autosave"""', 'gu', 'mod_org', '"""autosave"""', 'y', 'y', 'y'], {}), "('AUTOSAVE', 'R5-10', '$(SUPPORT)/autosave', gu, mod_org, 'autosave', y, y, y\n )\n", (13559, 13642), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13675, 13744), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""BUSY"""', '"""R1-7-2"""', '"""$(SUPPORT)/busy"""', 'gu', 'mod_org', '"""busy"""', 'y', 'y', 'y'], {}), "('BUSY', 'R1-7-2', '$(SUPPORT)/busy', gu, mod_org, 'busy', y, y, y)\n", (13677, 13744), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13793, 13862), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""CALC"""', '"""R3-7-3"""', '"""$(SUPPORT)/calc"""', 'gu', 'mod_org', '"""calc"""', 'y', 'y', 'y'], {}), "('CALC', 'R3-7-3', '$(SUPPORT)/calc', gu, mod_org, 'calc', y, y, y)\n", (13795, 13862), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((13911, 13999), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""DEVIOCSTATS"""', '"""master"""', '"""$(SUPPORT)/iocStats"""', 'gu', 'mod_org', '"""iocStats"""', 'y', 'y', 'y'], {}), "('DEVIOCSTATS', 'master', '$(SUPPORT)/iocStats', gu, mod_org, 'iocStats',\n y, y, y)\n", (13913, 13999), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14029, 14102), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""SSCAN"""', '"""R2-11-3"""', '"""$(SUPPORT)/sscan"""', 'gu', 'mod_org', '"""sscan"""', 'y', 'y', 'y'], {}), "('SSCAN', 'R2-11-3', '$(SUPPORT)/sscan', gu, mod_org, 'sscan', y, y, y)\n", (14031, 14102), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14147, 14232), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""IPUNIDIG"""', '"""R2-11"""', '"""$(SUPPORT)/ipUnidig"""', 'gu', 'mod_org', '"""ipUnidig"""', 'y', 'y', 'y'], {}), "('IPUNIDIG', 'R2-11', '$(SUPPORT)/ipUnidig', gu, mod_org, 'ipUnidig', y, y, y\n )\n", (14149, 14232), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14310, 14395), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""XSPRESS3"""', '"""master"""', '"""$(SUPPORT)/xspress3"""', 'gu', 'mod_org', '"""xspress3"""', 'y', 'y', 'y'], {}), "('XSPRESS3', 'master', '$(SUPPORT)/xspress3', gu, mod_org, 'xspress3', y,\n y, y)\n", (14312, 14395), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14425, 14495), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""MOTOR"""', '"""R7-1"""', '"""$(SUPPORT)/motor"""', 'gu', 'mod_org', '"""motor"""', 'y', 'y', 'y'], {}), "('MOTOR', 'R7-1', '$(SUPPORT)/motor', gu, mod_org, 'motor', y, y, y)\n", (14427, 14495), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14537, 14610), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""QUADEM"""', '"""R9-3"""', '"""$(SUPPORT)/quadEM"""', 'gu', 'mod_org', '"""quadEM"""', 'y', 'y', 'y'], {}), "('QUADEM', 'R9-3', '$(SUPPORT)/quadEM', gu, mod_org, 'quadEM', y, y, y)\n", (14539, 14610), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14650, 14735), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""STREAM"""', '"""2.8.10"""', '"""$(SUPPORT)/stream"""', 'gu', 'psi_org', '"""StreamDevice"""', 'y', 'y', 'y'], {}), "('STREAM', '2.8.10', '$(SUPPORT)/stream', gu, psi_org, 'StreamDevice', y,\n y, y)\n", (14652, 14735), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14815, 14910), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""AREA_DETECTOR"""', '"""R3-8"""', '"""$(SUPPORT)/areaDetector"""', 'gu', 'ad_org', '"""areaDetector"""', 'y', 'y', 'n'], {}), "('AREA_DETECTOR', 'R3-8', '$(SUPPORT)/areaDetector', gu, ad_org,\n 'areaDetector', y, y, n)\n", (14817, 14910), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((14945, 15036), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADSUPPORT"""', '"""R1-9"""', '"""$(AREA_DETECTOR)/ADSupport"""', 'gu', 'ad_org', '"""ADSupport"""', 'y', 'y', 'y'], {}), "('ADSUPPORT', 'R1-9', '$(AREA_DETECTOR)/ADSupport', gu, ad_org,\n 'ADSupport', y, y, y)\n", (14947, 15036), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15075, 15153), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADCORE"""', '"""R3-8"""', '"""$(AREA_DETECTOR)/ADCore"""', 'gu', 'ad_org', '"""ADCore"""', 'y', 'y', 'y'], {}), "('ADCORE', 'R3-8', '$(AREA_DETECTOR)/ADCore', gu, ad_org, 'ADCore', y, y, y)\n", (15077, 15153), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15205, 15310), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADPERKINELMER"""', '"""master"""', '"""$(AREA_DETECTOR)/ADPerkinElmer"""', 'gu', 'ad_org', '"""ADPerkinElmer"""', 'n', 'n', 'n'], {}), "('ADPERKINELMER', 'master', '$(AREA_DETECTOR)/ADPerkinElmer', gu, ad_org,\n 'ADPerkinElmer', n, n, n)\n", (15207, 15310), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15336, 15429), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADGENICAM"""', '"""master"""', '"""$(AREA_DETECTOR)/ADGenICam"""', 'gu', 'ad_org', '"""ADGenICam"""', 'n', 'n', 'n'], {}), "('ADGENICAM', 'master', '$(AREA_DETECTOR)/ADGenICam', gu, ad_org,\n 'ADGenICam', n, n, n)\n", (15338, 15429), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15466, 15556), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADANDOR3"""', '"""master"""', '"""$(AREA_DETECTOR)/ADAndor3"""', 'gu', 'ad_org', '"""ADAndor3"""', 'n', 'n', 'n'], {}), "('ADANDOR3', 'master', '$(AREA_DETECTOR)/ADAndor3', gu, ad_org,\n 'ADAndor3', n, n, n)\n", (15468, 15556), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15596, 15693), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADPROSILICA"""', '"""R2-5"""', '"""$(AREA_DETECTOR)/ADProsilica"""', 'gu', 'ad_org', '"""ADProsilica"""', 'n', 'n', 'n'], {}), "('ADPROSILICA', 'R2-5', '$(AREA_DETECTOR)/ADProsilica', gu, ad_org,\n 'ADProsilica', n, n, n)\n", (15598, 15693), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15726, 15831), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADSIMDETECTOR"""', '"""master"""', '"""$(AREA_DETECTOR)/ADSimDetector"""', 'gu', 'ad_org', '"""ADSimDetector"""', 'n', 'n', 'n'], {}), "('ADSIMDETECTOR', 'master', '$(AREA_DETECTOR)/ADSimDetector', gu, ad_org,\n 'ADSimDetector', n, n, n)\n", (15728, 15831), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15857, 15948), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADPILATUS"""', '"""R2-8"""', '"""$(AREA_DETECTOR)/ADPilatus"""', 'gu', 'ad_org', '"""ADPilatus"""', 'n', 'n', 'n'], {}), "('ADPILATUS', 'R2-8', '$(AREA_DETECTOR)/ADPilatus', gu, ad_org,\n 'ADPilatus', n, n, n)\n", (15859, 15948), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((15987, 16077), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADMERLIN"""', '"""master"""', '"""$(AREA_DETECTOR)/ADMerlin"""', 'gu', 'ad_org', '"""ADMerlin"""', 'n', 'n', 'n'], {}), "('ADMERLIN', 'master', '$(AREA_DETECTOR)/ADMerlin', gu, ad_org,\n 'ADMerlin', n, n, n)\n", (15989, 16077), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16117, 16207), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADARAVIS"""', '"""master"""', '"""$(AREA_DETECTOR)/ADAravis"""', 'gu', 'ad_org', '"""ADAravis"""', 'n', 'n', 'n'], {}), "('ADARAVIS', 'master', '$(AREA_DETECTOR)/ADAravis', gu, ad_org,\n 'ADAravis', n, n, n)\n", (16119, 16207), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16247, 16332), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADEIGER"""', '"""R2-6"""', '"""$(AREA_DETECTOR)/ADEiger"""', 'gu', 'ad_org', '"""ADEiger"""', 'n', 'n', 'n'], {}), "('ADEIGER', 'R2-6', '$(AREA_DETECTOR)/ADEiger', gu, ad_org, 'ADEiger', n,\n n, n)\n", (16249, 16332), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16377, 16464), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADVIMBA"""', '"""master"""', '"""$(AREA_DETECTOR)/ADVimba"""', 'gu', 'ad_org', '"""ADVimba"""', 'n', 'n', 'n'], {}), "('ADVIMBA', 'master', '$(AREA_DETECTOR)/ADVimba', gu, ad_org, 'ADVimba',\n n, n, n)\n", (16379, 16464), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16507, 16606), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADPOINTGREY"""', '"""master"""', '"""$(AREA_DETECTOR)/ADPointGrey"""', 'gu', 'ad_org', '"""ADPointGrey"""', 'n', 'n', 'n'], {}), "('ADPOINTGREY', 'master', '$(AREA_DETECTOR)/ADPointGrey', gu, ad_org,\n 'ADPointGrey', n, n, n)\n", (16509, 16606), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16637, 16722), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADANDOR"""', '"""R2-8"""', '"""$(AREA_DETECTOR)/ADAndor"""', 'gu', 'ad_org', '"""ADAndor"""', 'n', 'n', 'n'], {}), "('ADANDOR', 'R2-8', '$(AREA_DETECTOR)/ADAndor', gu, ad_org, 'ADAndor', n,\n n, n)\n", (16639, 16722), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16767, 16855), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADDEXELA"""', '"""R2-3"""', '"""$(AREA_DETECTOR)/ADDexela"""', 'gu', 'ad_org', '"""ADDexela"""', 'n', 'n', 'n'], {}), "('ADDEXELA', 'R2-3', '$(AREA_DETECTOR)/ADDexela', gu, ad_org, 'ADDexela',\n n, n, n)\n", (16769, 16855), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((16897, 16987), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADMYTHEN"""', '"""master"""', '"""$(AREA_DETECTOR)/ADMythen"""', 'gu', 'ad_org', '"""ADMythen"""', 'n', 'n', 'n'], {}), "('ADMYTHEN', 'master', '$(AREA_DETECTOR)/ADMythen', gu, ad_org,\n 'ADMythen', n, n, n)\n", (16899, 16987), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((17027, 17104), 'installSynApps.data_model.install_module.InstallModule', 'IM', (['"""ADURL"""', '"""master"""', '"""$(AREA_DETECTOR)/ADURL"""', 'gu', 'ad_org', '"""ADURL"""', 'n', 'n', 'n'], {}), "('ADURL', 'master', '$(AREA_DETECTOR)/ADURL', gu, ad_org, 'ADURL', n, n, n)\n", (17029, 17104), True, 'from installSynApps.data_model.install_module import InstallModule as IM\n'), ((18088, 18131), 'installSynApps.sync_all_module_tags', 'installSynApps.sync_all_module_tags', (['config'], {}), '(config)\n', (18123, 18131), False, 'import installSynApps\n'), ((2990, 3012), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (3004, 3012), False, 'import os\n'), ((3035, 3073), 'os.path.dirname', 'os.path.dirname', (['self.install_location'], {}), '(self.install_location)\n', (3050, 3073), False, 'import os\n'), ((3090, 3112), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (3104, 3112), False, 'import os\n'), ((8557, 8610), 'installSynApps.join_path', 'installSynApps.join_path', (['self.install_location', 'temp'], {}), '(self.install_location, temp)\n', (8581, 8610), False, 'import installSynApps\n'), ((3232, 3268), 'os.access', 'os.access', (['target', '(os.W_OK | os.X_OK)'], {}), '(target, os.W_OK | os.X_OK)\n', (3241, 3268), False, 'import os\n'), ((8699, 8745), 'installSynApps.join_path', 'installSynApps.join_path', (['self.base_path', 'temp'], {}), '(self.base_path, temp)\n', (8723, 8745), False, 'import installSynApps\n'), ((8834, 8883), 'installSynApps.join_path', 'installSynApps.join_path', (['self.support_path', 'temp'], {}), '(self.support_path, temp)\n', (8858, 8883), False, 'import installSynApps\n'), ((8973, 9017), 'installSynApps.join_path', 'installSynApps.join_path', (['self.ad_path', 'temp'], {}), '(self.ad_path, temp)\n', (8997, 9017), False, 'import installSynApps\n'), ((9102, 9149), 'installSynApps.join_path', 'installSynApps.join_path', (['self.motor_path', 'temp'], {}), '(self.motor_path, temp)\n', (9126, 9149), False, 'import installSynApps\n'), ((9244, 9296), 'installSynApps.join_path', 'installSynApps.join_path', (['self.extensions_path', 'temp'], {}), '(self.extensions_path, temp)\n', (9268, 9296), False, 'import installSynApps\n'), ((9547, 9601), 'installSynApps.join_path', 'installSynApps.join_path', (['rel_to_module.abs_path', 'temp'], {}), '(rel_to_module.abs_path, temp)\n', (9571, 9601), False, 'import installSynApps\n')] |
#!/usr/bin/env python2.7
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Starts a local DNS server for use in tests"""
import argparse
import sys
import yaml
import signal
import os
import threading
import time
import twisted
import twisted.internet
import twisted.internet.reactor
import twisted.internet.threads
import twisted.internet.defer
import twisted.internet.protocol
import twisted.names
import twisted.names.client
import twisted.names.dns
import twisted.names.server
from twisted.names import client, server, common, authority, dns
import argparse
import platform
_SERVER_HEALTH_CHECK_RECORD_NAME = 'health-check-local-dns-server-is-alive.resolver-tests.grpctestingexp' # missing end '.' for twisted syntax
_SERVER_HEALTH_CHECK_RECORD_DATA = '172.16.58.3'
class NoFileAuthority(authority.FileAuthority):
def __init__(self, soa, records):
# skip FileAuthority
common.ResolverBase.__init__(self)
self.soa = soa
self.records = records
def start_local_dns_server(args):
all_records = {}
def _push_record(name, r):
print('pushing record: |%s|' % name)
if all_records.get(name) is not None:
all_records[name].append(r)
return
all_records[name] = [r]
def _maybe_split_up_txt_data(name, txt_data, r_ttl):
start = 0
txt_data_list = []
while len(txt_data[start:]) > 0:
next_read = len(txt_data[start:])
if next_read > 255:
next_read = 255
txt_data_list.append(txt_data[start:start + next_read])
start += next_read
_push_record(name, dns.Record_TXT(*txt_data_list, ttl=r_ttl))
with open(args.records_config_path) as config:
test_records_config = yaml.load(config)
common_zone_name = test_records_config['resolver_tests_common_zone_name']
for group in test_records_config['resolver_component_tests']:
for name in group['records'].keys():
for record in group['records'][name]:
r_type = record['type']
r_data = record['data']
r_ttl = int(record['TTL'])
record_full_name = '%s.%s' % (name, common_zone_name)
assert record_full_name[-1] == '.'
record_full_name = record_full_name[:-1]
if r_type == 'A':
_push_record(record_full_name,
dns.Record_A(r_data, ttl=r_ttl))
if r_type == 'AAAA':
_push_record(record_full_name,
dns.Record_AAAA(r_data, ttl=r_ttl))
if r_type == 'SRV':
p, w, port, target = r_data.split(' ')
p = int(p)
w = int(w)
port = int(port)
target_full_name = '%s.%s' % (target, common_zone_name)
r_data = '%s %s %s %s' % (p, w, port, target_full_name)
_push_record(
record_full_name,
dns.Record_SRV(p, w, port, target_full_name, ttl=r_ttl))
if r_type == 'TXT':
_maybe_split_up_txt_data(record_full_name, r_data, r_ttl)
# Add an optional IPv4 record is specified
if args.add_a_record:
extra_host, extra_host_ipv4 = args.add_a_record.split(':')
_push_record(extra_host, dns.Record_A(extra_host_ipv4, ttl=0))
# Server health check record
_push_record(_SERVER_HEALTH_CHECK_RECORD_NAME,
dns.Record_A(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0))
soa_record = dns.Record_SOA(mname=common_zone_name)
test_domain_com = NoFileAuthority(
soa=(common_zone_name, soa_record),
records=all_records,
)
server = twisted.names.server.DNSServerFactory(
authorities=[test_domain_com], verbose=2)
server.noisy = 2
twisted.internet.reactor.listenTCP(args.port, server)
dns_proto = twisted.names.dns.DNSDatagramProtocol(server)
dns_proto.noisy = 2
twisted.internet.reactor.listenUDP(args.port, dns_proto)
print('starting local dns server on 127.0.0.1:%s' % args.port)
print('starting twisted.internet.reactor')
twisted.internet.reactor.suggestThreadPoolSize(1)
twisted.internet.reactor.run()
def _quit_on_signal(signum, _frame):
print('Received SIGNAL %d. Quitting with exit code 0' % signum)
twisted.internet.reactor.stop()
sys.stdout.flush()
sys.exit(0)
def flush_stdout_loop():
num_timeouts_so_far = 0
sleep_time = 1
# Prevent zombies. Tests that use this server are short-lived.
max_timeouts = 60 * 10
while num_timeouts_so_far < max_timeouts:
sys.stdout.flush()
time.sleep(sleep_time)
num_timeouts_so_far += 1
print('Process timeout reached, or cancelled. Exitting 0.')
os.kill(os.getpid(), signal.SIGTERM)
def main():
argp = argparse.ArgumentParser(
description='Local DNS Server for resolver tests')
argp.add_argument('-p',
'--port',
default=None,
type=int,
help='Port for DNS server to listen on for TCP and UDP.')
argp.add_argument(
'-r',
'--records_config_path',
default=None,
type=str,
help=('Directory of resolver_test_record_groups.yaml file. '
'Defaults to path needed when the test is invoked as part '
'of run_tests.py.'))
argp.add_argument(
'--add_a_record',
default=None,
type=str,
help=('Add an A record via the command line. Useful for when we '
'need to serve a one-off A record that is under a '
'different domain then the rest the records configured in '
'--records_config_path (which all need to be under the '
'same domain). Format: <name>:<ipv4 address>'))
args = argp.parse_args()
signal.signal(signal.SIGTERM, _quit_on_signal)
signal.signal(signal.SIGINT, _quit_on_signal)
output_flush_thread = threading.Thread(target=flush_stdout_loop)
output_flush_thread.setDaemon(True)
output_flush_thread.start()
start_local_dns_server(args)
if __name__ == '__main__':
main()
| [
"twisted.names.dns.Record_A",
"twisted.internet.reactor.listenUDP",
"yaml.load",
"time.sleep",
"twisted.names.server.DNSServerFactory",
"twisted.names.common.ResolverBase.__init__",
"sys.exit",
"argparse.ArgumentParser",
"twisted.names.dns.Record_TXT",
"os.getpid",
"twisted.internet.reactor.run",
"sys.stdout.flush",
"twisted.names.dns.Record_AAAA",
"twisted.internet.reactor.suggestThreadPoolSize",
"twisted.names.dns.Record_SOA",
"twisted.names.dns.DNSDatagramProtocol",
"twisted.internet.reactor.listenTCP",
"signal.signal",
"twisted.internet.reactor.stop",
"threading.Thread",
"twisted.names.dns.Record_SRV"
] | [((4162, 4200), 'twisted.names.dns.Record_SOA', 'dns.Record_SOA', ([], {'mname': 'common_zone_name'}), '(mname=common_zone_name)\n', (4176, 4200), False, 'from twisted.names import client, server, common, authority, dns\n'), ((4332, 4411), 'twisted.names.server.DNSServerFactory', 'twisted.names.server.DNSServerFactory', ([], {'authorities': '[test_domain_com]', 'verbose': '(2)'}), '(authorities=[test_domain_com], verbose=2)\n', (4369, 4411), False, 'import twisted\n'), ((4446, 4499), 'twisted.internet.reactor.listenTCP', 'twisted.internet.reactor.listenTCP', (['args.port', 'server'], {}), '(args.port, server)\n', (4480, 4499), False, 'import twisted\n'), ((4516, 4561), 'twisted.names.dns.DNSDatagramProtocol', 'twisted.names.dns.DNSDatagramProtocol', (['server'], {}), '(server)\n', (4553, 4561), False, 'import twisted\n'), ((4590, 4646), 'twisted.internet.reactor.listenUDP', 'twisted.internet.reactor.listenUDP', (['args.port', 'dns_proto'], {}), '(args.port, dns_proto)\n', (4624, 4646), False, 'import twisted\n'), ((4765, 4814), 'twisted.internet.reactor.suggestThreadPoolSize', 'twisted.internet.reactor.suggestThreadPoolSize', (['(1)'], {}), '(1)\n', (4811, 4814), False, 'import twisted\n'), ((4819, 4849), 'twisted.internet.reactor.run', 'twisted.internet.reactor.run', ([], {}), '()\n', (4847, 4849), False, 'import twisted\n'), ((4961, 4992), 'twisted.internet.reactor.stop', 'twisted.internet.reactor.stop', ([], {}), '()\n', (4990, 4992), False, 'import twisted\n'), ((4997, 5015), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5013, 5015), False, 'import sys\n'), ((5020, 5031), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (5028, 5031), False, 'import sys\n'), ((5467, 5541), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Local DNS Server for resolver tests"""'}), "(description='Local DNS Server for resolver tests')\n", (5490, 5541), False, 'import argparse\n'), ((6516, 6562), 'signal.signal', 'signal.signal', (['signal.SIGTERM', '_quit_on_signal'], {}), '(signal.SIGTERM, _quit_on_signal)\n', (6529, 6562), False, 'import signal\n'), ((6567, 6612), 'signal.signal', 'signal.signal', (['signal.SIGINT', '_quit_on_signal'], {}), '(signal.SIGINT, _quit_on_signal)\n', (6580, 6612), False, 'import signal\n'), ((6639, 6681), 'threading.Thread', 'threading.Thread', ([], {'target': 'flush_stdout_loop'}), '(target=flush_stdout_loop)\n', (6655, 6681), False, 'import threading\n'), ((1430, 1464), 'twisted.names.common.ResolverBase.__init__', 'common.ResolverBase.__init__', (['self'], {}), '(self)\n', (1458, 1464), False, 'from twisted.names import client, server, common, authority, dns\n'), ((2295, 2312), 'yaml.load', 'yaml.load', (['config'], {}), '(config)\n', (2304, 2312), False, 'import yaml\n'), ((4090, 4143), 'twisted.names.dns.Record_A', 'dns.Record_A', (['_SERVER_HEALTH_CHECK_RECORD_DATA'], {'ttl': '(0)'}), '(_SERVER_HEALTH_CHECK_RECORD_DATA, ttl=0)\n', (4102, 4143), False, 'from twisted.names import client, server, common, authority, dns\n'), ((5254, 5272), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (5270, 5272), False, 'import sys\n'), ((5281, 5303), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (5291, 5303), False, 'import time\n'), ((5413, 5424), 'os.getpid', 'os.getpid', ([], {}), '()\n', (5422, 5424), False, 'import os\n'), ((2170, 2211), 'twisted.names.dns.Record_TXT', 'dns.Record_TXT', (['*txt_data_list'], {'ttl': 'r_ttl'}), '(*txt_data_list, ttl=r_ttl)\n', (2184, 2211), False, 'from twisted.names import client, server, common, authority, dns\n'), ((3951, 3987), 'twisted.names.dns.Record_A', 'dns.Record_A', (['extra_host_ipv4'], {'ttl': '(0)'}), '(extra_host_ipv4, ttl=0)\n', (3963, 3987), False, 'from twisted.names import client, server, common, authority, dns\n'), ((2971, 3002), 'twisted.names.dns.Record_A', 'dns.Record_A', (['r_data'], {'ttl': 'r_ttl'}), '(r_data, ttl=r_ttl)\n', (2983, 3002), False, 'from twisted.names import client, server, common, authority, dns\n'), ((3125, 3159), 'twisted.names.dns.Record_AAAA', 'dns.Record_AAAA', (['r_data'], {'ttl': 'r_ttl'}), '(r_data, ttl=r_ttl)\n', (3140, 3159), False, 'from twisted.names import client, server, common, authority, dns\n'), ((3607, 3662), 'twisted.names.dns.Record_SRV', 'dns.Record_SRV', (['p', 'w', 'port', 'target_full_name'], {'ttl': 'r_ttl'}), '(p, w, port, target_full_name, ttl=r_ttl)\n', (3621, 3662), False, 'from twisted.names import client, server, common, authority, dns\n')] |
# -*- coding: utf-8 -*-
"""
Showcases *ICTCP* *colour encoding* computations.
"""
import numpy as np
import colour
from colour.utilities import message_box
message_box('"ICTCP" Colour Encoding Computations')
RGB = np.array([0.45620519, 0.03081071, 0.04091952])
message_box(('Converting from "ITU-R BT.2020" colourspace to "ICTCP" colour '
'encoding given "RGB" values:\n'
'\n\t{0}'.format(RGB)))
print(colour.RGB_to_ICTCP(RGB))
print('\n')
ICTCP = np.array([0.07351364, 0.00475253, 0.09351596])
message_box(('Converting from "ICTCP" colour encoding to "ITU-R BT.2020" '
'colourspace given "ICTCP" values:\n'
'\n\t{0}'.format(ICTCP)))
print(colour.ICTCP_to_RGB(ICTCP))
| [
"colour.ICTCP_to_RGB",
"colour.utilities.message_box",
"numpy.array",
"colour.RGB_to_ICTCP"
] | [((159, 210), 'colour.utilities.message_box', 'message_box', (['""""ICTCP" Colour Encoding Computations"""'], {}), '(\'"ICTCP" Colour Encoding Computations\')\n', (170, 210), False, 'from colour.utilities import message_box\n'), ((218, 264), 'numpy.array', 'np.array', (['[0.45620519, 0.03081071, 0.04091952]'], {}), '([0.45620519, 0.03081071, 0.04091952])\n', (226, 264), True, 'import numpy as np\n'), ((480, 526), 'numpy.array', 'np.array', (['[0.07351364, 0.00475253, 0.09351596]'], {}), '([0.07351364, 0.00475253, 0.09351596])\n', (488, 526), True, 'import numpy as np\n'), ((432, 456), 'colour.RGB_to_ICTCP', 'colour.RGB_to_ICTCP', (['RGB'], {}), '(RGB)\n', (451, 456), False, 'import colour\n'), ((698, 724), 'colour.ICTCP_to_RGB', 'colour.ICTCP_to_RGB', (['ICTCP'], {}), '(ICTCP)\n', (717, 724), False, 'import colour\n')] |
"""
This is a demonstration script for using the Transect class in the COAsT
package. This object has strict data formatting requirements, which are
outlined in tranect.py.
Transect subsetting (a vertical slice of data between two coordinates): Creating them and performing some custom diagnostics with them.
---
In this tutorial we take a look at subsetting the model data along a transect (a custom straight line) and creating some bespoke diagnostics along it. We look at:
1. Creating a TRANSECT object, defined between two points.
2. Plotting data along a transect.
3. Calculating flow normal to the transect
"""
## Create a transect subset of the example dataset
# Load packages and define some file paths
import coast
import xarray as xr
import matplotlib.pyplot as plt
fn_nemo_dat_t = "./example_files/nemo_data_T_grid.nc"
fn_nemo_dat_u = "./example_files/nemo_data_U_grid.nc"
fn_nemo_dat_v = "./example_files/nemo_data_V_grid.nc"
fn_nemo_dom = "./example_files/COAsT_example_NEMO_domain.nc"
# Configuration files describing the data files
fn_config_t_grid = "./config/example_nemo_grid_t.json"
fn_config_f_grid = "./config/example_nemo_grid_f.json"
fn_config_u_grid = "./config/example_nemo_grid_u.json"
fn_config_v_grid = "./config/example_nemo_grid_v.json"
# %% Load data variables that are on the NEMO t-grid
nemo_t = coast.Gridded(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=fn_config_t_grid)
# Now create a transect between the points (54 N 15 W) and (56 N, 12 W) using the `coast.TransectT` object. This needs to be passed the corresponding NEMO object and transect end points. The model points closest to these coordinates will be selected as the transect end points.
tran_t = coast.TransectT(nemo_t, (54, -15), (56, -12))
# Inspect the data
tran_t.data
# where `r_dim` is the dimension along the transect.
# %% Plot the data
# It is simple to plot a scalar such as temperature along the transect:
temp_mean = tran_t.data.temperature.mean(dim="t_dim")
plt.figure()
temp_mean.plot.pcolormesh(y="depth_0", yincrease=False)
plt.show()
# %% Flow across the transect
# With NEMO’s staggered grid, the first step is to define the transect on the f-grid so that the velocity components are between f-points. We do not need any model data on the f-grid, just the grid information, so create a nemo f-grid object
nemo_f = coast.Gridded(fn_domain=fn_nemo_dom, config=fn_config_f_grid)
# and a transect on the f-grid
tran_f = coast.TransectF(nemo_f, (54, -15), (56, -12))
tran_f.data
# We also need the i- and j-components of velocity so (lazy) load the model data on the u- and v-grid grids
nemo_u = coast.Gridded(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=fn_config_u_grid)
nemo_v = coast.Gridded(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=fn_config_v_grid)
# Now we can calculate the flow across the transect with the method
tran_f.calc_flow_across_transect(nemo_u, nemo_v)
# The flow across the transect is stored in a new dataset where the variables are all defined at the points between f-points.
tran_f.data_cross_tran_flow
# For example, to plot the time averaged velocity across the transect, we can plot the ‘normal_velocities’ variable
cross_velocity_mean = tran_f.data_cross_tran_flow.normal_velocities.mean(dim="t_dim")
plt.figure()
cross_velocity_mean.rolling(r_dim=2).mean().plot.pcolormesh(yincrease=False, y="depth_0", cbar_kwargs={"label": "m/s"})
plt.show()
# or the volume transport across the transect, we can plot the ‘normal_transports’ variable
plt.figure()
cross_transport_mean = tran_f.data_cross_tran_flow.normal_transports.mean(dim="t_dim")
cross_transport_mean.rolling(r_dim=2).mean().plot()
plt.ylabel("Sv")
plt.show()
| [
"coast.Gridded",
"coast.TransectF",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.figure",
"coast.TransectT",
"matplotlib.pyplot.show"
] | [((1351, 1440), 'coast.Gridded', 'coast.Gridded', ([], {'fn_data': 'fn_nemo_dat_t', 'fn_domain': 'fn_nemo_dom', 'config': 'fn_config_t_grid'}), '(fn_data=fn_nemo_dat_t, fn_domain=fn_nemo_dom, config=\n fn_config_t_grid)\n', (1364, 1440), False, 'import coast\n'), ((1724, 1769), 'coast.TransectT', 'coast.TransectT', (['nemo_t', '(54, -15)', '(56, -12)'], {}), '(nemo_t, (54, -15), (56, -12))\n', (1739, 1769), False, 'import coast\n'), ((2002, 2014), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2012, 2014), True, 'import matplotlib.pyplot as plt\n'), ((2071, 2081), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2079, 2081), True, 'import matplotlib.pyplot as plt\n'), ((2366, 2427), 'coast.Gridded', 'coast.Gridded', ([], {'fn_domain': 'fn_nemo_dom', 'config': 'fn_config_f_grid'}), '(fn_domain=fn_nemo_dom, config=fn_config_f_grid)\n', (2379, 2427), False, 'import coast\n'), ((2471, 2516), 'coast.TransectF', 'coast.TransectF', (['nemo_f', '(54, -15)', '(56, -12)'], {}), '(nemo_f, (54, -15), (56, -12))\n', (2486, 2516), False, 'import coast\n'), ((2650, 2739), 'coast.Gridded', 'coast.Gridded', ([], {'fn_data': 'fn_nemo_dat_u', 'fn_domain': 'fn_nemo_dom', 'config': 'fn_config_u_grid'}), '(fn_data=fn_nemo_dat_u, fn_domain=fn_nemo_dom, config=\n fn_config_u_grid)\n', (2663, 2739), False, 'import coast\n'), ((2744, 2833), 'coast.Gridded', 'coast.Gridded', ([], {'fn_data': 'fn_nemo_dat_v', 'fn_domain': 'fn_nemo_dom', 'config': 'fn_config_v_grid'}), '(fn_data=fn_nemo_dat_v, fn_domain=fn_nemo_dom, config=\n fn_config_v_grid)\n', (2757, 2833), False, 'import coast\n'), ((3314, 3326), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3324, 3326), True, 'import matplotlib.pyplot as plt\n'), ((3447, 3457), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3455, 3457), True, 'import matplotlib.pyplot as plt\n'), ((3552, 3564), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (3562, 3564), True, 'import matplotlib.pyplot as plt\n'), ((3704, 3720), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Sv"""'], {}), "('Sv')\n", (3714, 3720), True, 'import matplotlib.pyplot as plt\n'), ((3721, 3731), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3729, 3731), True, 'import matplotlib.pyplot as plt\n')] |
import pytest
import numpy as np
from pathlib import Path
def extract_param(data, n_states):
keys = ("initial_state_prob", "transition_prob", "means", "st_devs")
param = {"n_states": n_states}
for key in keys:
param[key] = data[f"{key}_{n_states}"]
return param
@pytest.fixture(scope="session", params=[2, 3, 4])
def trace_lownoise(request):
"""Trace data can be generated by running ./data/generate_trace_data.py """
data = np.load(Path(__file__).parent / "data/trace_data.npz")
n_states = request.param
param = extract_param(data, n_states)
y = data[f"y_{n_states}"]
sp = data[f"sp_{n_states}"]
return y, sp, param
@pytest.fixture(scope="session")
def trace_simple(request):
"""Trace data can be generated by running ./data/generate_trace_data.py """
data = np.load(Path(__file__).parent / "data/trace_data.npz")
n_states = 2
param = extract_param(data, n_states)
y = data[f"y_{n_states}"]
sp = data[f"sp_{n_states}"]
return y, sp, param
| [
"pytest.fixture",
"pathlib.Path"
] | [((291, 340), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""', 'params': '[2, 3, 4]'}), "(scope='session', params=[2, 3, 4])\n", (305, 340), False, 'import pytest\n'), ((679, 710), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""session"""'}), "(scope='session')\n", (693, 710), False, 'import pytest\n'), ((470, 484), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (474, 484), False, 'from pathlib import Path\n'), ((838, 852), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (842, 852), False, 'from pathlib import Path\n')] |
import filters as f
from iota import TransactionHash, Address
from iota.commands import FilterCommand, RequestFilter, ResponseFilter
from iota.filters import Trytes
__all__ = [
'GetNodeInfoCommand',
]
class GetNodeInfoCommand(FilterCommand):
"""
Executes `getNodeInfo` command.
See :py:meth:`iota.api.StrictIota.get_node_info`.
"""
command = 'getNodeInfo'
def get_request_filter(self):
return GetNodeInfoRequestFilter()
def get_response_filter(self):
return GetNodeInfoResponseFilter()
class GetNodeInfoRequestFilter(RequestFilter):
def __init__(self) -> None:
# ``getNodeInfo`` does not accept any parameters.
# Using a filter here just to enforce that the request is empty.
super(GetNodeInfoRequestFilter, self).__init__({})
class GetNodeInfoResponseFilter(ResponseFilter):
def __init__(self) -> None:
super(GetNodeInfoResponseFilter, self).__init__({
'coordinatorAddress':
f.ByteString(encoding='ascii') | Trytes(Address),
'latestMilestone':
f.ByteString(encoding='ascii') | Trytes(TransactionHash),
'latestSolidSubtangleMilestone':
f.ByteString(encoding='ascii') | Trytes(TransactionHash),
})
| [
"iota.filters.Trytes",
"filters.ByteString"
] | [((1003, 1033), 'filters.ByteString', 'f.ByteString', ([], {'encoding': '"""ascii"""'}), "(encoding='ascii')\n", (1015, 1033), True, 'import filters as f\n'), ((1036, 1051), 'iota.filters.Trytes', 'Trytes', (['Address'], {}), '(Address)\n', (1042, 1051), False, 'from iota.filters import Trytes\n'), ((1100, 1130), 'filters.ByteString', 'f.ByteString', ([], {'encoding': '"""ascii"""'}), "(encoding='ascii')\n", (1112, 1130), True, 'import filters as f\n'), ((1133, 1156), 'iota.filters.Trytes', 'Trytes', (['TransactionHash'], {}), '(TransactionHash)\n', (1139, 1156), False, 'from iota.filters import Trytes\n'), ((1220, 1250), 'filters.ByteString', 'f.ByteString', ([], {'encoding': '"""ascii"""'}), "(encoding='ascii')\n", (1232, 1250), True, 'import filters as f\n'), ((1253, 1276), 'iota.filters.Trytes', 'Trytes', (['TransactionHash'], {}), '(TransactionHash)\n', (1259, 1276), False, 'from iota.filters import Trytes\n')] |
from Negocio import controller
import forms, functions
from flask import Flask, render_template, request, redirect, url_for, flash
def register(mysql, request):
registerForm= forms.RegisterForm(request.form)
if request.method == 'POST' and registerForm.validate():
return controller.registraUsuario(mysql, request, registerForm)
return render_template('register.html', form=registerForm)
def Index(mysql, request):
if request.method=='GET':
success= request.args.get('success')
if success==None:
if controller.usuarioIniciado():
return redirect(url_for('home'))
else:
return render_template('Index.html')
else:
return render_template('Index.html', success=success)
return render_template('Index.html')
def home(mysql, request):
if request.method== 'POST':
controller.iniciarSesion(mysql, request)
if controller.usuarioIniciado() and request.method== 'GET':
return controller.mostrarRutinas(mysql, request)
else:
return redirect(url_for('Index'))
def historial_rutina(mysql, request):
if controller.usuarioIniciado() and request.method== 'GET':
return controller.mostrar_historial_rutina(mysql, request)
else:
return redirect(url_for('Index'))
def historial_usuario(mysql, request):
if controller.usuarioIniciado() and request.method== 'GET':
return controller.mostrar_historial_usuario(mysql, request)
else:
return redirect(url_for('Index'))
def perfil(mysql, request):
if controller.usuarioIniciado and request.method=='GET':
success= request.args.get('success')
usuario=controller.datosUsuario(mysql, request)
imc=functions.IMC(usuario[8], usuario[7])
m_basal= controller.calcular_metabolismo_basal(mysql, usuario[7], usuario[8])
return render_template('perfil.html', success=success, usuario=usuario, imc=imc, evaluacion=functions.evaluarIMC(imc), pg=functions.porcentajeGrasa(usuario[5], usuario[9], usuario[10], usuario[7], usuario[11]), m_basal=m_basal )
else:
return redirect(url_for('Index'))
def ActualizarPerfil(mysql, request):
actualize_form= forms.PerfilForm(request.form)
if request.method == 'POST' and controller.usuarioIniciado:
if actualize_form.validate():
return controller.actualizar_perfil(mysql, request)
else:
flash("Alguno de los datos es incorrecto")
return redirect(url_for('actualizar_perfil', success=False))
else:
if request.method == 'GET' and controller.usuarioIniciado:
datos=controller.formulario_perfil(mysql)
return render_template('actualizar_perfil.html', form=actualize_form, datos=datos)
return redirect(url_for('perfil'))
def administracionRutinas(mysql, request):
if controller.usuarioIniciado():
return render_template('administracion_rutinas.html')
else:
return redirect(url_for('Index'))
def crearRutina(mysql, request):
if request.method =='POST' and controller.usuarioIniciado():
return controller.agregarRutina(mysql, request)
else:
if controller.rutinaIniciada() and controller.usuarioIniciado():
return controller.rutinaEnCurso(mysql, request)
if controller.usuarioIniciado():
return redirect(url_for('adm_rutinas'))
else:
return redirect(url_for('Index'))
def registrarEjerciciosRutina(mysql, request):
if request.method == 'POST':
return controller.registrarEjerciciosRutina(mysql, request)
return redirect(url_for('adm_rutinas'))
def modificarRutina(mysql, request):
if controller.usuarioIniciado():
rutinas=controller.rutinasUsuario(mysql)
rutinaEjercicios=controller.rutinaEjercicios(mysql)
datosEjer=controller.datosEjercicios(mysql)
return render_template('modify_rutina.html', rutinas=rutinas , ejercicios=datosEjer, rutinaEjer=rutinaEjercicios)
else:
return redirect(url_for('Index'))
def registrarModiciaciones(mysql, request):
if request.method == 'POST':
return controller.registrarModificaciones(mysql, request)
return redirect(url_for('adm_rutinas'))
def eliminarRutina(mysql,request):
if controller.usuarioIniciado():
rutinas=controller.rutinasUsuario(mysql)
rutinaEjercicios=controller.rutinaEjercicios(mysql)
return render_template('delete_rutina.html', rutinas=rutinas , rutinaEjer=rutinaEjercicios)
else:
return redirect(url_for('Index'))
def registrarEliminacion(mysql, request):
if request.method=='POST' and controller.usuarioIniciado():
return controller.registrarEliminacion(mysql, request)
else:
return redirect(url_for('Index'))
def registrarEjercicios(mysql, request):
if request.method == 'POST':
return controller.registrarEjercicio(mysql, request)
return redirect(url_for('ejercicios')) | [
"flask.render_template",
"flask.request.args.get",
"Negocio.controller.datosEjercicios",
"Negocio.controller.mostrar_historial_rutina",
"Negocio.controller.registraUsuario",
"functions.porcentajeGrasa",
"Negocio.controller.formulario_perfil",
"Negocio.controller.actualizar_perfil",
"Negocio.controller.usuarioIniciado",
"flask.flash",
"functions.IMC",
"Negocio.controller.rutinaIniciada",
"Negocio.controller.agregarRutina",
"functions.evaluarIMC",
"Negocio.controller.mostrar_historial_usuario",
"Negocio.controller.rutinasUsuario",
"Negocio.controller.registrarEjercicio",
"Negocio.controller.calcular_metabolismo_basal",
"Negocio.controller.rutinaEnCurso",
"Negocio.controller.registrarModificaciones",
"Negocio.controller.mostrarRutinas",
"Negocio.controller.registrarEjerciciosRutina",
"Negocio.controller.datosUsuario",
"flask.url_for",
"Negocio.controller.iniciarSesion",
"forms.PerfilForm",
"Negocio.controller.registrarEliminacion",
"Negocio.controller.rutinaEjercicios",
"forms.RegisterForm"
] | [((182, 214), 'forms.RegisterForm', 'forms.RegisterForm', (['request.form'], {}), '(request.form)\n', (200, 214), False, 'import forms, functions\n'), ((355, 406), 'flask.render_template', 'render_template', (['"""register.html"""'], {'form': 'registerForm'}), "('register.html', form=registerForm)\n", (370, 406), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((780, 809), 'flask.render_template', 'render_template', (['"""Index.html"""'], {}), "('Index.html')\n", (795, 809), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2218, 2248), 'forms.PerfilForm', 'forms.PerfilForm', (['request.form'], {}), '(request.form)\n', (2234, 2248), False, 'import forms, functions\n'), ((2852, 2880), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (2878, 2880), False, 'from Negocio import controller\n'), ((3664, 3692), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (3690, 3692), False, 'from Negocio import controller\n'), ((4247, 4275), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (4273, 4275), False, 'from Negocio import controller\n'), ((288, 344), 'Negocio.controller.registraUsuario', 'controller.registraUsuario', (['mysql', 'request', 'registerForm'], {}), '(mysql, request, registerForm)\n', (314, 344), False, 'from Negocio import controller\n'), ((479, 506), 'flask.request.args.get', 'request.args.get', (['"""success"""'], {}), "('success')\n", (495, 506), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((887, 927), 'Negocio.controller.iniciarSesion', 'controller.iniciarSesion', (['mysql', 'request'], {}), '(mysql, request)\n', (911, 927), False, 'from Negocio import controller\n'), ((935, 963), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (961, 963), False, 'from Negocio import controller\n'), ((1006, 1047), 'Negocio.controller.mostrarRutinas', 'controller.mostrarRutinas', (['mysql', 'request'], {}), '(mysql, request)\n', (1031, 1047), False, 'from Negocio import controller\n'), ((1146, 1174), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (1172, 1174), False, 'from Negocio import controller\n'), ((1217, 1268), 'Negocio.controller.mostrar_historial_rutina', 'controller.mostrar_historial_rutina', (['mysql', 'request'], {}), '(mysql, request)\n', (1252, 1268), False, 'from Negocio import controller\n'), ((1368, 1396), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (1394, 1396), False, 'from Negocio import controller\n'), ((1439, 1491), 'Negocio.controller.mostrar_historial_usuario', 'controller.mostrar_historial_usuario', (['mysql', 'request'], {}), '(mysql, request)\n', (1475, 1491), False, 'from Negocio import controller\n'), ((1651, 1678), 'flask.request.args.get', 'request.args.get', (['"""success"""'], {}), "('success')\n", (1667, 1678), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((1695, 1734), 'Negocio.controller.datosUsuario', 'controller.datosUsuario', (['mysql', 'request'], {}), '(mysql, request)\n', (1718, 1734), False, 'from Negocio import controller\n'), ((1747, 1784), 'functions.IMC', 'functions.IMC', (['usuario[8]', 'usuario[7]'], {}), '(usuario[8], usuario[7])\n', (1760, 1784), False, 'import forms, functions\n'), ((1802, 1870), 'Negocio.controller.calcular_metabolismo_basal', 'controller.calcular_metabolismo_basal', (['mysql', 'usuario[7]', 'usuario[8]'], {}), '(mysql, usuario[7], usuario[8])\n', (1839, 1870), False, 'from Negocio import controller\n'), ((2897, 2943), 'flask.render_template', 'render_template', (['"""administracion_rutinas.html"""'], {}), "('administracion_rutinas.html')\n", (2912, 2943), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((3064, 3092), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (3090, 3092), False, 'from Negocio import controller\n'), ((3109, 3149), 'Negocio.controller.agregarRutina', 'controller.agregarRutina', (['mysql', 'request'], {}), '(mysql, request)\n', (3133, 3149), False, 'from Negocio import controller\n'), ((3296, 3324), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (3322, 3324), False, 'from Negocio import controller\n'), ((3523, 3575), 'Negocio.controller.registrarEjerciciosRutina', 'controller.registrarEjerciciosRutina', (['mysql', 'request'], {}), '(mysql, request)\n', (3559, 3575), False, 'from Negocio import controller\n'), ((3595, 3617), 'flask.url_for', 'url_for', (['"""adm_rutinas"""'], {}), "('adm_rutinas')\n", (3602, 3617), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((3708, 3740), 'Negocio.controller.rutinasUsuario', 'controller.rutinasUsuario', (['mysql'], {}), '(mysql)\n', (3733, 3740), False, 'from Negocio import controller\n'), ((3764, 3798), 'Negocio.controller.rutinaEjercicios', 'controller.rutinaEjercicios', (['mysql'], {}), '(mysql)\n', (3791, 3798), False, 'from Negocio import controller\n'), ((3815, 3848), 'Negocio.controller.datosEjercicios', 'controller.datosEjercicios', (['mysql'], {}), '(mysql)\n', (3841, 3848), False, 'from Negocio import controller\n'), ((3862, 3971), 'flask.render_template', 'render_template', (['"""modify_rutina.html"""'], {'rutinas': 'rutinas', 'ejercicios': 'datosEjer', 'rutinaEjer': 'rutinaEjercicios'}), "('modify_rutina.html', rutinas=rutinas, ejercicios=datosEjer,\n rutinaEjer=rutinaEjercicios)\n", (3877, 3971), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((4111, 4161), 'Negocio.controller.registrarModificaciones', 'controller.registrarModificaciones', (['mysql', 'request'], {}), '(mysql, request)\n', (4145, 4161), False, 'from Negocio import controller\n'), ((4181, 4203), 'flask.url_for', 'url_for', (['"""adm_rutinas"""'], {}), "('adm_rutinas')\n", (4188, 4203), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((4291, 4323), 'Negocio.controller.rutinasUsuario', 'controller.rutinasUsuario', (['mysql'], {}), '(mysql)\n', (4316, 4323), False, 'from Negocio import controller\n'), ((4347, 4381), 'Negocio.controller.rutinaEjercicios', 'controller.rutinaEjercicios', (['mysql'], {}), '(mysql)\n', (4374, 4381), False, 'from Negocio import controller\n'), ((4395, 4483), 'flask.render_template', 'render_template', (['"""delete_rutina.html"""'], {'rutinas': 'rutinas', 'rutinaEjer': 'rutinaEjercicios'}), "('delete_rutina.html', rutinas=rutinas, rutinaEjer=\n rutinaEjercicios)\n", (4410, 4483), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((4606, 4634), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (4632, 4634), False, 'from Negocio import controller\n'), ((4651, 4698), 'Negocio.controller.registrarEliminacion', 'controller.registrarEliminacion', (['mysql', 'request'], {}), '(mysql, request)\n', (4682, 4698), False, 'from Negocio import controller\n'), ((4838, 4883), 'Negocio.controller.registrarEjercicio', 'controller.registrarEjercicio', (['mysql', 'request'], {}), '(mysql, request)\n', (4867, 4883), False, 'from Negocio import controller\n'), ((4903, 4924), 'flask.url_for', 'url_for', (['"""ejercicios"""'], {}), "('ejercicios')\n", (4910, 4924), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((543, 571), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (569, 571), False, 'from Negocio import controller\n'), ((723, 769), 'flask.render_template', 'render_template', (['"""Index.html"""'], {'success': 'success'}), "('Index.html', success=success)\n", (738, 769), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((1082, 1098), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (1089, 1098), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((1303, 1319), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (1310, 1319), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((1526, 1542), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (1533, 1542), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2142, 2158), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (2149, 2158), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2364, 2408), 'Negocio.controller.actualizar_perfil', 'controller.actualizar_perfil', (['mysql', 'request'], {}), '(mysql, request)\n', (2392, 2408), False, 'from Negocio import controller\n'), ((2430, 2472), 'flask.flash', 'flash', (['"""Alguno de los datos es incorrecto"""'], {}), "('Alguno de los datos es incorrecto')\n", (2435, 2472), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2632, 2667), 'Negocio.controller.formulario_perfil', 'controller.formulario_perfil', (['mysql'], {}), '(mysql)\n', (2660, 2667), False, 'from Negocio import controller\n'), ((2684, 2759), 'flask.render_template', 'render_template', (['"""actualizar_perfil.html"""'], {'form': 'actualize_form', 'datos': 'datos'}), "('actualizar_perfil.html', form=actualize_form, datos=datos)\n", (2699, 2759), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2782, 2799), 'flask.url_for', 'url_for', (['"""perfil"""'], {}), "('perfil')\n", (2789, 2799), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((2978, 2994), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (2985, 2994), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((3168, 3195), 'Negocio.controller.rutinaIniciada', 'controller.rutinaIniciada', ([], {}), '()\n', (3193, 3195), False, 'from Negocio import controller\n'), ((3200, 3228), 'Negocio.controller.usuarioIniciado', 'controller.usuarioIniciado', ([], {}), '()\n', (3226, 3228), False, 'from Negocio import controller\n'), ((3246, 3286), 'Negocio.controller.rutinaEnCurso', 'controller.rutinaEnCurso', (['mysql', 'request'], {}), '(mysql, request)\n', (3270, 3286), False, 'from Negocio import controller\n'), ((4003, 4019), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (4010, 4019), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((4511, 4527), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (4518, 4527), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((4733, 4749), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (4740, 4749), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((655, 684), 'flask.render_template', 'render_template', (['"""Index.html"""'], {}), "('Index.html')\n", (670, 684), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((1971, 1996), 'functions.evaluarIMC', 'functions.evaluarIMC', (['imc'], {}), '(imc)\n', (1991, 1996), False, 'import forms, functions\n'), ((2001, 2092), 'functions.porcentajeGrasa', 'functions.porcentajeGrasa', (['usuario[5]', 'usuario[9]', 'usuario[10]', 'usuario[7]', 'usuario[11]'], {}), '(usuario[5], usuario[9], usuario[10], usuario[7],\n usuario[11])\n', (2026, 2092), False, 'import forms, functions\n'), ((2498, 2541), 'flask.url_for', 'url_for', (['"""actualizar_perfil"""'], {'success': '(False)'}), "('actualizar_perfil', success=False)\n", (2505, 2541), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((3351, 3373), 'flask.url_for', 'url_for', (['"""adm_rutinas"""'], {}), "('adm_rutinas')\n", (3358, 3373), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((3412, 3428), 'flask.url_for', 'url_for', (['"""Index"""'], {}), "('Index')\n", (3419, 3428), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n'), ((604, 619), 'flask.url_for', 'url_for', (['"""home"""'], {}), "('home')\n", (611, 619), False, 'from flask import Flask, render_template, request, redirect, url_for, flash\n')] |
# -*- coding: UTF-8 -*-
"""
This module provides Runner class to run behave feature files (or model elements).
"""
from __future__ import absolute_import, print_function, with_statement
import contextlib
import os.path
import sys
import warnings
import weakref
import six
from behave._types import ExceptionUtil
from behave.capture import CaptureController
from behave.exception import ConfigError
from behave.formatter._registry import make_formatters
from behave.runner_util import \
collect_feature_locations, parse_features, \
exec_file, load_step_modules, PathManager
from behave.step_registry import registry as the_step_registry
from enum import Enum
if six.PY2:
# -- USE PYTHON3 BACKPORT: With unicode traceback support.
import traceback2 as traceback
else:
import traceback
class CleanupError(RuntimeError):
pass
class ContextMaskWarning(UserWarning):
"""Raised if a context variable is being overwritten in some situations.
If the variable was originally set by user code then this will be raised if
*behave* overwrites the value.
If the variable was originally set by *behave* then this will be raised if
user code overwrites the value.
"""
pass
class ContextMode(Enum):
"""Used to distinguish between the two usage modes while using the context:
* BEHAVE: Indicates "behave" (internal) mode
* USER: Indicates "user" mode (in steps, hooks, fixtures, ...)
"""
BEHAVE = 1
USER = 2
class Context(object):
"""Hold contextual information during the running of tests.
This object is a place to store information related to the tests you're
running. You may add arbitrary attributes to it of whatever value you need.
During the running of your tests the object will have additional layers of
namespace added and removed automatically. There is a "root" namespace and
additional namespaces for features and scenarios.
Certain names are used by *behave*; be wary of using them yourself as
*behave* may overwrite the value you set. These names are:
.. attribute:: feature
This is set when we start testing a new feature and holds a
:class:`~behave.model.Feature`. It will not be present outside of a
feature (i.e. within the scope of the environment before_all and
after_all).
.. attribute:: scenario
This is set when we start testing a new scenario (including the
individual scenarios of a scenario outline) and holds a
:class:`~behave.model.Scenario`. It will not be present outside of the
scope of a scenario.
.. attribute:: tags
The current set of active tags (as a Python set containing instances of
:class:`~behave.model.Tag` which are basically just glorified strings)
combined from the feature and scenario. This attribute will not be
present outside of a feature scope.
.. attribute:: aborted
This is set to true in the root namespace when the user aborts a test run
(:exc:`KeyboardInterrupt` exception). Initially: False.
.. attribute:: failed
This is set to true in the root namespace as soon as a step fails.
Initially: False.
.. attribute:: table
This is set at the step level and holds any :class:`~behave.model.Table`
associated with the step.
.. attribute:: text
This is set at the step level and holds any multiline text associated
with the step.
.. attribute:: config
The configuration of *behave* as determined by configuration files and
command-line options. The attributes of this object are the same as the
`configuration file section names`_.
.. attribute:: active_outline
This is set for each scenario in a scenario outline and references the
:class:`~behave.model.Row` that is active for the current scenario. It is
present mostly for debugging, but may be useful otherwise.
.. attribute:: log_capture
If logging capture is enabled then this attribute contains the captured
logging as an instance of :class:`~behave.log_capture.LoggingCapture`.
It is not present if logging is not being captured.
.. attribute:: stdout_capture
If stdout capture is enabled then this attribute contains the captured
output as a StringIO instance. It is not present if stdout is not being
captured.
.. attribute:: stderr_capture
If stderr capture is enabled then this attribute contains the captured
output as a StringIO instance. It is not present if stderr is not being
captured.
A :class:`behave.runner.ContextMaskWarning` warning will be raised if user
code attempts to overwrite one of these variables, or if *behave* itself
tries to overwrite a user-set variable.
You may use the "in" operator to test whether a certain value has been set
on the context, for example:
"feature" in context
checks whether there is a "feature" value in the context.
Values may be deleted from the context using "del" but only at the level
they are set. You can't delete a value set by a feature at a scenario level
but you can delete a value set for a scenario in that scenario.
.. _`configuration file section names`: behave.html#configuration-files
"""
# pylint: disable=too-many-instance-attributes
FAIL_ON_CLEANUP_ERRORS = True
def __init__(self, runner):
self._runner = weakref.proxy(runner)
self._config = runner.config
d = self._root = {
"aborted": False,
"failed": False,
"config": self._config,
"active_outline": None,
"cleanup_errors": 0,
"@cleanups": [], # -- REQUIRED-BY: before_all() hook
"@layer": "testrun",
}
self._stack = [d]
self._record = {}
self._origin = {}
self._mode = ContextMode.BEHAVE
# -- MODEL ENTITY REFERENCES/SUPPORT:
self.feature = None
# DISABLED: self.rule = None
# DISABLED: self.scenario = None
self.text = None
self.table = None
# -- RUNTIME SUPPORT:
self.stdout_capture = None
self.stderr_capture = None
self.log_capture = None
self.fail_on_cleanup_errors = self.FAIL_ON_CLEANUP_ERRORS
@staticmethod
def ignore_cleanup_error(context, cleanup_func, exception):
pass
@staticmethod
def print_cleanup_error(context, cleanup_func, exception):
cleanup_func_name = getattr(cleanup_func, "__name__", None)
if not cleanup_func_name:
cleanup_func_name = "%r" % cleanup_func
print(u"CLEANUP-ERROR in %s: %s: %s" %
(cleanup_func_name, exception.__class__.__name__, exception))
traceback.print_exc(file=sys.stdout)
# MAYBE: context._dump(pretty=True, prefix="Context: ")
# -- MARK: testrun as FAILED
# context._set_root_attribute("failed", True)
def _do_cleanups(self):
"""Execute optional cleanup functions when stack frame is popped.
A user can add a user-specified handler for cleanup errors.
.. code-block:: python
# -- FILE: features/environment.py
def cleanup_database(database):
pass
def handle_cleanup_error(context, cleanup_func, exception):
pass
def before_all(context):
context.on_cleanup_error = handle_cleanup_error
context.add_cleanup(cleanup_database, the_database)
"""
# -- BEST-EFFORT ALGORITHM: Tries to perform all cleanups.
assert self._stack, "REQUIRE: Non-empty stack"
current_layer = self._stack[0]
cleanup_funcs = current_layer.get("@cleanups", [])
on_cleanup_error = getattr(self, "on_cleanup_error",
self.print_cleanup_error)
context = self
cleanup_errors = []
for cleanup_func in reversed(cleanup_funcs):
try:
cleanup_func()
except Exception as e: # pylint: disable=broad-except
# pylint: disable=protected-access
context._root["cleanup_errors"] += 1
cleanup_errors.append(sys.exc_info())
on_cleanup_error(context, cleanup_func, e)
if self.fail_on_cleanup_errors and cleanup_errors:
first_cleanup_erro_info = cleanup_errors[0]
del cleanup_errors # -- ENSURE: Release other exception frames.
six.reraise(*first_cleanup_erro_info)
def _push(self, layer_name=None):
"""Push a new layer on the context stack.
HINT: Use layer_name values: "scenario", "feature", "testrun".
:param layer_name: Layer name to use (or None).
"""
initial_data = {"@cleanups": []}
if layer_name:
initial_data["@layer"] = layer_name
self._stack.insert(0, initial_data)
def _pop(self):
"""Pop the current layer from the context stack.
Performs any pending cleanups, registered for this layer.
"""
try:
self._do_cleanups()
finally:
# -- ENSURE: Layer is removed even if cleanup-errors occur.
self._stack.pop(0)
def _use_with_behave_mode(self):
"""Provides a context manager for using the context in BEHAVE mode."""
return use_context_with_mode(self, ContextMode.BEHAVE)
def use_with_user_mode(self):
"""Provides a context manager for using the context in USER mode."""
return use_context_with_mode(self, ContextMode.USER)
def user_mode(self):
warnings.warn("Use 'use_with_user_mode()' instead",
PendingDeprecationWarning, stacklevel=2)
return self.use_with_user_mode()
def _set_root_attribute(self, attr, value):
for frame in self.__dict__["_stack"]:
if frame is self.__dict__["_root"]:
continue
if attr in frame:
record = self.__dict__["_record"][attr]
params = {
"attr": attr,
"filename": record[0],
"line": record[1],
"function": record[3],
}
self._emit_warning(attr, params)
self.__dict__["_root"][attr] = value
if attr not in self._origin:
self._origin[attr] = self._mode
def _emit_warning(self, attr, params):
msg = ""
if self._mode is ContextMode.BEHAVE and self._origin[attr] is not ContextMode.BEHAVE:
msg = "behave runner is masking context attribute '%(attr)s' " \
"originally set in %(function)s (%(filename)s:%(line)s)"
elif self._mode is ContextMode.USER:
if self._origin[attr] is not ContextMode.USER:
msg = "user code is masking context attribute '%(attr)s' " \
"originally set by behave"
elif self._config.verbose:
msg = "user code is masking context attribute " \
"'%(attr)s'; see the tutorial for what this means"
if msg:
msg = msg % params
warnings.warn(msg, ContextMaskWarning, stacklevel=3)
def _dump(self, pretty=False, prefix=" "):
for level, frame in enumerate(self._stack):
print("%sLevel %d" % (prefix, level))
if pretty:
for name in sorted(frame.keys()):
value = frame[name]
print("%s %-15s = %r" % (prefix, name, value))
else:
print(prefix + repr(frame))
def __getattr__(self, attr):
if attr[0] == "_":
try:
return self.__dict__[attr]
except KeyError:
raise AttributeError(attr)
for frame in self._stack:
if attr in frame:
return frame[attr]
msg = "'{0}' object has no attribute '{1}'"
msg = msg.format(self.__class__.__name__, attr)
raise AttributeError(msg)
def __setattr__(self, attr, value):
if attr[0] == "_":
self.__dict__[attr] = value
return
for frame in self._stack[1:]:
if attr in frame:
record = self._record[attr]
params = {
"attr": attr,
"filename": record[0],
"line": record[1],
"function": record[3],
}
self._emit_warning(attr, params)
stack_limit = 2
if six.PY2:
stack_limit += 1 # Due to traceback2 usage.
stack_frame = traceback.extract_stack(limit=stack_limit)[0]
self._record[attr] = stack_frame
frame = self._stack[0]
frame[attr] = value
if attr not in self._origin:
self._origin[attr] = self._mode
def __delattr__(self, attr):
frame = self._stack[0]
if attr in frame:
del frame[attr]
del self._record[attr]
else:
msg = "'{0}' object has no attribute '{1}' at the current level"
msg = msg.format(self.__class__.__name__, attr)
raise AttributeError(msg)
def __contains__(self, attr):
if attr[0] == "_":
return attr in self.__dict__
for frame in self._stack:
if attr in frame:
return True
return False
def execute_steps(self, steps_text):
"""The steps identified in the "steps" text string will be parsed and
executed in turn just as though they were defined in a feature file.
If the execute_steps call fails (either through error or failure
assertion) then the step invoking it will need to catch the resulting
exceptions.
:param steps_text: Text with the Gherkin steps to execute (as string).
:returns: True, if the steps executed successfully.
:raises: AssertionError, if a step failure occurs.
:raises: ValueError, if invoked without a feature context.
"""
assert isinstance(steps_text, six.text_type), "Steps must be unicode."
if not self.feature:
raise ValueError("execute_steps() called outside of feature")
# -- PREPARE: Save original context data for current step.
# Needed if step definition that called this method uses .table/.text
original_table = getattr(self, "table", None)
original_text = getattr(self, "text", None)
self.feature.parser.variant = "steps"
steps = self.feature.parser.parse_steps(steps_text)
with self._use_with_behave_mode():
for step in steps:
passed = step.run(self._runner, quiet=True, capture=False)
if not passed:
# -- ISSUE #96: Provide more substep info to diagnose problem.
step_line = u"%s %s" % (step.keyword, step.name)
message = "%s SUB-STEP: %s" % \
(step.status.name.upper(), step_line)
if step.error_message:
message += "\nSubstep info: %s\n" % step.error_message
message += u"Traceback (of failed substep):\n"
message += u"".join(traceback.format_tb(step.exc_traceback))
# message += u"\nTraceback (of context.execute_steps()):"
assert False, message
# -- FINALLY: Restore original context data for current step.
self.table = original_table
self.text = original_text
return True
def add_cleanup(self, cleanup_func, *args, **kwargs):
"""Adds a cleanup function that is called when :meth:`Context._pop()`
is called. This is intended for user-cleanups.
:param cleanup_func: Callable function
:param args: Args for cleanup_func() call (optional).
:param kwargs: Kwargs for cleanup_func() call (optional).
"""
# MAYBE:
assert callable(cleanup_func), "REQUIRES: callable(cleanup_func)"
assert self._stack
if args or kwargs:
def internal_cleanup_func():
cleanup_func(*args, **kwargs)
else:
internal_cleanup_func = cleanup_func
current_frame = self._stack[0]
if cleanup_func not in current_frame["@cleanups"]:
# -- AVOID DUPLICATES:
current_frame["@cleanups"].append(internal_cleanup_func)
@contextlib.contextmanager
def use_context_with_mode(context, mode):
"""Switch context to ContextMode.BEHAVE or ContextMode.USER mode.
Provides a context manager for switching between the two context modes.
.. sourcecode:: python
context = Context()
with use_context_with_mode(context, ContextMode.BEHAVE):
... # Do something
# -- POSTCONDITION: Original context._mode is restored.
:param context: Context object to use.
:param mode: Mode to apply to context object.
"""
# pylint: disable=protected-access
assert mode in (ContextMode.BEHAVE, ContextMode.USER)
current_mode = context._mode
try:
context._mode = mode
yield
finally:
# -- RESTORE: Initial current_mode
# Even if an AssertionError/Exception is raised.
context._mode = current_mode
@contextlib.contextmanager
def scoped_context_layer(context, layer_name=None):
"""Provides context manager for context layer (push/do-something/pop cycle).
.. code-block::
with scoped_context_layer(context):
the_fixture = use_fixture(foo, context, name="foo_42")
"""
# pylint: disable=protected-access
try:
context._push(layer_name)
yield context
finally:
context._pop()
def path_getrootdir(path):
"""
Extract rootdir from path in a platform independent way.
POSIX-PATH EXAMPLE:
rootdir = path_getrootdir("/foo/bar/one.feature")
assert rootdir == "/"
WINDOWS-PATH EXAMPLE:
rootdir = path_getrootdir("D:\\foo\\bar\\one.feature")
assert rootdir == r"D:\"
"""
drive, _ = os.path.splitdrive(path)
if drive:
# -- WINDOWS:
return drive + os.path.sep
# -- POSIX:
return os.path.sep
class ModelRunner(object):
"""
Test runner for a behave model (features).
Provides the core functionality of a test runner and
the functional API needed by model elements.
.. attribute:: aborted
This is set to true when the user aborts a test run
(:exc:`KeyboardInterrupt` exception). Initially: False.
Stored as derived attribute in :attr:`Context.aborted`.
"""
# pylint: disable=too-many-instance-attributes
def __init__(self, config, features=None, step_registry=None):
self.config = config
self.features = features or []
self.hooks = {}
self.formatters = []
self.undefined_steps = []
self.step_registry = step_registry
self.capture_controller = CaptureController(config)
self.context = None
self.feature = None
self.hook_failures = 0
# @property
def _get_aborted(self):
value = False
if self.context:
value = self.context.aborted
return value
# @aborted.setter
def _set_aborted(self, value):
# pylint: disable=protected-access
assert self.context, "REQUIRE: context, but context=%r" % self.context
self.context._set_root_attribute("aborted", bool(value))
aborted = property(_get_aborted, _set_aborted,
doc="Indicates that test run is aborted by the user.")
def run_hook(self, name, context, *args):
if not self.config.dry_run and (name in self.hooks):
try:
with context.use_with_user_mode():
self.hooks[name](context, *args)
# except KeyboardInterrupt:
# self.aborted = True
# if name not in ("before_all", "after_all"):
# raise
except Exception as e: # pylint: disable=broad-except
# -- HANDLE HOOK ERRORS:
use_traceback = False
if self.config.verbose:
use_traceback = True
ExceptionUtil.set_traceback(e)
extra = u""
if "tag" in name:
extra = "(tag=%s)" % args[0]
error_text = ExceptionUtil.describe(e, use_traceback).rstrip()
error_message = u"HOOK-ERROR in %s%s: %s" % (name, extra, error_text)
print(error_message)
self.hook_failures += 1
if "tag" in name:
# -- SCENARIO or FEATURE
statement = getattr(context, "scenario", context.feature)
elif "all" in name:
# -- ABORT EXECUTION: For before_all/after_all
self.aborted = True
statement = None
else:
# -- CASE: feature, scenario, step
statement = args[0]
if statement:
# -- CASE: feature, scenario, step
statement.hook_failed = True
if statement.error_message:
# -- NOTE: One exception/failure is already stored.
# Append only error message.
statement.error_message += u"\n"+ error_message
else:
# -- FIRST EXCEPTION/FAILURE:
statement.store_exception_context(e)
statement.error_message = error_message
def setup_capture(self):
if not self.context:
self.context = Context(self)
self.capture_controller.setup_capture(self.context)
def start_capture(self):
self.capture_controller.start_capture()
def stop_capture(self):
self.capture_controller.stop_capture()
def teardown_capture(self):
self.capture_controller.teardown_capture()
def run_model(self, features=None):
# pylint: disable=too-many-branches
if not self.context:
self.context = Context(self)
if self.step_registry is None:
self.step_registry = the_step_registry
if features is None:
features = self.features
# -- ENSURE: context.execute_steps() works in weird cases (hooks, ...)
context = self.context
self.hook_failures = 0
self.setup_capture()
self.run_hook("before_all", context)
run_feature = not self.aborted
failed_count = 0
undefined_steps_initial_size = len(self.undefined_steps)
for feature in features:
if run_feature:
try:
self.feature = feature
for formatter in self.formatters:
formatter.uri(feature.filename)
failed = feature.run(self)
if failed:
failed_count += 1
if self.config.stop or self.aborted:
# -- FAIL-EARLY: After first failure.
run_feature = False
except KeyboardInterrupt:
self.aborted = True
failed_count += 1
run_feature = False
# -- ALWAYS: Report run/not-run feature to reporters.
# REQUIRED-FOR: Summary to keep track of untested features.
for reporter in self.config.reporters:
reporter.feature(feature)
# -- AFTER-ALL:
# pylint: disable=protected-access, broad-except
cleanups_failed = False
self.run_hook("after_all", self.context)
try:
self.context._do_cleanups() # Without dropping the last context layer.
except Exception:
cleanups_failed = True
if self.aborted:
print("\nABORTED: By user.")
for formatter in self.formatters:
formatter.close()
for reporter in self.config.reporters:
reporter.end()
failed = ((failed_count > 0) or self.aborted or (self.hook_failures > 0)
or (len(self.undefined_steps) > undefined_steps_initial_size)
or cleanups_failed)
# XXX-MAYBE: or context.failed)
return failed
def run(self):
"""
Implements the run method by running the model.
"""
self.context = Context(self)
return self.run_model()
class Runner(ModelRunner):
"""
Standard test runner for behave:
* setup paths
* loads environment hooks
* loads step definitions
* select feature files, parses them and creates model (elements)
"""
def __init__(self, config):
super(Runner, self).__init__(config)
self.path_manager = PathManager()
self.base_dir = None
def setup_paths(self):
# pylint: disable=too-many-branches, too-many-statements
if self.config.paths:
if self.config.verbose:
print("Supplied path:", \
", ".join('"%s"' % path for path in self.config.paths))
first_path = self.config.paths[0]
if hasattr(first_path, "filename"):
# -- BETTER: isinstance(first_path, FileLocation):
first_path = first_path.filename
base_dir = first_path
if base_dir.startswith("@"):
# -- USE: behave @features.txt
base_dir = base_dir[1:]
file_locations = self.feature_locations()
if file_locations:
base_dir = os.path.dirname(file_locations[0].filename)
base_dir = os.path.abspath(base_dir)
# supplied path might be to a feature file
if os.path.isfile(base_dir):
if self.config.verbose:
print("Primary path is to a file so using its directory")
base_dir = os.path.dirname(base_dir)
else:
if self.config.verbose:
print('Using default path "./features"')
base_dir = os.path.abspath("features")
# Get the root. This is not guaranteed to be "/" because Windows.
root_dir = path_getrootdir(base_dir)
new_base_dir = base_dir
steps_dir = self.config.steps_dir
environment_file = self.config.environment_file
while True:
if self.config.verbose:
print("Trying base directory:", new_base_dir)
if os.path.isdir(os.path.join(new_base_dir, steps_dir)):
break
if os.path.isfile(os.path.join(new_base_dir, environment_file)):
break
if new_base_dir == root_dir:
break
new_base_dir = os.path.dirname(new_base_dir)
if new_base_dir == root_dir:
if self.config.verbose:
if not self.config.paths:
print('ERROR: Could not find "%s" directory. '\
'Please specify where to find your features.' % \
steps_dir)
else:
print('ERROR: Could not find "%s" directory in your '\
'specified path "%s"' % (steps_dir, base_dir))
message = 'No %s directory in %r' % (steps_dir, base_dir)
raise ConfigError(message)
base_dir = new_base_dir
self.config.base_dir = base_dir
for dirpath, dirnames, filenames in os.walk(base_dir, followlinks=True):
if [fn for fn in filenames if fn.endswith(".feature")]:
break
else:
if self.config.verbose:
if not self.config.paths:
print('ERROR: Could not find any "<name>.feature" files. '\
'Please specify where to find your features.')
else:
print('ERROR: Could not find any "<name>.feature" files '\
'in your specified path "%s"' % base_dir)
raise ConfigError('No feature files in %r' % base_dir)
self.base_dir = base_dir
self.path_manager.add(base_dir)
if not self.config.paths:
self.config.paths = [base_dir]
if base_dir != os.getcwd():
self.path_manager.add(os.getcwd())
def before_all_default_hook(self, context):
"""
Default implementation for :func:`before_all()` hook.
Setup the logging subsystem based on the configuration data.
"""
# pylint: disable=no-self-use
context.config.setup_logging()
def load_hooks(self, filename=None):
filename = filename or self.config.environment_file
hooks_path = os.path.join(self.base_dir, filename)
if os.path.exists(hooks_path):
exec_file(hooks_path, self.hooks)
if "before_all" not in self.hooks:
self.hooks["before_all"] = self.before_all_default_hook
def load_step_definitions(self, extra_step_paths=None):
if extra_step_paths is None:
extra_step_paths = []
# -- Allow steps to import other stuff from the steps dir
# NOTE: Default matcher can be overridden in "environment.py" hook.
steps_dir = os.path.join(self.base_dir, self.config.steps_dir)
step_paths = [steps_dir] + list(extra_step_paths)
load_step_modules(step_paths)
def feature_locations(self):
return collect_feature_locations(self.config.paths)
def run(self):
with self.path_manager:
self.setup_paths()
return self.run_with_paths()
def run_with_paths(self):
self.context = Context(self)
self.load_hooks()
self.load_step_definitions()
# -- ENSURE: context.execute_steps() works in weird cases (hooks, ...)
# self.setup_capture()
# self.run_hook("before_all", self.context)
# -- STEP: Parse all feature files (by using their file location).
feature_locations = [filename for filename in self.feature_locations()
if not self.config.exclude(filename)]
features = parse_features(feature_locations, language=self.config.lang)
self.features.extend(features)
# -- STEP: Run all features.
stream_openers = self.config.outputs
self.formatters = make_formatters(self.config, stream_openers)
return self.run_model()
| [
"behave.runner_util.parse_features",
"behave._types.ExceptionUtil.set_traceback",
"traceback.extract_stack",
"traceback.format_tb",
"behave.exception.ConfigError",
"behave.runner_util.PathManager",
"sys.exc_info",
"behave.runner_util.exec_file",
"weakref.proxy",
"behave._types.ExceptionUtil.describe",
"six.reraise",
"behave.capture.CaptureController",
"behave.runner_util.collect_feature_locations",
"warnings.warn",
"behave.formatter._registry.make_formatters",
"behave.runner_util.load_step_modules",
"traceback.print_exc"
] | [((5457, 5478), 'weakref.proxy', 'weakref.proxy', (['runner'], {}), '(runner)\n', (5470, 5478), False, 'import weakref\n'), ((6802, 6838), 'traceback.print_exc', 'traceback.print_exc', ([], {'file': 'sys.stdout'}), '(file=sys.stdout)\n', (6821, 6838), False, 'import traceback\n'), ((9702, 9798), 'warnings.warn', 'warnings.warn', (['"""Use \'use_with_user_mode()\' instead"""', 'PendingDeprecationWarning'], {'stacklevel': '(2)'}), '("Use \'use_with_user_mode()\' instead",\n PendingDeprecationWarning, stacklevel=2)\n', (9715, 9798), False, 'import warnings\n'), ((19252, 19277), 'behave.capture.CaptureController', 'CaptureController', (['config'], {}), '(config)\n', (19269, 19277), False, 'from behave.capture import CaptureController\n'), ((25266, 25279), 'behave.runner_util.PathManager', 'PathManager', ([], {}), '()\n', (25277, 25279), False, 'from behave.runner_util import collect_feature_locations, parse_features, exec_file, load_step_modules, PathManager\n'), ((29873, 29902), 'behave.runner_util.load_step_modules', 'load_step_modules', (['step_paths'], {}), '(step_paths)\n', (29890, 29902), False, 'from behave.runner_util import collect_feature_locations, parse_features, exec_file, load_step_modules, PathManager\n'), ((29952, 29996), 'behave.runner_util.collect_feature_locations', 'collect_feature_locations', (['self.config.paths'], {}), '(self.config.paths)\n', (29977, 29996), False, 'from behave.runner_util import collect_feature_locations, parse_features, exec_file, load_step_modules, PathManager\n'), ((30656, 30716), 'behave.runner_util.parse_features', 'parse_features', (['feature_locations'], {'language': 'self.config.lang'}), '(feature_locations, language=self.config.lang)\n', (30670, 30716), False, 'from behave.runner_util import collect_feature_locations, parse_features, exec_file, load_step_modules, PathManager\n'), ((30865, 30909), 'behave.formatter._registry.make_formatters', 'make_formatters', (['self.config', 'stream_openers'], {}), '(self.config, stream_openers)\n', (30880, 30909), False, 'from behave.formatter._registry import make_formatters\n'), ((8568, 8605), 'six.reraise', 'six.reraise', (['*first_cleanup_erro_info'], {}), '(*first_cleanup_erro_info)\n', (8579, 8605), False, 'import six\n'), ((11264, 11316), 'warnings.warn', 'warnings.warn', (['msg', 'ContextMaskWarning'], {'stacklevel': '(3)'}), '(msg, ContextMaskWarning, stacklevel=3)\n', (11277, 11316), False, 'import warnings\n'), ((12766, 12808), 'traceback.extract_stack', 'traceback.extract_stack', ([], {'limit': 'stack_limit'}), '(limit=stack_limit)\n', (12789, 12808), False, 'import traceback\n'), ((27845, 27865), 'behave.exception.ConfigError', 'ConfigError', (['message'], {}), '(message)\n', (27856, 27865), False, 'from behave.exception import ConfigError\n'), ((28539, 28587), 'behave.exception.ConfigError', 'ConfigError', (["('No feature files in %r' % base_dir)"], {}), "('No feature files in %r' % base_dir)\n", (28550, 28587), False, 'from behave.exception import ConfigError\n'), ((29316, 29349), 'behave.runner_util.exec_file', 'exec_file', (['hooks_path', 'self.hooks'], {}), '(hooks_path, self.hooks)\n', (29325, 29349), False, 'from behave.runner_util import collect_feature_locations, parse_features, exec_file, load_step_modules, PathManager\n'), ((8288, 8302), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (8300, 8302), False, 'import sys\n'), ((20539, 20569), 'behave._types.ExceptionUtil.set_traceback', 'ExceptionUtil.set_traceback', (['e'], {}), '(e)\n', (20566, 20569), False, 'from behave._types import ExceptionUtil\n'), ((15430, 15469), 'traceback.format_tb', 'traceback.format_tb', (['step.exc_traceback'], {}), '(step.exc_traceback)\n', (15449, 15469), False, 'import traceback\n'), ((20711, 20751), 'behave._types.ExceptionUtil.describe', 'ExceptionUtil.describe', (['e', 'use_traceback'], {}), '(e, use_traceback)\n', (20733, 20751), False, 'from behave._types import ExceptionUtil\n')] |
add_library('pdf')
import random
from datetime import datetime
tileCount = 20
def setup():
global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight
savePDF = False
actStrokeCap = ROUND
actRandomSeed = 0
colorLeft = color(197, 0, 123)
colorRight = color(87, 35, 129)
alphaLeft = 100
alphaRight = 100
def draw():
global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight
if savePDF:
beginRecord(PDF, datetime.now().strftime("%Y%m%d%H%M%S")+".pdf")
background(255)
smooth()
noFill()
strokeCap(actStrokeCap)
random.seed(actRandomSeed)
for gridY in range(tileCount):
for gridX in range(tileCount):
posX = int(width/tileCount*gridX)
posY = int(height/tileCount*gridY)
toggle = random.randint(0,1)
if (toggle == 0):
strokeWeight(mouseX/20)
stroke(colorLeft, alphaLeft)
line(posX, posY, posX+width/tileCount, posY+height/tileCount)
elif (toggle == 1):
strokeWeight(mouseY/20)
stroke(colorRight, alphaRight)
line(posX, posY+width/tileCount, posX+height/tileCount, posY)
if (savePDF):
savePDF = False
endRecord()
def mousePressed():
global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight
actRandomSeed = random.randint(0, 100000)
def keyReleased():
global savePDF, actStrokeCap, actRandomSeed, colorLeft, colorRight, alphaLeft, alphaRight
if (key=='s' or key=='S'):
saveFrame(datetime.now().strftime("%Y%m%d%H%M%S")+".png")
if (key=='p' or key=='P'):
savePDF = True
if key == "1":
actStrokeCap = ROUND
elif key == "2":
actStrokeCap = SQUARE
elif key == "3":
actStrokeCap = PROJECT
elif (key == '4'):
if (colorLeft == color(0)):
colorLeft = color(323, 100, 77)
else:
colorLeft = color(0)
elif (key == '5'):
if (colorRight == color(0)):
colorRight = color(273, 73, 51)
else:
colorRight = color(0)
elif (key == '6'):
if (alphaLeft == 100):
alphaLeft = 50
else:
alphaLeft = 100
elif (key == '7'):
if (alphaRight == 100):
alphaRight = 50
else:
alphaRight = 100
if (key == '0'):
actStrokeCap = ROUND
colorLeft = color(0)
colorRight = color(0)
alphaLeft = 100
alphaRight = 100
| [
"datetime.datetime.now",
"random.randint",
"random.seed"
] | [((648, 674), 'random.seed', 'random.seed', (['actRandomSeed'], {}), '(actRandomSeed)\n', (659, 674), False, 'import random\n'), ((1487, 1512), 'random.randint', 'random.randint', (['(0)', '(100000)'], {}), '(0, 100000)\n', (1501, 1512), False, 'import random\n'), ((865, 885), 'random.randint', 'random.randint', (['(0)', '(1)'], {}), '(0, 1)\n', (879, 885), False, 'import random\n'), ((516, 530), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (528, 530), False, 'from datetime import datetime\n'), ((1677, 1691), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1689, 1691), False, 'from datetime import datetime\n')] |
import enum
from dataclasses import dataclass, field
from itertools import chain, islice
from mashumaro import DataClassMessagePackMixin
from multiprocessing.synchronize import Lock
from typing import (
Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple,
TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar
)
from typing_extensions import Protocol
from uuid import UUID
from dbt.contracts.graph.compiled import (
CompileResultNode, ManifestNode, NonSourceCompiledNode, GraphMemberNode
)
from dbt.contracts.graph.parsed import (
ParsedMacro, ParsedDocumentation, ParsedNodePatch, ParsedMacroPatch,
ParsedSourceDefinition, ParsedExposure, HasUniqueID,
UnpatchedSourceDefinition, ManifestNodes
)
from dbt.contracts.graph.unparsed import SourcePatch
from dbt.contracts.files import SourceFile, SchemaSourceFile, FileHash, AnySourceFile
from dbt.contracts.util import (
BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version
)
from dbt.dataclass_schema import dbtClassMixin
from dbt.exceptions import (
CompilationException,
raise_duplicate_resource_name, raise_compiler_error, warn_or_error,
raise_duplicate_patch_name,
raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name,
)
from dbt.helper_types import PathSet
from dbt.logger import GLOBAL_LOGGER as logger
from dbt.node_types import NodeType
from dbt.ui import line_wrap_message
from dbt import flags
from dbt import tracking
import dbt.utils
NodeEdgeMap = Dict[str, List[str]]
PackageName = str
DocName = str
RefName = str
UniqueID = str
def find_unique_id_for_package(storage, key, package: Optional[PackageName]):
if key not in storage:
return None
pkg_dct: Mapping[PackageName, UniqueID] = storage[key]
if package is None:
if not pkg_dct:
return None
else:
return next(iter(pkg_dct.values()))
elif package in pkg_dct:
return pkg_dct[package]
else:
return None
class DocLookup(dbtClassMixin):
def __init__(self, manifest: 'Manifest'):
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
self.populate(manifest)
def get_unique_id(self, key, package: Optional[PackageName]):
return find_unique_id_for_package(self.storage, key, package)
def find(self, key, package: Optional[PackageName], manifest: 'Manifest'):
unique_id = self.get_unique_id(key, package)
if unique_id is not None:
return self.perform_lookup(unique_id, manifest)
return None
def add_doc(self, doc: ParsedDocumentation):
if doc.name not in self.storage:
self.storage[doc.name] = {}
self.storage[doc.name][doc.package_name] = doc.unique_id
def populate(self, manifest):
for doc in manifest.docs.values():
self.add_doc(doc)
def perform_lookup(
self, unique_id: UniqueID, manifest
) -> ParsedDocumentation:
if unique_id not in manifest.docs:
raise dbt.exceptions.InternalException(
f'Doc {unique_id} found in cache but not found in manifest'
)
return manifest.docs[unique_id]
class SourceLookup(dbtClassMixin):
def __init__(self, manifest: 'Manifest'):
self.storage: Dict[Tuple[str, str], Dict[PackageName, UniqueID]] = {}
self.populate(manifest)
def get_unique_id(self, key, package: Optional[PackageName]):
return find_unique_id_for_package(self.storage, key, package)
def find(self, key, package: Optional[PackageName], manifest: 'Manifest'):
unique_id = self.get_unique_id(key, package)
if unique_id is not None:
return self.perform_lookup(unique_id, manifest)
return None
def add_source(self, source: ParsedSourceDefinition):
key = (source.source_name, source.name)
if key not in self.storage:
self.storage[key] = {}
self.storage[key][source.package_name] = source.unique_id
def populate(self, manifest):
for source in manifest.sources.values():
if hasattr(source, 'source_name'):
self.add_source(source)
def perform_lookup(
self, unique_id: UniqueID, manifest: 'Manifest'
) -> ParsedSourceDefinition:
if unique_id not in manifest.sources:
raise dbt.exceptions.InternalException(
f'Source {unique_id} found in cache but not found in manifest'
)
return manifest.sources[unique_id]
class RefableLookup(dbtClassMixin):
# model, seed, snapshot
_lookup_types: ClassVar[set] = set(NodeType.refable())
# refables are actually unique, so the Dict[PackageName, UniqueID] will
# only ever have exactly one value, but doing 3 dict lookups instead of 1
# is not a big deal at all and retains consistency
def __init__(self, manifest: 'Manifest'):
self.storage: Dict[str, Dict[PackageName, UniqueID]] = {}
self.populate(manifest)
def get_unique_id(self, key, package: Optional[PackageName]):
return find_unique_id_for_package(self.storage, key, package)
def find(self, key, package: Optional[PackageName], manifest: 'Manifest'):
unique_id = self.get_unique_id(key, package)
if unique_id is not None:
return self.perform_lookup(unique_id, manifest)
return None
def add_node(self, node: ManifestNode):
if node.resource_type in self._lookup_types:
if node.name not in self.storage:
self.storage[node.name] = {}
self.storage[node.name][node.package_name] = node.unique_id
def populate(self, manifest):
for node in manifest.nodes.values():
self.add_node(node)
def perform_lookup(
self, unique_id: UniqueID, manifest
) -> ManifestNode:
if unique_id not in manifest.nodes:
raise dbt.exceptions.InternalException(
f'Node {unique_id} found in cache but not found in manifest'
)
return manifest.nodes[unique_id]
class AnalysisLookup(RefableLookup):
_lookup_types: ClassVar[set] = set(NodeType.Analysis)
def _search_packages(
current_project: str,
node_package: str,
target_package: Optional[str] = None,
) -> List[Optional[str]]:
if target_package is not None:
return [target_package]
elif current_project == node_package:
return [current_project, None]
else:
return [current_project, node_package, None]
@dataclass
class ManifestMetadata(BaseArtifactMetadata):
"""Metadata for the manifest."""
dbt_schema_version: str = field(
default_factory=lambda: str(WritableManifest.dbt_schema_version)
)
project_id: Optional[str] = field(
default=None,
metadata={
'description': 'A unique identifier for the project',
},
)
user_id: Optional[UUID] = field(
default=None,
metadata={
'description': 'A unique identifier for the user',
},
)
send_anonymous_usage_stats: Optional[bool] = field(
default=None,
metadata=dict(description=(
'Whether dbt is configured to send anonymous usage statistics'
)),
)
adapter_type: Optional[str] = field(
default=None,
metadata=dict(description='The type name of the adapter'),
)
def __post_init__(self):
if tracking.active_user is None:
return
if self.user_id is None:
self.user_id = tracking.active_user.id
if self.send_anonymous_usage_stats is None:
self.send_anonymous_usage_stats = (
not tracking.active_user.do_not_track
)
@classmethod
def default(cls):
return cls(
dbt_schema_version=str(WritableManifest.dbt_schema_version),
)
def _sort_values(dct):
"""Given a dictionary, sort each value. This makes output deterministic,
which helps for tests.
"""
return {k: sorted(v) for k, v in dct.items()}
def build_node_edges(nodes: List[ManifestNode]):
"""Build the forward and backward edges on the given list of ParsedNodes
and return them as two separate dictionaries, each mapping unique IDs to
lists of edges.
"""
backward_edges: Dict[str, List[str]] = {}
# pre-populate the forward edge dict for simplicity
forward_edges: Dict[str, List[str]] = {n.unique_id: [] for n in nodes}
for node in nodes:
backward_edges[node.unique_id] = node.depends_on_nodes[:]
for unique_id in node.depends_on_nodes:
if unique_id in forward_edges.keys():
forward_edges[unique_id].append(node.unique_id)
return _sort_values(forward_edges), _sort_values(backward_edges)
# Build a map of children of macros
def build_macro_edges(nodes: List[Any]):
forward_edges: Dict[str, List[str]] = {
n.unique_id: [] for n in nodes if n.unique_id.startswith('macro') or n.depends_on.macros
}
for node in nodes:
for unique_id in node.depends_on.macros:
if unique_id in forward_edges.keys():
forward_edges[unique_id].append(node.unique_id)
return _sort_values(forward_edges)
def _deepcopy(value):
return value.from_dict(value.to_dict(omit_none=True))
class Locality(enum.IntEnum):
Core = 1
Imported = 2
Root = 3
class Specificity(enum.IntEnum):
Default = 1
Adapter = 2
@dataclass
class MacroCandidate:
locality: Locality
macro: ParsedMacro
def __eq__(self, other: object) -> bool:
if not isinstance(other, MacroCandidate):
return NotImplemented
return self.locality == other.locality
def __lt__(self, other: object) -> bool:
if not isinstance(other, MacroCandidate):
return NotImplemented
if self.locality < other.locality:
return True
if self.locality > other.locality:
return False
return False
@dataclass
class MaterializationCandidate(MacroCandidate):
specificity: Specificity
@classmethod
def from_macro(
cls, candidate: MacroCandidate, specificity: Specificity
) -> 'MaterializationCandidate':
return cls(
locality=candidate.locality,
macro=candidate.macro,
specificity=specificity,
)
def __eq__(self, other: object) -> bool:
if not isinstance(other, MaterializationCandidate):
return NotImplemented
equal = (
self.specificity == other.specificity and
self.locality == other.locality
)
if equal:
raise_compiler_error(
'Found two materializations with the name {} (packages {} and '
'{}). dbt cannot resolve this ambiguity'
.format(self.macro.name, self.macro.package_name,
other.macro.package_name)
)
return equal
def __lt__(self, other: object) -> bool:
if not isinstance(other, MaterializationCandidate):
return NotImplemented
if self.specificity < other.specificity:
return True
if self.specificity > other.specificity:
return False
if self.locality < other.locality:
return True
if self.locality > other.locality:
return False
return False
M = TypeVar('M', bound=MacroCandidate)
class CandidateList(List[M]):
def last(self) -> Optional[ParsedMacro]:
if not self:
return None
self.sort()
return self[-1].macro
def _get_locality(
macro: ParsedMacro, root_project_name: str, internal_packages: Set[str]
) -> Locality:
if macro.package_name == root_project_name:
return Locality.Root
elif macro.package_name in internal_packages:
return Locality.Core
else:
return Locality.Imported
class Searchable(Protocol):
resource_type: NodeType
package_name: str
@property
def search_name(self) -> str:
raise NotImplementedError('search_name not implemented')
N = TypeVar('N', bound=Searchable)
@dataclass
class NameSearcher(Generic[N]):
name: str
package: Optional[str]
nodetypes: List[NodeType]
def _matches(self, model: N) -> bool:
"""Return True if the model matches the given name, package, and type.
If package is None, any package is allowed.
nodetypes should be a container of NodeTypes that implements the 'in'
operator.
"""
if model.resource_type not in self.nodetypes:
return False
if self.name != model.search_name:
return False
return self.package is None or self.package == model.package_name
def search(self, haystack: Iterable[N]) -> Optional[N]:
"""Find an entry in the given iterable by name."""
for model in haystack:
if self._matches(model):
return model
return None
D = TypeVar('D')
@dataclass
class Disabled(Generic[D]):
target: D
MaybeDocumentation = Optional[ParsedDocumentation]
MaybeParsedSource = Optional[Union[
ParsedSourceDefinition,
Disabled[ParsedSourceDefinition],
]]
MaybeNonSource = Optional[Union[
ManifestNode,
Disabled[ManifestNode]
]]
T = TypeVar('T', bound=GraphMemberNode)
def _update_into(dest: MutableMapping[str, T], new_item: T):
"""Update dest to overwrite whatever is at dest[new_item.unique_id] with
new_itme. There must be an existing value to overwrite, and they two nodes
must have the same original file path.
"""
unique_id = new_item.unique_id
if unique_id not in dest:
raise dbt.exceptions.RuntimeException(
f'got an update_{new_item.resource_type} call with an '
f'unrecognized {new_item.resource_type}: {new_item.unique_id}'
)
existing = dest[unique_id]
if new_item.original_file_path != existing.original_file_path:
raise dbt.exceptions.RuntimeException(
f'cannot update a {new_item.resource_type} to have a new file '
f'path!'
)
dest[unique_id] = new_item
# This contains macro methods that are in both the Manifest
# and the MacroManifest
class MacroMethods:
# Just to make mypy happy. There must be a better way.
def __init__(self):
self.macros = []
self.metadata = {}
def find_macro_by_name(
self, name: str, root_project_name: str, package: Optional[str]
) -> Optional[ParsedMacro]:
"""Find a macro in the graph by its name and package name, or None for
any package. The root project name is used to determine priority:
- locally defined macros come first
- then imported macros
- then macros defined in the root project
"""
filter: Optional[Callable[[MacroCandidate], bool]] = None
if package is not None:
def filter(candidate: MacroCandidate) -> bool:
return package == candidate.macro.package_name
candidates: CandidateList = self._find_macros_by_name(
name=name,
root_project_name=root_project_name,
filter=filter,
)
return candidates.last()
def find_generate_macro_by_name(
self, component: str, root_project_name: str
) -> Optional[ParsedMacro]:
"""
The `generate_X_name` macros are similar to regular ones, but ignore
imported packages.
- if there is a `generate_{component}_name` macro in the root
project, return it
- return the `generate_{component}_name` macro from the 'dbt'
internal project
"""
def filter(candidate: MacroCandidate) -> bool:
return candidate.locality != Locality.Imported
candidates: CandidateList = self._find_macros_by_name(
name=f'generate_{component}_name',
root_project_name=root_project_name,
# filter out imported packages
filter=filter,
)
return candidates.last()
def _find_macros_by_name(
self,
name: str,
root_project_name: str,
filter: Optional[Callable[[MacroCandidate], bool]] = None
) -> CandidateList:
"""Find macros by their name.
"""
# avoid an import cycle
from dbt.adapters.factory import get_adapter_package_names
candidates: CandidateList = CandidateList()
packages = set(get_adapter_package_names(self.metadata.adapter_type))
for unique_id, macro in self.macros.items():
if macro.name != name:
continue
candidate = MacroCandidate(
locality=_get_locality(macro, root_project_name, packages),
macro=macro,
)
if filter is None or filter(candidate):
candidates.append(candidate)
return candidates
@dataclass
class ParsingInfo:
static_analysis_parsed_path_count: int = 0
static_analysis_path_count: int = 0
@dataclass
class ManifestStateCheck(dbtClassMixin):
vars_hash: FileHash = field(default_factory=FileHash.empty)
profile_hash: FileHash = field(default_factory=FileHash.empty)
project_hashes: MutableMapping[str, FileHash] = field(default_factory=dict)
@dataclass
class Manifest(MacroMethods, DataClassMessagePackMixin, dbtClassMixin):
"""The manifest for the full graph, after parsing and during compilation.
"""
# These attributes are both positional and by keyword. If an attribute
# is added it must all be added in the __reduce_ex__ method in the
# args tuple in the right position.
nodes: MutableMapping[str, ManifestNode] = field(default_factory=dict)
sources: MutableMapping[str, ParsedSourceDefinition] = field(default_factory=dict)
macros: MutableMapping[str, ParsedMacro] = field(default_factory=dict)
docs: MutableMapping[str, ParsedDocumentation] = field(default_factory=dict)
exposures: MutableMapping[str, ParsedExposure] = field(default_factory=dict)
selectors: MutableMapping[str, Any] = field(default_factory=dict)
disabled: List[CompileResultNode] = field(default_factory=list)
files: MutableMapping[str, AnySourceFile] = field(default_factory=dict)
metadata: ManifestMetadata = field(default_factory=ManifestMetadata)
flat_graph: Dict[str, Any] = field(default_factory=dict)
state_check: ManifestStateCheck = field(default_factory=ManifestStateCheck)
# Moved from the ParseResult object
source_patches: MutableMapping[SourceKey, SourcePatch] = field(default_factory=dict)
# following is from ParseResult
_disabled: MutableMapping[str, List[CompileResultNode]] = field(default_factory=dict)
_doc_lookup: Optional[DocLookup] = field(
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
_source_lookup: Optional[SourceLookup] = field(
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
_ref_lookup: Optional[RefableLookup] = field(
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
_analysis_lookup: Optional[AnalysisLookup] = field(
default=None, metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
_parsing_info: ParsingInfo = field(
default_factory=ParsingInfo,
metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
_lock: Lock = field(
default_factory=flags.MP_CONTEXT.Lock,
metadata={'serialize': lambda x: None, 'deserialize': lambda x: None}
)
def __pre_serialize__(self):
# serialization won't work with anything except an empty source_patches because
# tuple keys are not supported, so ensure it's empty
self.source_patches = {}
return self
@classmethod
def __post_deserialize__(cls, obj):
obj._lock = flags.MP_CONTEXT.Lock()
return obj
def sync_update_node(
self, new_node: NonSourceCompiledNode
) -> NonSourceCompiledNode:
"""update the node with a lock. The only time we should want to lock is
when compiling an ephemeral ancestor of a node at runtime, because
multiple threads could be just-in-time compiling the same ephemeral
dependency, and we want them to have a consistent view of the manifest.
If the existing node is not compiled, update it with the new node and
return that. If the existing node is compiled, do not update the
manifest and return the existing node.
"""
with self._lock:
existing = self.nodes[new_node.unique_id]
if getattr(existing, 'compiled', False):
# already compiled -> must be a NonSourceCompiledNode
return cast(NonSourceCompiledNode, existing)
_update_into(self.nodes, new_node)
return new_node
def update_exposure(self, new_exposure: ParsedExposure):
_update_into(self.exposures, new_exposure)
def update_node(self, new_node: ManifestNode):
_update_into(self.nodes, new_node)
def update_source(self, new_source: ParsedSourceDefinition):
_update_into(self.sources, new_source)
def build_flat_graph(self):
"""This attribute is used in context.common by each node, so we want to
only build it once and avoid any concurrency issues around it.
Make sure you don't call this until you're done with building your
manifest!
"""
self.flat_graph = {
'exposures': {
k: v.to_dict(omit_none=False)
for k, v in self.exposures.items()
},
'nodes': {
k: v.to_dict(omit_none=False)
for k, v in self.nodes.items()
},
'sources': {
k: v.to_dict(omit_none=False)
for k, v in self.sources.items()
}
}
def find_disabled_by_name(
self, name: str, package: Optional[str] = None
) -> Optional[ManifestNode]:
searcher: NameSearcher = NameSearcher(
name, package, NodeType.refable()
)
result = searcher.search(self.disabled)
return result
def find_disabled_source_by_name(
self, source_name: str, table_name: str, package: Optional[str] = None
) -> Optional[ParsedSourceDefinition]:
search_name = f'{source_name}.{table_name}'
searcher: NameSearcher = NameSearcher(
search_name, package, [NodeType.Source]
)
result = searcher.search(self.disabled)
if result is not None:
assert isinstance(result, ParsedSourceDefinition)
return result
def _materialization_candidates_for(
self, project_name: str,
materialization_name: str,
adapter_type: Optional[str],
) -> CandidateList:
if adapter_type is None:
specificity = Specificity.Default
else:
specificity = Specificity.Adapter
full_name = dbt.utils.get_materialization_macro_name(
materialization_name=materialization_name,
adapter_type=adapter_type,
with_prefix=False,
)
return CandidateList(
MaterializationCandidate.from_macro(m, specificity)
for m in self._find_macros_by_name(full_name, project_name)
)
def find_materialization_macro_by_name(
self, project_name: str, materialization_name: str, adapter_type: str
) -> Optional[ParsedMacro]:
candidates: CandidateList = CandidateList(chain.from_iterable(
self._materialization_candidates_for(
project_name=project_name,
materialization_name=materialization_name,
adapter_type=atype,
) for atype in (adapter_type, None)
))
return candidates.last()
def get_resource_fqns(self) -> Mapping[str, PathSet]:
resource_fqns: Dict[str, Set[Tuple[str, ...]]] = {}
all_resources = chain(self.exposures.values(), self.nodes.values(), self.sources.values())
for resource in all_resources:
resource_type_plural = resource.resource_type.pluralize()
if resource_type_plural not in resource_fqns:
resource_fqns[resource_type_plural] = set()
resource_fqns[resource_type_plural].add(tuple(resource.fqn))
return resource_fqns
# This is called by 'parse_patch' in the NodePatchParser
def add_patch(
self, source_file: SchemaSourceFile, patch: ParsedNodePatch,
) -> None:
if patch.yaml_key in ['models', 'seeds', 'snapshots']:
unique_id = self.ref_lookup.get_unique_id(patch.name, None)
elif patch.yaml_key == 'analyses':
unique_id = self.analysis_lookup.get_unique_id(patch.name, None)
else:
raise dbt.exceptions.InternalException(
f'Unexpected yaml_key {patch.yaml_key} for patch in '
f'file {source_file.path.original_file_path}'
)
if unique_id is None:
# This will usually happen when a node is disabled
return
# patches can't be overwritten
node = self.nodes.get(unique_id)
if node:
if node.patch_path:
package_name, existing_file_path = node.patch_path.split('://')
raise_duplicate_patch_name(patch, existing_file_path)
source_file.append_patch(patch.yaml_key, unique_id)
node.patch(patch)
def add_macro_patch(
self, source_file: SchemaSourceFile, patch: ParsedMacroPatch,
) -> None:
# macros are fully namespaced
unique_id = f'macro.{patch.package_name}.{patch.name}'
macro = self.macros.get(unique_id)
if not macro:
warn_or_error(
f'WARNING: Found documentation for macro "{patch.name}" '
f'which was not found'
)
return
if macro.patch_path:
package_name, existing_file_path = macro.patch_path.split('://')
raise_duplicate_macro_patch_name(patch, existing_file_path)
source_file.macro_patches[patch.name] = unique_id
macro.patch(patch)
def add_source_patch(
self, source_file: SchemaSourceFile, patch: SourcePatch,
) -> None:
# source patches must be unique
key = (patch.overrides, patch.name)
if key in self.source_patches:
raise_duplicate_source_patch_name(patch, self.source_patches[key])
self.source_patches[key] = patch
source_file.source_patches.append(key)
def get_used_schemas(self, resource_types=None):
return frozenset({
(node.database, node.schema) for node in
chain(self.nodes.values(), self.sources.values())
if not resource_types or node.resource_type in resource_types
})
def get_used_databases(self):
return frozenset(
x.database for x in
chain(self.nodes.values(), self.sources.values())
)
# This is used in dbt.task.rpc.sql_commands 'add_new_refs'
def deepcopy(self):
return Manifest(
nodes={k: _deepcopy(v) for k, v in self.nodes.items()},
sources={k: _deepcopy(v) for k, v in self.sources.items()},
macros={k: _deepcopy(v) for k, v in self.macros.items()},
docs={k: _deepcopy(v) for k, v in self.docs.items()},
exposures={k: _deepcopy(v) for k, v in self.exposures.items()},
selectors={k: _deepcopy(v) for k, v in self.selectors.items()},
metadata=self.metadata,
disabled=[_deepcopy(n) for n in self.disabled],
files={k: _deepcopy(v) for k, v in self.files.items()},
state_check=_deepcopy(self.state_check),
)
def build_parent_and_child_maps(self):
edge_members = list(chain(
self.nodes.values(),
self.sources.values(),
self.exposures.values(),
))
forward_edges, backward_edges = build_node_edges(edge_members)
self.child_map = forward_edges
self.parent_map = backward_edges
def build_macro_child_map(self):
edge_members = list(chain(
self.nodes.values(),
self.macros.values(),
))
forward_edges = build_macro_edges(edge_members)
return forward_edges
def writable_manifest(self):
self.build_parent_and_child_maps()
return WritableManifest(
nodes=self.nodes,
sources=self.sources,
macros=self.macros,
docs=self.docs,
exposures=self.exposures,
selectors=self.selectors,
metadata=self.metadata,
disabled=self.disabled,
child_map=self.child_map,
parent_map=self.parent_map,
)
def write(self, path):
self.writable_manifest().write(path)
# Called in dbt.compilation.Linker.write_graph and
# dbt.graph.queue.get and ._include_in_cost
def expect(self, unique_id: str) -> GraphMemberNode:
if unique_id in self.nodes:
return self.nodes[unique_id]
elif unique_id in self.sources:
return self.sources[unique_id]
elif unique_id in self.exposures:
return self.exposures[unique_id]
else:
# something terrible has happened
raise dbt.exceptions.InternalException(
'Expected node {} not found in manifest'.format(unique_id)
)
@property
def doc_lookup(self) -> DocLookup:
if self._doc_lookup is None:
self._doc_lookup = DocLookup(self)
return self._doc_lookup
def rebuild_doc_lookup(self):
self._doc_lookup = DocLookup(self)
@property
def source_lookup(self) -> SourceLookup:
if self._source_lookup is None:
self._source_lookup = SourceLookup(self)
return self._source_lookup
def rebuild_source_lookup(self):
self._source_lookup = SourceLookup(self)
@property
def ref_lookup(self) -> RefableLookup:
if self._ref_lookup is None:
self._ref_lookup = RefableLookup(self)
return self._ref_lookup
def rebuild_ref_lookup(self):
self._ref_lookup = RefableLookup(self)
@property
def analysis_lookup(self) -> AnalysisLookup:
if self._analysis_lookup is None:
self._analysis_lookup = AnalysisLookup(self)
return self._analysis_lookup
# Called by dbt.parser.manifest._resolve_refs_for_exposure
# and dbt.parser.manifest._process_refs_for_node
def resolve_ref(
self,
target_model_name: str,
target_model_package: Optional[str],
current_project: str,
node_package: str,
) -> MaybeNonSource:
node: Optional[ManifestNode] = None
disabled: Optional[ManifestNode] = None
candidates = _search_packages(
current_project, node_package, target_model_package
)
for pkg in candidates:
node = self.ref_lookup.find(target_model_name, pkg, self)
if node is not None and node.config.enabled:
return node
# it's possible that the node is disabled
if disabled is None:
disabled = self.find_disabled_by_name(
target_model_name, pkg
)
if disabled is not None:
return Disabled(disabled)
return None
# Called by dbt.parser.manifest._resolve_sources_for_exposure
# and dbt.parser.manifest._process_source_for_node
def resolve_source(
self,
target_source_name: str,
target_table_name: str,
current_project: str,
node_package: str
) -> MaybeParsedSource:
key = (target_source_name, target_table_name)
candidates = _search_packages(current_project, node_package)
source: Optional[ParsedSourceDefinition] = None
disabled: Optional[ParsedSourceDefinition] = None
for pkg in candidates:
source = self.source_lookup.find(key, pkg, self)
if source is not None and source.config.enabled:
return source
if disabled is None:
disabled = self.find_disabled_source_by_name(
target_source_name, target_table_name, pkg
)
if disabled is not None:
return Disabled(disabled)
return None
# Called by DocsRuntimeContext.doc
def resolve_doc(
self,
name: str,
package: Optional[str],
current_project: str,
node_package: str,
) -> Optional[ParsedDocumentation]:
"""Resolve the given documentation. This follows the same algorithm as
resolve_ref except the is_enabled checks are unnecessary as docs are
always enabled.
"""
candidates = _search_packages(
current_project, node_package, package
)
for pkg in candidates:
result = self.doc_lookup.find(name, pkg, self)
if result is not None:
return result
return None
# Called by RunTask.defer_to_manifest
def merge_from_artifact(
self,
adapter,
other: 'WritableManifest',
selected: AbstractSet[UniqueID],
) -> None:
"""Given the selected unique IDs and a writable manifest, update this
manifest by replacing any unselected nodes with their counterpart.
Only non-ephemeral refable nodes are examined.
"""
refables = set(NodeType.refable())
merged = set()
for unique_id, node in other.nodes.items():
current = self.nodes.get(unique_id)
if current and (
node.resource_type in refables and
not node.is_ephemeral and
unique_id not in selected and
not adapter.get_relation(
current.database, current.schema, current.identifier
)
):
merged.add(unique_id)
self.nodes[unique_id] = node.replace(deferred=True)
# log up to 5 items
sample = list(islice(merged, 5))
logger.debug(
f'Merged {len(merged)} items from state (sample: {sample})'
)
# Methods that were formerly in ParseResult
def add_macro(self, source_file: SourceFile, macro: ParsedMacro):
if macro.unique_id in self.macros:
# detect that the macro exists and emit an error
other_path = self.macros[macro.unique_id].original_file_path
# subtract 2 for the "Compilation Error" indent
# note that the line wrap eats newlines, so if you want newlines,
# this is the result :(
msg = line_wrap_message(
f'''\
dbt found two macros named "{macro.name}" in the project
"{macro.package_name}".
To fix this error, rename or remove one of the following
macros:
- {macro.original_file_path}
- {other_path}
''',
subtract=2
)
raise_compiler_error(msg)
self.macros[macro.unique_id] = macro
source_file.macros.append(macro.unique_id)
def has_file(self, source_file: SourceFile) -> bool:
key = source_file.file_id
if key is None:
return False
if key not in self.files:
return False
my_checksum = self.files[key].checksum
return my_checksum == source_file.checksum
def add_source(
self, source_file: SchemaSourceFile, source: UnpatchedSourceDefinition
):
# sources can't be overwritten!
_check_duplicates(source, self.sources)
self.sources[source.unique_id] = source # type: ignore
source_file.sources.append(source.unique_id)
def add_node_nofile(self, node: ManifestNodes):
# nodes can't be overwritten!
_check_duplicates(node, self.nodes)
self.nodes[node.unique_id] = node
def add_node(self, source_file: AnySourceFile, node: ManifestNodes, test_from=None):
self.add_node_nofile(node)
if isinstance(source_file, SchemaSourceFile):
assert test_from
source_file.add_test(node.unique_id, test_from)
else:
source_file.nodes.append(node.unique_id)
def add_exposure(self, source_file: SchemaSourceFile, exposure: ParsedExposure):
_check_duplicates(exposure, self.exposures)
self.exposures[exposure.unique_id] = exposure
source_file.exposures.append(exposure.unique_id)
def add_disabled_nofile(self, node: CompileResultNode):
if node.unique_id in self._disabled:
self._disabled[node.unique_id].append(node)
else:
self._disabled[node.unique_id] = [node]
def add_disabled(self, source_file: AnySourceFile, node: CompileResultNode, test_from=None):
self.add_disabled_nofile(node)
if isinstance(source_file, SchemaSourceFile):
assert test_from
source_file.add_test(node.unique_id, test_from)
else:
source_file.nodes.append(node.unique_id)
def add_doc(self, source_file: SourceFile, doc: ParsedDocumentation):
_check_duplicates(doc, self.docs)
self.docs[doc.unique_id] = doc
source_file.docs.append(doc.unique_id)
# end of methods formerly in ParseResult
# Provide support for copy.deepcopy() - we just need to avoid the lock!
# pickle and deepcopy use this. It returns a callable object used to
# create the initial version of the object and a tuple of arguments
# for the object, i.e. the Manifest.
# The order of the arguments must match the order of the attributes
# in the Manifest class declaration, because they are used as
# positional arguments to construct a Manifest.
def __reduce_ex__(self, protocol):
args = (
self.nodes,
self.sources,
self.macros,
self.docs,
self.exposures,
self.selectors,
self.disabled,
self.files,
self.metadata,
self.flat_graph,
self.state_check,
self.source_patches,
self._disabled,
self._doc_lookup,
self._source_lookup,
self._ref_lookup,
)
return self.__class__, args
class MacroManifest(MacroMethods):
def __init__(self, macros):
self.macros = macros
self.metadata = ManifestMetadata()
# This is returned by the 'graph' context property
# in the ProviderContext class.
self.flat_graph = {}
AnyManifest = Union[Manifest, MacroManifest]
@dataclass
@schema_version('manifest', 2)
class WritableManifest(ArtifactMixin):
nodes: Mapping[UniqueID, ManifestNode] = field(
metadata=dict(description=(
'The nodes defined in the dbt project and its dependencies'
))
)
sources: Mapping[UniqueID, ParsedSourceDefinition] = field(
metadata=dict(description=(
'The sources defined in the dbt project and its dependencies'
))
)
macros: Mapping[UniqueID, ParsedMacro] = field(
metadata=dict(description=(
'The macros defined in the dbt project and its dependencies'
))
)
docs: Mapping[UniqueID, ParsedDocumentation] = field(
metadata=dict(description=(
'The docs defined in the dbt project and its dependencies'
))
)
exposures: Mapping[UniqueID, ParsedExposure] = field(
metadata=dict(description=(
'The exposures defined in the dbt project and its dependencies'
))
)
selectors: Mapping[UniqueID, Any] = field(
metadata=dict(description=(
'The selectors defined in selectors.yml'
))
)
disabled: Optional[List[CompileResultNode]] = field(metadata=dict(
description='A list of the disabled nodes in the target'
))
parent_map: Optional[NodeEdgeMap] = field(metadata=dict(
description='A mapping from child nodes to their dependencies',
))
child_map: Optional[NodeEdgeMap] = field(metadata=dict(
description='A mapping from parent nodes to their dependents',
))
metadata: ManifestMetadata = field(metadata=dict(
description='Metadata about the manifest',
))
def _check_duplicates(
value: HasUniqueID, src: Mapping[str, HasUniqueID]
):
if value.unique_id in src:
raise_duplicate_resource_name(value, src[value.unique_id])
K_T = TypeVar('K_T')
V_T = TypeVar('V_T')
def _expect_value(
key: K_T, src: Mapping[K_T, V_T], old_file: SourceFile, name: str
) -> V_T:
if key not in src:
raise CompilationException(
'Expected to find "{}" in cached "result.{}" based '
'on cached file information: {}!'
.format(key, name, old_file)
)
return src[key]
| [
"itertools.islice",
"dbt.adapters.factory.get_adapter_package_names",
"dbt.ui.line_wrap_message",
"dbt.exceptions.raise_duplicate_source_patch_name",
"dbt.contracts.util.schema_version",
"dbt.exceptions.raise_compiler_error",
"dbt.exceptions.raise_duplicate_resource_name",
"dbt.exceptions.raise_duplicate_macro_patch_name",
"typing.cast",
"dbt.exceptions.raise_duplicate_patch_name",
"dbt.exceptions.warn_or_error",
"dbt.flags.MP_CONTEXT.Lock",
"dataclasses.field",
"dbt.node_types.NodeType.refable",
"typing.TypeVar"
] | [((11468, 11502), 'typing.TypeVar', 'TypeVar', (['"""M"""'], {'bound': 'MacroCandidate'}), "('M', bound=MacroCandidate)\n", (11475, 11502), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((12186, 12216), 'typing.TypeVar', 'TypeVar', (['"""N"""'], {'bound': 'Searchable'}), "('N', bound=Searchable)\n", (12193, 12216), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((13082, 13094), 'typing.TypeVar', 'TypeVar', (['"""D"""'], {}), "('D')\n", (13089, 13094), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((13399, 13434), 'typing.TypeVar', 'TypeVar', (['"""T"""'], {'bound': 'GraphMemberNode'}), "('T', bound=GraphMemberNode)\n", (13406, 13434), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((39262, 39291), 'dbt.contracts.util.schema_version', 'schema_version', (['"""manifest"""', '(2)'], {}), "('manifest', 2)\n", (39276, 39291), False, 'from dbt.contracts.util import BaseArtifactMetadata, SourceKey, ArtifactMixin, schema_version\n'), ((41121, 41135), 'typing.TypeVar', 'TypeVar', (['"""K_T"""'], {}), "('K_T')\n", (41128, 41135), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((41142, 41156), 'typing.TypeVar', 'TypeVar', (['"""V_T"""'], {}), "('V_T')\n", (41149, 41156), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((6784, 6872), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'description': 'A unique identifier for the project'}"}), "(default=None, metadata={'description':\n 'A unique identifier for the project'})\n", (6789, 6872), False, 'from dataclasses import dataclass, field\n'), ((6945, 7030), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'description': 'A unique identifier for the user'}"}), "(default=None, metadata={'description':\n 'A unique identifier for the user'})\n", (6950, 7030), False, 'from dataclasses import dataclass, field\n'), ((17257, 17294), 'dataclasses.field', 'field', ([], {'default_factory': 'FileHash.empty'}), '(default_factory=FileHash.empty)\n', (17262, 17294), False, 'from dataclasses import dataclass, field\n'), ((17324, 17361), 'dataclasses.field', 'field', ([], {'default_factory': 'FileHash.empty'}), '(default_factory=FileHash.empty)\n', (17329, 17361), False, 'from dataclasses import dataclass, field\n'), ((17414, 17441), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (17419, 17441), False, 'from dataclasses import dataclass, field\n'), ((17846, 17873), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (17851, 17873), False, 'from dataclasses import dataclass, field\n'), ((17933, 17960), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (17938, 17960), False, 'from dataclasses import dataclass, field\n'), ((18008, 18035), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18013, 18035), False, 'from dataclasses import dataclass, field\n'), ((18089, 18116), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18094, 18116), False, 'from dataclasses import dataclass, field\n'), ((18170, 18197), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18175, 18197), False, 'from dataclasses import dataclass, field\n'), ((18240, 18267), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18245, 18267), False, 'from dataclasses import dataclass, field\n'), ((18308, 18335), 'dataclasses.field', 'field', ([], {'default_factory': 'list'}), '(default_factory=list)\n', (18313, 18335), False, 'from dataclasses import dataclass, field\n'), ((18384, 18411), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18389, 18411), False, 'from dataclasses import dataclass, field\n'), ((18445, 18484), 'dataclasses.field', 'field', ([], {'default_factory': 'ManifestMetadata'}), '(default_factory=ManifestMetadata)\n', (18450, 18484), False, 'from dataclasses import dataclass, field\n'), ((18518, 18545), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18523, 18545), False, 'from dataclasses import dataclass, field\n'), ((18584, 18625), 'dataclasses.field', 'field', ([], {'default_factory': 'ManifestStateCheck'}), '(default_factory=ManifestStateCheck)\n', (18589, 18625), False, 'from dataclasses import dataclass, field\n'), ((18727, 18754), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18732, 18754), False, 'from dataclasses import dataclass, field\n'), ((18853, 18880), 'dataclasses.field', 'field', ([], {'default_factory': 'dict'}), '(default_factory=dict)\n', (18858, 18880), False, 'from dataclasses import dataclass, field\n'), ((18920, 19015), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default=None, metadata={'serialize': lambda x: None, 'deserialize': \n lambda x: None})\n", (18925, 19015), False, 'from dataclasses import dataclass, field\n'), ((19070, 19165), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default=None, metadata={'serialize': lambda x: None, 'deserialize': \n lambda x: None})\n", (19075, 19165), False, 'from dataclasses import dataclass, field\n'), ((19218, 19313), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default=None, metadata={'serialize': lambda x: None, 'deserialize': \n lambda x: None})\n", (19223, 19313), False, 'from dataclasses import dataclass, field\n'), ((19372, 19467), 'dataclasses.field', 'field', ([], {'default': 'None', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default=None, metadata={'serialize': lambda x: None, 'deserialize': \n lambda x: None})\n", (19377, 19467), False, 'from dataclasses import dataclass, field\n'), ((19510, 19619), 'dataclasses.field', 'field', ([], {'default_factory': 'ParsingInfo', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default_factory=ParsingInfo, metadata={'serialize': lambda x: None,\n 'deserialize': lambda x: None})\n", (19515, 19619), False, 'from dataclasses import dataclass, field\n'), ((19656, 19775), 'dataclasses.field', 'field', ([], {'default_factory': 'flags.MP_CONTEXT.Lock', 'metadata': "{'serialize': lambda x: None, 'deserialize': lambda x: None}"}), "(default_factory=flags.MP_CONTEXT.Lock, metadata={'serialize': lambda\n x: None, 'deserialize': lambda x: None})\n", (19661, 19775), False, 'from dataclasses import dataclass, field\n'), ((4640, 4658), 'dbt.node_types.NodeType.refable', 'NodeType.refable', ([], {}), '()\n', (4656, 4658), False, 'from dbt.node_types import NodeType\n'), ((20108, 20131), 'dbt.flags.MP_CONTEXT.Lock', 'flags.MP_CONTEXT.Lock', ([], {}), '()\n', (20129, 20131), False, 'from dbt import flags\n'), ((41054, 41112), 'dbt.exceptions.raise_duplicate_resource_name', 'raise_duplicate_resource_name', (['value', 'src[value.unique_id]'], {}), '(value, src[value.unique_id])\n', (41083, 41112), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n'), ((16607, 16660), 'dbt.adapters.factory.get_adapter_package_names', 'get_adapter_package_names', (['self.metadata.adapter_type'], {}), '(self.metadata.adapter_type)\n', (16632, 16660), False, 'from dbt.adapters.factory import get_adapter_package_names\n'), ((22362, 22380), 'dbt.node_types.NodeType.refable', 'NodeType.refable', ([], {}), '()\n', (22378, 22380), False, 'from dbt.node_types import NodeType\n'), ((26091, 26192), 'dbt.exceptions.warn_or_error', 'warn_or_error', (['f"""WARNING: Found documentation for macro "{patch.name}" which was not found"""'], {}), '(\n f\'WARNING: Found documentation for macro "{patch.name}" which was not found\'\n )\n', (26104, 26192), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n'), ((26370, 26429), 'dbt.exceptions.raise_duplicate_macro_patch_name', 'raise_duplicate_macro_patch_name', (['patch', 'existing_file_path'], {}), '(patch, existing_file_path)\n', (26402, 26429), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n'), ((26757, 26823), 'dbt.exceptions.raise_duplicate_source_patch_name', 'raise_duplicate_source_patch_name', (['patch', 'self.source_patches[key]'], {}), '(patch, self.source_patches[key])\n', (26790, 26823), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n'), ((33973, 33991), 'dbt.node_types.NodeType.refable', 'NodeType.refable', ([], {}), '()\n', (33989, 33991), False, 'from dbt.node_types import NodeType\n'), ((34589, 34606), 'itertools.islice', 'islice', (['merged', '(5)'], {}), '(merged, 5)\n', (34595, 34606), False, 'from itertools import chain, islice\n'), ((35201, 35563), 'dbt.ui.line_wrap_message', 'line_wrap_message', (['f""" dbt found two macros named "{macro.name}" in the project\n "{macro.package_name}".\n\n\n To fix this error, rename or remove one of the following\n macros:\n\n - {macro.original_file_path}\n\n - {other_path}\n """'], {'subtract': '(2)'}), '(\n f""" dbt found two macros named "{macro.name}" in the project\n "{macro.package_name}".\n\n\n To fix this error, rename or remove one of the following\n macros:\n\n - {macro.original_file_path}\n\n - {other_path}\n """\n , subtract=2)\n', (35218, 35563), False, 'from dbt.ui import line_wrap_message\n'), ((35614, 35639), 'dbt.exceptions.raise_compiler_error', 'raise_compiler_error', (['msg'], {}), '(msg)\n', (35634, 35639), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n'), ((21003, 21040), 'typing.cast', 'cast', (['NonSourceCompiledNode', 'existing'], {}), '(NonSourceCompiledNode, existing)\n', (21007, 21040), False, 'from typing import Dict, List, Optional, Union, Mapping, MutableMapping, Any, Set, Tuple, TypeVar, Callable, Iterable, Generic, cast, AbstractSet, ClassVar\n'), ((25654, 25707), 'dbt.exceptions.raise_duplicate_patch_name', 'raise_duplicate_patch_name', (['patch', 'existing_file_path'], {}), '(patch, existing_file_path)\n', (25680, 25707), False, 'from dbt.exceptions import CompilationException, raise_duplicate_resource_name, raise_compiler_error, warn_or_error, raise_duplicate_patch_name, raise_duplicate_macro_patch_name, raise_duplicate_source_patch_name\n')] |
import argparse
from collections import defaultdict
import csv
from dataclasses import dataclass, field
from enum import Enum, unique, auto
import os
import sys
import tempfile
import yaml
import zipfile
import gffutils
from google.protobuf import json_format
from ncbi.datasets.v1alpha1 import dataset_catalog_pb2
from ncbi.datasets.v1alpha1.reports import assembly_pb2
from ncbi.datasets.reports.report_reader import DatasetsReportReader
def retrieve_assembly_report(zip_in, catalog, assm_acc: str) -> assembly_pb2.AssemblyDataReport:
report_files = get_catalog_files_for_assembly(catalog, dataset_catalog_pb2.File.FileType.DATA_REPORT, assm_acc)
for path in report_files:
yaml = zip_in.read(path)
rpt_rdr = DatasetsReportReader()
return rpt_rdr.assembly_report(yaml)
def retrieve_data_catalog(zip_in) -> dataset_catalog_pb2.Catalog:
catalog_json = zip_in.read('ncbi_dataset/data/dataset_catalog.json')
return json_format.Parse(catalog_json, dataset_catalog_pb2.Catalog())
def get_catalog_files_for_assembly(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str):
report_files = get_catalog_files(catalog, desired_filetype, assm_acc)
filepaths = []
for assm_acc, paths in report_files.items():
filepaths.extend(paths)
return filepaths
def get_catalog_files(catalog: dataset_catalog_pb2.Catalog, desired_filetype: dataset_catalog_pb2.File.FileType, assm_acc: str = None):
files = defaultdict(list)
for assm in catalog.assemblies:
acc = assm.accession
if assm_acc and assm_acc != acc:
continue
for f in assm.files:
filepath = os.path.join('ncbi_dataset', 'data', f.file_path)
if f.file_type == desired_filetype:
files[acc].append(filepath)
return files
def get_zip_file_for_acc(acc, path):
fname = os.path.join(path, f'{acc}.zip')
if os.path.isfile(fname):
return fname
return None
@dataclass
class Gene:
id: str
feat_type: str
name: str
chrom: str
strand: str
range_start: int
range_stop: int
protein_accession: str = ""
def get_fields(self):
return [self.feat_type, self.name, self.range_start, self.range_stop, self.protein_accession]
def name_val(self):
return self.protein_accession if self.protein_accession else self.name
def find_genes_by_loc(gff3_db, csvout, assm_acc, seq_acc, start, stop, extra_fields):
found_genes = []
feat_types = ('gene', 'pseudogene')
for gene in gff3_db.region(seqid=seq_acc, start=start, end=stop, featuretype=feat_types, completely_within=False):
gene_name = gene.attributes.get('Name', None)[0]
prot_acc = ""
if gene.attributes['gene_biotype'][0] == 'protein_coding':
cds = list(gff3_db.children(gene, featuretype='CDS'))
prot_acc = cds[0].attributes.get('protein_id', None)[0]
geneobj = Gene(
gene.id,
gene.featuretype,
gene_name,
gene.chrom,
gene.strand,
gene.start,
gene.stop,
prot_acc,
)
csvout.writerow([assm_acc, seq_acc, start, stop, *extra_fields, *geneobj.get_fields()])
found_genes.append(geneobj)
return found_genes
class FindGenesByLoc:
default_packages_dir = os.path.join('var', 'data', 'packages')
def __init__(self):
parser = argparse.ArgumentParser()
parser.add_argument('--packages-dir', type=str, default=self.default_packages_dir,
help=f'root of input data directory [{self.default_packages_dir}]')
parser.add_argument('--locs', type=str, help='file containing genomic locations')
self.args = parser.parse_args()
self.writer = csv.writer(sys.stdout, dialect='excel-tab')
def read_data(self):
for row in csv.reader(iter(sys.stdin.readline, ''), dialect='excel-tab'):
yield row
def run(self):
for assm_acc, seq_acc, start, stop, *extra in self.read_data():
self.find_all_for_location(assm_acc, seq_acc, start, stop, extra)
def process_loc_for_gff(self, zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields):
with tempfile.NamedTemporaryFile() as tmpfile:
tmpfile.write(zin.read(gff_fname))
db = gffutils.create_db(
tmpfile.name,
dbfn=':memory:',
force=True,
keep_order=True,
merge_strategy='merge',
sort_attribute_values=True
)
find_genes_by_loc(db, self.writer, assm_acc, seq_acc, start, stop, extra_fields)
def find_all_for_location(self, assm_acc, seq_acc, start, stop, extra_fields):
zip_file = get_zip_file_for_acc(assm_acc, self.args.packages_dir)
try:
with zipfile.ZipFile(zip_file, 'r') as zin:
catalog = retrieve_data_catalog(zin)
gff_files = get_catalog_files(catalog, dataset_catalog_pb2.File.FileType.GFF3)
for assm_acc, gff_files in gff_files.items():
report = retrieve_assembly_report(zin, catalog, assm_acc)
for gff_fname in gff_files:
self.process_loc_for_gff(zin, gff_fname, assm_acc, seq_acc, start, stop, extra_fields)
except zipfile.BadZipFile:
print(f'{zip_file} is not a zip file')
if __name__ == '__main__':
FindGenesByLoc().run()
| [
"argparse.ArgumentParser",
"zipfile.ZipFile",
"ncbi.datasets.v1alpha1.dataset_catalog_pb2.Catalog",
"os.path.join",
"csv.writer",
"gffutils.create_db",
"os.path.isfile",
"collections.defaultdict",
"tempfile.NamedTemporaryFile",
"ncbi.datasets.reports.report_reader.DatasetsReportReader"
] | [((1509, 1526), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (1520, 1526), False, 'from collections import defaultdict\n'), ((1916, 1948), 'os.path.join', 'os.path.join', (['path', 'f"""{acc}.zip"""'], {}), "(path, f'{acc}.zip')\n", (1928, 1948), False, 'import os\n'), ((1956, 1977), 'os.path.isfile', 'os.path.isfile', (['fname'], {}), '(fname)\n', (1970, 1977), False, 'import os\n'), ((3403, 3442), 'os.path.join', 'os.path.join', (['"""var"""', '"""data"""', '"""packages"""'], {}), "('var', 'data', 'packages')\n", (3415, 3442), False, 'import os\n'), ((737, 759), 'ncbi.datasets.reports.report_reader.DatasetsReportReader', 'DatasetsReportReader', ([], {}), '()\n', (757, 759), False, 'from ncbi.datasets.reports.report_reader import DatasetsReportReader\n'), ((989, 1018), 'ncbi.datasets.v1alpha1.dataset_catalog_pb2.Catalog', 'dataset_catalog_pb2.Catalog', ([], {}), '()\n', (1016, 1018), False, 'from ncbi.datasets.v1alpha1 import dataset_catalog_pb2\n'), ((3485, 3510), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (3508, 3510), False, 'import argparse\n'), ((3850, 3893), 'csv.writer', 'csv.writer', (['sys.stdout'], {'dialect': '"""excel-tab"""'}), "(sys.stdout, dialect='excel-tab')\n", (3860, 3893), False, 'import csv\n'), ((1706, 1755), 'os.path.join', 'os.path.join', (['"""ncbi_dataset"""', '"""data"""', 'f.file_path'], {}), "('ncbi_dataset', 'data', f.file_path)\n", (1718, 1755), False, 'import os\n'), ((4305, 4334), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (4332, 4334), False, 'import tempfile\n'), ((4411, 4546), 'gffutils.create_db', 'gffutils.create_db', (['tmpfile.name'], {'dbfn': '""":memory:"""', 'force': '(True)', 'keep_order': '(True)', 'merge_strategy': '"""merge"""', 'sort_attribute_values': '(True)'}), "(tmpfile.name, dbfn=':memory:', force=True, keep_order=\n True, merge_strategy='merge', sort_attribute_values=True)\n", (4429, 4546), False, 'import gffutils\n'), ((4933, 4963), 'zipfile.ZipFile', 'zipfile.ZipFile', (['zip_file', '"""r"""'], {}), "(zip_file, 'r')\n", (4948, 4963), False, 'import zipfile\n')] |
# -*- coding: utf-8 -*-
from scrapy import Request
from scrapy_ddiy.utils.spiders.ddiy_base import DdiyBaseSpider
class GlidedSky001Spider(DdiyBaseSpider):
name = 'glided_sky_001'
description = 'GlidedSky 爬虫-基础1'
start_url = 'http://www.glidedsky.com/level/web/crawler-basic-1'
custom_settings = {
'COOKIES_ENABLED': True,
'DOWNLOADER_MIDDLEWARES': {
'scrapy_ddiy.spiders.GlidedSky.glided_sky_downloadmiddleware.GlidedSkyMiddleware': 589,
},
}
def start_requests(self):
yield Request(url=self.start_url, callback=self.parse)
def parse(self, response, **kwargs):
all_number = [int(i) for i in
response.xpath('//div[@class="card-body"]//div[@class="col-md-1"]/text()').getall()]
self.logger.info(f'Sum or web number is {sum(all_number)}')
| [
"scrapy.Request"
] | [((549, 597), 'scrapy.Request', 'Request', ([], {'url': 'self.start_url', 'callback': 'self.parse'}), '(url=self.start_url, callback=self.parse)\n', (556, 597), False, 'from scrapy import Request\n')] |
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Provides data for the mnist with attributes dataset.
Provide data loading utilities for an augmented version of the
MNIST dataset which contains the following attributes:
1. Location (digits are translated on a canvas and placed around
one of four locations/regions in the canvas). Each location
is a gaussian placed at four quadrants of the canvas.
2. Scale (We vary scale from 0.4 to 1.0), with two gaussians
placed at 0.5 +- 0.1 and 0.9 +- 0.1 repsectively.
3. Orientation: we vary orientation from -90 to +90 degrees,
sampling actual values from gaussians at +30 +- 10 and
-30 +-10. On a third of the occasions we dont orient the
digit at all which means a rotation of 0 degrees.
The original data after transformations is binarized as per the
procedure described in the following paper:
Salakhutdinov, Ruslan, and <NAME>. 2008. ``On the Quantitative Analysis of
Deep Belief Networks.'' In Proceedings of the 25th International Conference on
Machine Learning, 872-79.
Author: vrama@
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tensorflow as tf
from tensorflow.contrib.slim.python.slim.data import dataset
from tensorflow.contrib.slim.python.slim.data import tfexample_decoder
from datasets.celeba.image_decoder import ImageDecodeProcess
# Only provides option to load the binarized version of the dataset.
_FILE_PATTERN = '%s-*'
_SPLIT_TYPE = 'iid'
_DATASET_DIR = '/srv/share/datasets/celeba_for_tf_ig'
_SPLITS_TO_SIZES = {'train': 162770, 'val': 19867, 'test': 19962}
_ITEMS_TO_DESCRIPTIONS = {
'image': 'A [218 x 178 x 3] RGB image.',
'labels': 'Attributes corresponding to the image.',
}
_NUM_CLASSES_PER_ATTRIBUTE = tuple([2]*18)
def get_split(split_name='train',
split_type="iid",
dataset_dir=None,
image_length=64,
num_classes_per_attribute=None):
"""Gets a dataset tuple with instructions for reading 2D shapes data.
Args:
split_name: A train/test split name.
split_type: str, type of split being loaded "iid" or "comp"
dataset_dir: The base directory of the dataset sources.
num_classes_per_attribute: The number of labels for the classfication
problem corresponding to each attribute. For example, if the first
attribute is "shape" and there are three possible shapes, then
then provide a value 3 in the first index, and so on.
Returns:
A `Dataset` namedtuple.
metadata: A dictionary with some metadata about the dataset we just
constructed.
Raises:
ValueError: if `split_name` is not a valid train/test split.
"""
if split_name not in _SPLITS_TO_SIZES:
raise ValueError('split name %s was not recognized.' % split_name)
if split_type is not "iid":
raise ValueError("Only IID split available for CelebA.")
if num_classes_per_attribute is None:
num_classes_per_attribute = _NUM_CLASSES_PER_ATTRIBUTE
if dataset_dir is None or dataset_dir == '':
dataset_dir = _DATASET_DIR
# Load attribute label map file.
label_map_json = os.path.join(dataset_dir,
'attribute_label_map.json')
file_pattern = os.path.join(dataset_dir, _FILE_PATTERN % split_name)
tf.logging.info('Loading from %s file.' % (file_pattern))
keys_to_features = {
'image/encoded': tf.FixedLenFeature((), tf.string, default_value=''),
'image/format': tf.FixedLenFeature((), tf.string, default_value='raw'),
'image/labels': tf.FixedLenFeature([len(num_classes_per_attribute)], tf.int64),
}
# TODO(vrama): See
# https://github.com/tensorflow/tensorflow/blob/master/tensorflow/contrib/slim/python/slim/data/tfexample_decoder.py#L270
# For where changes would need to be made to preprocess the images which
# get loaded.
items_to_handlers = {
'image': ImageDecodeProcess(shape=[218, 178, 3], image_length=64),
'labels': tfexample_decoder.Tensor('image/labels'),
}
decoder = tfexample_decoder.TFExampleDecoder(keys_to_features,
items_to_handlers)
metadata = {
'num_classes_per_attribute': num_classes_per_attribute,
'split_type': _SPLIT_TYPE,
'label_map_json': label_map_json,
}
return dataset.Dataset(
data_sources=file_pattern,
reader=tf.TFRecordReader,
decoder=decoder,
num_samples=_SPLITS_TO_SIZES[split_name],
items_to_descriptions=_ITEMS_TO_DESCRIPTIONS), metadata
| [
"tensorflow.contrib.slim.python.slim.data.dataset.Dataset",
"tensorflow.contrib.slim.python.slim.data.tfexample_decoder.TFExampleDecoder",
"tensorflow.logging.info",
"datasets.celeba.image_decoder.ImageDecodeProcess",
"os.path.join",
"tensorflow.contrib.slim.python.slim.data.tfexample_decoder.Tensor",
"tensorflow.FixedLenFeature"
] | [((3716, 3769), 'os.path.join', 'os.path.join', (['dataset_dir', '"""attribute_label_map.json"""'], {}), "(dataset_dir, 'attribute_label_map.json')\n", (3728, 3769), False, 'import os\n'), ((3830, 3883), 'os.path.join', 'os.path.join', (['dataset_dir', '(_FILE_PATTERN % split_name)'], {}), '(dataset_dir, _FILE_PATTERN % split_name)\n', (3842, 3883), False, 'import os\n'), ((3886, 3941), 'tensorflow.logging.info', 'tf.logging.info', (["('Loading from %s file.' % file_pattern)"], {}), "('Loading from %s file.' % file_pattern)\n", (3901, 3941), True, 'import tensorflow as tf\n'), ((4621, 4692), 'tensorflow.contrib.slim.python.slim.data.tfexample_decoder.TFExampleDecoder', 'tfexample_decoder.TFExampleDecoder', (['keys_to_features', 'items_to_handlers'], {}), '(keys_to_features, items_to_handlers)\n', (4655, 4692), False, 'from tensorflow.contrib.slim.python.slim.data import tfexample_decoder\n'), ((3991, 4042), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '""""""'}), "((), tf.string, default_value='')\n", (4009, 4042), True, 'import tensorflow as tf\n'), ((4066, 4120), 'tensorflow.FixedLenFeature', 'tf.FixedLenFeature', (['()', 'tf.string'], {'default_value': '"""raw"""'}), "((), tf.string, default_value='raw')\n", (4084, 4120), True, 'import tensorflow as tf\n'), ((4488, 4544), 'datasets.celeba.image_decoder.ImageDecodeProcess', 'ImageDecodeProcess', ([], {'shape': '[218, 178, 3]', 'image_length': '(64)'}), '(shape=[218, 178, 3], image_length=64)\n', (4506, 4544), False, 'from datasets.celeba.image_decoder import ImageDecodeProcess\n'), ((4562, 4602), 'tensorflow.contrib.slim.python.slim.data.tfexample_decoder.Tensor', 'tfexample_decoder.Tensor', (['"""image/labels"""'], {}), "('image/labels')\n", (4586, 4602), False, 'from tensorflow.contrib.slim.python.slim.data import tfexample_decoder\n'), ((4905, 5086), 'tensorflow.contrib.slim.python.slim.data.dataset.Dataset', 'dataset.Dataset', ([], {'data_sources': 'file_pattern', 'reader': 'tf.TFRecordReader', 'decoder': 'decoder', 'num_samples': '_SPLITS_TO_SIZES[split_name]', 'items_to_descriptions': '_ITEMS_TO_DESCRIPTIONS'}), '(data_sources=file_pattern, reader=tf.TFRecordReader,\n decoder=decoder, num_samples=_SPLITS_TO_SIZES[split_name],\n items_to_descriptions=_ITEMS_TO_DESCRIPTIONS)\n', (4920, 5086), False, 'from tensorflow.contrib.slim.python.slim.data import dataset\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright(c) 2019 Nippon Telegraph and Telephone Corporation
# Filename: ASRDriverParts/UNIInterface.py
'''
Parts Module for ASR driver UNI interface configuraton
'''
import GlobalModule
from EmCommonLog import decorater_log
from ASRDriverParts.InterfaceBase import InterfaceBase
class UNIInterface(InterfaceBase):
'''
Parts class for ASR driver UNI interface configuraton
'''
@decorater_log
def __init__(self,
vrf_name=None,
if_name=None,
vlan_id=None,
ip_address=None,
subnet_mask=None,
vip_ip_address=None,
hsrp_id=None,
mtu=None,
is_active=True):
'''
Costructor
'''
super(UNIInterface, self).__init__(vrf_name=vrf_name,
if_name=if_name)
self.vlan_id = vlan_id
self.ip_address = ip_address
self.subnet_mask = subnet_mask
self.vip_ip_address = vip_ip_address
self.hsrp_id = hsrp_id
self.mtu = mtu
self.is_active = is_active
@decorater_log
def output_add_command(self):
'''
Command line to add configuration is output.
'''
parame = self._get_param()
self._interface_common_start()
self._append_add_command("standby version 2")
comm_txt = "standby %(hsrp_id)s ip %(vip_ip_address)s"
self._append_add_command(comm_txt, parame)
if self.is_active:
comm_txt = "standby %(hsrp_id)s priority 105"
self._append_add_command(comm_txt, parame)
comm_txt = "standby %(hsrp_id)s preempt"
self._append_add_command(comm_txt, parame)
comm_txt = "ip mtu %(mtu)s"
self._append_add_command(comm_txt, parame)
self._interface_common_end()
GlobalModule.EM_LOGGER.debug(
"uni if command = %s" % (self._tmp_add_command,))
return self._tmp_add_command
@decorater_log
def _get_param(self):
'''
Parameter is acquired from attribute.(dict type)
'''
tmp_param = super(UNIInterface, self)._get_param()
tmp_param.update(
{
"vlan_id": self.vlan_id,
"ip_address": self.ip_address,
"subnet_mask": self.subnet_mask,
"vip_ip_address": self.vip_ip_address,
"hsrp_id": self.hsrp_id,
"mtu": self.mtu,
}
)
return tmp_param
| [
"GlobalModule.EM_LOGGER.debug"
] | [((1985, 2063), 'GlobalModule.EM_LOGGER.debug', 'GlobalModule.EM_LOGGER.debug', (["('uni if command = %s' % (self._tmp_add_command,))"], {}), "('uni if command = %s' % (self._tmp_add_command,))\n", (2013, 2063), False, 'import GlobalModule\n')] |
"""
Copyright (c) 2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
from ..config import PathField, BoolField
from ..representation import ClassificationAnnotation
from ..utils import read_csv, check_file_existence, read_json
from .format_converter import BaseFormatConverter, ConverterReturn
try:
from PIL import Image
except ImportError:
Image = None
class MNISTCSVFormatConverter(BaseFormatConverter):
"""
MNIST CSV dataset converter. All annotation converters should be derived from BaseFormatConverter class.
"""
# register name for this converter
# this name will be used for converter class look up
__provider__ = 'mnist_csv'
annotation_types = (ClassificationAnnotation, )
@classmethod
def parameters(cls):
configuration_parameters = super().parameters()
configuration_parameters.update({
'annotation_file': PathField(description="Path to csv file which contain dataset."),
'convert_images': BoolField(
optional=True,
default=False,
description="Allows to convert images from pickle file to user specified directory."
),
'converted_images_dir': PathField(
optional=True, is_directory=True, check_exists=False, description="Path to converted images location."
),
'dataset_meta_file': PathField(
description='path to json file with dataset meta (e.g. label_map, color_encoding)', optional=True
)
})
return configuration_parameters
def configure(self):
"""
This method is responsible for obtaining the necessary parameters
for converting from the command line or config.
"""
self.test_csv_file = self.get_value_from_config('annotation_file')
self.converted_images_dir = self.get_value_from_config('converted_images_dir')
self.convert_images = self.get_value_from_config('convert_images')
if self.convert_images and not self.converted_images_dir:
self.converted_images_dir = self.test_csv_file.parent / 'converted_images'
if not self.converted_images_dir.exists():
self.converted_images_dir.mkdir(parents=True)
if self.convert_images and Image is None:
raise ValueError(
"conversion mnist images requires Pillow installation, please install it before usage"
)
self.dataset_meta = self.get_value_from_config('dataset_meta_file')
def convert(self, check_content=False, progress_callback=None, progress_interval=100, **kwargs):
"""
This method is executed automatically when convert.py is started.
All arguments are automatically got from command line arguments or config file in method configure
Returns:
annotations: list of annotation representation objects.
meta: dictionary with additional dataset level metadata.
"""
annotations = []
check_images = check_content and not self.convert_images
meta = self.generate_meta()
labels_to_id = meta['label_map']
content_errors = None
if check_content:
self.converted_images_dir = self.converted_images_dir or self.test_csv_file.parent / 'converted_images'
if self.converted_images_dir and check_content:
if not self.converted_images_dir.exists():
content_errors = ['{}: does not exist'.format(self.converted_images_dir)]
check_images = False
# read original dataset annotation
annotation_table = read_csv(self.test_csv_file)
num_iterations = len(annotation_table)
for index, annotation in enumerate(annotation_table):
identifier = '{}.png'.format(index)
label = labels_to_id.get(annotation['label'], int(annotation['label']))
if self.convert_images:
image = Image.fromarray(self.convert_image(annotation))
image = image.convert("L")
image.save(str(self.converted_images_dir / identifier))
annotations.append(ClassificationAnnotation(identifier, label))
if check_images:
if not check_file_existence(self.converted_images_dir / identifier):
# add error to errors list if file not found
content_errors.append('{}: does not exist'.format(self.converted_images_dir / identifier))
if progress_callback is not None and index % progress_interval == 0:
progress_callback(index / num_iterations * 100)
return ConverterReturn(annotations, meta, content_errors)
@staticmethod
def convert_image(features):
image = np.zeros((28, 28))
column_template = '{}x{}'
for x in range(28):
for y in range(28):
pixel = int(features[column_template.format(x+1, y+1)])
image[x, y] = pixel
return image
def generate_meta(self):
if not self.dataset_meta:
return {'label_map': {str(i): i for i in range(10)}}
dataset_meta = read_json(self.dataset_meta)
label_map = dataset_meta.get('label_map')
if 'labels' in dataset_meta:
label_map = dict(enumerate(dataset_meta['labels']))
dataset_meta['label_map'] = label_map or {str(i): i for i in range(10)}
return dataset_meta
| [
"numpy.zeros"
] | [((5319, 5337), 'numpy.zeros', 'np.zeros', (['(28, 28)'], {}), '((28, 28))\n', (5327, 5337), True, 'import numpy as np\n')] |
from PyQt5 import QtWidgets, uic
from Factory import Factory
from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox
from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget
import Constant as c
class RegularPolygonDialog(QtWidgets.QDialog):
def __init__(self, scene, data):
"""Construct RegularPolygonDialog."""
super(RegularPolygonDialog, self).__init__()
self.ui = uic.loadUi('regularpolygon.ui', self)
self.scene = scene
self.sides = 3
self.free_point = False
self.data = data
self.ui.buttonBox.accepted.connect(self.accepted)
self.ui.buttonBox.rejected.connect(self.rejected)
self.ui.sides_slider.valueChanged.connect(self.hslider_sides_func)
self.ui.checkBox.stateChanged.connect(lambda x: free_point_checkbox(self, x))
def hslider_sides_func(self, value):
"""Be slider callback function to set sides."""
self.sides = value
self.ui.sides_spin.setValue(value)
def accepted(self):
"""Create new regular polygon with settings."""
A, B = self.data
angle = -(self.sides - 2) * 180 / self.sides
polygon = [A, B]
for _ in range(self.sides - 2):
item = Factory.create_empty_item('point', c.Point.Definition.ROTATION)
definition = {'A': A, 'B': B, 'angle': angle}
id_ = Factory.next_id(item, definition, self.scene.project_data.items)
item.item["id"] = id_
item.item["definition"] = definition
if self.free_point:
item = turn_into_free_point(item, self.scene)
self.scene.project_data.add(item)
A = B
B = item.item["id"]
polygon.append(item.item["id"])
item = Factory.create_empty_item('polygon', None)
definition = polygon
item.item["id"] = Factory.next_id(item, definition, self.scene.project_data.items)
item.item["definition"] = definition
self.scene.project_data.add(item)
self.scene.project_data.recompute_canvas(*self.scene.init_canvas_dims)
current_row_old = self.scene.ui.listWidget.currentRow()
fill_listWidget_with_data(self.scene.project_data, self.scene.ui.listWidget, self.scene.current_tab_idx)
set_selected_id_in_listWidget(self.scene, current_row_old)
self.scene.edit.add_undo_item(self.scene)
def rejected(self):
"""Add no new regular polygon."""
pass
| [
"Factory.Factory.next_id",
"Fill.ListWidget.fill_listWidget_with_data",
"PyQt5.uic.loadUi",
"Fill.ListWidget.set_selected_id_in_listWidget",
"Dialogs.DialogMacros.turn_into_free_point",
"Factory.Factory.create_empty_item",
"Dialogs.DialogMacros.free_point_checkbox"
] | [((446, 483), 'PyQt5.uic.loadUi', 'uic.loadUi', (['"""regularpolygon.ui"""', 'self'], {}), "('regularpolygon.ui', self)\n", (456, 483), False, 'from PyQt5 import QtWidgets, uic\n'), ((1820, 1862), 'Factory.Factory.create_empty_item', 'Factory.create_empty_item', (['"""polygon"""', 'None'], {}), "('polygon', None)\n", (1845, 1862), False, 'from Factory import Factory\n'), ((1918, 1982), 'Factory.Factory.next_id', 'Factory.next_id', (['item', 'definition', 'self.scene.project_data.items'], {}), '(item, definition, self.scene.project_data.items)\n', (1933, 1982), False, 'from Factory import Factory\n'), ((2222, 2330), 'Fill.ListWidget.fill_listWidget_with_data', 'fill_listWidget_with_data', (['self.scene.project_data', 'self.scene.ui.listWidget', 'self.scene.current_tab_idx'], {}), '(self.scene.project_data, self.scene.ui.listWidget,\n self.scene.current_tab_idx)\n', (2247, 2330), False, 'from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget\n'), ((2335, 2393), 'Fill.ListWidget.set_selected_id_in_listWidget', 'set_selected_id_in_listWidget', (['self.scene', 'current_row_old'], {}), '(self.scene, current_row_old)\n', (2364, 2393), False, 'from Fill.ListWidget import fill_listWidget_with_data, set_selected_id_in_listWidget\n'), ((1282, 1345), 'Factory.Factory.create_empty_item', 'Factory.create_empty_item', (['"""point"""', 'c.Point.Definition.ROTATION'], {}), "('point', c.Point.Definition.ROTATION)\n", (1307, 1345), False, 'from Factory import Factory\n'), ((1422, 1486), 'Factory.Factory.next_id', 'Factory.next_id', (['item', 'definition', 'self.scene.project_data.items'], {}), '(item, definition, self.scene.project_data.items)\n', (1437, 1486), False, 'from Factory import Factory\n'), ((840, 868), 'Dialogs.DialogMacros.free_point_checkbox', 'free_point_checkbox', (['self', 'x'], {}), '(self, x)\n', (859, 868), False, 'from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox\n'), ((1625, 1663), 'Dialogs.DialogMacros.turn_into_free_point', 'turn_into_free_point', (['item', 'self.scene'], {}), '(item, self.scene)\n', (1645, 1663), False, 'from Dialogs.DialogMacros import turn_into_free_point, free_point_checkbox\n')] |
import unittest
from networks.QoS import QoS
from networks.connections.mathematical_connections import FunctionalDegradation
from networks.slicing import SliceConceptualGraph
from utils.location import Location
class TestBaseStationLinear(unittest.TestCase):
def setUp(self):
self.name = "network"
self.wireless_connection_type = "LinearDegradation"
self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.parameters = dict(
best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps',
'error_rate': '1.0%'},
worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps',
'error_rate': '2.0%'}, radius="5km")
self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters)
def test_creation(self):
self.assertEqual(self.network.get_name(), "network")
def test_get_empty_nodes(self):
self.assertEqual(self.network.get_nodes(), {})
def test_add_node(self):
name, lat, lon = 'node', 33, 40
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node(name, lat, lon)
self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.add_node('node', 33, 40)
def test_get_empty_RUs(self):
self.assertEqual(self.network.get_RUs(), {})
def test_set_basetastion(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RU(lat, lon)
def test_constructor(self):
with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException):
SliceConceptualGraph('test', {}, {}, {})
SliceConceptualGraph('test', self.midhaul_qos, {}, {})
SliceConceptualGraph('test', {}, self.backhaul_qos, {})
SliceConceptualGraph('test', {}, {}, self.parameters)
def test_get_qos(self):
self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos))
def test_set_qos(self):
self.network.set_backhaul(QoS.minimum_qos_dict)
self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict))
def test_qos_from_distance(self):
self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos'))
self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos'))
def test_get_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('test', 10, 10)
self.assertEqual(self.network.get_node_location('test2'), None)
self.assertEqual(self.network.get_node_location('test'), Location(10, 10))
def test_has_to_pass_through_backhaul(self):
self.network.set_RU(10, 10)
self.network.set_RU(20, 20)
self.network.add_node('source1', 10, 10)
self.network.add_node('destination1', 10, 10)
self.network.add_node('destination2', 20, 20)
def test_set_RUs(self):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
self.assertEqual(self.network.get_RUs(),
{'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})})
lat, lon = 33, 40
self.network.set_RU(lat, lon)
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
def test_set_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('destination1', 10, 10)
self.network.set_node_location('destination1', 20, 20)
self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20))
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 'test', 20)
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 20, 'test')
class TestBaseLog2Degradation(unittest.TestCase):
def setUp(self):
self.name = "network"
self.wireless_connection_type = "Log2Degradation"
self.midhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.backhaul_qos = {'latency': {'delay': '3.0ms', 'deviation': '1.0ms'}, 'bandwidth': '100.0mbps',
'error_rate': '1.0%'}
self.parameters = dict(
best_qos={'latency': {'delay': '5.0ms', 'deviation': '2.0ms'}, 'bandwidth': '10.0mbps',
'error_rate': '1.0%'},
worst_qos={'latency': {'delay': '100.0ms', 'deviation': '20.0ms'}, 'bandwidth': '5.0mbps',
'error_rate': '2.0%'}, radius="5km")
self.network = SliceConceptualGraph(self.name, self.midhaul_qos, self.backhaul_qos, self.parameters)
def test_creation(self):
self.assertEqual(self.network.get_name(), "network")
def test_get_empty_nodes(self):
self.assertEqual(self.network.get_nodes(), {})
def test_add_node(self):
name, lat, lon = 'node', 33, 40
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.add_node(name, lat, lon)
self.network.set_RU(33, 40, 0)
self.network.add_node(name, lat, lon)
self.assertEqual(self.network.get_nodes(), {'node': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.add_node('node', 33, 40)
def test_get_empty_RUs(self):
self.assertEqual(self.network.get_RUs(), {})
def test_set_basetastion(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.assertEqual(self.network.get_RUs(), {f'{lat}-{lon}': Location(lat, lon)})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RU(lat, lon)
def test_constructor(self):
with self.assertRaises(FunctionalDegradation.FunctionDegradationNetworkException):
SliceConceptualGraph('test', {} ,{}, {})
SliceConceptualGraph('test', self.midhaul_qos, {}, {})
SliceConceptualGraph('test', {}, self.backhaul_qos, {})
SliceConceptualGraph('test', {}, {}, self.parameters)
def test_get_qos(self):
self.assertEqual(self.network.get_backhaul(), QoS(self.backhaul_qos))
def test_set_qos(self):
self.network.set_backhaul(QoS.minimum_qos_dict)
self.assertEqual(self.network.get_backhaul(), QoS(QoS.minimum_qos_dict))
def test_qos_from_distance(self):
self.assertEqual(self.network.get_qos_from(5).get_formated_qos(), self.parameters.get('worst_qos'))
self.assertEqual(self.network.get_qos_from(0.0).get_formated_qos(), self.parameters.get('best_qos'))
def test_get_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('test', 10, 10)
self.assertEqual(self.network.get_node_location('test2'), None)
self.assertEqual(self.network.get_node_location('test'), Location(10, 10))
def test_set_RUs(self):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
self.assertEqual(self.network.get_RUs(),
{'10-10': Location(**{'lat': 10, 'lon': 10}), '5-5': Location(**{'lat': 5, 'lon': 5})})
with self.assertRaises(SliceConceptualGraph.NetworkSliceException):
self.network.set_RUs([{'lat': 10, 'lon': 10}, {'lat': 5, 'lon': 5}])
def test_set_node_location(self):
lat, lon = 33, 40
self.network.set_RU(lat, lon)
self.network.add_node('destination1', 10, 10)
self.network.set_node_location('destination1', 20, 20)
self.assertEqual(self.network.get_node_location('destination1'), Location(20, 20))
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 'test', 20)
with self.assertRaises(Location.LocationException):
self.network.set_node_location('destination1', 20, 'test')
| [
"networks.QoS.QoS",
"networks.slicing.SliceConceptualGraph",
"utils.location.Location"
] | [((1054, 1144), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['self.name', 'self.midhaul_qos', 'self.backhaul_qos', 'self.parameters'], {}), '(self.name, self.midhaul_qos, self.backhaul_qos, self.\n parameters)\n', (1074, 1144), False, 'from networks.slicing import SliceConceptualGraph\n'), ((5498, 5588), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['self.name', 'self.midhaul_qos', 'self.backhaul_qos', 'self.parameters'], {}), '(self.name, self.midhaul_qos, self.backhaul_qos, self.\n parameters)\n', (5518, 5588), False, 'from networks.slicing import SliceConceptualGraph\n'), ((2240, 2280), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', '{}', '{}'], {}), "('test', {}, {}, {})\n", (2260, 2280), False, 'from networks.slicing import SliceConceptualGraph\n'), ((2293, 2347), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', 'self.midhaul_qos', '{}', '{}'], {}), "('test', self.midhaul_qos, {}, {})\n", (2313, 2347), False, 'from networks.slicing import SliceConceptualGraph\n'), ((2360, 2415), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', 'self.backhaul_qos', '{}'], {}), "('test', {}, self.backhaul_qos, {})\n", (2380, 2415), False, 'from networks.slicing import SliceConceptualGraph\n'), ((2428, 2481), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', '{}', 'self.parameters'], {}), "('test', {}, {}, self.parameters)\n", (2448, 2481), False, 'from networks.slicing import SliceConceptualGraph\n'), ((2565, 2587), 'networks.QoS.QoS', 'QoS', (['self.backhaul_qos'], {}), '(self.backhaul_qos)\n', (2568, 2587), False, 'from networks.QoS import QoS\n'), ((2728, 2753), 'networks.QoS.QoS', 'QoS', (['QoS.minimum_qos_dict'], {}), '(QoS.minimum_qos_dict)\n', (2731, 2753), False, 'from networks.QoS import QoS\n'), ((3297, 3313), 'utils.location.Location', 'Location', (['(10)', '(10)'], {}), '(10, 10)\n', (3305, 3313), False, 'from utils.location import Location\n'), ((4377, 4393), 'utils.location.Location', 'Location', (['(20)', '(20)'], {}), '(20, 20)\n', (4385, 4393), False, 'from utils.location import Location\n'), ((6785, 6825), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', '{}', '{}'], {}), "('test', {}, {}, {})\n", (6805, 6825), False, 'from networks.slicing import SliceConceptualGraph\n'), ((6838, 6892), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', 'self.midhaul_qos', '{}', '{}'], {}), "('test', self.midhaul_qos, {}, {})\n", (6858, 6892), False, 'from networks.slicing import SliceConceptualGraph\n'), ((6905, 6960), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', 'self.backhaul_qos', '{}'], {}), "('test', {}, self.backhaul_qos, {})\n", (6925, 6960), False, 'from networks.slicing import SliceConceptualGraph\n'), ((6973, 7026), 'networks.slicing.SliceConceptualGraph', 'SliceConceptualGraph', (['"""test"""', '{}', '{}', 'self.parameters'], {}), "('test', {}, {}, self.parameters)\n", (6993, 7026), False, 'from networks.slicing import SliceConceptualGraph\n'), ((7110, 7132), 'networks.QoS.QoS', 'QoS', (['self.backhaul_qos'], {}), '(self.backhaul_qos)\n', (7113, 7132), False, 'from networks.QoS import QoS\n'), ((7273, 7298), 'networks.QoS.QoS', 'QoS', (['QoS.minimum_qos_dict'], {}), '(QoS.minimum_qos_dict)\n', (7276, 7298), False, 'from networks.QoS import QoS\n'), ((7842, 7858), 'utils.location.Location', 'Location', (['(10)', '(10)'], {}), '(10, 10)\n', (7850, 7858), False, 'from utils.location import Location\n'), ((8578, 8594), 'utils.location.Location', 'Location', (['(20)', '(20)'], {}), '(20, 20)\n', (8586, 8594), False, 'from utils.location import Location\n'), ((1563, 1581), 'utils.location.Location', 'Location', (['lat', 'lon'], {}), '(lat, lon)\n', (1571, 1581), False, 'from utils.location import Location\n'), ((1965, 1983), 'utils.location.Location', 'Location', (['lat', 'lon'], {}), '(lat, lon)\n', (1973, 1983), False, 'from utils.location import Location\n'), ((3785, 3819), 'utils.location.Location', 'Location', ([], {}), "(**{'lat': 10, 'lon': 10})\n", (3793, 3819), False, 'from utils.location import Location\n'), ((3828, 3860), 'utils.location.Location', 'Location', ([], {}), "(**{'lat': 5, 'lon': 5})\n", (3836, 3860), False, 'from utils.location import Location\n'), ((6108, 6126), 'utils.location.Location', 'Location', (['lat', 'lon'], {}), '(lat, lon)\n', (6116, 6126), False, 'from utils.location import Location\n'), ((6510, 6528), 'utils.location.Location', 'Location', (['lat', 'lon'], {}), '(lat, lon)\n', (6518, 6528), False, 'from utils.location import Location\n'), ((8050, 8084), 'utils.location.Location', 'Location', ([], {}), "(**{'lat': 10, 'lon': 10})\n", (8058, 8084), False, 'from utils.location import Location\n'), ((8093, 8125), 'utils.location.Location', 'Location', ([], {}), "(**{'lat': 5, 'lon': 5})\n", (8101, 8125), False, 'from utils.location import Location\n')] |
"""
Setup DB with example data for tests
"""
from django.contrib.auth.hashers import make_password
from django.contrib.auth.models import User, Group
from django.core.management.base import BaseCommand
from api import models
class Command(BaseCommand):
help = 'Setup DB with example data for tests'
def handle(self, *args, **options):
print('---- Creating Users ----')
User.objects.get_or_create(username='thomastest', password=make_password('<PASSWORD>'))
thomas = User.objects.get(username='thomastest')
User.objects.get_or_create(username='norbert', password=make_password('<PASSWORD>'))
norbert = User.objects.get(username='norbert')
User.objects.get_or_create(username='stefan', password=make_password('<PASSWORD>'))
stefan = User.objects.get(username='stefan')
superuser = Group.objects.get(name='superuser')
superuser.user_set.add(thomas)
netadmin = Group.objects.get(name='netadmin')
netadmin.user_set.add(norbert)
support = Group.objects.get(name='support')
support.user_set.add(stefan)
print('---- Creating Inventory ----')
models.Inventory.objects.create(name='Example', hosts_file='web_nornir/nornir_config/example_config/hosts.yaml',
groups_file='web_nornir/nornir_config/example_config/groups.yaml', type=1)
models.Inventory.objects.create(name='INS Lab', hosts_file='web_nornir/nornir_config/inslab_config/hosts.yaml',
groups_file='web_nornir/nornir_config/inslab_config/groups.yaml', type=1)
print('---- Creating Job Templates ----')
models.JobTemplate.objects.create(name='hello_world', description='This prints a hello world',
file_name='hello_world.py', created_by_id=1)
models.JobTemplate.objects.create(name='Get CDP Neighbors', description='Lists all CDP neighbors',
file_name='get_cdp_neighbors.py', created_by_id=1)
models.JobTemplate.objects.create(name='Get Interfaces',
description='Gets brief information about all interfaces, sh ip int br',
file_name='get_interfaces.py', created_by_id=1)
models.JobTemplate.objects.create(name='Ping Device',
description='Pings a chosen network device and reports if reachable',
file_name='ping.py', variables=['target'], created_by_id=1)
models.JobTemplate.objects.create(name='Get Configuration', description='Gets all configuration from device',
file_name='get_configuration.py', created_by_id=1)
print('---- Creating Tasks ----')
models.Task.objects.create(name='Get Hello World', created_by_id=1, template_id=1, inventory_id=1)
models.Task.objects.create(name='Get CDP neighbors of INS lab', created_by_id=2, template_id=2, inventory_id=2)
models.Task.objects.create(name='Get interfaces of INS lab', created_by_id=2, template_id=3, inventory_id=2)
print('---- ALL DONE!! ----')
| [
"django.contrib.auth.models.Group.objects.get",
"django.contrib.auth.hashers.make_password",
"api.models.JobTemplate.objects.create",
"api.models.Task.objects.create",
"api.models.Inventory.objects.create",
"django.contrib.auth.models.User.objects.get"
] | [((526, 565), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': '"""thomastest"""'}), "(username='thomastest')\n", (542, 565), False, 'from django.contrib.auth.models import User, Group\n'), ((681, 717), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': '"""norbert"""'}), "(username='norbert')\n", (697, 717), False, 'from django.contrib.auth.models import User, Group\n'), ((831, 866), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': '"""stefan"""'}), "(username='stefan')\n", (847, 866), False, 'from django.contrib.auth.models import User, Group\n'), ((890, 925), 'django.contrib.auth.models.Group.objects.get', 'Group.objects.get', ([], {'name': '"""superuser"""'}), "(name='superuser')\n", (907, 925), False, 'from django.contrib.auth.models import User, Group\n'), ((988, 1022), 'django.contrib.auth.models.Group.objects.get', 'Group.objects.get', ([], {'name': '"""netadmin"""'}), "(name='netadmin')\n", (1005, 1022), False, 'from django.contrib.auth.models import User, Group\n'), ((1084, 1117), 'django.contrib.auth.models.Group.objects.get', 'Group.objects.get', ([], {'name': '"""support"""'}), "(name='support')\n", (1101, 1117), False, 'from django.contrib.auth.models import User, Group\n'), ((1220, 1417), 'api.models.Inventory.objects.create', 'models.Inventory.objects.create', ([], {'name': '"""Example"""', 'hosts_file': '"""web_nornir/nornir_config/example_config/hosts.yaml"""', 'groups_file': '"""web_nornir/nornir_config/example_config/groups.yaml"""', 'type': '(1)'}), "(name='Example', hosts_file=\n 'web_nornir/nornir_config/example_config/hosts.yaml', groups_file=\n 'web_nornir/nornir_config/example_config/groups.yaml', type=1)\n", (1251, 1417), False, 'from api import models\n'), ((1458, 1653), 'api.models.Inventory.objects.create', 'models.Inventory.objects.create', ([], {'name': '"""INS Lab"""', 'hosts_file': '"""web_nornir/nornir_config/inslab_config/hosts.yaml"""', 'groups_file': '"""web_nornir/nornir_config/inslab_config/groups.yaml"""', 'type': '(1)'}), "(name='INS Lab', hosts_file=\n 'web_nornir/nornir_config/inslab_config/hosts.yaml', groups_file=\n 'web_nornir/nornir_config/inslab_config/groups.yaml', type=1)\n", (1489, 1653), False, 'from api import models\n'), ((1753, 1897), 'api.models.JobTemplate.objects.create', 'models.JobTemplate.objects.create', ([], {'name': '"""hello_world"""', 'description': '"""This prints a hello world"""', 'file_name': '"""hello_world.py"""', 'created_by_id': '(1)'}), "(name='hello_world', description=\n 'This prints a hello world', file_name='hello_world.py', created_by_id=1)\n", (1786, 1897), False, 'from api import models\n'), ((1945, 2103), 'api.models.JobTemplate.objects.create', 'models.JobTemplate.objects.create', ([], {'name': '"""Get CDP Neighbors"""', 'description': '"""Lists all CDP neighbors"""', 'file_name': '"""get_cdp_neighbors.py"""', 'created_by_id': '(1)'}), "(name='Get CDP Neighbors', description=\n 'Lists all CDP neighbors', file_name='get_cdp_neighbors.py',\n created_by_id=1)\n", (1978, 2103), False, 'from api import models\n'), ((2147, 2334), 'api.models.JobTemplate.objects.create', 'models.JobTemplate.objects.create', ([], {'name': '"""Get Interfaces"""', 'description': '"""Gets brief information about all interfaces, sh ip int br"""', 'file_name': '"""get_interfaces.py"""', 'created_by_id': '(1)'}), "(name='Get Interfaces', description=\n 'Gets brief information about all interfaces, sh ip int br', file_name=\n 'get_interfaces.py', created_by_id=1)\n", (2180, 2334), False, 'from api import models\n'), ((2420, 2613), 'api.models.JobTemplate.objects.create', 'models.JobTemplate.objects.create', ([], {'name': '"""Ping Device"""', 'description': '"""Pings a chosen network device and reports if reachable"""', 'file_name': '"""ping.py"""', 'variables': "['target']", 'created_by_id': '(1)'}), "(name='Ping Device', description=\n 'Pings a chosen network device and reports if reachable', file_name=\n 'ping.py', variables=['target'], created_by_id=1)\n", (2453, 2613), False, 'from api import models\n'), ((2699, 2868), 'api.models.JobTemplate.objects.create', 'models.JobTemplate.objects.create', ([], {'name': '"""Get Configuration"""', 'description': '"""Gets all configuration from device"""', 'file_name': '"""get_configuration.py"""', 'created_by_id': '(1)'}), "(name='Get Configuration', description=\n 'Gets all configuration from device', file_name='get_configuration.py',\n created_by_id=1)\n", (2732, 2868), False, 'from api import models\n'), ((2963, 3065), 'api.models.Task.objects.create', 'models.Task.objects.create', ([], {'name': '"""Get Hello World"""', 'created_by_id': '(1)', 'template_id': '(1)', 'inventory_id': '(1)'}), "(name='Get Hello World', created_by_id=1,\n template_id=1, inventory_id=1)\n", (2989, 3065), False, 'from api import models\n'), ((3071, 3186), 'api.models.Task.objects.create', 'models.Task.objects.create', ([], {'name': '"""Get CDP neighbors of INS lab"""', 'created_by_id': '(2)', 'template_id': '(2)', 'inventory_id': '(2)'}), "(name='Get CDP neighbors of INS lab',\n created_by_id=2, template_id=2, inventory_id=2)\n", (3097, 3186), False, 'from api import models\n'), ((3192, 3305), 'api.models.Task.objects.create', 'models.Task.objects.create', ([], {'name': '"""Get interfaces of INS lab"""', 'created_by_id': '(2)', 'template_id': '(3)', 'inventory_id': '(2)'}), "(name='Get interfaces of INS lab', created_by_id=\n 2, template_id=3, inventory_id=2)\n", (3218, 3305), False, 'from api import models\n'), ((479, 506), 'django.contrib.auth.hashers.make_password', 'make_password', (['"""<PASSWORD>"""'], {}), "('<PASSWORD>')\n", (492, 506), False, 'from django.contrib.auth.hashers import make_password\n'), ((633, 660), 'django.contrib.auth.hashers.make_password', 'make_password', (['"""<PASSWORD>"""'], {}), "('<PASSWORD>')\n", (646, 660), False, 'from django.contrib.auth.hashers import make_password\n'), ((784, 811), 'django.contrib.auth.hashers.make_password', 'make_password', (['"""<PASSWORD>"""'], {}), "('<PASSWORD>')\n", (797, 811), False, 'from django.contrib.auth.hashers import make_password\n')] |
from __future__ import absolute_import, unicode_literals
import os
import distro
def get_distro_info(root_dir):
# We point _UNIXCONFDIR to root_dir
old_value = distro._UNIXCONFDIR
distro._UNIXCONFDIR = os.path.join(root_dir, 'etc')
obj = distro.LinuxDistribution(include_lsb=False, include_uname=False)
# NOTE: The parsing of LinuxDistribution distro information is done in a lazy way.
# This will force the parsing to happen before we restore the old value of _UNIXCONFDIR.
_ = obj.info()
distro._UNIXCONFDIR = old_value
return obj
| [
"os.path.join",
"distro.LinuxDistribution"
] | [((218, 247), 'os.path.join', 'os.path.join', (['root_dir', '"""etc"""'], {}), "(root_dir, 'etc')\n", (230, 247), False, 'import os\n'), ((259, 323), 'distro.LinuxDistribution', 'distro.LinuxDistribution', ([], {'include_lsb': '(False)', 'include_uname': '(False)'}), '(include_lsb=False, include_uname=False)\n', (283, 323), False, 'import distro\n')] |
import numpy as np
import sys
import os
from PIL import Image
from visu.helper_functions import save_image
from scipy.spatial.transform import Rotation as R
from helper import re_quat
import copy
import torch
import numpy as np
import k3d
class Visualizer():
def __init__(self, p_visu, writer=None):
if p_visu[-1] != '/':
p_visu = p_visu + '/'
self.p_visu = p_visu
self.writer = writer
if not os.path.exists(self.p_visu):
os.makedirs(self.p_visu)
def plot_estimated_pose(self, tag, epoch, img, points, trans=[[0, 0, 0]], rot_mat=[[1, 0, 0], [0, 1, 0], [0, 0, 1]], cam_cx=0, cam_cy=0, cam_fx=0, cam_fy=0, store=False, jupyter=False, w=2):
"""
tag := tensorboard tag
epoch := tensorboard epoche
store := ture -> stores the image to standard path
path := != None creats the path and store to it path/tag.png
img:= original_image, [widht,height,RGB]
points:= points of the object model [length,x,y,z]
trans: [1,3]
rot: [3,3]
"""
img_d = copy.deepcopy(img)
points = np.dot(points, rot_mat.T)
points = np.add(points, trans[0, :])
for i in range(0, points.shape[0]):
p_x = points[i, 0]
p_y = points[i, 1]
p_z = points[i, 2]
u = int(((p_x / p_z) * cam_fx) + cam_cx)
v = int(((p_y / p_z) * cam_fy) + cam_cy)
try:
img_d[v - w:v + w + 1, u - w:u + w + 1, 0] = 0
img_d[v - w:v + w + 1, u - w:u + w + 1, 1] = 255
img_d[v - w:v + w + 1, u - w:u + w + 1, 0] = 0
except:
#print("out of bounce")
pass
if jupyter:
display(Image.fromarray(img_d))
if store:
#store_ar = (img_d* 255).round().astype(np.uint8)
#print("IMAGE D:" ,img_d,img_d.shape )
save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu)
if self.writer is not None:
self.writer.add_image(tag, img_d.astype(
np.uint8), global_step=epoch, dataformats='HWC')
def plot_bounding_box(self, tag, epoch, img, rmin=0, rmax=0, cmin=0, cmax=0, str_width=2, store=False, jupyter=False, b=None):
"""
tag := tensorboard tag
epoch := tensorboard epoche
store := ture -> stores the image to standard path
path := != None creats the path and store to it path/tag.png
img:= original_image, [widht,height,RGB]
"""
if isinstance(b, dict):
rmin = b['rmin']
rmax = b['rmax']
cmin = b['cmin']
cmax = b['cmax']
# ToDo check Input data
img_d = np.array(copy.deepcopy(img))
c = [0, 0, 255]
rmin_mi = max(0, rmin - str_width)
rmin_ma = min(img_d.shape[0], rmin + str_width)
rmax_mi = max(0, rmax - str_width)
rmax_ma = min(img_d.shape[0], rmax + str_width)
cmin_mi = max(0, cmin - str_width)
cmin_ma = min(img_d.shape[1], cmin + str_width)
cmax_mi = max(0, cmax - str_width)
cmax_ma = min(img_d.shape[1], cmax + str_width)
img_d[rmin_mi:rmin_ma, cmin:cmax, :] = c
img_d[rmax_mi:rmax_ma, cmin:cmax, :] = c
img_d[rmin:rmax, cmin_mi:cmin_ma, :] = c
img_d[rmin:rmax, cmax_mi:cmax_ma, :] = c
print("STORE", store)
img_d = img_d.astype(np.uint8)
if store:
#store_ar = (img_d* 255).round().astype(np.uint8)
save_image(img_d, tag=str(epoch) + tag, p_store=self.p_visu)
if jupyter:
display(Image.fromarray(img_d))
if self.writer is not None:
self.writer.add_image(tag, img_d.astype(
np.uint8), global_step=epoch, dataformats='HWC')
def plot_pcd(x, point_size=0.005, c='g'):
"""
x: point_nr,3
"""
if c == 'b':
k = 245
elif c == 'g':
k = 25811000
elif c == 'r':
k = 11801000
elif c == 'black':
k = 2580
else:
k = 2580
colors = np.ones(x.shape[0]) * k
plot = k3d.plot(name='points')
plt_points = k3d.points(x, colors.astype(np.uint32), point_size=point_size)
plot += plt_points
plt_points.shader = '3d'
plot.display()
def plot_two_pcd(x, y, point_size=0.005, c1='g', c2='r'):
if c1 == 'b':
k = 245
elif c1 == 'g':
k = 25811000
elif c1 == 'r':
k = 11801000
elif c1 == 'black':
k = 2580
else:
k = 2580
if c2 == 'b':
k2 = 245
elif c2 == 'g':
k2 = 25811000
elif c2 == 'r':
k2 = 11801000
elif c2 == 'black':
k2 = 2580
else:
k2 = 2580
col1 = np.ones(x.shape[0]) * k
col2 = np.ones(y.shape[0]) * k2
plot = k3d.plot(name='points')
plt_points = k3d.points(x, col1.astype(np.uint32), point_size=point_size)
plot += plt_points
plt_points = k3d.points(y, col2.astype(np.uint32), point_size=point_size)
plot += plt_points
plt_points.shader = '3d'
plot.display()
class SequenceVisualizer():
def __init__(self, seq_data, images_path, output_path=None):
self.seq_data = seq_data
self.images_path = images_path
self.output_path = output_path
def plot_points_on_image(self, seq_no, frame_no, jupyter=False, store=False, pose_type='filtered'):
seq_data = self.seq_data
images_path = self.images_path
output_path = self.output_path
frame = seq_data[seq_no][frame_no]
unique_desig = frame['dl_dict']['unique_desig'][0]
if pose_type == 'ground_truth':
# ground truth
t = frame['dl_dict']['gt_trans'].reshape(1, 3)
rot_quat = re_quat(copy.deepcopy(
frame['dl_dict']['gt_rot_wxyz'][0]), 'wxyz')
rot = R.from_quat(rot_quat).as_matrix()
elif pose_type == 'filtered':
# filter pred
t = np.array(frame['filter_pred']['t']).reshape(1, 3)
rot_quat = re_quat(copy.deepcopy(
frame['filter_pred']['r_wxyz']), 'wxyz')
rot = R.from_quat(rot_quat).as_matrix()
elif pose_type == 'final_pred_obs':
# final pred
t = np.array(frame['final_pred_obs']['t']).reshape(1, 3)
rot_quat = re_quat(copy.deepcopy(
frame['final_pred_obs']['r_wxyz']), 'wxyz')
rot = R.from_quat(rot_quat).as_matrix()
else:
raise Exception('Pose type not implemented.')
w = 2
if type(unique_desig) != str:
im = np.array(Image.open(
images_path + unique_desig[0] + '-color.png')) # ycb
else:
im = np.array(Image.open(
images_path + unique_desig + '.png')) # laval
img_d = copy.deepcopy(im)
dl_dict = frame['dl_dict']
points = copy.deepcopy(
seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :])
points = np.dot(points, rot.T)
points = np.add(points, t[0, :])
cam_cx = dl_dict['cam_cal'][0][0]
cam_cy = dl_dict['cam_cal'][0][1]
cam_fx = dl_dict['cam_cal'][0][2]
cam_fy = dl_dict['cam_cal'][0][3]
for i in range(0, points.shape[0]):
p_x = points[i, 0]
p_y = points[i, 1]
p_z = points[i, 2]
u = int(((p_x / p_z) * cam_fx) + cam_cx)
v = int(((p_y / p_z) * cam_fy) + cam_cy)
try:
img_d[v - w:v + w + 1, u - w:u + w + 1, 0] = 0
img_d[v - w:v + w + 1, u - w:u + w + 1, 1] = 255
img_d[v - w:v + w + 1, u - w:u + w + 1, 0] = 0
except:
#print("out of bounds")
pass
img_disp = Image.fromarray(img_d)
if jupyter:
display(img_disp)
if store:
outpath = output_path + \
'{}_{}_{}.png'.format(pose_type, seq_no, frame_no)
img_disp.save(outpath, "PNG", compress_level=1)
print("Saved image to {}".format(outpath))
def save_sequence(self, seq_no, pose_type='filtered', name=''):
for fn in range(len(self.seq_data)):
self.plot_points_on_image(seq_no, fn, False, True, pose_type)
if name:
video_name = '{}_{}_{}'.format(name, pose_type, seq_no)
else:
video_name = '{}_{}'.format(pose_type, seq_no)
cmd = "cd {} && ffmpeg -r 10 -i ./filtered_{}_%d.png -vcodec mpeg4 -y {}.mp4".format(
self.output_path, seq_no, video_name)
os.system(cmd)
| [
"k3d.plot",
"PIL.Image.fromarray",
"os.path.exists",
"PIL.Image.open",
"numpy.ones",
"numpy.add",
"os.makedirs",
"scipy.spatial.transform.Rotation.from_quat",
"numpy.array",
"numpy.dot",
"copy.deepcopy",
"os.system"
] | [((4150, 4173), 'k3d.plot', 'k3d.plot', ([], {'name': '"""points"""'}), "(name='points')\n", (4158, 4173), False, 'import k3d\n'), ((4842, 4865), 'k3d.plot', 'k3d.plot', ([], {'name': '"""points"""'}), "(name='points')\n", (4850, 4865), False, 'import k3d\n'), ((1091, 1109), 'copy.deepcopy', 'copy.deepcopy', (['img'], {}), '(img)\n', (1104, 1109), False, 'import copy\n'), ((1127, 1152), 'numpy.dot', 'np.dot', (['points', 'rot_mat.T'], {}), '(points, rot_mat.T)\n', (1133, 1152), True, 'import numpy as np\n'), ((1170, 1197), 'numpy.add', 'np.add', (['points', 'trans[0, :]'], {}), '(points, trans[0, :])\n', (1176, 1197), True, 'import numpy as np\n'), ((4115, 4134), 'numpy.ones', 'np.ones', (['x.shape[0]'], {}), '(x.shape[0])\n', (4122, 4134), True, 'import numpy as np\n'), ((4771, 4790), 'numpy.ones', 'np.ones', (['x.shape[0]'], {}), '(x.shape[0])\n', (4778, 4790), True, 'import numpy as np\n'), ((4806, 4825), 'numpy.ones', 'np.ones', (['y.shape[0]'], {}), '(y.shape[0])\n', (4813, 4825), True, 'import numpy as np\n'), ((6871, 6888), 'copy.deepcopy', 'copy.deepcopy', (['im'], {}), '(im)\n', (6884, 6888), False, 'import copy\n'), ((6942, 7012), 'copy.deepcopy', 'copy.deepcopy', (["seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :]"], {}), "(seq_data[seq_no][0]['dl_dict']['model_points'][0, :, :])\n", (6955, 7012), False, 'import copy\n'), ((7043, 7064), 'numpy.dot', 'np.dot', (['points', 'rot.T'], {}), '(points, rot.T)\n', (7049, 7064), True, 'import numpy as np\n'), ((7082, 7105), 'numpy.add', 'np.add', (['points', 't[0, :]'], {}), '(points, t[0, :])\n', (7088, 7105), True, 'import numpy as np\n'), ((7827, 7849), 'PIL.Image.fromarray', 'Image.fromarray', (['img_d'], {}), '(img_d)\n', (7842, 7849), False, 'from PIL import Image\n'), ((8637, 8651), 'os.system', 'os.system', (['cmd'], {}), '(cmd)\n', (8646, 8651), False, 'import os\n'), ((444, 471), 'os.path.exists', 'os.path.exists', (['self.p_visu'], {}), '(self.p_visu)\n', (458, 471), False, 'import os\n'), ((485, 509), 'os.makedirs', 'os.makedirs', (['self.p_visu'], {}), '(self.p_visu)\n', (496, 509), False, 'import os\n'), ((2763, 2781), 'copy.deepcopy', 'copy.deepcopy', (['img'], {}), '(img)\n', (2776, 2781), False, 'import copy\n'), ((1771, 1793), 'PIL.Image.fromarray', 'Image.fromarray', (['img_d'], {}), '(img_d)\n', (1786, 1793), False, 'from PIL import Image\n'), ((3666, 3688), 'PIL.Image.fromarray', 'Image.fromarray', (['img_d'], {}), '(img_d)\n', (3681, 3688), False, 'from PIL import Image\n'), ((5798, 5847), 'copy.deepcopy', 'copy.deepcopy', (["frame['dl_dict']['gt_rot_wxyz'][0]"], {}), "(frame['dl_dict']['gt_rot_wxyz'][0])\n", (5811, 5847), False, 'import copy\n'), ((6658, 6714), 'PIL.Image.open', 'Image.open', (["(images_path + unique_desig[0] + '-color.png')"], {}), "(images_path + unique_desig[0] + '-color.png')\n", (6668, 6714), False, 'from PIL import Image\n'), ((6780, 6827), 'PIL.Image.open', 'Image.open', (["(images_path + unique_desig + '.png')"], {}), "(images_path + unique_desig + '.png')\n", (6790, 6827), False, 'from PIL import Image\n'), ((5892, 5913), 'scipy.spatial.transform.Rotation.from_quat', 'R.from_quat', (['rot_quat'], {}), '(rot_quat)\n', (5903, 5913), True, 'from scipy.spatial.transform import Rotation as R\n'), ((6087, 6132), 'copy.deepcopy', 'copy.deepcopy', (["frame['filter_pred']['r_wxyz']"], {}), "(frame['filter_pred']['r_wxyz'])\n", (6100, 6132), False, 'import copy\n'), ((6006, 6041), 'numpy.array', 'np.array', (["frame['filter_pred']['t']"], {}), "(frame['filter_pred']['t'])\n", (6014, 6041), True, 'import numpy as np\n'), ((6177, 6198), 'scipy.spatial.transform.Rotation.from_quat', 'R.from_quat', (['rot_quat'], {}), '(rot_quat)\n', (6188, 6198), True, 'from scipy.spatial.transform import Rotation as R\n'), ((6380, 6428), 'copy.deepcopy', 'copy.deepcopy', (["frame['final_pred_obs']['r_wxyz']"], {}), "(frame['final_pred_obs']['r_wxyz'])\n", (6393, 6428), False, 'import copy\n'), ((6296, 6334), 'numpy.array', 'np.array', (["frame['final_pred_obs']['t']"], {}), "(frame['final_pred_obs']['t'])\n", (6304, 6334), True, 'import numpy as np\n'), ((6473, 6494), 'scipy.spatial.transform.Rotation.from_quat', 'R.from_quat', (['rot_quat'], {}), '(rot_quat)\n', (6484, 6494), True, 'from scipy.spatial.transform import Rotation as R\n')] |
# Copyright (c) 2013 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
import json
# NOTE(kgriffs): http://tools.ietf.org/html/draft-nottingham-json-home-03
JSON_HOME = {
'resources': {
# -----------------------------------------------------------------
# Queues
# -----------------------------------------------------------------
'rel/queues': {
'href-template': '/v2/queues{?marker,limit,detailed}',
'href-vars': {
'marker': 'param/marker',
'limit': 'param/queue_limit',
'detailed': 'param/detailed',
},
'hints': {
'allow': ['GET'],
'formats': {
'application/json': {},
},
},
},
'rel/queue': {
'href-template': '/v2/queues/{queue_name}',
'href-vars': {
'queue_name': 'param/queue_name',
},
'hints': {
'allow': ['PUT', 'DELETE'],
'formats': {
'application/json': {},
},
},
},
'rel/queue_stats': {
'href-template': '/v2/queues/{queue_name}/stats',
'href-vars': {
'queue_name': 'param/queue_name',
},
'hints': {
'allow': ['GET'],
'formats': {
'application/json': {},
},
},
},
# -----------------------------------------------------------------
# Messages
# -----------------------------------------------------------------
'rel/messages': {
'href-template': ('/v2/queues/{queue_name}/messages'
'{?marker,limit,echo,include_claimed}'),
'href-vars': {
'queue_name': 'param/queue_name',
'marker': 'param/marker',
'limit': 'param/messages_limit',
'echo': 'param/echo',
'include_claimed': 'param/include_claimed',
},
'hints': {
'allow': ['GET'],
'formats': {
'application/json': {},
},
},
},
'rel/post_messages': {
'href-template': '/v2/queues/{queue_name}/messages',
'href-vars': {
'queue_name': 'param/queue_name',
},
'hints': {
'allow': ['POST'],
'formats': {
'application/json': {},
},
'accept-post': ['application/json'],
},
},
'rel/messages_delete': {
'href-template': '/v2/queues/{queue_name}/messages{?ids,pop}',
'href-vars': {
'queue_name': 'param/queue_name',
'ids': 'param/ids',
'pop': 'param/pop'
},
'hints': {
'allow': [
'DELETE'
],
'formats': {
'application/json': {}
}
}
},
'rel/message_delete': {
'href-template': '/v2/queues/{queue_name}/messages/{message_id}{?claim}', # noqa
'href-vars': {
'queue_name': 'param/queue_name',
'message_id': 'param/message_id',
'claim': 'param/claim_id'
},
'hints': {
'allow': [
'DELETE'
],
'formats': {
'application/json': {}
}
}
},
# -----------------------------------------------------------------
# Claims
# -----------------------------------------------------------------
'rel/claim': {
'href-template': '/v2/queues/{queue_name}/claims/{claim_id}',
'href-vars': {
'queue_name': 'param/queue_name',
'claim_id': 'param/claim_id',
},
'hints': {
'allow': ['GET'],
'formats': {
'application/json': {},
},
},
},
'rel/post_claim': {
'href-template': '/v2/queues/{queue_name}/claims{?limit}',
'href-vars': {
'queue_name': 'param/queue_name',
'limit': 'param/claim_limit',
},
'hints': {
'allow': ['POST'],
'formats': {
'application/json': {},
},
'accept-post': ['application/json']
},
},
'rel/patch_claim': {
'href-template': '/v2/queues/{queue_name}/claims/{claim_id}',
'href-vars': {
'queue_name': 'param/queue_name',
'claim_id': 'param/claim_id',
},
'hints': {
'allow': ['PATCH'],
'formats': {
'application/json': {},
},
'accept-post': ['application/json']
},
},
'rel/delete_claim': {
'href-template': '/v2/queues/{queue_name}/claims/{claim_id}',
'href-vars': {
'queue_name': 'param/queue_name',
'claim_id': 'param/claim_id',
},
'hints': {
'allow': ['DELETE'],
'formats': {
'application/json': {},
},
},
},
}
}
ADMIN_RESOURCES = {
# -----------------------------------------------------------------
# Pools
# -----------------------------------------------------------------
'rel/pools': {
'href-template': '/v2/pools{?detailed,limit,marker}',
'href-vars': {
'detailed': 'param/detailed',
'limit': 'param/pool_limit',
'marker': 'param/marker',
},
'hints': {
'allow': ['GET'],
'formats': {
'application/json': {},
},
},
},
'rel/pool': {
'href-template': '/v2/pools/{pool_name}',
'href-vars': {
'pool_name': 'param/pool_name',
},
'hints': {
'allow': ['GET', 'PUT', 'PATCH', 'DELETE'],
'formats': {
'application/json': {},
},
},
},
# -----------------------------------------------------------------
# Flavors
# -----------------------------------------------------------------
'rel/flavors': {
'href-template': '/v2/flavors{?detailed,limit,marker}',
'href-vars': {
'detailed': 'param/detailed',
'limit': 'param/flavor_limit',
'marker': 'param/marker',
},
'hints': {
'allow': ['GET'],
'formats': {
'application/json': {},
},
},
},
'rel/flavor': {
'href-template': '/v2/flavors/{flavor_name}',
'href-vars': {
'flavor_name': 'param/flavor_name',
},
'hints': {
'allow': ['GET', 'PUT', 'PATCH', 'DELETE'],
'formats': {
'application/json': {},
},
},
},
# -----------------------------------------------------------------
# Health
# -----------------------------------------------------------------
'rel/health': {
'href': '/v2/health',
'hints': {
'allow': ['GET'],
'formats': {
'application/json': {},
},
},
},
}
class Resource(object):
def __init__(self, conf):
if conf.admin_mode:
JSON_HOME['resources'].update(ADMIN_RESOURCES)
document = json.dumps(JSON_HOME, ensure_ascii=False, indent=4)
self.document_utf8 = document.encode('utf-8')
def on_get(self, req, resp, project_id):
resp.data = self.document_utf8
resp.content_type = 'application/json-home'
resp.cache_control = ['max-age=86400']
# status defaults to 200
| [
"json.dumps"
] | [((8466, 8517), 'json.dumps', 'json.dumps', (['JSON_HOME'], {'ensure_ascii': '(False)', 'indent': '(4)'}), '(JSON_HOME, ensure_ascii=False, indent=4)\n', (8476, 8517), False, 'import json\n')] |
import asyncio
import logging
import synapse.exc as s_exc
import synapse.lib.types as s_types
import synapse.lib.module as s_module
import synapse.lib.version as s_version
logger = logging.getLogger(__name__)
class Cpe23Str(s_types.Str):
'''
CPE 2.3 Formatted String
https://nvlpubs.nist.gov/nistpubs/Legacy/IR/nistir7695.pdf
(Section 6.2)
cpe:2.3: part : vendor : product : version : update : edition :
language : sw_edition : target_sw : target_hw : other
* = "any"
- = N/A
'''
def __init__(self, modl, name, info, opts):
opts['lower'] = True
s_types.Str.__init__(self, modl, name, info, opts)
def _splitCpe23(self, text):
part = ''
parts = []
genr = iter(text)
try:
while True:
c = next(genr)
if c == '\\':
c += next(genr)
if c == ':':
parts.append(part)
part = ''
continue
part += c
except StopIteration:
parts.append(part)
return parts
def _normPyStr(self, valu):
if not valu.startswith('cpe:2.3:'):
mesg = 'CPE 2.3 string is expected to start with "cpe:2.3:"'
raise s_exc.BadTypeValu(valu=valu, mesg=mesg)
text, info = s_types.Str._normPyStr(self, valu)
parts = self._splitCpe23(text)
if len(parts) != 13:
mesg = f'CPE 2.3 string has {len(parts)} parts, expected 13.'
raise s_exc.BadTypeValu(valu=valu, mesg=mesg)
subs = {
'part': parts[2],
'vendor': parts[3],
'product': parts[4],
'version': parts[5],
'update': parts[6],
'edition': parts[7],
'language': parts[8],
'sw_edition': parts[9],
'target_sw': parts[10],
'target_hw': parts[11],
'other': parts[12],
}
return ':'.join(parts), {'subs': subs}
class SemVer(s_types.Int):
'''
Provides support for parsing a semantic version string into its component
parts. This normalizes a version string into an integer to allow version
ordering. Prerelease information is disregarded for integer comparison
purposes, as we cannot map an arbitrary pre-release version into a integer
value
Major, minor and patch levels are represented as integers, with a max
width of 20 bits. The comparable integer value representing the semver
is the bitwise concatenation of the major, minor and patch levels.
Prerelease and build information will be parsed out and available as
strings if that information is present.
'''
def postTypeInit(self):
s_types.Int.postTypeInit(self)
self.setNormFunc(str, self._normPyStr)
self.setNormFunc(int, self._normPyInt)
def _normPyStr(self, valu):
valu = valu.strip()
if not valu:
raise s_exc.BadTypeValu(valu=valu, name=self.name,
mesg='No text left after stripping whitespace')
subs = s_version.parseSemver(valu)
if subs is None:
raise s_exc.BadTypeValu(valu=valu, name=self.name,
mesg='Unable to parse string as a semver.')
valu = s_version.packVersion(subs.get('major'), subs.get('minor'), subs.get('patch'))
return valu, {'subs': subs}
def _normPyInt(self, valu):
if valu < 0:
raise s_exc.BadTypeValu(valu=valu, name=self.name,
mesg='Cannot norm a negative integer as a semver.')
if valu > s_version.mask60:
raise s_exc.BadTypeValu(valu=valu, name=self.name,
mesg='Cannot norm a integer larger than 1152921504606846975 as a semver.')
major, minor, patch = s_version.unpackVersion(valu)
valu = s_version.packVersion(major, minor, patch)
subs = {'major': major,
'minor': minor,
'patch': patch}
return valu, {'subs': subs}
def repr(self, valu):
major, minor, patch = s_version.unpackVersion(valu)
valu = s_version.fmtVersion(major, minor, patch)
return valu
loglevels = (
(10, 'debug'),
(20, 'info'),
(30, 'notice'),
(40, 'warning'),
(50, 'err'),
(60, 'crit'),
(70, 'alert'),
(80, 'emerg'),
)
class ItModule(s_module.CoreModule):
async def initCoreModule(self):
self.model.form('it:dev:str').onAdd(self._onFormItDevStr)
self.model.form('it:dev:pipe').onAdd(self._onFormMakeDevStr)
self.model.form('it:dev:mutex').onAdd(self._onFormMakeDevStr)
self.model.form('it:dev:regkey').onAdd(self._onFormMakeDevStr)
self.model.prop('it:prod:softver:arch').onSet(self._onPropSoftverArch)
self.model.prop('it:prod:softver:vers').onSet(self._onPropSoftverVers)
self.model.prop('it:prod:softver:software').onSet(self._onPropSoftverSoft)
def bruteVersionStr(self, valu):
'''
Brute force the version out of a string.
Args:
valu (str): String to attempt to get version information for.
Notes:
This first attempts to parse strings using the it:semver normalization
before attempting to extract version parts out of the string.
Returns:
int, dict: The system normalized version integer and a subs dictionary.
'''
try:
valu, info = self.core.model.type('it:semver').norm(valu)
subs = info.get('subs')
return valu, subs
except s_exc.BadTypeValu:
# Try doing version part extraction by noming through the string
subs = s_version.parseVersionParts(valu)
if subs is None:
raise s_exc.BadTypeValu(valu=valu, name='bruteVersionStr',
mesg='Unable to brute force version parts out of the string')
if subs:
valu = s_version.packVersion(subs.get('major'),
subs.get('minor', 0),
subs.get('patch', 0))
return valu, subs
async def _onFormItDevStr(self, node):
await node.set('norm', node.ndef[1])
async def _onFormMakeDevStr(self, node):
pprop = node.ndef[1]
await node.snap.addNode('it:dev:str', pprop)
async def _onPropSoftverSoft(self, node, oldv):
# Check to see if name is available and set it if possible
prop = node.get('software')
if prop:
opts = {'vars': {'soft': prop}}
nodes = await node.snap.nodes('it:prod:soft=$soft', opts=opts)
if nodes:
name = nodes[0].get('name')
if name:
await node.set('software:name', name)
async def _onPropSoftverArch(self, node, oldv):
# make it:dev:str for arch
prop = node.get('arch')
if prop:
await node.snap.addNode('it:dev:str', prop)
async def _onPropSoftverVers(self, node, oldv):
# Set vers:norm and make it's normed valu
prop = node.get('vers')
if not prop:
return
await node.set('vers:norm', prop)
# Make it:dev:str from version str
await node.snap.addNode('it:dev:str', prop)
# form the semver properly or bruteforce parts
try:
valu, subs = self.bruteVersionStr(prop)
await node.set('semver', valu)
for k, v in subs.items():
await node.set(f'semver:{k}', v)
except asyncio.CancelledError: # pragma: no cover
raise
except Exception:
logger.exception('Failed to brute force version string [%s]', prop)
def getModelDefs(self):
modl = {
'ctors': (
('it:semver', 'synapse.models.infotech.SemVer', {}, {
'doc': 'Semantic Version type.',
}),
('it:sec:cpe', 'synapse.models.infotech.Cpe23Str', {}, {
'doc': 'A NIST CPE 2.3 Formatted String',
}),
),
'types': (
('it:hostname', ('str', {'strip': True, 'lower': True}), {
'doc': 'The name of a host or system.',
}),
('it:host', ('guid', {}), {
'doc': 'A GUID that represents a host or system.'
}),
('it:log:event', ('guid', {}), {
'doc': 'A GUID representing an individual log event.',
'interfaces': ('it:host:activity',),
}),
('it:network', ('guid', {}), {
'doc': 'A GUID that represents a logical network.'
}),
('it:domain', ('guid', {}), {
'doc': 'A logical boundary of authentication and configuration such as a windows domain.'
}),
('it:account', ('guid', {}), {
'doc': 'A GUID that represents an account on a host or network.'
}),
('it:group', ('guid', {}), {
'doc': 'A GUID that represents a group on a host or network.'
}),
('it:logon', ('guid', {}), {
'doc': 'A GUID that represents an individual logon/logoff event.'
}),
('it:hosturl', ('comp', {'fields': (('host', 'it:host'), ('url', 'inet:url'))}), {
'doc': 'A url hosted on or served by a host or system.',
}),
('it:sec:cve', ('str', {'lower': True, 'regex': r'(?i)^CVE-[0-9]{4}-[0-9]{4,}$'}), {
'doc': 'A vulnerability as designated by a Common Vulnerabilities and Exposures (CVE) number.',
'ex': 'cve-2012-0158'
}),
('it:sec:cwe', ('str', {'regex': r'^CWE-[0-9]{1,8}$'}), {
'doc': 'NIST NVD Common Weaknesses Enumeration Specification',
'ex': 'CWE-120',
}),
('it:mitre:attack:status', ('str', {'enums': 'current,deprecated,withdrawn'}), {
'doc': 'A Mitre ATT&CK element status.',
'ex': 'current',
}),
('it:mitre:attack:group', ('str', {'regex': r'^G[0-9]{4}$'}), {
'doc': 'A Mitre ATT&CK Group ID.',
'ex': 'G0100',
}),
('it:mitre:attack:tactic', ('str', {'regex': r'^TA[0-9]{4}$'}), {
'doc': 'A Mitre ATT&CK Tactic ID.',
'ex': 'TA0040',
}),
('it:mitre:attack:technique', ('str', {'regex': r'^T[0-9]{4}(.[0-9]{3})?$'}), {
'doc': 'A Mitre ATT&CK Technique ID.',
'ex': 'T1548',
}),
('it:mitre:attack:mitigation', ('str', {'regex': r'^M[0-9]{4}$'}), {
'doc': 'A Mitre ATT&CK Mitigation ID.',
'ex': 'M1036',
}),
('it:mitre:attack:software', ('str', {'regex': r'^S[0-9]{4}$'}), {
'doc': 'A Mitre ATT&CK Software ID.',
'ex': 'S0154',
}),
('it:dev:str', ('str', {}), {
'doc': 'A developer-selected string.'
}),
('it:dev:pipe', ('str', {}), {
'doc': 'A string representing a named pipe.',
}),
('it:dev:mutex', ('str', {}), {
'doc': 'A string representing a mutex.',
}),
('it:dev:int', ('int', {}), {
'doc': 'A developer selected integer constant.',
}),
('it:dev:regkey', ('str', {}), {
'doc': 'A Windows registry key.',
'ex': 'HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Run',
}),
('it:dev:regval', ('guid', {}), {
'doc': 'A Windows registry key/value pair.',
}),
('it:prod:soft', ('guid', {}), {
'doc': 'A arbitrary, unversioned software product.',
}),
('it:adid', ('str', {'lower': True, 'strip': True}), {
'doc': 'An advertising identification string.'}),
('it:os:windows:sid', ('str', {'regex': r'^S-1-[0-59]-\d{2}-\d{8,10}-\d{8,10}-\d{8,10}-[1-9]\d{3}$'}), {
'doc': 'A Microsoft Windows Security Identifier.',
'ex': 'S-1-5-21-1220945662-1202665555-839525555-5555',
}),
('it:os:ios:idfa', ('it:adid', {}), {
'doc': 'An iOS advertising identification string.'}),
('it:os:android:aaid', ('it:adid', {}), {
'doc': 'An android advertising identification string.'}),
('it:os:android:perm', ('str', {}), {
'doc': 'An android permission string.'}),
('it:os:android:intent', ('str', {}), {
'doc': 'An android intent string.'}),
('it:os:android:reqperm', ('comp', {'fields': (
('app', 'it:prod:soft'),
('perm', 'it:os:android:perm'))}), {
'doc': 'The given software requests the android permission.'}),
('it:os:android:ilisten', ('comp', {'fields': (
('app', 'it:prod:soft'),
('intent', 'it:os:android:intent'))}), {
'doc': 'The given software listens for an android intent.'}),
('it:os:android:ibroadcast', ('comp', {'fields': (
('app', 'it:prod:soft'),
('intent', 'it:os:android:intent')
)}), {
'doc': 'The given software broadcasts the given Android intent.'}),
('it:prod:softver', ('guid', {}), {
'doc': 'A specific version of a software product.'}),
('it:prod:softfile', ('comp', {'fields': (
('soft', 'it:prod:softver'),
('file', 'file:bytes'))}), {
'doc': 'A file is distributed by a specific software version.'}),
('it:prod:softlib', ('comp', {'fields': (
('soft', 'it:prod:softver'),
('lib', 'it:prod:softver'))}), {
'doc': 'A software version contains a library software version.'}),
('it:prod:softos', ('comp', {'fields': (
('soft', 'it:prod:softver'),
('os', 'it:prod:softver'))}), {
'doc': 'The software version is known to be compatible with the given os software version.'}),
('it:hostsoft', ('comp', {'fields': (('host', 'it:host'), ('softver', 'it:prod:softver'))}), {
'doc': 'A version of a software product which is present on a given host.',
}),
('it:av:sig', ('comp', {'fields': (('soft', 'it:prod:soft'), ('name', ('str', {'lower': True})))}), {
'doc': 'A signature name within the namespace of an antivirus engine name.'
}),
('it:av:filehit', ('comp', {'fields': (('file', 'file:bytes'), ('sig', 'it:av:sig'))}), {
'doc': 'A file that triggered an alert on a specific antivirus signature.',
}),
('it:av:prochit', ('guid', {}), {
'doc': 'An instance of a process triggering an alert on a specific antivirus signature.'
}),
('it:auth:passwdhash', ('guid', {}), {
'doc': 'An instance of a password hash.',
}),
('it:exec:proc', ('guid', {}), {
'doc': 'A process executing on a host. May be an actual (e.g., endpoint) or virtual (e.g., malware sandbox) host.',
}),
('it:exec:thread', ('guid', {}), {
'doc': 'A thread executing in a process.',
}),
('it:exec:loadlib', ('guid', {}), {
'doc': 'A library load event in a process.',
}),
('it:exec:mmap', ('guid', {}), {
'doc': 'A memory mapped segment located in a process.',
}),
('it:cmd', ('str', {'strip': True}), {
'doc': 'A unique command-line string.',
'ex': 'foo.exe --dostuff bar',
}),
('it:exec:mutex', ('guid', {}), {
'doc': 'A mutex created by a process at runtime.',
}),
('it:exec:pipe', ('guid', {}), {
'doc': 'A named pipe created by a process at runtime.',
}),
('it:exec:url', ('guid', {}), {
'doc': 'An instance of a host requesting a URL.',
}),
('it:exec:bind', ('guid', {}), {
'doc': 'An instance of a host binding a listening port.',
}),
('it:fs:file', ('guid', {}), {
'doc': 'A file on a host.'
}),
('it:exec:file:add', ('guid', {}), {
'doc': 'An instance of a host adding a file to a filesystem.',
}),
('it:exec:file:del', ('guid', {}), {
'doc': 'An instance of a host deleting a file from a filesystem.',
}),
('it:exec:file:read', ('guid', {}), {
'doc': 'An instance of a host reading a file from a filesystem.',
}),
('it:exec:file:write', ('guid', {}), {
'doc': 'An instance of a host writing a file to a filesystem.',
}),
('it:exec:reg:get', ('guid', {}), {
'doc': 'An instance of a host getting a registry key.',
}),
('it:exec:reg:set', ('guid', {}), {
'doc': 'An instance of a host creating or setting a registry key.',
}),
('it:exec:reg:del', ('guid', {}), {
'doc': 'An instance of a host deleting a registry key.',
}),
('it:app:yara:rule', ('guid', {}), {
'doc': 'A YARA rule unique identifier.',
}),
('it:app:yara:match', ('comp', {'fields': (('rule', 'it:app:yara:rule'), ('file', 'file:bytes'))}), {
'doc': 'A YARA rule match to a file.',
}),
('it:app:yara:procmatch', ('guid', {}), {
'doc': 'An instance of a YARA rule match to a process.',
}),
('it:app:snort:rule', ('guid', {}), {
'doc': 'A snort rule unique identifier.',
}),
('it:app:snort:hit', ('guid', {}), {
'doc': 'An instance of a snort rule hit.',
}),
('it:reveng:function', ('guid', {}), {
'doc': 'A function inside an executable.',
}),
('it:reveng:filefunc', ('comp', {'fields': (('file', 'file:bytes'), ('function', 'it:reveng:function'))}), {
'doc': 'An instance of a function in an executable.',
}),
('it:reveng:funcstr', ('comp', {'fields': (('function', 'it:reveng:function'), ('string', 'str'))}), {
'deprecated': True,
'doc': 'A reference to a string inside a function.',
}),
('it:reveng:impfunc', ('str', {'lower': 1}), {
'doc': 'A function from an imported library.',
}),
),
'interfaces': (
('it:host:activity', {
'props': (
('exe', ('file:bytes', {}), {
'doc': 'The executable file which caused the activity.'}),
('proc', ('it:exec:proc', {}), {
'doc': 'The host process which caused the activity.'}),
('thread', ('it:exec:thread', {}), {
'doc': 'The host thread which caused the activity.'}),
('host', ('it:host', {}), {
'doc': 'The host on which the activity occurred.'}),
('time', ('time', {}), {
'doc': 'The time that the activity started.'}),
),
}),
),
'forms': (
('it:hostname', {}, ()),
('it:host', {}, (
('name', ('it:hostname', {}), {
'doc': 'The name of the host or system.',
}),
('desc', ('str', {}), {
'doc': 'A free-form description of the host.',
}),
('domain', ('it:domain', {}), {
'doc': 'The authentication domain that the host is a member of.',
}),
('ipv4', ('inet:ipv4', {}), {
'doc': 'The last known ipv4 address for the host.'
}),
('latlong', ('geo:latlong', {}), {
'doc': 'The last known location for the host.'
}),
('place', ('geo:place', {}), {
'doc': 'The place where the host resides.',
}),
('loc', ('loc', {}), {
'doc': 'The geo-political location string for the node.',
}),
('os', ('it:prod:softver', {}), {
'doc': 'The operating system of the host.'
}),
('manu', ('str', {}), {
'doc': 'The manufacturer of the host.',
}),
('model', ('str', {}), {
'doc': 'The product model of the host.',
}),
('serial', ('str', {}), {
'doc': 'The serial number of the host.',
}),
('operator', ('ps:contact', {}), {
'doc': 'The operator of the host.',
}),
('org', ('ou:org', {}), {
'doc': 'The org that operates the given host.',
}),
)),
('it:log:event', {}, (
('mesg', ('str', {}), {
'doc': 'The log messsage text.',
}),
('severity', ('int', {'enums': loglevels}), {
'doc': 'A log level integer that increases with severity.',
}),
('data', ('data', {}), {
'doc': 'A raw JSON record of the log event.',
}),
)),
('it:domain', {}, (
('name', ('str', {'lower': True, 'strip': True, 'onespace': True}), {
'doc': 'The name of the domain.',
}),
('desc', ('str', {}), {
'doc': 'A brief description of the domain.',
}),
('org', ('ou:org', {}), {
'doc': 'The org that operates the given domain.',
}),
)),
('it:network', {}, (
('name', ('str', {'lower': True, 'strip': True, 'onespace': True}), {
'doc': 'The name of the network.',
}),
('desc', ('str', {}), {
'doc': 'A brief description of the network.',
}),
('org', ('ou:org', {}), {
'doc': 'The org that owns/operates the network.',
}),
('net4', ('inet:net4', {}), {
'doc': 'The optional contiguous IPv4 address range of this network.',
}),
('net6', ('inet:net6', {}), {
'doc': 'The optional contiguous IPv6 address range of this network.',
}),
)),
('it:account', {}, (
('user', ('inet:user', {}), {
'doc': 'The username associated with the account',
}),
('contact', ('ps:contact', {}), {
'doc': 'Additional contact information associated with this account.',
}),
('host', ('it:host', {}), {
'doc': 'The host where the account is registered.',
}),
('domain', ('it:domain', {}), {
'doc': 'The authentication domain where the account is registered.',
}),
('posix:uid', ('int', {}), {
'doc': 'The user ID of the account.',
'ex': '1001',
}),
('posix:gid', ('int', {}), {
'doc': 'The primary group ID of the account.',
'ex': '1001',
}),
('posix:gecos', ('int', {}), {
'doc': 'The GECOS field for the POSIX account.',
}),
('posix:home', ('file:path', {}), {
'doc': "The path to the POSIX account's home directory.",
'ex': '/home/visi',
}),
('posix:shell', ('file:path', {}), {
'doc': "The path to the POSIX account's default shell.",
'ex': '/bin/bash',
}),
('windows:sid', ('it:os:windows:sid', {}), {
'doc': 'The Microsoft Windows Security Identifier of the account.',
}),
('groups', ('array', {'type': 'it:group'}), {
'doc': 'An array of groups that the account is a member of.',
}),
)),
('it:group', {}, (
('name', ('str', {'lower': True, 'strip': True, 'onespace': True}), {
'doc': 'The name of the group.',
}),
('desc', ('str', {}), {
'doc': 'A brief description of the group.',
}),
('host', ('it:host', {}), {
'doc': 'The host where the group is registered.',
}),
('domain', ('it:domain', {}), {
'doc': 'The authentication domain where the group is registered.',
}),
('groups', ('array', {'type': 'it:group'}), {
'doc': 'Groups that are a member of this group.',
}),
('posix:gid', ('int', {}), {
'doc': 'The primary group ID of the account.',
'ex': '1001',
}),
('windows:sid', ('it:os:windows:sid', {}), {
'doc': 'The Microsoft Windows Security Identifier of the group.',
}),
)),
('it:logon', {}, (
('time', ('time', {}), {
'doc': 'The time the logon occured.',
}),
('success', ('bool', {}), {
'doc': 'Set to false to indicate an unsuccessful logon attempt.',
}),
('logoff:time', ('time', {}), {
'doc': 'The time the logon session ended.',
}),
('host', ('it:host', {}), {
'doc': 'The host that the account logged in to.',
}),
('account', ('it:account', {}), {
'doc': 'The account that logged in.',
}),
('creds', ('auth:creds', {}), {
'doc': 'The credentials that were used for the logon.',
}),
('duration', ('duration', {}), {
'doc': 'The duration of the logon session.',
}),
('client:host', ('it:host', {}), {
'doc': 'The host where the logon originated.',
}),
('client:ipv4', ('inet:ipv4', {}), {
'doc': 'The IPv4 where the logon originated.',
}),
('client:ipv6', ('inet:ipv6', {}), {
'doc': 'The IPv6 where the logon originated.',
}),
)),
('it:hosturl', {}, (
('host', ('it:host', {}), {
'ro': True,
'doc': 'Host serving a url.',
}),
('url', ('inet:url', {}), {
'ro': True,
'doc': 'URL available on the host.',
}),
)),
('it:dev:str', {}, (
('norm', ('str', {'lower': True}), {
'doc': 'Lower case normalized version of the it:dev:str.',
}),
)),
('it:sec:cve', {}, (
('desc', ('str', {}), {
'doc': 'A free-form description of the CVE vulnerability.',
'disp': {'hint': 'text'},
}),
('url', ('inet:url', {}), {
'doc': 'A URL linking this CVE to a full description.',
}),
('references', ('array', {'type': 'inet:url', 'uniq': True}), {
'doc': 'An array of URLs that document the CVE ID.',
}),
)),
('it:sec:cpe', {}, (
('part', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The "part" field from the CPE 2.3 string.'}),
('vendor', ('ou:name', {}), {
'ro': True,
'doc': 'The "vendor" field from the CPE 2.3 string.'}),
('product', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The "product" field from the CPE 2.3 string.'}),
('version', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The "version" field from the CPE 2.3 string.'}),
('update', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The "update" field from the CPE 2.3 string.'}),
('edition', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The "edition" field from the CPE 2.3 string.'}),
('language', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The "language" field from the CPE 2.3 string.'}),
('sw_edition', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The "sw_edition" field from the CPE 2.3 string.'}),
('target_sw', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The "target_sw" field from the CPE 2.3 string.'}),
('target_hw', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The "target_hw" field from the CPE 2.3 string.'}),
('other', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The "other" field from the CPE 2.3 string.'}),
)),
('it:sec:cwe', {}, (
('name', ('str', {}), {
'doc': 'The CWE description field.',
'ex': 'Buffer Copy without Checking Size of Input (Classic Buffer Overflow)',
}),
('desc', ('str', {}), {
'doc': 'The CWE description field.',
'disp': {'hint': 'text'},
}),
('url', ('inet:url', {}), {
'doc': 'A URL linking this CWE to a full description.',
}),
('parents', ('array', {'type': 'it:sec:cwe',
'uniq': True, 'sorted': True, 'split': ','}), {
'doc': 'An array of ChildOf CWE Relationships.'
}),
)),
('it:mitre:attack:group', {}, (
('org', ('ou:org', {}), {
'doc': 'Used to map an ATT&CK group to a synapse ou:org.',
}),
('name', ('ou:name', {}), {
'doc': 'The primary name for the ATT&CK group.',
}),
('names', ('array', {'type': 'ou:name', 'uniq': True, 'sorted': True}), {
'doc': 'An array of alternate names for the ATT&CK group.',
}),
('desc', ('str', {}), {
'doc': 'A description of the ATT&CK group.',
'disp': {'hint': 'text'},
}),
('url', ('inet:url', {}), {
'doc': 'The URL that documents the ATT&CK group.',
}),
('tag', ('syn:tag', {}), {
'doc': 'The synapse tag used to annotate nodes included in this ATT&CK group ID.',
'ex': 'cno.mitre.g0100',
}),
('references', ('array', {'type': 'inet:url', 'uniq': True}), {
'doc': 'An array of URLs that document the ATT&CK group.',
}),
('techniques', ('array', {'type': 'it:mitre:attack:technique',
'uniq': True, 'sorted': True, 'split': ','}), {
'doc': 'An array of ATT&CK technique IDs used by the group.',
}),
('software', ('array', {'type': 'it:mitre:attack:software',
'uniq': True, 'sorted': True, 'split': ','}), {
'doc': 'An array of ATT&CK software IDs used by the group.',
}),
)),
('it:mitre:attack:tactic', {}, (
('name', ('str', {'strip': True}), {
'doc': 'The primary name for the ATT&CK tactic.',
}),
('desc', ('str', {}), {
'doc': 'A description of the ATT&CK tactic.',
'disp': {'hint': 'text'},
}),
('url', ('inet:url', {}), {
'doc': 'The URL that documents the ATT&CK tactic.',
}),
('tag', ('syn:tag', {}), {
'doc': 'The synapse tag used to annotate nodes included in this ATT&CK tactic.',
'ex': 'cno.mitre.ta0100',
}),
('references', ('array', {'type': 'inet:url', 'uniq': True}), {
'doc': 'An array of URLs that document the ATT&CK tactic.',
}),
)),
('it:mitre:attack:technique', {}, (
('name', ('str', {'strip': True}), {
'doc': 'The primary name for the ATT&CK technique.',
}),
('status', ('it:mitre:attack:status', {}), {
'doc': 'The status of this ATT&CK technique.',
}),
('isnow', ('it:mitre:attack:technique', {}), {
'doc': 'If deprecated, this field may contain the current value for the technique.',
}),
('desc', ('str', {'strip': True}), {
'doc': 'A description of the ATT&CK technique.',
'disp': {'hint': 'text'},
}),
('url', ('inet:url', {}), {
'doc': 'The URL that documents the ATT&CK technique.',
}),
('tag', ('syn:tag', {}), {
'doc': 'The synapse tag used to annotate nodes included in this ATT&CK technique.',
'ex': 'cno.mitre.t0100',
}),
('references', ('array', {'type': 'inet:url', 'uniq': True}), {
'doc': 'An array of URLs that document the ATT&CK technique.',
}),
('parent', ('it:mitre:attack:technique', {}), {
'doc': 'The parent ATT&CK technique on this sub-technique.',
}),
('tactics', ('array', {'type': 'it:mitre:attack:tactic',
'uniq': True, 'sorted': True, 'split': ','}), {
'doc': 'An array of ATT&CK tactics that include this technique.',
}),
)),
('it:mitre:attack:software', {}, (
('software', ('it:prod:soft', {}), {
'doc': 'Used to map an ATT&CK software to a synapse it:prod:soft.',
}),
('name', ('str', {'strip': True}), {
'doc': 'The primary name for the ATT&CK software.',
}),
('names', ('array', {'type': 'str', 'uniq': True, 'sorted': True}), {
'doc': 'Associated names for the ATT&CK software.',
}),
('desc', ('str', {'strip': True}), {
'doc': 'A description of the ATT&CK software.',
'disp': {'hint': 'text'},
}),
('url', ('inet:url', {}), {
'doc': 'The URL that documents the ATT&CK software.',
}),
('tag', ('syn:tag', {}), {
'doc': 'The synapse tag used to annotate nodes included in this ATT&CK software.',
'ex': 'cno.mitre.s0100',
}),
('references', ('array', {'type': 'inet:url', 'uniq': True}), {
'doc': 'An array of URLs that document the ATT&CK software.',
}),
('techniques', ('array', {'type': 'it:mitre:attack:technique',
'uniq': True, 'sorted': True, 'split': ','}), {
'doc': 'An array of techniques used by the software.',
}),
)),
('it:mitre:attack:mitigation', {}, (
# TODO map to an eventual risk:mitigation
('name', ('str', {'strip': True}), {
'doc': 'The primary name for the ATT&CK mitigation.',
}),
('desc', ('str', {'strip': True}), {
'doc': 'A description of the ATT&CK mitigation.',
'disp': {'hint': 'text'},
}),
('url', ('inet:url', {}), {
'doc': 'The URL that documents the ATT&CK mitigation.',
}),
('tag', ('syn:tag', {}), {
'doc': 'The synapse tag used to annotate nodes included in this ATT&CK mitigation.',
'ex': 'cno.mitre.m0100',
}),
('references', ('array', {'type': 'inet:url', 'uniq': True}), {
'doc': 'An array of URLs that document the ATT&CK mitigation.',
}),
('addresses', ('array', {'type': 'it:mitre:attack:technique',
'uniq': True, 'sorted': True, 'split': ','}), {
'doc': 'An array of ATT&CK technique IDs addressed by the mitigation.',
}),
)),
('it:dev:int', {}, ()),
('it:dev:pipe', {}, ()),
('it:dev:mutex', {}, ()),
('it:dev:regkey', {}, ()),
('it:dev:regval', {}, (
('key', ('it:dev:regkey', {}), {
'doc': 'The Windows registry key.',
}),
('str', ('it:dev:str', {}), {
'doc': 'The value of the registry key, if the value is a string.',
}),
('int', ('it:dev:int', {}), {
'doc': 'The value of the registry key, if the value is an integer.',
}),
('bytes', ('file:bytes', {}), {
'doc': 'The file representing the value of the registry key, if the value is binary data.',
}),
)),
('it:prod:soft', {}, (
('name', ('str', {'lower': True, 'strip': True}), {
'doc': 'Name of the software.',
}),
('names', ('array', {'type': 'it:dev:str', 'uniq': True, 'sorted': True}), {
'doc': 'Observed/variant names for this software.',
}),
('desc', ('str', {}), {
'doc': 'A description of the software.',
'disp': {'hint': 'text'},
}),
('desc:short', ('str', {'lower': True}), {
'doc': 'A short description of the software.',
}),
('cpe', ('it:sec:cpe', {}), {
'doc': 'The NIST CPE 2.3 string specifying this software.',
}),
('author', ('ps:contact', {}), {
'doc': 'The contact information of the org or person who authored the software.',
}),
('author:org', ('ou:org', {}), {
'deprecated': True,
'doc': 'Organization which authored the software.',
}),
('author:acct', ('inet:web:acct', {}), {
'deprecated': True,
'doc': 'Web account of the software author.',
}),
('author:email', ('inet:email', {}), {
'deprecated': True,
'doc': 'Email address of the sofware author.',
}),
('author:person', ('ps:person', {}), {
'deprecated': True,
'doc': 'Person who authored the software.',
}),
('url', ('inet:url', {}), {
'doc': 'URL relevant for the software.',
}),
('isos', ('bool', {}), {
'doc': 'Set to True if the software is an operating system.'}),
('islib', ('bool', {}), {
'doc': 'Set to True if the software is a library.'}),
)),
('it:adid', {}, ()),
('it:os:ios:idfa', {}, ()),
('it:os:android:aaid', {}, ()),
('it:os:android:perm', {}, ()),
('it:os:android:intent', {}, ()),
('it:os:android:reqperm', {}, (
('app', ('it:prod:softver', {}), {'ro': True,
'doc': 'The android app which requests the permission.'}),
('perm', ('it:os:android:perm', {}), {'ro': True,
'doc': 'The android permission requested by the app.'}),
)),
('it:prod:softos', {}, (
('soft', ('it:prod:softver', {}), {'ro': True,
'doc': 'The software which can run on the operating system.'}),
('os', ('it:prod:softver', {}), {'ro': True,
'doc': 'The operating system which the software can run on.'}),
)),
('it:os:android:ilisten', {}, (
('app', ('it:prod:softver', {}), {'ro': True,
'doc': 'The app software which listens for the android intent.'}),
('intent', ('it:os:android:intent', {}), {'ro': True,
'doc': 'The android intent which is listened for by the app.'}),
)),
('it:os:android:ibroadcast', {}, (
('app', ('it:prod:softver', {}), {'ro': True,
'doc': 'The app software which broadcasts the android intent.'}),
('intent', ('it:os:android:intent', {}), {'ro': True,
'doc': 'The android intent which is broadcast by the app.'}),
)),
('it:prod:softver', {}, (
('software', ('it:prod:soft', {}), {
'doc': 'Software associated with this version instance.',
}),
('software:name', ('str', {'lower': True, 'strip': True}), {
'doc': 'The name of the software at a particular version.',
}),
('names', ('array', {'type': 'it:dev:str', 'uniq': True, 'sorted': True}), {
'doc': 'Observed/variant names for this software version.',
}),
('cpe', ('it:sec:cpe', {}), {
'doc': 'The NIST CPE 2.3 string specifying this software version',
}),
('cves', ('array', {'type': 'it:sec:cve', 'uniq': True, 'sorted': True}), {
'doc': 'A list of CVEs that apply to this software version.',
}),
('vers', ('it:dev:str', {}), {
'doc': 'Version string associated with this version instance.',
}),
('vers:norm', ('str', {'lower': True}), {
'doc': 'Normalized version of the version string.',
}),
('arch', ('it:dev:str', {}), {
'doc': 'Software architecture.',
}),
('released', ('time', {}), {
'doc': 'Timestamp for when this version of the software was released.',
}),
('semver', ('it:semver', {}), {
'doc': 'System normalized semantic version number.',
}),
('semver:major', ('int', {}), {
'doc': 'Version major number.',
}),
('semver:minor', ('int', {}), {
'doc': 'Version minor number.',
}),
('semver:patch', ('int', {}), {
'doc': 'Version patch number.',
}),
('semver:pre', ('str', {}), {
'doc': 'Semver prerelease string.',
}),
('semver:build', ('str', {}), {
'doc': 'Semver build string.',
}),
('url', ('inet:url', {}), {
'doc': 'URL where a specific version of the software is available from.',
}),
)),
('it:prod:softlib', {}, (
('soft', ('it:prod:softver', {}), {'ro': True,
'doc': 'The software version that contains the library.'}),
('lib', ('it:prod:softver', {}), {'ro': True,
'doc': 'The library software version.'}),
)),
('it:prod:softfile', {}, (
('soft', ('it:prod:softver', {}), {'ro': True,
'doc': 'The software which distributes the file.'}),
('file', ('file:bytes', {}), {'ro': True,
'doc': 'The file distributed by the software.'}),
('path', ('file:path', {}), {
'doc': 'The default installation path of the file.'}),
)),
('it:hostsoft', {}, (
('host', ('it:host', {}), {'ro': True,
'doc': 'Host with the software.'}),
('softver', ('it:prod:softver', {}), {'ro': True,
'doc': 'Software on the host.'})
)),
('it:av:sig', {}, (
('soft', ('it:prod:soft', {}), {
'ro': True,
'doc': 'The anti-virus product which contains the signature.',
}),
('name', ('str', {'lower': True}), {
'ro': True,
'doc': 'The signature name.'
}),
('desc', ('str', {}), {
'doc': 'A free-form description of the signature.',
'disp': {'hint': 'text'},
}),
('url', ('inet:url', {}), {
'doc': 'A reference URL for information about the signature.',
})
)),
('it:av:filehit', {}, (
('file', ('file:bytes', {}), {
'ro': True,
'doc': 'The file that triggered the signature hit.',
}),
('sig', ('it:av:sig', {}), {
'ro': True,
'doc': 'The signature that the file triggered on.'
}),
('sig:name', ('str', {'lower': True}), {
'ro': True,
'doc': 'The signature name.',
}),
('sig:soft', ('it:prod:soft', {}), {
'ro': True,
'doc': 'The anti-virus product which contains the signature.',
}),
)),
('it:av:prochit', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The file that triggered the signature hit.',
}),
('sig', ('it:av:sig', {}), {
'doc': 'The signature that the file triggered on.'
}),
('time', ('time', {}), {
'doc': 'The time that the AV engine detected the signature.'
}),
)),
('it:auth:passwdhash', {}, (
('salt', ('hex', {}), {
'doc': 'The (optional) hex encoded salt value used to calculate the password hash.',
}),
('hash:md5', ('hash:md5', {}), {
'doc': 'The MD5 password hash value.',
}),
('hash:sha1', ('hash:sha1', {}), {
'doc': 'The SHA1 password hash value.',
}),
('hash:sha256', ('hash:sha256', {}), {
'doc': 'The SHA256 password hash value.',
}),
('hash:sha512', ('hash:sha512', {}), {
'doc': 'The SHA512 password hash value.',
}),
('hash:lm', ('hash:lm', {}), {
'doc': 'The LM password hash value.',
}),
('hash:ntlm', ('hash:ntlm', {}), {
'doc': 'The NTLM password hash value.',
}),
('passwd', ('inet:passwd', {}), {
'doc': 'The (optional) clear text password for this password hash.',
}),
)),
('it:cmd', {}, ()),
('it:exec:proc', {}, (
('host', ('it:host', {}), {
'doc': 'The host that executed the process. May be an actual or a virtual / notional host.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The file considered the "main" executable for the process. For example, rundll32.exe may be considered the "main" executable for DLLs loaded by that program.',
}),
('cmd', ('it:cmd', {}), {
'doc': 'The command string used to launch the process, including any command line parameters.',
'disp': {'hint': 'text'},
}),
('pid', ('int', {}), {
'doc': 'The process ID.',
}),
('time', ('time', {}), {
'doc': 'The start time for the process.',
}),
('exited', ('time', {}), {
'doc': 'The time the process exited.',
}),
('exitcode', ('int', {}), {
'doc': 'The exit code for the process.',
}),
('user', ('inet:user', {}), {
'doc': 'The user name of the process owner.',
}),
('path', ('file:path', {}), {
'doc': 'The path to the executable of the process.',
}),
('src:exe', ('file:path', {}), {
'doc': 'The path to the executable which started the process.',
}),
('src:proc', ('it:exec:proc', {}), {
'doc': 'The process which created the process.'
}),
('killedby', ('it:exec:proc', {}), {
'doc': 'The process which killed this process.',
}),
)),
('it:exec:thread', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The process which contains the thread.',
}),
('created', ('time', {}), {
'doc': 'The time the thread was created.',
}),
('exited', ('time', {}), {
'doc': 'The time the thread exited.',
}),
('exitcode', ('int', {}), {
'doc': 'The exit code or return value for the thread.',
}),
('src:proc', ('it:exec:proc', {}), {
'doc': 'An external process which created the thread.',
}),
('src:thread', ('it:exec:thread', {}), {
'doc': 'The thread which created this thread.',
}),
)),
('it:exec:loadlib', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The process where the library was loaded.',
}),
('va', ('int', {}), {
'doc': 'The base memory address where the library was loaded in the process.',
}),
('loaded', ('time', {}), {
'doc': 'The time the library was loaded.',
}),
('unloaded', ('time', {}), {
'doc': 'The time the library was unloaded.',
}),
('path', ('file:path', {}), {
'doc': 'The path that the library was loaded from.',
}),
('file', ('file:bytes', {}), {
'doc': 'The library file that was loaded.',
}),
)),
('it:exec:mmap', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The process where the memory was mapped.',
}),
('va', ('int', {}), {
'doc': 'The base memory address where the map was created in the process.',
}),
('size', ('int', {}), {
'doc': 'The size of the memory map in bytes.',
}),
('perms:read', ('bool', {}), {
'doc': 'True if the mmap is mapped with read permissions.',
}),
('perms:write', ('bool', {}), {
'doc': 'True if the mmap is mapped with write permissions.',
}),
('perms:execute', ('bool', {}), {
'doc': 'True if the mmap is mapped with execute permissions.',
}),
('created', ('time', {}), {
'doc': 'The time the memory map was created.',
}),
('deleted', ('time', {}), {
'doc': 'The time the memory map was deleted.',
}),
('path', ('file:path', {}), {
'doc': 'The file path if the mmap is a mapped view of a file.',
}),
('hash:sha256', ('hash:sha256', {}), {
'doc': 'A SHA256 hash of the memory map. Bytes may optionally be present in the axon.',
}),
)),
('it:exec:mutex', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that created the mutex.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that created the mutex. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that created the mutex. May or may not be the same :exe specified in :proc, if present.',
}),
('time', ('time', {}), {
'doc': 'The time the mutex was created.',
}),
('name', ('it:dev:mutex', {}), {
'doc': 'The mutex string.',
}),
)),
('it:exec:pipe', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that created the named pipe.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that created the named pipe. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that created the named pipe. May or may not be the same :exe specified in :proc, if present.',
}),
('time', ('time', {}), {
'doc': 'The time the named pipe was created.',
}),
('name', ('it:dev:pipe', {}), {
'doc': 'The named pipe string.',
}),
)),
('it:exec:url', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that requested the URL.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that requested the URL. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that requested the URL. May or may not be the same :exe specified in :proc, if present.',
}),
('time', ('time', {}), {
'doc': 'The time the URL was requested.',
}),
('url', ('inet:url', {}), {
'doc': 'The URL that was requested.',
}),
('client', ('inet:client', {}), {
'doc': 'The address of the client during the URL retrieval.'
}),
('client:ipv4', ('inet:ipv4', {}), {
'doc': 'The IPv4 of the client during the URL retrieval..'
}),
('client:ipv6', ('inet:ipv6', {}), {
'doc': 'The IPv6 of the client during the URL retrieval..'
}),
('client:port', ('inet:port', {}), {
'doc': 'The client port during the URL retrieval..'
}),
)),
('it:exec:bind', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that bound the listening port.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that bound the listening port. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that bound the listening port. May or may not be the same :exe specified in :proc, if present.',
}),
('time', ('time', {}), {
'doc': 'The time the port was bound.',
}),
('server', ('inet:server', {}), {
'doc': 'The inet:addr of the server when binding the port.'
}),
('server:ipv4', ('inet:ipv4', {}), {
'doc': 'The IPv4 address specified to bind().'
}),
('server:ipv6', ('inet:ipv6', {}), {
'doc': 'The IPv6 address specified to bind().'
}),
('server:port', ('inet:port', {}), {
'doc': 'The bound (listening) TCP port.'
}),
)),
('it:fs:file', {}, (
('host', ('it:host', {}), {
'doc': 'The host containing the file.',
}),
('path', ('file:path', {}), {
'doc': 'The path for the file.',
}),
('path:dir', ('file:path', {}), {
'ro': True,
'doc': 'The parent directory of the file path (parsed from :path).',
}),
('path:ext', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The file extension of the file name (parsed from :path).',
}),
('path:base', ('file:base', {}), {
'ro': True,
'doc': 'The final component of the file path (parsed from :path).',
}),
('file', ('file:bytes', {}), {
'doc': 'The file on the host.',
}),
('ctime', ('time', {}), {
'doc': 'The file creation time.',
}),
('mtime', ('time', {}), {
'doc': 'The file modification time.',
}),
('atime', ('time', {}), {
'doc': 'The file access time.',
}),
('user', ('inet:user', {}), {
'doc': 'The owner of the file.',
}),
('group', ('inet:user', {}), {
'doc': 'The group owner of the file.',
}),
)),
('it:exec:file:add', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that created the new file.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that created the new file. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that created the new file. May or may not be the same :exe specified in :proc, if present.'}),
('time', ('time', {}), {
'doc': 'The time the file was created.',
}),
('path', ('file:path', {}), {
'doc': 'The path where the file was created.',
}),
('path:dir', ('file:path', {}), {
'ro': True,
'doc': 'The parent directory of the file path (parsed from :path).',
}),
('path:ext', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The file extension of the file name (parsed from :path).',
}),
('path:base', ('file:base', {}), {
'ro': True,
'doc': 'The final component of the file path (parsed from :path).',
}),
('file', ('file:bytes', {}), {
'doc': 'The file that was created.',
}),
)),
('it:exec:file:del', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that deleted the file.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that deleted the file. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that deleted the file. May or may not be the same :exe specified in :proc, if present.'}),
('time', ('time', {}), {
'doc': 'The time the file was deleted.',
}),
('path', ('file:path', {}), {
'doc': 'The path where the file was deleted.',
}),
('path:dir', ('file:path', {}), {
'ro': True,
'doc': 'The parent directory of the file path (parsed from :path).',
}),
('path:ext', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The file extension of the file name (parsed from :path).',
}),
('path:base', ('file:base', {}), {
'ro': True,
'doc': 'The final component of the file path (parsed from :path).',
}),
('file', ('file:bytes', {}), {
'doc': 'The file that was deleted.',
}),
)),
('it:exec:file:read', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that read the file.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that read the file. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that read the file. May or may not be the same :exe specified in :proc, if present.'}),
('time', ('time', {}), {
'doc': 'The time the file was read.',
}),
('path', ('file:path', {}), {
'doc': 'The path where the file was read.',
}),
('path:dir', ('file:path', {}), {
'ro': True,
'doc': 'The parent directory of the file path (parsed from :path).',
}),
('path:ext', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The file extension of the file name (parsed from :path).',
}),
('path:base', ('file:base', {}), {
'ro': True,
'doc': 'The final component of the file path (parsed from :path).',
}),
('file', ('file:bytes', {}), {
'doc': 'The file that was read.',
}),
)),
('it:exec:file:write', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that wrote to / modified the existing file.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that wrote to the file. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that wrote to the file. May or may not be the same :exe specified in :proc, if present.'}),
('time', ('time', {}), {
'doc': 'The time the file was written to/modified.',
}),
('path', ('file:path', {}), {
'doc': 'The path where the file was written to/modified.',
}),
('path:dir', ('file:path', {}), {
'ro': True,
'doc': 'The parent directory of the file path (parsed from :path).',
}),
('path:ext', ('str', {'lower': True, 'strip': True}), {
'ro': True,
'doc': 'The file extension of the file name (parsed from :path).',
}),
('path:base', ('file:base', {}), {
'ro': True,
'doc': 'The final component of the file path (parsed from :path).',
}),
('file', ('file:bytes', {}), {
'doc': 'The file that was modified.',
}),
)),
('it:exec:reg:get', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that read the registry.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that read the registry. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that read the registry. May or may not be the same :exe referenced in :proc, if present.',
}),
('time', ('time', {}), {
'doc': 'The time the registry was read.',
}),
('reg', ('it:dev:regval', {}), {
'doc': 'The registry key or value that was read.',
}),
)),
('it:exec:reg:set', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that wrote to the registry.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that wrote to the registry. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that wrote to the registry. May or may not be the same :exe referenced in :proc, if present.',
}),
('time', ('time', {}), {
'doc': 'The time the registry was written to.',
}),
('reg', ('it:dev:regval', {}), {
'doc': 'The registry key or value that was written to.',
}),
)),
('it:exec:reg:del', {}, (
('proc', ('it:exec:proc', {}), {
'doc': 'The main process executing code that deleted data from the registry.',
}),
('host', ('it:host', {}), {
'doc': 'The host running the process that deleted data from the registry. Typically the same host referenced in :proc, if present.',
}),
('exe', ('file:bytes', {}), {
'doc': 'The specific file containing code that deleted data from the registry. May or may not be the same :exe referenced in :proc, if present.',
}),
('time', ('time', {}), {
'doc': 'The time the data from the registry was deleted.',
}),
('reg', ('it:dev:regval', {}), {
'doc': 'The registry key or value that was deleted.',
}),
)),
('it:app:snort:rule', {}, (
('text', ('str', {}), {
'doc': 'The snort rule text.',
'disp': {'hint': 'text'},
}),
('name', ('str', {}), {
'doc': 'The name of the snort rule.'}),
('version', ('it:semver', {}), {
'doc': 'The current version of the rule.'}),
)),
('it:app:snort:hit', {}, (
('rule', ('it:app:snort:rule', {}), {
'doc': 'The snort rule that matched the file.'}),
('flow', ('inet:flow', {}), {
'doc': 'The inet:flow that matched the snort rule.'}),
('src', ('inet:addr', {}), {
'doc': 'The source address of flow that caused the hit.'}),
('src:ipv4', ('inet:ipv4', {}), {
'doc': 'The source IPv4 address of the flow that caused the hit.'}),
('src:ipv6', ('inet:ipv6', {}), {
'doc': 'The source IPv6 address of the flow that caused the hit.'}),
('src:port', ('inet:port', {}), {
'doc': 'The source port of the flow that caused the hit.'}),
('dst', ('inet:addr', {}), {
'doc': 'The destination address of the trigger.'}),
('dst:ipv4', ('inet:ipv4', {}), {
'doc': 'The destination IPv4 address of the flow that caused the hit.'}),
('dst:ipv6', ('inet:ipv6', {}), {
'doc': 'The destination IPv4 address of the flow that caused the hit.'}),
('dst:port', ('inet:port', {}), {
'doc': 'The destination port of the flow that caused the hit.'}),
('time', ('time', {}), {
'doc': 'The time of the network flow that caused the hit.'}),
('sensor', ('it:host', {}), {
'doc': 'The sensor host node that produced the hit.'}),
('version', ('it:semver', {}), {
'doc': 'The version of the rule at the time of match.'}),
)),
('it:app:yara:rule', {}, (
('text', ('str', {}), {
'doc': 'The YARA rule text.',
'disp': {'hint': 'text'},
}),
('name', ('str', {}), {
'doc': 'The name of the YARA rule.'}),
('author', ('ps:contact', {}), {
'doc': 'Contact info for the author of the YARA rule.'}),
('version', ('it:semver', {}), {
'doc': 'The current version of the rule.'}),
('enabled', ('bool', {}), {
'doc': 'The rule enabled status to be used for YARA evaluation engines.'}),
)),
('it:app:yara:match', {}, (
('rule', ('it:app:yara:rule', {}), {
'ro': True,
'doc': 'The YARA rule that matched the file.'}),
('file', ('file:bytes', {}), {
'ro': True,
'doc': 'The file that matched the YARA rule.'}),
('version', ('it:semver', {}), {
'doc': 'The most recent version of the rule evaluated as a match.'}),
)),
('it:app:yara:procmatch', {}, (
('rule', ('it:app:yara:rule', {}), {
'doc': 'The YARA rule that matched the file.'}),
('proc', ('it:exec:proc', {}), {
'doc': 'The process that matched the YARA rule.'}),
('time', ('time', {}), {
'doc': 'The time that the YARA engine matched the process to the rule.'}),
('version', ('it:semver', {}), {
'doc': 'The most recent version of the rule evaluated as a match.'}),
)),
('it:reveng:function', {}, (
('name', ('str', {}), {
'doc': 'The name of the function.'}),
('description', ('str', {}), {
'doc': 'Notes concerning the function.'}),
('impcalls', ('array', {'type': 'it:reveng:impfunc'}), {
'doc': 'Calls to imported library functions within the scope of the function.',
}),
('strings', ('array', {'type': 'it:dev:str', 'uniq': True}), {
'doc': 'An array of strings referenced within the function.',
}),
)),
('it:reveng:filefunc', {}, (
('function', ('it:reveng:function', {}), {
'ro': True,
'doc': 'The guid matching the function.'}),
('file', ('file:bytes', {}), {
'ro': True,
'doc': 'The file that contains the function.'}),
('va', ('int', {}), {
'doc': 'The virtual address of the first codeblock of the function.'}),
('rank', ('int', {}), {
'doc': 'The function rank score used to evaluate if it exhibits interesting behavior.'}),
('complexity', ('int', {}), {
'doc': 'The complexity of the function.'}),
('funccalls', ('array', {'type': 'it:reveng:filefunc'}), {
'doc': 'Other function calls within the scope of the function.',
}),
)),
('it:reveng:funcstr', {}, (
('function', ('it:reveng:function', {}), {
'ro': True,
'doc': 'The guid matching the function.'}),
('string', ('str', {}), {
'ro': True,
'doc': 'The string that the function references.'}),
)),
('it:reveng:impfunc', {}, ()),
),
}
name = 'it'
return ((name, modl), )
| [
"logging.getLogger",
"synapse.lib.version.parseSemver",
"synapse.lib.version.unpackVersion",
"synapse.exc.BadTypeValu",
"synapse.lib.version.packVersion",
"synapse.lib.types.Str.__init__",
"synapse.lib.types.Str._normPyStr",
"synapse.lib.version.parseVersionParts",
"synapse.lib.types.Int.postTypeInit",
"synapse.lib.version.fmtVersion"
] | [((183, 210), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (200, 210), False, 'import logging\n'), ((612, 662), 'synapse.lib.types.Str.__init__', 's_types.Str.__init__', (['self', 'modl', 'name', 'info', 'opts'], {}), '(self, modl, name, info, opts)\n', (632, 662), True, 'import synapse.lib.types as s_types\n'), ((1368, 1402), 'synapse.lib.types.Str._normPyStr', 's_types.Str._normPyStr', (['self', 'valu'], {}), '(self, valu)\n', (1390, 1402), True, 'import synapse.lib.types as s_types\n'), ((2787, 2817), 'synapse.lib.types.Int.postTypeInit', 's_types.Int.postTypeInit', (['self'], {}), '(self)\n', (2811, 2817), True, 'import synapse.lib.types as s_types\n'), ((3157, 3184), 'synapse.lib.version.parseSemver', 's_version.parseSemver', (['valu'], {}), '(valu)\n', (3178, 3184), True, 'import synapse.lib.version as s_version\n'), ((3928, 3957), 'synapse.lib.version.unpackVersion', 's_version.unpackVersion', (['valu'], {}), '(valu)\n', (3951, 3957), True, 'import synapse.lib.version as s_version\n'), ((3973, 4015), 'synapse.lib.version.packVersion', 's_version.packVersion', (['major', 'minor', 'patch'], {}), '(major, minor, patch)\n', (3994, 4015), True, 'import synapse.lib.version as s_version\n'), ((4205, 4234), 'synapse.lib.version.unpackVersion', 's_version.unpackVersion', (['valu'], {}), '(valu)\n', (4228, 4234), True, 'import synapse.lib.version as s_version\n'), ((4250, 4291), 'synapse.lib.version.fmtVersion', 's_version.fmtVersion', (['major', 'minor', 'patch'], {}), '(major, minor, patch)\n', (4270, 4291), True, 'import synapse.lib.version as s_version\n'), ((1306, 1345), 'synapse.exc.BadTypeValu', 's_exc.BadTypeValu', ([], {'valu': 'valu', 'mesg': 'mesg'}), '(valu=valu, mesg=mesg)\n', (1323, 1345), True, 'import synapse.exc as s_exc\n'), ((1564, 1603), 'synapse.exc.BadTypeValu', 's_exc.BadTypeValu', ([], {'valu': 'valu', 'mesg': 'mesg'}), '(valu=valu, mesg=mesg)\n', (1581, 1603), True, 'import synapse.exc as s_exc\n'), ((3012, 3109), 'synapse.exc.BadTypeValu', 's_exc.BadTypeValu', ([], {'valu': 'valu', 'name': 'self.name', 'mesg': '"""No text left after stripping whitespace"""'}), "(valu=valu, name=self.name, mesg=\n 'No text left after stripping whitespace')\n", (3029, 3109), True, 'import synapse.exc as s_exc\n'), ((3228, 3321), 'synapse.exc.BadTypeValu', 's_exc.BadTypeValu', ([], {'valu': 'valu', 'name': 'self.name', 'mesg': '"""Unable to parse string as a semver."""'}), "(valu=valu, name=self.name, mesg=\n 'Unable to parse string as a semver.')\n", (3245, 3321), True, 'import synapse.exc as s_exc\n'), ((3555, 3656), 'synapse.exc.BadTypeValu', 's_exc.BadTypeValu', ([], {'valu': 'valu', 'name': 'self.name', 'mesg': '"""Cannot norm a negative integer as a semver."""'}), "(valu=valu, name=self.name, mesg=\n 'Cannot norm a negative integer as a semver.')\n", (3572, 3656), True, 'import synapse.exc as s_exc\n'), ((3742, 3866), 'synapse.exc.BadTypeValu', 's_exc.BadTypeValu', ([], {'valu': 'valu', 'name': 'self.name', 'mesg': '"""Cannot norm a integer larger than 1152921504606846975 as a semver."""'}), "(valu=valu, name=self.name, mesg=\n 'Cannot norm a integer larger than 1152921504606846975 as a semver.')\n", (3759, 3866), True, 'import synapse.exc as s_exc\n'), ((5825, 5858), 'synapse.lib.version.parseVersionParts', 's_version.parseVersionParts', (['valu'], {}), '(valu)\n', (5852, 5858), True, 'import synapse.lib.version as s_version\n'), ((5910, 6029), 'synapse.exc.BadTypeValu', 's_exc.BadTypeValu', ([], {'valu': 'valu', 'name': '"""bruteVersionStr"""', 'mesg': '"""Unable to brute force version parts out of the string"""'}), "(valu=valu, name='bruteVersionStr', mesg=\n 'Unable to brute force version parts out of the string')\n", (5927, 6029), True, 'import synapse.exc as s_exc\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""Unit tests."""
import os
import unittest
from copy import copy
from webui.app import create_app
class TestRoutes(unittest.TestCase):
"""Test routes."""
ignore_routes = ('/static/<path:filename>',)
ignore_end_patterns = ('>',)
def setUp(self):
"""Set up: Put Flask app in test mode."""
app = create_app()
self.initial_app = copy(app)
app.testing = True
self.app = app.test_client()
@staticmethod
def valid_route(route):
"""Validate route.
Args:
route (str): Route url pattern.
Returns:
bool: True if valid, else False.
"""
if route in TestRoutes.ignore_routes \
or route.endswith(TestRoutes.ignore_end_patterns):
return False
return True
def test_routes(self):
"""Smoke test routes to ensure no runtime errors.."""
routes = [route.rule for route in self.initial_app.url_map.iter_rules()
if self.valid_route(route.rule)]
for route in routes:
self.app.get(route)
if __name__ == '__main__':
from test.utils.doctest_unittest_runner import doctest_unittest_runner
TEST_DIR = os.path.dirname(os.path.realpath(__file__)) + '/'
doctest_unittest_runner(test_dir=TEST_DIR, relative_path_to_root='../',
package_names=['webui', 'test'])
| [
"os.path.realpath",
"copy.copy",
"test.utils.doctest_unittest_runner.doctest_unittest_runner",
"webui.app.create_app"
] | [((1314, 1422), 'test.utils.doctest_unittest_runner.doctest_unittest_runner', 'doctest_unittest_runner', ([], {'test_dir': 'TEST_DIR', 'relative_path_to_root': '"""../"""', 'package_names': "['webui', 'test']"}), "(test_dir=TEST_DIR, relative_path_to_root='../',\n package_names=['webui', 'test'])\n", (1337, 1422), False, 'from test.utils.doctest_unittest_runner import doctest_unittest_runner\n'), ((378, 390), 'webui.app.create_app', 'create_app', ([], {}), '()\n', (388, 390), False, 'from webui.app import create_app\n'), ((418, 427), 'copy.copy', 'copy', (['app'], {}), '(app)\n', (422, 427), False, 'from copy import copy\n'), ((1276, 1302), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (1292, 1302), False, 'import os\n')] |
#!/usr/bin/env python
# Copyright 1996-2021 Cyberbotics Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test that the C, C++ and shader source code is compliant with ClangFormat."""
import unittest
import difflib
import os
import subprocess
from io import open
from distutils.spawn import find_executable
class TestClangFormat(unittest.TestCase):
"""Unit test for ClangFormat compliance."""
def setUp(self):
"""Set up called before each test."""
self.WEBOTS_HOME = os.environ['WEBOTS_HOME']
def _runClangFormat(self, f):
"""Run clang format on 'f' file."""
return subprocess.check_output(['clang-format', '-style=file', f])
def test_clang_format_is_correctly_installed(self):
"""Test ClangFormat is correctly installed."""
self.assertTrue(
find_executable('clang-format') is not None,
msg='ClangFormat is not installed on this computer.'
)
clangFormatConfigFile = self.WEBOTS_HOME + os.sep + '.clang-format'
self.assertTrue(
os.path.exists(clangFormatConfigFile),
msg=clangFormatConfigFile + ' not found.'
)
def test_sources_are_clang_format_compliant(self):
"""Test that sources are ClangFormat compliant."""
directories = [
'include/controller',
'projects',
'resources/projects',
'resources/wren/shaders',
'tests',
'include/wren',
'src/controller/c',
'src/controller/cpp',
'src/license/sign',
'src/webots',
'src/wren'
]
skippedPaths = [
'projects/default/controllers/ros/include',
'projects/robots/gctronic/e-puck/transfer',
'projects/robots/mobsya/thymio/controllers/thymio2_aseba/aseba',
'projects/robots/mobsya/thymio/libraries/dashel',
'projects/robots/mobsya/thymio/libraries/dashel-src',
'projects/robots/robotis/darwin-op/libraries/libssh',
'projects/robots/robotis/darwin-op/libraries/libzip',
'projects/robots/robotis/darwin-op/libraries/robotis-op2/robotis',
'projects/robots/robotis/darwin-op/remote_control/libjpeg-turbo',
'projects/vehicles/controllers/ros_automobile/include',
'src/webots/external'
]
skippedFiles = [
'projects/robots/robotis/darwin-op/plugins/remote_controls/robotis-op2_tcpip/stb_image.h'
]
skippedDirectories = [
'build',
'python',
'java'
]
extensions = ['c', 'h', 'cpp', 'hpp', 'cc', 'hh', 'c++', 'h++', 'vert', 'frag']
modified_files = os.path.join(self.WEBOTS_HOME, 'tests', 'sources', 'modified_files.txt')
sources = []
if os.path.isfile(modified_files):
with open(modified_files, 'r') as file:
for line in file:
line = line.strip()
extension = os.path.splitext(line)[1][1:].lower()
if extension not in extensions:
continue
found = False
for directory in directories:
if line.startswith(directory):
found = True
break
if not found:
continue
found = False
for directory in skippedPaths + skippedFiles:
if line.startswith(directory):
found = True
break
if found:
continue
for directory in skippedDirectories:
currentDirectories = line.split(os.sep)
if directory in currentDirectories:
found = True
if found:
continue
sources.append(line.replace('/', os.sep))
else:
for directory in directories:
path = self.WEBOTS_HOME + os.sep + directory.replace('/', os.sep)
for rootPath, dirNames, fileNames in os.walk(path):
shouldContinue = False
for path in skippedPaths:
if rootPath.startswith(self.WEBOTS_HOME + os.sep + path.replace('/', os.sep)):
shouldContinue = True
break
for directory in skippedDirectories:
currentDirectories = rootPath.replace(self.WEBOTS_HOME, '').split(os.sep)
if directory in currentDirectories:
shouldContinue = True
break
if shouldContinue:
continue
for fileName in fileNames:
extension = os.path.splitext(fileName)[1][1:].lower()
if extension not in extensions:
continue
path = os.path.normpath(os.path.join(rootPath, fileName))
skipFile = False
for file in skippedFiles:
if os.path.normpath((self.WEBOTS_HOME + os.sep + file.replace('/', os.sep))) == path:
skipFile = True
break
if not skipFile:
sources.append(path)
curdir = os.getcwd()
os.chdir(self.WEBOTS_HOME)
for source in sources:
diff = ''
with open(source, encoding='utf8') as file:
try:
for line in difflib.context_diff(self._runClangFormat(source).decode('utf-8').splitlines(),
file.read().splitlines()):
diff += line + '\n'
except UnicodeDecodeError:
self.assertTrue(False, msg='utf-8 decode problem in %s' % source)
self.assertTrue(
len(diff) == 0,
msg='Source file "%s" is not compliant with ClangFormat:\n\nDIFF:%s' % (source, diff)
)
os.chdir(curdir)
if __name__ == '__main__':
unittest.main()
| [
"subprocess.check_output",
"os.path.exists",
"distutils.spawn.find_executable",
"os.path.join",
"os.path.splitext",
"io.open",
"os.getcwd",
"os.path.isfile",
"os.chdir",
"unittest.main",
"os.walk"
] | [((6922, 6937), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6935, 6937), False, 'import unittest\n'), ((1123, 1182), 'subprocess.check_output', 'subprocess.check_output', (["['clang-format', '-style=file', f]"], {}), "(['clang-format', '-style=file', f])\n", (1146, 1182), False, 'import subprocess\n'), ((3239, 3311), 'os.path.join', 'os.path.join', (['self.WEBOTS_HOME', '"""tests"""', '"""sources"""', '"""modified_files.txt"""'], {}), "(self.WEBOTS_HOME, 'tests', 'sources', 'modified_files.txt')\n", (3251, 3311), False, 'import os\n'), ((3344, 3374), 'os.path.isfile', 'os.path.isfile', (['modified_files'], {}), '(modified_files)\n', (3358, 3374), False, 'import os\n'), ((6129, 6140), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (6138, 6140), False, 'import os\n'), ((6149, 6175), 'os.chdir', 'os.chdir', (['self.WEBOTS_HOME'], {}), '(self.WEBOTS_HOME)\n', (6157, 6175), False, 'import os\n'), ((6872, 6888), 'os.chdir', 'os.chdir', (['curdir'], {}), '(curdir)\n', (6880, 6888), False, 'import os\n'), ((1565, 1602), 'os.path.exists', 'os.path.exists', (['clangFormatConfigFile'], {}), '(clangFormatConfigFile)\n', (1579, 1602), False, 'import os\n'), ((1332, 1363), 'distutils.spawn.find_executable', 'find_executable', (['"""clang-format"""'], {}), "('clang-format')\n", (1347, 1363), False, 'from distutils.spawn import find_executable\n'), ((3393, 3418), 'io.open', 'open', (['modified_files', '"""r"""'], {}), "(modified_files, 'r')\n", (3397, 3418), False, 'from io import open\n'), ((4769, 4782), 'os.walk', 'os.walk', (['path'], {}), '(path)\n', (4776, 4782), False, 'import os\n'), ((6246, 6275), 'io.open', 'open', (['source'], {'encoding': '"""utf8"""'}), "(source, encoding='utf8')\n", (6250, 6275), False, 'from io import open\n'), ((5697, 5729), 'os.path.join', 'os.path.join', (['rootPath', 'fileName'], {}), '(rootPath, fileName)\n', (5709, 5729), False, 'import os\n'), ((3534, 3556), 'os.path.splitext', 'os.path.splitext', (['line'], {}), '(line)\n', (3550, 3556), False, 'import os\n'), ((5514, 5540), 'os.path.splitext', 'os.path.splitext', (['fileName'], {}), '(fileName)\n', (5530, 5540), False, 'import os\n')] |
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""shell tests."""
import mock
import os
import unittest
from pyfakefs import fake_filesystem_unittest
from system import environment
from system import shell
from tests.test_libs import helpers as test_helpers
from tests.test_libs import test_utils
class RemoveEmptyFilesTest(fake_filesystem_unittest.TestCase):
"""Tests for remove_empty_files."""
def setUp(self):
# FIXME: Add support for Windows.
if not environment.is_posix():
self.skipTest('Process tests are only applicable for posix platforms.')
test_utils.set_up_pyfakefs(self)
def test_remove(self):
"""Test remove."""
self.fs.CreateFile('/test/aa/bb.txt', contents='s')
self.fs.CreateFile('/test/aa/cc.txt', contents='')
self.fs.CreateFile('/test/aa/aa/dd.txt', contents='s')
self.fs.CreateFile('/test/aa/aa/aa.txt', contents='')
shell.remove_empty_files('/test')
self.assertTrue(os.path.exists('/test/aa/bb.txt'))
self.assertTrue(os.path.exists('/test/aa/aa/dd.txt'))
self.assertFalse(os.path.exists('/test/aa/cc.txt'))
self.assertFalse(os.path.exists('/test/aa/aa/aa.txt'))
def test_ignore_file(self):
self.fs.CreateFile('/test/aa/cc.txt', contents='')
shell.remove_empty_files('/test/aa/cc.txt')
self.assertTrue(os.path.exists('/test/aa/cc.txt'))
@mock.patch('os.remove', autospec=True)
def test_exception(self, mock_remove):
# bypass pyfakefs's os.remove.
os.remove = mock_remove
mock_remove.side_effect = OSError()
self.fs.CreateFile('/test/aa/cc.txt', contents='')
shell.remove_empty_files('/test')
self.assertTrue(os.path.exists('/test/aa/cc.txt'))
class RemoveDirectoryTest(unittest.TestCase):
"""Tests for remove_directory."""
def setUp(self):
test_helpers.patch(self, [
'os.chmod',
'os.mkdir',
'os.path.exists',
'os.system',
'system.environment.platform',
'metrics.logs.log_error',
'metrics.logs.log_warn',
'shutil.rmtree',
])
def _test_remove_os_specific(self, platform, recreate, raise_mkdir_error):
"""Helper for testing removing dir with os-specific command."""
self.mock.platform.return_value = platform
self.mock.exists.side_effect = [True, False, False]
if raise_mkdir_error:
self.mock.mkdir.side_effect = OSError()
result = shell.remove_directory('dir', recreate=recreate)
if recreate:
self.assertEqual(not raise_mkdir_error, result)
else:
self.assertTrue(result)
self.mock.rmtree.assert_has_calls([])
if recreate:
self.mock.mkdir.assert_has_calls([mock.call('dir')])
else:
self.mock.mkdir.assert_has_calls([])
def test_remove_os_specific_windows(self):
"""Test remove with os-specific command on windows."""
self._test_remove_os_specific('WINDOWS', True, False)
self.mock.system.assert_has_calls([mock.call('rd /s /q "dir" > nul 2>&1')])
def test_remove_os_specific_non_windows(self):
"""Test remove with os-specific command on non-windows."""
self._test_remove_os_specific('LINUX', True, False)
self.mock.system.assert_has_calls(
[mock.call('rm -rf "dir" > /dev/null 2>&1')])
def test_remove_without_recreate(self):
"""Test remove without recreate."""
self._test_remove_os_specific('LINUX', False, True)
def test_remove_with_mkdir_error(self):
"""Test remove when mkdir errors."""
self._test_remove_os_specific('LINUX', True, True)
def test_remove_shutil_success(self):
"""Test remove with shutil."""
self.mock.exists.side_effect = [True, True, False]
self.assertTrue(shell.remove_directory('dir'))
self.mock.system.assert_has_calls(
[mock.call('rm -rf "dir" > /dev/null 2>&1')])
self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)])
def test_remove_shutil_failure(self):
"""Test remove with shutil but fails."""
self.mock.exists.side_effect = [True, True, True]
self.assertFalse(shell.remove_directory('dir'))
self.mock.log_error.assert_has_calls(
[mock.call('Failed to clear directory dir.')])
self.assertEqual(0, self.mock.log_warn.call_count)
self.mock.system.assert_has_calls(
[mock.call('rm -rf "dir" > /dev/null 2>&1')])
self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)])
def test_remove_shutil_failure_ignore_errors(self):
self.mock.exists.side_effect = [True, True, True]
self.assertFalse(shell.remove_directory('dir', ignore_errors=True))
self.mock.log_warn.assert_has_calls(
[mock.call('Failed to clear directory dir.')])
self.assertEqual(0, self.mock.log_error.call_count)
self.mock.system.assert_has_calls(
[mock.call('rm -rf "dir" > /dev/null 2>&1')])
self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)])
def test_remove_shutil_onerror(self):
"""Test shutil invoking onerror."""
self.mock.exists.side_effect = [True, True, False]
self.assertTrue(shell.remove_directory('dir'))
self.mock.system.assert_has_calls(
[mock.call('rm -rf "dir" > /dev/null 2>&1')])
self.mock.rmtree.assert_has_calls([mock.call('dir', onerror=mock.ANY)])
onerror = self.mock.rmtree.call_args[1]['onerror']
fake_fn = mock.MagicMock()
fake_fn.side_effect = OSError()
onerror(fake_fn, 'dir/child', ImportError())
self.mock.chmod.assert_has_calls([mock.call('dir/child', 0o750)])
fake_fn.assert_has_calls([mock.call('dir/child')])
class GetDirectoryFileCount(fake_filesystem_unittest.TestCase):
"""Tests for get_directory_file_count."""
def setUp(self):
test_utils.set_up_pyfakefs(self)
def test(self):
"""Test get_directory_file_count."""
self.fs.CreateFile('/test/aa/bb.txt', contents='abc')
self.fs.CreateFile('/test/aa/cc.txt', contents='def')
self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi')
self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t')
self.assertEqual(shell.get_directory_file_count('/test/aa'), 4)
class GetDirectorySizeTest(fake_filesystem_unittest.TestCase):
"""Tests for get_directory_size."""
def setUp(self):
test_utils.set_up_pyfakefs(self)
def test(self):
"""Test get_directory_size."""
self.fs.CreateFile('/test/aa/bb.txt', contents='abc')
self.fs.CreateFile('/test/aa/cc.txt', contents='def')
self.fs.CreateFile('/test/aa/aa/aa.txt', contents='ghi')
self.fs.CreateFile('/test/aa/aa/dd.txt', contents='t')
self.assertEqual(shell.get_directory_size('/test/aa'), 10)
class WhichTest(fake_filesystem_unittest.TestCase):
"""Tests for which (shutil.which)."""
def setUp(self):
# FIXME: Add support for Windows.
if not environment.is_posix():
self.skipTest('Which test is only supported on posix platforms.')
def test(self):
self.assertEqual('/bin/ls', shell.which('ls'))
class ClearSystemTempDirectoryTest(fake_filesystem_unittest.TestCase):
"""Tests for clear_system_temp_directory."""
def setUp(self):
test_helpers.patch(self, [
'tempfile.gettempdir',
])
self.mock.gettempdir.return_value = '/tmp'
test_utils.set_up_pyfakefs(self)
def test(self):
"""Test clear_system_temp_directory works as expected."""
self.fs.CreateFile('/tmp/aa/bb.txt', contents='abc')
self.fs.CreateFile('/tmp/cc/dd/ee.txt', contents='def')
self.fs.CreateDirectory('/tmp/ff/gg')
self.fs.CreateDirectory('/tmp/hh')
self.fs.CreateDirectory('/unrelated')
self.fs.CreateFile('/unrelated/zz.txt', contents='zzz')
os.symlink('/unrelated/zz.txt', '/tmp/hh/gg.txt')
os.symlink('/unrelated', '/tmp/ii')
shell.clear_system_temp_directory()
self.assertTrue(os.path.exists('/tmp'))
self.assertTrue(os.path.exists('/unrelated'))
self.assertEqual(shell.get_directory_file_count('/tmp'), 0)
self.assertEqual(shell.get_directory_file_count('/unrelated'), 1)
self.assertFalse(os.path.exists('/tmp/aa/bb.txt'))
self.assertFalse(os.path.exists('/tmp/cc/dd/ee.txt'))
self.assertFalse(os.path.exists('/tmp/ff/gg'))
self.assertFalse(os.path.exists('/tmp/hh'))
class GetExecuteCommand(unittest.TestCase):
"""Test that the correct commands to run files are returned."""
def call_and_assert_helper(self, expected_command, file_to_execute):
"""Call get_execute_command on |file_to_execute| and assert result equal to
|expected_command|."""
self.assertEqual(expected_command,
shell.get_execute_command(file_to_execute))
def test_standard_script(self):
"""Test correct command returned for python script."""
script_name = 'script.py'
expected_command = 'python %s' % script_name
self.call_and_assert_helper(expected_command, script_name)
def test_java(self):
"""Test correct launch command returned for Java class."""
script_name = 'javaclassfile.class'
expected_command = 'java javaclassfile'
self.call_and_assert_helper(expected_command, script_name)
def test_binary(self):
"""Test correct launch command returned for a binary (executable) file."""
executable_name = 'executable'
self.call_and_assert_helper(executable_name, executable_name)
executable_name += '.exe'
self.call_and_assert_helper(executable_name, executable_name)
class GetInterpreter(object):
"""Test that the correct interpreters to execute a file are returned."""
def get_interpreted_file_test(self):
"""Test correct interpreter is returned for a file that needs one."""
self.assertEqual('python', shell.get_interpreter('run.py'))
def get_non_interpreter_file_test(self):
"""Test that None is returned for a file that doesn't need one. We don't
want empty string since this is easier to than None. """
self.assertIsNone(shell.get_interpreter('executable'))
| [
"system.shell.get_directory_size",
"os.path.exists",
"system.shell.remove_empty_files",
"mock.patch",
"system.shell.which",
"tests.test_libs.helpers.patch",
"system.shell.clear_system_temp_directory",
"system.shell.get_execute_command",
"os.symlink",
"system.shell.get_interpreter",
"system.environment.is_posix",
"system.shell.get_directory_file_count",
"mock.call",
"system.shell.remove_directory",
"tests.test_libs.test_utils.set_up_pyfakefs",
"mock.MagicMock"
] | [((1877, 1915), 'mock.patch', 'mock.patch', (['"""os.remove"""'], {'autospec': '(True)'}), "('os.remove', autospec=True)\n", (1887, 1915), False, 'import mock\n'), ((1106, 1138), 'tests.test_libs.test_utils.set_up_pyfakefs', 'test_utils.set_up_pyfakefs', (['self'], {}), '(self)\n', (1132, 1138), False, 'from tests.test_libs import test_utils\n'), ((1421, 1454), 'system.shell.remove_empty_files', 'shell.remove_empty_files', (['"""/test"""'], {}), "('/test')\n", (1445, 1454), False, 'from system import shell\n'), ((1774, 1817), 'system.shell.remove_empty_files', 'shell.remove_empty_files', (['"""/test/aa/cc.txt"""'], {}), "('/test/aa/cc.txt')\n", (1798, 1817), False, 'from system import shell\n'), ((2120, 2153), 'system.shell.remove_empty_files', 'shell.remove_empty_files', (['"""/test"""'], {}), "('/test')\n", (2144, 2153), False, 'from system import shell\n'), ((2317, 2505), 'tests.test_libs.helpers.patch', 'test_helpers.patch', (['self', "['os.chmod', 'os.mkdir', 'os.path.exists', 'os.system',\n 'system.environment.platform', 'metrics.logs.log_error',\n 'metrics.logs.log_warn', 'shutil.rmtree']"], {}), "(self, ['os.chmod', 'os.mkdir', 'os.path.exists',\n 'os.system', 'system.environment.platform', 'metrics.logs.log_error',\n 'metrics.logs.log_warn', 'shutil.rmtree'])\n", (2335, 2505), True, 'from tests.test_libs import helpers as test_helpers\n'), ((2904, 2952), 'system.shell.remove_directory', 'shell.remove_directory', (['"""dir"""'], {'recreate': 'recreate'}), "('dir', recreate=recreate)\n", (2926, 2952), False, 'from system import shell\n'), ((5812, 5828), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (5826, 5828), False, 'import mock\n'), ((6175, 6207), 'tests.test_libs.test_utils.set_up_pyfakefs', 'test_utils.set_up_pyfakefs', (['self'], {}), '(self)\n', (6201, 6207), False, 'from tests.test_libs import test_utils\n'), ((6700, 6732), 'tests.test_libs.test_utils.set_up_pyfakefs', 'test_utils.set_up_pyfakefs', (['self'], {}), '(self)\n', (6726, 6732), False, 'from tests.test_libs import test_utils\n'), ((7560, 7609), 'tests.test_libs.helpers.patch', 'test_helpers.patch', (['self', "['tempfile.gettempdir']"], {}), "(self, ['tempfile.gettempdir'])\n", (7578, 7609), True, 'from tests.test_libs import helpers as test_helpers\n'), ((7677, 7709), 'tests.test_libs.test_utils.set_up_pyfakefs', 'test_utils.set_up_pyfakefs', (['self'], {}), '(self)\n', (7703, 7709), False, 'from tests.test_libs import test_utils\n'), ((8095, 8144), 'os.symlink', 'os.symlink', (['"""/unrelated/zz.txt"""', '"""/tmp/hh/gg.txt"""'], {}), "('/unrelated/zz.txt', '/tmp/hh/gg.txt')\n", (8105, 8144), False, 'import os\n'), ((8149, 8184), 'os.symlink', 'os.symlink', (['"""/unrelated"""', '"""/tmp/ii"""'], {}), "('/unrelated', '/tmp/ii')\n", (8159, 8184), False, 'import os\n'), ((8190, 8225), 'system.shell.clear_system_temp_directory', 'shell.clear_system_temp_directory', ([], {}), '()\n', (8223, 8225), False, 'from system import shell\n'), ((999, 1021), 'system.environment.is_posix', 'environment.is_posix', ([], {}), '()\n', (1019, 1021), False, 'from system import environment\n'), ((1476, 1509), 'os.path.exists', 'os.path.exists', (['"""/test/aa/bb.txt"""'], {}), "('/test/aa/bb.txt')\n", (1490, 1509), False, 'import os\n'), ((1531, 1567), 'os.path.exists', 'os.path.exists', (['"""/test/aa/aa/dd.txt"""'], {}), "('/test/aa/aa/dd.txt')\n", (1545, 1567), False, 'import os\n'), ((1590, 1623), 'os.path.exists', 'os.path.exists', (['"""/test/aa/cc.txt"""'], {}), "('/test/aa/cc.txt')\n", (1604, 1623), False, 'import os\n'), ((1646, 1682), 'os.path.exists', 'os.path.exists', (['"""/test/aa/aa/aa.txt"""'], {}), "('/test/aa/aa/aa.txt')\n", (1660, 1682), False, 'import os\n'), ((1838, 1871), 'os.path.exists', 'os.path.exists', (['"""/test/aa/cc.txt"""'], {}), "('/test/aa/cc.txt')\n", (1852, 1871), False, 'import os\n'), ((2174, 2207), 'os.path.exists', 'os.path.exists', (['"""/test/aa/cc.txt"""'], {}), "('/test/aa/cc.txt')\n", (2188, 2207), False, 'import os\n'), ((4171, 4200), 'system.shell.remove_directory', 'shell.remove_directory', (['"""dir"""'], {}), "('dir')\n", (4193, 4200), False, 'from system import shell\n'), ((4532, 4561), 'system.shell.remove_directory', 'shell.remove_directory', (['"""dir"""'], {}), "('dir')\n", (4554, 4561), False, 'from system import shell\n'), ((5014, 5063), 'system.shell.remove_directory', 'shell.remove_directory', (['"""dir"""'], {'ignore_errors': '(True)'}), "('dir', ignore_errors=True)\n", (5036, 5063), False, 'from system import shell\n'), ((5542, 5571), 'system.shell.remove_directory', 'shell.remove_directory', (['"""dir"""'], {}), "('dir')\n", (5564, 5571), False, 'from system import shell\n'), ((6526, 6568), 'system.shell.get_directory_file_count', 'shell.get_directory_file_count', (['"""/test/aa"""'], {}), "('/test/aa')\n", (6556, 6568), False, 'from system import shell\n'), ((7045, 7081), 'system.shell.get_directory_size', 'shell.get_directory_size', (['"""/test/aa"""'], {}), "('/test/aa')\n", (7069, 7081), False, 'from system import shell\n'), ((7250, 7272), 'system.environment.is_posix', 'environment.is_posix', ([], {}), '()\n', (7270, 7272), False, 'from system import environment\n'), ((7397, 7414), 'system.shell.which', 'shell.which', (['"""ls"""'], {}), "('ls')\n", (7408, 7414), False, 'from system import shell\n'), ((8247, 8269), 'os.path.exists', 'os.path.exists', (['"""/tmp"""'], {}), "('/tmp')\n", (8261, 8269), False, 'import os\n'), ((8291, 8319), 'os.path.exists', 'os.path.exists', (['"""/unrelated"""'], {}), "('/unrelated')\n", (8305, 8319), False, 'import os\n'), ((8342, 8380), 'system.shell.get_directory_file_count', 'shell.get_directory_file_count', (['"""/tmp"""'], {}), "('/tmp')\n", (8372, 8380), False, 'from system import shell\n'), ((8406, 8450), 'system.shell.get_directory_file_count', 'shell.get_directory_file_count', (['"""/unrelated"""'], {}), "('/unrelated')\n", (8436, 8450), False, 'from system import shell\n'), ((8476, 8508), 'os.path.exists', 'os.path.exists', (['"""/tmp/aa/bb.txt"""'], {}), "('/tmp/aa/bb.txt')\n", (8490, 8508), False, 'import os\n'), ((8531, 8566), 'os.path.exists', 'os.path.exists', (['"""/tmp/cc/dd/ee.txt"""'], {}), "('/tmp/cc/dd/ee.txt')\n", (8545, 8566), False, 'import os\n'), ((8589, 8617), 'os.path.exists', 'os.path.exists', (['"""/tmp/ff/gg"""'], {}), "('/tmp/ff/gg')\n", (8603, 8617), False, 'import os\n'), ((8640, 8665), 'os.path.exists', 'os.path.exists', (['"""/tmp/hh"""'], {}), "('/tmp/hh')\n", (8654, 8665), False, 'import os\n'), ((9018, 9060), 'system.shell.get_execute_command', 'shell.get_execute_command', (['file_to_execute'], {}), '(file_to_execute)\n', (9043, 9060), False, 'from system import shell\n'), ((10087, 10118), 'system.shell.get_interpreter', 'shell.get_interpreter', (['"""run.py"""'], {}), "('run.py')\n", (10108, 10118), False, 'from system import shell\n'), ((10324, 10359), 'system.shell.get_interpreter', 'shell.get_interpreter', (['"""executable"""'], {}), "('executable')\n", (10345, 10359), False, 'from system import shell\n'), ((3439, 3477), 'mock.call', 'mock.call', (['"""rd /s /q "dir" > nul 2>&1"""'], {}), '(\'rd /s /q "dir" > nul 2>&1\')\n', (3448, 3477), False, 'import mock\n'), ((3697, 3739), 'mock.call', 'mock.call', (['"""rm -rf "dir" > /dev/null 2>&1"""'], {}), '(\'rm -rf "dir" > /dev/null 2>&1\')\n', (3706, 3739), False, 'import mock\n'), ((4250, 4292), 'mock.call', 'mock.call', (['"""rm -rf "dir" > /dev/null 2>&1"""'], {}), '(\'rm -rf "dir" > /dev/null 2>&1\')\n', (4259, 4292), False, 'import mock\n'), ((4334, 4368), 'mock.call', 'mock.call', (['"""dir"""'], {'onerror': 'mock.ANY'}), "('dir', onerror=mock.ANY)\n", (4343, 4368), False, 'import mock\n'), ((4614, 4657), 'mock.call', 'mock.call', (['"""Failed to clear directory dir."""'], {}), "('Failed to clear directory dir.')\n", (4623, 4657), False, 'import mock\n'), ((4763, 4805), 'mock.call', 'mock.call', (['"""rm -rf "dir" > /dev/null 2>&1"""'], {}), '(\'rm -rf "dir" > /dev/null 2>&1\')\n', (4772, 4805), False, 'import mock\n'), ((4847, 4881), 'mock.call', 'mock.call', (['"""dir"""'], {'onerror': 'mock.ANY'}), "('dir', onerror=mock.ANY)\n", (4856, 4881), False, 'import mock\n'), ((5115, 5158), 'mock.call', 'mock.call', (['"""Failed to clear directory dir."""'], {}), "('Failed to clear directory dir.')\n", (5124, 5158), False, 'import mock\n'), ((5265, 5307), 'mock.call', 'mock.call', (['"""rm -rf "dir" > /dev/null 2>&1"""'], {}), '(\'rm -rf "dir" > /dev/null 2>&1\')\n', (5274, 5307), False, 'import mock\n'), ((5349, 5383), 'mock.call', 'mock.call', (['"""dir"""'], {'onerror': 'mock.ANY'}), "('dir', onerror=mock.ANY)\n", (5358, 5383), False, 'import mock\n'), ((5621, 5663), 'mock.call', 'mock.call', (['"""rm -rf "dir" > /dev/null 2>&1"""'], {}), '(\'rm -rf "dir" > /dev/null 2>&1\')\n', (5630, 5663), False, 'import mock\n'), ((5705, 5739), 'mock.call', 'mock.call', (['"""dir"""'], {'onerror': 'mock.ANY'}), "('dir', onerror=mock.ANY)\n", (5714, 5739), False, 'import mock\n'), ((5954, 5981), 'mock.call', 'mock.call', (['"""dir/child"""', '(488)'], {}), "('dir/child', 488)\n", (5963, 5981), False, 'import mock\n'), ((6016, 6038), 'mock.call', 'mock.call', (['"""dir/child"""'], {}), "('dir/child')\n", (6025, 6038), False, 'import mock\n'), ((3165, 3181), 'mock.call', 'mock.call', (['"""dir"""'], {}), "('dir')\n", (3174, 3181), False, 'import mock\n')] |
import os
from PIL import Image
import cv2
from os import listdir
from os.path import join
import matplotlib.pyplot as plt
import matplotlib
from matplotlib.colors import LogNorm
from io_utils.io_common import create_folder
from viz_utils.constants import PlotMode, BackgroundType
import pylab
import numpy as np
import cmocean
import shapely
import cartopy.crs as ccrs
import cartopy.feature as cfeature
import cartopy
def select_colormap(field_name):
'''
Based on the name if the field it chooses a colormap from cmocean
Args:
field_name:
Returns:
'''
if np.any([field_name.find(x) != -1 for x in ('ssh', 'srfhgt', 'adt','surf_el')]):
# cmaps_fields.append(cmocean.cm.deep_r)
return cmocean.cm.curl
elif np.any([field_name.find(x) != -1 for x in ('temp', 'sst', 'temperature')]):
return cmocean.cm.thermal
elif np.any([field_name.find(x) != -1 for x in ('vorticity', 'vort')]):
return cmocean.cm.curl
elif np.any([field_name.find(x) != -1 for x in ('salin', 'sss', 'sal')]):
return cmocean.cm.haline
elif field_name.find('error') != -1:
return cmocean.cm.diff
elif field_name.find('binary') != -1:
return cmocean.cm.oxy
elif np.any([field_name.find(x) != -1 for x in ('u_', 'v_', 'u-vel.', 'v-vel.','velocity')]):
return cmocean.cm.speed
class EOAImageVisualizer:
"""This class makes plenty of plots assuming we are plotting Geospatial data (maps).
It is made to read xarrays, numpy arrays, and numpy arrays in dictionaries
vizobj = new EOAImageVisualizer(disp_images=True, output_folder='output',
lats=[lats],lons=[lons])
"""
_COLORS = ['y', 'r', 'c', 'b', 'g', 'w', 'k', 'y', 'r', 'c', 'b', 'g', 'w', 'k']
_figsize = 8
_font_size = 30
_units = ''
_max_imgs_per_row = 4
_mincbar = np.nan # User can set a min and max colorbar values to 'force' same color bar to all plots
_maxcbar = np.nan
_flip_data = True
_eoas_pyutils_path = './eoas_pyutils'# This is the path where the eoas_utils folder is stored with respect to the main project
_contourf = False # When plotting non-regular grids and need precision
_background = BackgroundType.BLUE_MARBLE_LR # Select the background to use
_auto_colormap = True # Selects the colormap based on the name of the field
_show_var_names = False # Includes the name of the field name in the titles
_additional_polygons = [] # MUST BE SHAPELY GEOMETRIES In case we want to include additional polygons in the plots (all of them)
# If you want to add a streamplot of a vector field. It must be a dictionary with keys x,y,u,v
# and optional density, color, cmap, arrowsize, arrowstyle, minlength
_vector_field = None
_norm = None # Use to normalize the colormap. For example with LogNorm
# vizobj = EOAImageVisualizer(disp_images=True, output_folder='output',
# lats=[lats],lons=[lons])
def __init__(self, disp_images=True, output_folder='output',
lats=[-90,90], lons =[-180,180],
projection=ccrs.PlateCarree(), **kwargs):
# All the arguments that are passed to the constructor of the class MUST have its name on it.
self._disp_images = disp_images
self._output_folder = output_folder
self._projection = projection
bbox = self.getExtent(lats, lons)
self._extent = bbox
self._lats = lats
self._lons = lons
self._fig_prop = (bbox[1]-bbox[0])/(bbox[3]-bbox[2])
self._contour_labels = False
for arg_name, arg_value in kwargs.items():
self.__dict__["_" + arg_name] = arg_value
print(self.__dict__["_" + arg_name])
def __getattr__(self, attr):
'''Generic getter for all the properties of the class'''
return self.__dict__["_" + attr]
def __setattr__(self, attr, value):
'''Generic setter for all the properties of the class'''
self.__dict__["_" + attr] = value
def add_colorbar(self, fig, im, ax, show_color_bar, label=""):
# https://matplotlib.org/api/_as_gen/matplotlib.pyplot.colorbar.html
if show_color_bar:
font_size_cbar = self._font_size * .5
# TODO how to make this automatic and works always
cbar = fig.colorbar(im, ax=ax, shrink=.7)
cbar.ax.tick_params(labelsize=font_size_cbar)
if label != "":
cbar.set_label(label, fontsize=font_size_cbar*1.2)
else:
cbar.set_label(self._units, fontsize=font_size_cbar*1.2)
def plot_slice_eoa(self, c_img, ax, cmap='gray', mode=PlotMode.RASTER, mincbar=np.nan, maxcbar=np.nan) -> None:
"""
Plots a 2D img for EOA data.
:param c_img: 2D array
:param ax: geoaxes
:return:
"""
c_ax = ax
if self._flip_data:
origin = 'lower'
else:
origin = 'upper'
if self._background == BackgroundType.CARTO_DEF:
c_ax.stock_img()
else:
if self._background == BackgroundType.BLUE_MARBLE_LR:
img = plt.imread(join(self._eoas_pyutils_path,'viz_utils/imgs/bluemarble.png'))
if self._background == BackgroundType.BLUE_MARBLE_HR:
img = plt.imread(join(self._eoas_pyutils_path,'viz_utils/imgs/bluemarble_5400x2700.jpg'))
if self._background == BackgroundType.TOPO:
img = plt.imread(join(self._eoas_pyutils_path,'viz_utils/imgs/etopo.png'))
if self._background == BackgroundType.BATHYMETRY:
img = plt.imread(join(self._eoas_pyutils_path,'viz_utils/imgs/bathymetry_3600x1800.jpg'))
c_ax.imshow(img, origin='upper', extent=(-180,180,-90,90), transform=ccrs.PlateCarree())
if mode == PlotMode.RASTER or mode == PlotMode.MERGED:
if self._contourf:
im = c_ax.contourf(self._lons, self._lats, c_img, num_colors=255, cmap='inferno', extent=self._extent)
else:
if np.isnan(mincbar):
im = c_ax.imshow(c_img, extent=self._extent, origin=origin, cmap=cmap, transform=self._projection, norm=self._norm)
else:
im = c_ax.imshow(c_img, extent=self._extent, origin=origin, cmap=cmap, vmin=mincbar, vmax=maxcbar, transform=self._projection, norm=self._norm)
if mode == PlotMode.CONTOUR or mode == PlotMode.MERGED:
c_ax.set_extent(self.getExtent(list(self._lats), list(self._lons)))
if mode == PlotMode.CONTOUR:
im = c_ax.contour(c_img, extent=self._extent, transform=self._projection)
if mode == PlotMode.MERGED:
if self._contour_labels:
c_ax.contour(c_img, self._contour_labels, colors='r', extent=self._extent, transform=self._projection)
else:
c_ax.contour(c_img, extent=self._extent, transform=self._projection)
if len(self._additional_polygons) > 0:
pol_lats = []
pol_lons = []
for c_polygon in self._additional_polygons:
if isinstance(c_polygon, shapely.geometry.linestring.LineString):
x,y = c_polygon.xy
elif isinstance(c_polygon, shapely.geometry.polygon.Polygon):
x, y = c_polygon.exterior.xy
pol_lats += y
pol_lons += x
c_ax.plot(x,y, transform=self._projection, c='r')
# Adds a threshold to the plot to see the polygons
c_ax.set_extent(self.getExtent(list(self._lats) + pol_lats, list(self._lons) + pol_lons, 0.5))
if self._vector_field != None:
try:
u = self._vector_field['u']
v = self._vector_field['v']
x = self._vector_field['x']
y = self._vector_field['y']
vec_keys = self._vector_field.keys()
c = 'r'
density = 1
linewidth = 3
vec_cmap = cmocean.cm.solar
if 'color' in vec_keys:
c = self._vector_field['color']
if 'density' in vec_keys:
density = self._vector_field['density']
if 'linewidth' in vec_keys:
linewidth = self._vector_field['linewidth']
if 'cmap' in vec_keys:
vec_cmap = self._vector_field['cmap']
c_ax.set_extent(self.getExtent(list(self._lats), list(self._lons)))
c_ax.streamplot(x, y, u, v, transform=self._projection, density=density, color=c,
cmap=vec_cmap, linewidth=linewidth)
except Exception as e:
print(F"Couldn't add vector field e:{e}")
gl = c_ax.gridlines(draw_labels=True, color='grey', alpha=0.5, linestyle='--')
# gl.xlabel_style = {'size': self._font_size/2, 'color': '#aaaaaa', 'weight':'bold'}
font_coords = {'size': self._font_size*.6}
gl.xlabel_style = font_coords
gl.ylabel_style = font_coords
gl.top_labels = False
gl.right_labels = False
return im
def get_proper_size(self, rows, cols):
"""
Obtains the proper size for a figure.
:param rows: how many rows will the figure have
:param cols: how many colswill the figure have
:param prop: Proportion is the proportion to use w/h
:return:
"""
if rows == 1:
return self._figsize * cols * self._fig_prop, self._figsize
else:
return self._figsize * cols * self._fig_prop, self._figsize * rows
def _close_figure(self):
"""Depending on what is disp_images, the figures are displayed or just closed"""
if self._disp_images:
plt.show()
else:
plt.close()
def getExtent(self, lats, lons, expand_ext=0.0):
'''
Obtains the bbox of the coordinates. If included threshold then increases the bbox in all directions with that thres
Args:
lats:
lons:
inc_threshold:
Returns:
'''
minLat = np.amin(lats) - expand_ext
maxLat = np.amax(lats) + expand_ext
minLon = np.amin(lons) - expand_ext
maxLon = np.amax(lons) + expand_ext
bbox = (minLon, maxLon, minLat, maxLat)
return bbox
def xr_summary(self, ds):
""" Prints a summary of the netcdf (global attributes, variables, etc)
:param ds:
:return:
"""
print("\n========== Global attributes =========")
for name in ds.attrs:
print(F"{name} = {getattr(ds, name)}")
print("\n========== Dimensions =========")
for name in ds.dims:
print(F"{name}: {ds[name].shape}")
print("\n========== Coordinates =========")
for name in ds.coords:
print(F"{name}: {ds[name].shape}")
print("\n========== Variables =========")
for cur_variable_name in ds.variables:
cur_var = ds[cur_variable_name]
print(F"{cur_variable_name}: {cur_var.dims} {cur_var.shape}")
def nc_summary(self, ds):
""" Prints a summary of the netcdf (global attributes, variables, etc)
:param ds:
:return:
"""
print("\n========== Global attributes =========")
for name in ds.ncattrs():
print(F"{name} = {getattr(ds, name)}")
print("\n========== Variables =========")
netCDFvars = ds.variables
for cur_variable_name in netCDFvars.keys():
cur_var = ds.variables[cur_variable_name]
print(F"Dimensions for {cur_variable_name}: {cur_var.dimensions} {cur_var.shape}")
def add_roads(self, ax):
# Names come from: https://www.naturalearthdata.com/features/
# -- Add states
roads = cfeature.NaturalEarthFeature(
category='cultural',
name='roads',
scale='10m',
facecolor='none')
ax.add_feature(roads, edgecolor='black')
return ax
def add_states(self, ax):
# Names come from: https://www.naturalearthdata.com/features/
# -- Add states
states_provinces = cfeature.NaturalEarthFeature(
category='cultural',
name='admin_1_states_provinces_lines',
scale='50m',
facecolor='none')
ax.add_feature(states_provinces, edgecolor='gray')
return ax
def plot_scatter_data(self, lats=None, lons=None, bbox=None, s=1, c='blue', cmap='plasma', title=''):
'''
This function plots points in a map
:param bbox:
:return:
'''
if bbox is None:
bbox = (-180, 180, -90, 90)
if lats is None:
lats = self.lats
if lons is None:
lons = self.lons
fig, ax = plt.subplots(1, 1, figsize=(self._figsize, self._figsize), subplot_kw={'projection': ccrs.PlateCarree()})
ax.set_extent(bbox) # If we do not set this, it will cropp it to the limits of the locations
ax.gridlines()
im = ax.scatter(lons, lats, s=s, c=c, cmap=cmap)
fig.colorbar(im, ax=ax, shrink=0.7)
ax.coastlines()
plt.title(title)
plt.show()
def plot_3d_data_npdict(self, np_variables:list, var_names:list, z_levels= [], title='',
file_name_prefix='', cmap=None, z_names = [],
show_color_bar=True, plot_mode=PlotMode.RASTER, mincbar=np.nan, maxcbar=np.nan):
"""
Plots multiple z_levels for multiple fields.
It uses rows for each depth, and columns for each variable
"""
create_folder(self._output_folder)
orig_cmap = cmap
# If the user do not requires any z-leve, then all are plotted
if len(z_levels) == 0:
z_levels = range(np_variables[var_names[0]].shape[0])
cols = np.min((self._max_imgs_per_row, len(var_names)))
if cols == len(var_names):
rows = len(z_levels)
else:
rows = int(len(z_levels) * np.ceil(len(var_names)/cols))
fig, _axs = plt.subplots(rows, cols,
figsize=self.get_proper_size(rows, cols),
subplot_kw={'projection': self._projection})
for c_zlevel, c_slice in enumerate(z_levels): # Iterates over the z-levels
# Verify the index of the z_levels are the original ones.
if len(z_names) != 0:
c_slice_txt = z_names[c_slice]
else:
c_slice_txt = c_slice
c_mincbar = np.nan
c_maxcbar = np.nan
for idx_var, c_var in enumerate(var_names): # Iterate over the fields
if rows*cols == 1: # Single figure
ax = _axs
else:
ax = _axs.flatten()[c_zlevel*len(var_names) + idx_var]
# Here we chose the min and max colorbars for each field
if not(np.all(np.isnan(mincbar))):
if type(mincbar) is list:
c_mincbar = mincbar[idx_var]
else:
c_mincbar = mincbar
if not(np.all(np.isnan(maxcbar))):
if type(mincbar) is list:
c_maxcbar = maxcbar[idx_var]
else:
c_maxcbar = maxcbar
# By default we select the colorbar from the name of the variable
if self._auto_colormap and orig_cmap is None:
cmap = select_colormap(c_var)
else:
# If there is an array of colormaps we select the one for this field
if type(orig_cmap) is list:
cmap = orig_cmap[idx_var]
else:
# If it is just one cmap, then we use it for all the fields
cmap = orig_cmap
im = self.plot_slice_eoa(np_variables[c_var][c_slice,:,:], ax, cmap=cmap, mode=plot_mode,
mincbar=c_mincbar, maxcbar=c_maxcbar)
if self._show_var_names:
c_title = F'{var_names[idx_var]} {title}'
else:
c_title = F'{title}'
if len(z_levels) > 1:
c_title += F"Z - level: {c_slice_txt}"
ax.set_title(c_title, fontsize=self._font_size)
self.add_colorbar(fig, im, ax, show_color_bar)
plt.tight_layout(pad=.5)
file_name = F'{file_name_prefix}'
pylab.savefig(join(self._output_folder, F'{file_name}.png'), bbox_inches='tight')
self._close_figure()
def plot_2d_data_xr(self, np_variables:list, var_names:list, title='',
file_name_prefix='', cmap='viridis', show_color_bar=True, plot_mode=PlotMode.RASTER, mincbar=np.nan, maxcbar=np.nan):
'''
Wrapper function to receive raw 2D numpy data. It calls the 'main' function for 3D plotting
:param np_variables:
:param var_names:
:param title:
:param file_name_prefix:
:param cmap:
:param flip_data:
:param rot_90:
:param show_color_bar:
:param plot_mode:
:param mincbar:
:param maxcbar:
:return:
'''
npdict_3d = {}
for i, field_name in enumerate(var_names):
npdict_3d[field_name] = np.expand_dims(np_variables[field_name], axis=0)
self.plot_3d_data_npdict(npdict_3d, var_names, z_levels=[0], title=title,
file_name_prefix=file_name_prefix, cmap=cmap, z_names = [],
show_color_bar=show_color_bar, plot_mode=plot_mode, mincbar=mincbar, maxcbar=maxcbar)
def plot_2d_data_np(self, np_variables:list, var_names:list, title='',
file_name_prefix='', cmap=None, flip_data=False,
rot_90=False, show_color_bar=True, plot_mode=PlotMode.RASTER, mincbar=np.nan, maxcbar=np.nan):
'''
Wrapper function to receive raw 2D numpy data. It calls the 'main' function for 3D plotting
:param np_variables: Numpy variables. They can be with shape [fields, x, y] or just a single field with shape [x,y]
:param var_names:
:param title:
:param file_name_prefix:
:param cmap:
:param flip_data:
:param rot_90:
:param show_color_bar:
:param plot_mode:
:param mincbar:
:param maxcbar:
:return:
'''
npdict_3d = {}
for i, field_name in enumerate(var_names):
if len(np_variables.shape) == 3:
c_np_data = np_variables[i, :, :]
else:
c_np_data = np_variables # Single field
if rot_90:
c_np_data = np.rot90(c_np_data)
if flip_data:
c_np_data = np.flip(np.flip(c_np_data), axis=1)
npdict_3d[field_name] = np.expand_dims(c_np_data, axis=0)
self.plot_3d_data_npdict(npdict_3d, var_names, z_levels=[0], title=title,
file_name_prefix=file_name_prefix, cmap=cmap, z_names = [],
show_color_bar=show_color_bar, plot_mode=plot_mode, mincbar=mincbar, maxcbar=maxcbar)
def make_video_from_images(self, input_folder, output_file, fps=24):
files = listdir(input_folder)
files.sort()
print(F"Generating video file: {output_file}")
out_video = -1
for i, file_name in enumerate(files[0:36]):
if i % 10 == 0:
print(F"Adding file # {i}: {file_name}")
c_file = join(input_folder, file_name)
im = Image.open(c_file)
np_im = np.asarray(im)[:, :, :3]
if i == 0:
video_size = (np_im.shape[1], np_im.shape[0])
out_video = cv2.VideoWriter(output_file, cv2.VideoWriter_fourcc(*'mp4v'), fps, video_size, True)
out_video.write(np_im[:, :, ::-1])
out_video.release()
cv2.destroyAllWindows()
print("Done! yeah babe!") | [
"io_utils.io_common.create_folder",
"cv2.destroyAllWindows",
"numpy.rot90",
"numpy.flip",
"os.listdir",
"numpy.asarray",
"matplotlib.pyplot.close",
"cv2.VideoWriter_fourcc",
"numpy.amin",
"cartopy.crs.PlateCarree",
"numpy.isnan",
"matplotlib.pyplot.title",
"matplotlib.pyplot.show",
"PIL.Image.open",
"os.path.join",
"matplotlib.pyplot.tight_layout",
"numpy.expand_dims",
"numpy.amax",
"cartopy.feature.NaturalEarthFeature"
] | [((3165, 3183), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (3181, 3183), True, 'import cartopy.crs as ccrs\n'), ((12073, 12171), 'cartopy.feature.NaturalEarthFeature', 'cfeature.NaturalEarthFeature', ([], {'category': '"""cultural"""', 'name': '"""roads"""', 'scale': '"""10m"""', 'facecolor': '"""none"""'}), "(category='cultural', name='roads', scale='10m',\n facecolor='none')\n", (12101, 12171), True, 'import cartopy.feature as cfeature\n'), ((12437, 12561), 'cartopy.feature.NaturalEarthFeature', 'cfeature.NaturalEarthFeature', ([], {'category': '"""cultural"""', 'name': '"""admin_1_states_provinces_lines"""', 'scale': '"""50m"""', 'facecolor': '"""none"""'}), "(category='cultural', name=\n 'admin_1_states_provinces_lines', scale='50m', facecolor='none')\n", (12465, 12561), True, 'import cartopy.feature as cfeature\n'), ((13453, 13469), 'matplotlib.pyplot.title', 'plt.title', (['title'], {}), '(title)\n', (13462, 13469), True, 'import matplotlib.pyplot as plt\n'), ((13478, 13488), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (13486, 13488), True, 'import matplotlib.pyplot as plt\n'), ((13914, 13948), 'io_utils.io_common.create_folder', 'create_folder', (['self._output_folder'], {}), '(self._output_folder)\n', (13927, 13948), False, 'from io_utils.io_common import create_folder\n'), ((16832, 16857), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {'pad': '(0.5)'}), '(pad=0.5)\n', (16848, 16857), True, 'import matplotlib.pyplot as plt\n'), ((19744, 19765), 'os.listdir', 'listdir', (['input_folder'], {}), '(input_folder)\n', (19751, 19765), False, 'from os import listdir\n'), ((20417, 20440), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (20438, 20440), False, 'import cv2\n'), ((9968, 9978), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (9976, 9978), True, 'import matplotlib.pyplot as plt\n'), ((10005, 10016), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (10014, 10016), True, 'import matplotlib.pyplot as plt\n'), ((10333, 10346), 'numpy.amin', 'np.amin', (['lats'], {}), '(lats)\n', (10340, 10346), True, 'import numpy as np\n'), ((10377, 10390), 'numpy.amax', 'np.amax', (['lats'], {}), '(lats)\n', (10384, 10390), True, 'import numpy as np\n'), ((10421, 10434), 'numpy.amin', 'np.amin', (['lons'], {}), '(lons)\n', (10428, 10434), True, 'import numpy as np\n'), ((10465, 10478), 'numpy.amax', 'np.amax', (['lons'], {}), '(lons)\n', (10472, 10478), True, 'import numpy as np\n'), ((16921, 16966), 'os.path.join', 'join', (['self._output_folder', 'f"""{file_name}.png"""'], {}), "(self._output_folder, f'{file_name}.png')\n", (16925, 16966), False, 'from os.path import join\n'), ((17777, 17825), 'numpy.expand_dims', 'np.expand_dims', (['np_variables[field_name]'], {'axis': '(0)'}), '(np_variables[field_name], axis=0)\n', (17791, 17825), True, 'import numpy as np\n'), ((19343, 19376), 'numpy.expand_dims', 'np.expand_dims', (['c_np_data'], {'axis': '(0)'}), '(c_np_data, axis=0)\n', (19357, 19376), True, 'import numpy as np\n'), ((20024, 20053), 'os.path.join', 'join', (['input_folder', 'file_name'], {}), '(input_folder, file_name)\n', (20028, 20053), False, 'from os.path import join\n'), ((20071, 20089), 'PIL.Image.open', 'Image.open', (['c_file'], {}), '(c_file)\n', (20081, 20089), False, 'from PIL import Image\n'), ((6138, 6155), 'numpy.isnan', 'np.isnan', (['mincbar'], {}), '(mincbar)\n', (6146, 6155), True, 'import numpy as np\n'), ((19197, 19216), 'numpy.rot90', 'np.rot90', (['c_np_data'], {}), '(c_np_data)\n', (19205, 19216), True, 'import numpy as np\n'), ((20110, 20124), 'numpy.asarray', 'np.asarray', (['im'], {}), '(im)\n', (20120, 20124), True, 'import numpy as np\n'), ((5236, 5298), 'os.path.join', 'join', (['self._eoas_pyutils_path', '"""viz_utils/imgs/bluemarble.png"""'], {}), "(self._eoas_pyutils_path, 'viz_utils/imgs/bluemarble.png')\n", (5240, 5298), False, 'from os.path import join\n'), ((5398, 5470), 'os.path.join', 'join', (['self._eoas_pyutils_path', '"""viz_utils/imgs/bluemarble_5400x2700.jpg"""'], {}), "(self._eoas_pyutils_path, 'viz_utils/imgs/bluemarble_5400x2700.jpg')\n", (5402, 5470), False, 'from os.path import join\n'), ((5560, 5617), 'os.path.join', 'join', (['self._eoas_pyutils_path', '"""viz_utils/imgs/etopo.png"""'], {}), "(self._eoas_pyutils_path, 'viz_utils/imgs/etopo.png')\n", (5564, 5617), False, 'from os.path import join\n'), ((5713, 5785), 'os.path.join', 'join', (['self._eoas_pyutils_path', '"""viz_utils/imgs/bathymetry_3600x1800.jpg"""'], {}), "(self._eoas_pyutils_path, 'viz_utils/imgs/bathymetry_3600x1800.jpg')\n", (5717, 5785), False, 'from os.path import join\n'), ((5867, 5885), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (5883, 5885), True, 'import cartopy.crs as ccrs\n'), ((13174, 13192), 'cartopy.crs.PlateCarree', 'ccrs.PlateCarree', ([], {}), '()\n', (13190, 13192), True, 'import cartopy.crs as ccrs\n'), ((19279, 19297), 'numpy.flip', 'np.flip', (['c_np_data'], {}), '(c_np_data)\n', (19286, 19297), True, 'import numpy as np\n'), ((20277, 20308), 'cv2.VideoWriter_fourcc', 'cv2.VideoWriter_fourcc', (["*'mp4v'"], {}), "(*'mp4v')\n", (20299, 20308), False, 'import cv2\n'), ((15278, 15295), 'numpy.isnan', 'np.isnan', (['mincbar'], {}), '(mincbar)\n', (15286, 15295), True, 'import numpy as np\n'), ((15498, 15515), 'numpy.isnan', 'np.isnan', (['maxcbar'], {}), '(maxcbar)\n', (15506, 15515), True, 'import numpy as np\n')] |
from os import listdir
from os.path import isfile, join
from path import Path
import numpy as np
import cv2
# Dataset path
target_path = Path('target/')
annotation_images_path = Path('dataset/ade20k/annotations/training/').abspath()
dataset = [ f for f in listdir(annotation_images_path) if isfile(join(annotation_images_path,f))]
images = np.empty(len(dataset), dtype = object)
count = 1
# Iterate all Training Images
for n in range(0, len(dataset)):
# Read image
images[n] = cv2.imread(join(annotation_images_path,dataset[n]))
# Convert it to array
array = np.asarray(images[n],dtype=np.int8)
# Conditions when the value equal less than 1, change it to 255.
# If it is >= 1, increment it by -1
arr = np.where(array < 1, 255, array -1)
#Saved it to another file
if count < 10:
cv2.imwrite(target_path +'ADE_train_0000000'+ str(count) + ".png", arr)
elif count < 100 and count > 9:
cv2.imwrite(target_path +'ADE_train_000000'+ str(count) + ".png", arr)
elif count < 1000 and count > 99:
cv2.imwrite(target_path +'ADE_train_00000'+ str(count) + ".png", arr)
elif count < 10000 and count > 999:
cv2.imwrite(target_path +'ADE_train_0000'+ str(count) + ".png", arr)
else:
cv2.imwrite(target_path +'ADE_train_000'+ str(count) + ".png", arr)
print(str(count) + ".png is printed")
count += 1
| [
"os.listdir",
"numpy.where",
"numpy.asarray",
"os.path.join",
"path.Path"
] | [((138, 153), 'path.Path', 'Path', (['"""target/"""'], {}), "('target/')\n", (142, 153), False, 'from path import Path\n'), ((577, 613), 'numpy.asarray', 'np.asarray', (['images[n]'], {'dtype': 'np.int8'}), '(images[n], dtype=np.int8)\n', (587, 613), True, 'import numpy as np\n'), ((734, 769), 'numpy.where', 'np.where', (['(array < 1)', '(255)', '(array - 1)'], {}), '(array < 1, 255, array - 1)\n', (742, 769), True, 'import numpy as np\n'), ((179, 223), 'path.Path', 'Path', (['"""dataset/ade20k/annotations/training/"""'], {}), "('dataset/ade20k/annotations/training/')\n", (183, 223), False, 'from path import Path\n'), ((257, 288), 'os.listdir', 'listdir', (['annotation_images_path'], {}), '(annotation_images_path)\n', (264, 288), False, 'from os import listdir\n'), ((497, 537), 'os.path.join', 'join', (['annotation_images_path', 'dataset[n]'], {}), '(annotation_images_path, dataset[n])\n', (501, 537), False, 'from os.path import isfile, join\n'), ((299, 330), 'os.path.join', 'join', (['annotation_images_path', 'f'], {}), '(annotation_images_path, f)\n', (303, 330), False, 'from os.path import isfile, join\n')] |
# -*- coding: utf-8 -*
'''
最大堆实现
98
/ \
96 84
/ \ / \
92 82 78 47
/ \ / \ / \ / \
33 26 51 85 50 15 44 60
/ \ / \ / \ / \ / \ / \ / \ / \
40 51 98 51 7 17 94 82 32 21 64 60 7 44 63 63
'''
import random
class Maxheap(object):
def __init__(self,cpacity,arr = None):
self.data = [None] * (cpacity + 1)
self.cpacity = cpacity
if arr is None:
self.count = 0
else:
for i in range(0,cpacity):
self.data[i + 1]= arr[i]
self.count = cpacity
for i in range(self.count / 2, 0, -1):
self.__shifDown(i)
def size(self):
return self.count
def isEmpty(self):
return self.count == 0
def __shiftUp(self,k):
while k > 1 and self.data[k] > self.data[int(k / 2)]:
self.data[k],self.data[int(k / 2)] = self.data[int(k / 2)], self.data[k]
k =int(k/2)
def insert(self,data):
self.data[self.count + 1] = data
self.count += 1
self.__shiftUp(self.count)
def __shifDown(self,k):
while k * 2 <= self.count:
j = k * 2
if self.count >= j + 1 and self.data[j + 1] > self.data[j]:
j += 1
if self.data[k] > self.data[j]:
break
self.data[k], self.data[j] = self.data[j],self.data[k]
k = j
def extractMax(self):
ret = self.data[1]
self.data[1], self.data[self.count] = self.data[self.count], self.data[1]
self.count -= 1
self.__shifDown(1)
return ret
if __name__ == '__main__':
N = 31
M = 100
heap = Maxheap(N)
for i in range(0,N):
k = random.randint(1, M)
heap.insert(k)
# arr = [random.randint(1,M) for i in range(N)]
# heap = Maxheap(len(arr),arr)
print(heap.size())
print(heap.data)
print(heap.extractMax())
| [
"random.randint"
] | [((1986, 2006), 'random.randint', 'random.randint', (['(1)', 'M'], {}), '(1, M)\n', (2000, 2006), False, 'import random\n')] |
import mxnet as mx
def slice_symbol_to_seq_symobls(net, seq_len, axis=1, squeeze_axis=True):
net = mx.sym.SliceChannel(data=net, num_outputs=seq_len, axis=axis, squeeze_axis=squeeze_axis)
hidden_all = []
for seq_index in range(seq_len):
hidden_all.append(net[seq_index])
net = hidden_all
return net
| [
"mxnet.sym.SliceChannel"
] | [((105, 198), 'mxnet.sym.SliceChannel', 'mx.sym.SliceChannel', ([], {'data': 'net', 'num_outputs': 'seq_len', 'axis': 'axis', 'squeeze_axis': 'squeeze_axis'}), '(data=net, num_outputs=seq_len, axis=axis, squeeze_axis=\n squeeze_axis)\n', (124, 198), True, 'import mxnet as mx\n')] |
import datetime
import json
from calendar import timegm
from urllib.parse import parse_qsl
import requests
from allauth.socialaccount import models as aamodels
from requests_oauthlib import OAuth1
from rest_framework import parsers, renderers
from rest_framework import status
from rest_framework.authtoken.models import Token
from rest_framework.authtoken.serializers import AuthTokenSerializer
from rest_framework.permissions import AllowAny
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework_jwt.authentication import JSONWebTokenAuthentication
from rest_framework_jwt.settings import api_settings
from rest_framework_jwt.utils import jwt_payload_handler, jwt_encode_handler
from dss import settings
from spa.models import UserProfile
from spa.models.socialaccountlink import SocialAccountLink
def _temp_reverse_user(uid, provider, access_token, access_token_secret, payload):
"""
Do some magic here to find user account and deprecate psa
1. Look for account in
"""
user = None
try:
sa = SocialAccountLink.objects.get(social_id=uid)
sa.type = provider
sa.social_id = uid
sa.access_token = access_token
sa.access_token_secret = access_token_secret
sa.provider_data = payload
sa.save()
user = UserProfile.objects.get(id=sa.user.id)
except SocialAccountLink.DoesNotExist:
# try allauth
try:
aa = aamodels.SocialAccount.objects.get(uid=uid)
try:
user = UserProfile.objects.get(user__id=aa.user_id)
except UserProfile.DoesNotExist:
print('Need to create UserProfile')
# we got an allauth, create the SocialAccountLink
sa = SocialAccountLink()
sa.user = user
sa.social_id = aa.uid
sa.type = aa.provider
sa.access_token = access_token
sa.access_token_secret = access_token_secret
sa.provider_data = payload
sa.save()
except aamodels.SocialAccount.DoesNotExist:
print('Need to create social model')
return user if user else None
class SocialLoginHandler(APIView):
"""View to authenticate users through social media."""
permission_classes = (AllowAny,)
def post(self, request):
uid = None
backend = request.query_params.get('backend')
user = None
if backend in ['twitter']:
request_token_url = 'https://api.twitter.com/oauth/request_token'
access_token_url = 'https://api.twitter.com/oauth/access_token'
access_token = ""
access_token_secret = ""
if request.data.get('oauth_token') and request.data.get('oauth_verifier'):
auth = OAuth1(settings.SOCIAL_AUTH_TWITTER_KEY,
client_secret=settings.SOCIAL_AUTH_TWITTER_SECRET,
resource_owner_key=request.data.get('oauth_token'),
verifier=request.data.get('oauth_verifier'))
r = requests.post(access_token_url, auth=auth)
profile = dict(parse_qsl(r.text))
payload = json.dumps(profile)
uid = profile.get('user_id')
access_token = profile.get('oauth_token')
access_token_secret = profile.get('oauth_token_secret')
user = _temp_reverse_user(uid, 'twitter', access_token, access_token_secret, payload)
else:
oauth = OAuth1(settings.SOCIAL_AUTH_TWITTER_KEY,
client_secret=settings.SOCIAL_AUTH_TWITTER_SECRET,
callback_uri=settings.TWITTER_CALLBACK_URL)
r = requests.post(request_token_url, auth=oauth)
access_token = dict(parse_qsl(r.text))
return Response(access_token)
elif backend in ['facebook']:
access_token_url = 'https://graph.facebook.com/v2.3/oauth/access_token'
graph_api_url = 'https://graph.facebook.com/v2.3/me'
access_token = ""
access_token_secret = ""
params = {
'client_id': request.data.get('clientId'),
'redirect_uri': request.data.get('redirectUri'),
'client_secret': settings.SOCIAL_AUTH_FACEBOOK_SECRET,
'code': request.data.get('code')
}
# Step 1. Exchange authorization code for access token.
r = requests.get(access_token_url, params=params)
token = json.loads(r.text)
# Step 2. Retrieve information about the current user.
r = requests.get(graph_api_url, params=token)
profile = json.loads(r.text)
access_token = token.get('access_token')
uid = profile.get('id')
user = _temp_reverse_user(uid, 'facebook', access_token, access_token_secret, r.text)
elif backend in ['google']:
access_token_url = 'https://accounts.google.com/o/oauth2/token'
people_api_url = 'https://www.googleapis.com/plus/v1/people/me/openIdConnect'
access_token = ""
access_token_secret = ""
payload = dict(client_id=request.data.get('clientId'),
redirect_uri=request.data.get('redirectUri'),
client_secret=settings.SOCIAL_AUTH_GOOGLE_OAUTH_SECRET,
code=request.data.get('code'),
grant_type='authorization_code')
# Step 1. Exchange authorization code for access token.
r = requests.post(access_token_url, data=payload)
token = json.loads(r.text)
headers = {'Authorization': 'Bearer {0}'.format(token['access_token'])}
# Step 2. Retrieve information about the current user.
r = requests.get(people_api_url, headers=headers)
profile = json.loads(r.text)
uid = profile.get('sub')
user = _temp_reverse_user(uid, 'google', access_token, access_token_secret, r.text)
if uid is not None and user is not None:
if not user.user.is_active:
return Response({
'status': 'Unauthorized',
'message': 'User account disabled'
}, status=status.HTTP_401_UNAUTHORIZED)
payload = jwt_payload_handler(user.user)
if api_settings.JWT_ALLOW_REFRESH:
payload['orig_iat'] = timegm(
datetime.datetime.utcnow().utctimetuple()
)
response_data = {
'token': jwt_encode_handler(payload),
'session': user.get_session_id()
}
return Response(response_data)
return Response({
'status': 'Bad request',
'message': 'Authentication could not be performed with received data.'
}, status=status.HTTP_400_BAD_REQUEST)
class ObtainUser(APIView):
throttle_classes = ()
permission_classes = ()
parser_classes = (parsers.FormParser, parsers.MultiPartParser, parsers.JSONParser,)
renderer_classes = (renderers.JSONRenderer,)
serializer_class = AuthTokenSerializer
model = Token
def post(self, request):
return self.get(request)
def get(self, request):
if request.user.is_authenticated():
return Response(
status=status.HTTP_200_OK, data={
'id': request.user.id,
'name': request.user.username,
'session': request.user.userprofile.get_session_id(),
'slug': request.user.userprofile.slug,
'session': request.user.userprofile.get_session_id(),
'userRole': 'user',
})
else:
return Response(status=status.HTTP_401_UNAUTHORIZED)
| [
"json.loads",
"requests.post",
"datetime.datetime.utcnow",
"spa.models.socialaccountlink.SocialAccountLink.objects.get",
"rest_framework_jwt.utils.jwt_payload_handler",
"json.dumps",
"rest_framework_jwt.utils.jwt_encode_handler",
"requests.get",
"spa.models.UserProfile.objects.get",
"rest_framework.response.Response",
"spa.models.socialaccountlink.SocialAccountLink",
"urllib.parse.parse_qsl",
"requests_oauthlib.OAuth1",
"allauth.socialaccount.models.SocialAccount.objects.get"
] | [((1090, 1134), 'spa.models.socialaccountlink.SocialAccountLink.objects.get', 'SocialAccountLink.objects.get', ([], {'social_id': 'uid'}), '(social_id=uid)\n', (1119, 1134), False, 'from spa.models.socialaccountlink import SocialAccountLink\n'), ((1349, 1387), 'spa.models.UserProfile.objects.get', 'UserProfile.objects.get', ([], {'id': 'sa.user.id'}), '(id=sa.user.id)\n', (1372, 1387), False, 'from spa.models import UserProfile\n'), ((6887, 7039), 'rest_framework.response.Response', 'Response', (["{'status': 'Bad request', 'message':\n 'Authentication could not be performed with received data.'}"], {'status': 'status.HTTP_400_BAD_REQUEST'}), "({'status': 'Bad request', 'message':\n 'Authentication could not be performed with received data.'}, status=\n status.HTTP_400_BAD_REQUEST)\n", (6895, 7039), False, 'from rest_framework.response import Response\n'), ((6475, 6505), 'rest_framework_jwt.utils.jwt_payload_handler', 'jwt_payload_handler', (['user.user'], {}), '(user.user)\n', (6494, 6505), False, 'from rest_framework_jwt.utils import jwt_payload_handler, jwt_encode_handler\n'), ((6847, 6870), 'rest_framework.response.Response', 'Response', (['response_data'], {}), '(response_data)\n', (6855, 6870), False, 'from rest_framework.response import Response\n'), ((7954, 7999), 'rest_framework.response.Response', 'Response', ([], {'status': 'status.HTTP_401_UNAUTHORIZED'}), '(status=status.HTTP_401_UNAUTHORIZED)\n', (7962, 7999), False, 'from rest_framework.response import Response\n'), ((1483, 1526), 'allauth.socialaccount.models.SocialAccount.objects.get', 'aamodels.SocialAccount.objects.get', ([], {'uid': 'uid'}), '(uid=uid)\n', (1517, 1526), True, 'from allauth.socialaccount import models as aamodels\n'), ((1788, 1807), 'spa.models.socialaccountlink.SocialAccountLink', 'SocialAccountLink', ([], {}), '()\n', (1805, 1807), False, 'from spa.models.socialaccountlink import SocialAccountLink\n'), ((3121, 3163), 'requests.post', 'requests.post', (['access_token_url'], {'auth': 'auth'}), '(access_token_url, auth=auth)\n', (3134, 3163), False, 'import requests\n'), ((3240, 3259), 'json.dumps', 'json.dumps', (['profile'], {}), '(profile)\n', (3250, 3259), False, 'import json\n'), ((3579, 3719), 'requests_oauthlib.OAuth1', 'OAuth1', (['settings.SOCIAL_AUTH_TWITTER_KEY'], {'client_secret': 'settings.SOCIAL_AUTH_TWITTER_SECRET', 'callback_uri': 'settings.TWITTER_CALLBACK_URL'}), '(settings.SOCIAL_AUTH_TWITTER_KEY, client_secret=settings.\n SOCIAL_AUTH_TWITTER_SECRET, callback_uri=settings.TWITTER_CALLBACK_URL)\n', (3585, 3719), False, 'from requests_oauthlib import OAuth1\n'), ((3797, 3841), 'requests.post', 'requests.post', (['request_token_url'], {'auth': 'oauth'}), '(request_token_url, auth=oauth)\n', (3810, 3841), False, 'import requests\n'), ((3920, 3942), 'rest_framework.response.Response', 'Response', (['access_token'], {}), '(access_token)\n', (3928, 3942), False, 'from rest_framework.response import Response\n'), ((4564, 4609), 'requests.get', 'requests.get', (['access_token_url'], {'params': 'params'}), '(access_token_url, params=params)\n', (4576, 4609), False, 'import requests\n'), ((4630, 4648), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (4640, 4648), False, 'import json\n'), ((4733, 4774), 'requests.get', 'requests.get', (['graph_api_url'], {'params': 'token'}), '(graph_api_url, params=token)\n', (4745, 4774), False, 'import requests\n'), ((4797, 4815), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (4807, 4815), False, 'import json\n'), ((6284, 6397), 'rest_framework.response.Response', 'Response', (["{'status': 'Unauthorized', 'message': 'User account disabled'}"], {'status': 'status.HTTP_401_UNAUTHORIZED'}), "({'status': 'Unauthorized', 'message': 'User account disabled'},\n status=status.HTTP_401_UNAUTHORIZED)\n", (6292, 6397), False, 'from rest_framework.response import Response\n'), ((6735, 6762), 'rest_framework_jwt.utils.jwt_encode_handler', 'jwt_encode_handler', (['payload'], {}), '(payload)\n', (6753, 6762), False, 'from rest_framework_jwt.utils import jwt_payload_handler, jwt_encode_handler\n'), ((1567, 1611), 'spa.models.UserProfile.objects.get', 'UserProfile.objects.get', ([], {'user__id': 'aa.user_id'}), '(user__id=aa.user_id)\n', (1590, 1611), False, 'from spa.models import UserProfile\n'), ((3195, 3212), 'urllib.parse.parse_qsl', 'parse_qsl', (['r.text'], {}), '(r.text)\n', (3204, 3212), False, 'from urllib.parse import parse_qsl\n'), ((3878, 3895), 'urllib.parse.parse_qsl', 'parse_qsl', (['r.text'], {}), '(r.text)\n', (3887, 3895), False, 'from urllib.parse import parse_qsl\n'), ((5698, 5743), 'requests.post', 'requests.post', (['access_token_url'], {'data': 'payload'}), '(access_token_url, data=payload)\n', (5711, 5743), False, 'import requests\n'), ((5764, 5782), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (5774, 5782), False, 'import json\n'), ((5951, 5996), 'requests.get', 'requests.get', (['people_api_url'], {'headers': 'headers'}), '(people_api_url, headers=headers)\n', (5963, 5996), False, 'import requests\n'), ((6019, 6037), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (6029, 6037), False, 'import json\n'), ((6619, 6645), 'datetime.datetime.utcnow', 'datetime.datetime.utcnow', ([], {}), '()\n', (6643, 6645), False, 'import datetime\n')] |
import requests
import aiohttp
from constants import API_KEY
class User(object):
def __init__(self, author_info):
# "author": {
# "about": "",
# "avatar": {
# "cache": "//a.disquscdn.com/1519942534/images/noavatar92.png",
# "isCustom": false,
# "large": {
# "cache": "//a.disquscdn.com/1519942534/images/noavatar92.png",
# "permalink": "https://disqus.com/api/users/avatars/felix1999.jpg"
# },
# "permalink": "https://disqus.com/api/users/avatars/felix1999.jpg",
# "small": {
# "cache": "//a.disquscdn.com/1519942534/images/noavatar32.png",
# "permalink": "https://disqus.com/api/users/avatars/felix1999.jpg"
# }
# },
# "disable3rdPartyTrackers": false,
# "id": "5472588",
# "isAnonymous": false,
# "isPowerContributor": false,
# "isPrimary": true,
# "isPrivate": true,
# "joinedAt": "2010-11-20T04:45:33",
# "location": "",
# "name": "felix1999",
# "profileUrl": "https://disqus.com/by/felix1999/",
# "signedUrl": "",
# "url": "",
# "username": "felix1999"
# },
self._basic_info = author_info
self._detailed_info = None
async def load(self):
async with aiohttp.ClientSession(connector=aiohttp.TCPConnector(verify_ssl=False)) as session:
user_info = await session.get(
'https://disqus.com/api/3.0/users/details.json',
params={'user': self.id, 'api_key': API_KEY}
)
detail_json = await user_info.json()
if detail_json['code'] != 0:
print(f'Problem with getting user details from user {self.id}')
print(detail_json)
self._detailed_info = detail_json['response']
def _get_detailed_info(self):
# https://disqus.com/api/3.0/users/details.json?user=137780765&api_key=<KEY>
# {
# "code": 0,
# "response": {
# "about": "",
# "avatar": {
# "cache": "https://c.disquscdn.com/uploads/users/13778/765/avatar92.jpg?1433896551",
# "isCustom": true,
# "large": {
# "cache": "https://c.disquscdn.com/uploads/users/13778/765/avatar92.jpg?1433896551",
# "permalink": "https://disqus.com/api/users/avatars/disqus_FqhLpDGmTT.jpg"
# },
# "permalink": "https://disqus.com/api/users/avatars/disqus_FqhLpDGmTT.jpg",
# "small": {
# "cache": "https://c.disquscdn.com/uploads/users/13778/765/avatar32.jpg?1433896551",
# "permalink": "https://disqus.com/api/users/avatars/disqus_FqhLpDGmTT.jpg"
# }
# },
# "disable3rdPartyTrackers": false,
# "id": "137780765",
# "isAnonymous": false,
# "isPowerContributor": false,
# "isPrimary": true,
# "isPrivate": false,
# "joinedAt": "2015-01-02T18:40:14",
# "location": "",
# "name": "Bob",
# "numFollowers": 2,
# "numFollowing": 0,
# "numForumsFollowing": 0,
# "numLikesReceived": 8967,
# "numPosts": 4147,
# "profileUrl": "https://disqus.com/by/disqus_FqhLpDGmTT/",
# "rep": 3.5297520000000002,
# "reputation": 3.5297520000000002,
# "reputationLabel": "High",
# "signedUrl": "",
# "url": "",
# "username": "disqus_FqhLpDGmTT"
# }
# }
print("WARNING: auto-loading user in async version of code!!!!")
details = requests.get(
'https://disqus.com/api/3.0/users/details.json',
{'user': self.id, 'api_key': API_KEY}
)
detail_json = details.json()
if detail_json['code'] != 0:
print(f'Problem with getting user details from user {self.id}')
print(detail_json)
self._detailed_info = detail_json['response']
@property
def anonymous(self):
return 'id' not in self._basic_info
@property
def private(self):
return self.anonymous or self._basic_info.get('isPrivate')
@property
def id(self):
if self.private:
return 'Private'
return self._basic_info.get('id', 'Anonymous')
@property
def name(self):
return self._basic_info.get('name')
@property
def username(self):
return self._basic_info.get('username')
@property
def location(self):
return self._basic_info.get('location')
@property
def joined_at(self):
return self._basic_info.get('joinedAt')
@property
def profile_url(self):
return self._basic_info.get('profileUrl')
@property
def total_posts(self):
if self._detailed_info is None:
self._get_detailed_info()
return self._detailed_info.get('numPosts')
@property
def total_likes(self):
if self._detailed_info is None:
self._get_detailed_info()
return self._detailed_info.get('numLikesReceived')
@property
def user_info_row(self):
return [
self.id,
self.name,
self.username,
self.total_posts,
self.total_likes,
self.location,
self.joined_at,
self.profile_url
]
| [
"aiohttp.TCPConnector",
"requests.get"
] | [((4315, 4420), 'requests.get', 'requests.get', (['"""https://disqus.com/api/3.0/users/details.json"""', "{'user': self.id, 'api_key': API_KEY}"], {}), "('https://disqus.com/api/3.0/users/details.json', {'user': self\n .id, 'api_key': API_KEY})\n", (4327, 4420), False, 'import requests\n'), ((1751, 1789), 'aiohttp.TCPConnector', 'aiohttp.TCPConnector', ([], {'verify_ssl': '(False)'}), '(verify_ssl=False)\n', (1771, 1789), False, 'import aiohttp\n')] |
from flask_restful import Resource, reqparse
from firebase_admin import auth as firebase_auth
from dbcls.models import User
parser = reqparse.RequestParser()
parser.add_argument('token', type=str, required=True, nullable=False)
class Authenticate(Resource):
def post(self):
try:
args = parser.parse_args()
decoded_token = firebase_auth.verify_id_token(args['token'])
except (ValueError, firebase_auth.AuthError) as e:
return {'message': f'{e}'}, 400
firebase_uid = decoded_token['uid']
user = User.query.filter_by(firebase_uid=firebase_uid).first()
if not user:
return {'message': 'user not found. You have to sign up.'}, 400
custom_token = firebase_auth.create_custom_token(firebase_uid)
return {
'custom_token': custom_token.decode(),
'display_name': user.display_name,
'contact_uri': user.contact_uri,
'roles': [role.role_type for role in user.user_roles],
}
| [
"firebase_admin.auth.verify_id_token",
"firebase_admin.auth.create_custom_token",
"flask_restful.reqparse.RequestParser",
"dbcls.models.User.query.filter_by"
] | [((136, 160), 'flask_restful.reqparse.RequestParser', 'reqparse.RequestParser', ([], {}), '()\n', (158, 160), False, 'from flask_restful import Resource, reqparse\n'), ((748, 795), 'firebase_admin.auth.create_custom_token', 'firebase_auth.create_custom_token', (['firebase_uid'], {}), '(firebase_uid)\n', (781, 795), True, 'from firebase_admin import auth as firebase_auth\n'), ((363, 407), 'firebase_admin.auth.verify_id_token', 'firebase_auth.verify_id_token', (["args['token']"], {}), "(args['token'])\n", (392, 407), True, 'from firebase_admin import auth as firebase_auth\n'), ((571, 618), 'dbcls.models.User.query.filter_by', 'User.query.filter_by', ([], {'firebase_uid': 'firebase_uid'}), '(firebase_uid=firebase_uid)\n', (591, 618), False, 'from dbcls.models import User\n')] |
import csv
import logging
import os
import shutil
from csv import DictWriter # noqa: F401
from pyhocon import ConfigTree, ConfigFactory # noqa: F401
from typing import Dict, Any # noqa: F401
from databuilder.job.base_job import Job
from databuilder.loader.base_loader import Loader
from databuilder.models.neo4j_csv_serde import NODE_LABEL, \
RELATION_START_LABEL, RELATION_END_LABEL, RELATION_TYPE
from databuilder.models.neo4j_csv_serde import Neo4jCsvSerializable # noqa: F401
from databuilder.utils.closer import Closer
LOGGER = logging.getLogger(__name__)
class FsNeo4jCSVLoader(Loader):
"""
Write node and relationship CSV file(s) that can be consumed by
Neo4jCsvPublisher.
It assumes that the record it consumes is instance of Neo4jCsvSerializable
"""
# Config keys
NODE_DIR_PATH = 'node_dir_path'
RELATION_DIR_PATH = 'relationship_dir_path'
FORCE_CREATE_DIR = 'force_create_directory'
SHOULD_DELETE_CREATED_DIR = 'delete_created_directories'
_DEFAULT_CONFIG = ConfigFactory.from_dict({
SHOULD_DELETE_CREATED_DIR: True,
FORCE_CREATE_DIR: False
})
def __init__(self):
# type: () -> None
self._node_file_mapping = {} # type: Dict[Any, DictWriter]
self._relation_file_mapping = {} # type: Dict[Any, DictWriter]
self._closer = Closer()
def init(self, conf):
# type: (ConfigTree) -> None
"""
Initializing FsNeo4jCsvLoader by creating directory for node files
and relationship files. Note that the directory defined in
configuration should not exist.
:param conf:
:return:
"""
conf = conf.with_fallback(FsNeo4jCSVLoader._DEFAULT_CONFIG)
self._node_dir = conf.get_string(FsNeo4jCSVLoader.NODE_DIR_PATH)
self._relation_dir = \
conf.get_string(FsNeo4jCSVLoader.RELATION_DIR_PATH)
self._delete_created_dir = \
conf.get_bool(FsNeo4jCSVLoader.SHOULD_DELETE_CREATED_DIR)
self._force_create_dir = conf.get_bool(FsNeo4jCSVLoader.FORCE_CREATE_DIR)
self._create_directory(self._node_dir)
self._create_directory(self._relation_dir)
def _create_directory(self, path):
# type: (str) -> None
"""
Validate directory does not exist, creates it, register deletion of
created directory function to Job.closer.
:param path:
:return:
"""
if os.path.exists(path):
if self._force_create_dir:
LOGGER.info('Directory exist. Deleting directory {}'.format(path))
shutil.rmtree(path)
else:
raise RuntimeError('Directory should not exist: {}'.format(path))
os.makedirs(path)
def _delete_dir():
# type: () -> None
if not self._delete_created_dir:
LOGGER.warn('Skip Deleting directory {}'.format(path))
return
LOGGER.info('Deleting directory {}'.format(path))
shutil.rmtree(path)
# Directory should be deleted after publish is finished
Job.closer.register(_delete_dir)
def load(self, csv_serializable):
# type: (Neo4jCsvSerializable) -> None
"""
Writes Neo4jCsvSerializable into CSV files.
There are multiple CSV files that this method writes.
This is because there're not only node and relationship, but also it
can also have different nodes, and relationships.
Common pattern for both nodes and relations:
1. retrieve csv row (a dict where keys represent a header,
values represent a row)
2. using this dict to get a appropriate csv writer and write to it.
3. repeat 1 and 2
:param csv_serializable:
:return:
"""
node_dict = csv_serializable.next_node()
while node_dict:
key = (node_dict[NODE_LABEL], len(node_dict))
file_suffix = '{}_{}'.format(*key)
node_writer = self._get_writer(node_dict,
self._node_file_mapping,
key,
self._node_dir,
file_suffix)
node_writer.writerow(node_dict)
node_dict = csv_serializable.next_node()
relation_dict = csv_serializable.next_relation()
while relation_dict:
key2 = (relation_dict[RELATION_START_LABEL],
relation_dict[RELATION_END_LABEL],
relation_dict[RELATION_TYPE],
len(relation_dict))
file_suffix = '{}_{}_{}'.format(key2[0], key2[1], key2[2])
relation_writer = self._get_writer(relation_dict,
self._relation_file_mapping,
key2,
self._relation_dir,
file_suffix)
relation_writer.writerow(relation_dict)
relation_dict = csv_serializable.next_relation()
def _get_writer(self,
csv_record_dict, # type: Dict[str, Any]
file_mapping, # type: Dict[Any, DictWriter]
key, # type: Any
dir_path, # type: str
file_suffix # type: str
):
# type: (...) -> DictWriter
"""
Finds a writer based on csv record, key.
If writer does not exist, it's creates a csv writer and update the
mapping.
:param csv_record_dict:
:param file_mapping:
:param key:
:param file_suffix:
:return:
"""
writer = file_mapping.get(key)
if writer:
return writer
LOGGER.info('Creating file for {}'.format(key))
file_out = open('{}/{}.csv'.format(dir_path, file_suffix), 'w')
def file_out_close():
# type: () -> None
LOGGER.info('Closing file IO {}'.format(file_out))
file_out.close()
self._closer.register(file_out_close)
writer = csv.DictWriter(file_out, fieldnames=csv_record_dict.keys(),
quoting=csv.QUOTE_NONNUMERIC)
writer.writeheader()
file_mapping[key] = writer
return writer
def close(self):
# type: () -> None
"""
Any closeable callable registered in _closer, it will close.
:return:
"""
self._closer.close()
def get_scope(self):
# type: () -> str
return "loader.filesystem_csv_neo4j"
| [
"logging.getLogger",
"os.path.exists",
"databuilder.utils.closer.Closer",
"os.makedirs",
"databuilder.job.base_job.Job.closer.register",
"shutil.rmtree",
"pyhocon.ConfigFactory.from_dict"
] | [((544, 571), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (561, 571), False, 'import logging\n'), ((1026, 1113), 'pyhocon.ConfigFactory.from_dict', 'ConfigFactory.from_dict', (['{SHOULD_DELETE_CREATED_DIR: True, FORCE_CREATE_DIR: False}'], {}), '({SHOULD_DELETE_CREATED_DIR: True, FORCE_CREATE_DIR:\n False})\n', (1049, 1113), False, 'from pyhocon import ConfigTree, ConfigFactory\n'), ((1347, 1355), 'databuilder.utils.closer.Closer', 'Closer', ([], {}), '()\n', (1353, 1355), False, 'from databuilder.utils.closer import Closer\n'), ((2458, 2478), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (2472, 2478), False, 'import os\n'), ((2747, 2764), 'os.makedirs', 'os.makedirs', (['path'], {}), '(path)\n', (2758, 2764), False, 'import os\n'), ((3131, 3163), 'databuilder.job.base_job.Job.closer.register', 'Job.closer.register', (['_delete_dir'], {}), '(_delete_dir)\n', (3150, 3163), False, 'from databuilder.job.base_job import Job\n'), ((3038, 3057), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (3051, 3057), False, 'import shutil\n'), ((2618, 2637), 'shutil.rmtree', 'shutil.rmtree', (['path'], {}), '(path)\n', (2631, 2637), False, 'import shutil\n')] |
# -*- coding: utf-8 -*-
"""
@author: <NAME>
"""
import pandas as pd
from sklearn.neighbors import NearestNeighbors # k-NN
k_in_knn = 5 # k-NN における k
rate_of_training_samples_inside_ad = 0.96 # AD 内となるトレーニングデータの割合。AD のしきい値を決めるときに使用
dataset = pd.read_csv('resin.csv', index_col=0, header=0)
x_prediction = pd.read_csv('resin_prediction.csv', index_col=0, header=0)
# データ分割
y = dataset.iloc[:, 0] # 目的変数
x = dataset.iloc[:, 1:] # 説明変数
# 標準偏差が 0 の特徴量の削除
deleting_variables = x.columns[x.std() == 0]
x = x.drop(deleting_variables, axis=1)
x_prediction = x_prediction.drop(deleting_variables, axis=1)
# オートスケーリング
autoscaled_x = (x - x.mean()) / x.std()
autoscaled_x_prediction = (x_prediction - x.mean()) / x.std()
# k-NN による AD
ad_model = NearestNeighbors(n_neighbors=k_in_knn, metric='euclidean') # AD モデルの宣言
ad_model.fit(autoscaled_x) # k-NN による AD では、トレーニングデータの x を model_ad に格納することに対応
# サンプルごとの k 最近傍サンプルとの距離に加えて、k 最近傍サンプルのインデックス番号も一緒に出力されるため、出力用の変数を 2 つに
# トレーニングデータでは k 最近傍サンプルの中に自分も含まれ、自分との距離の 0 を除いた距離を考える必要があるため、k_in_knn + 1 個と設定
knn_distance_train, knn_index_train = ad_model.kneighbors(autoscaled_x, n_neighbors=k_in_knn + 1)
knn_distance_train = pd.DataFrame(knn_distance_train, index=autoscaled_x.index) # DataFrame型に変換
mean_of_knn_distance_train = pd.DataFrame(knn_distance_train.iloc[:, 1:].mean(axis=1),
columns=['mean_of_knn_distance']) # 自分以外の k_in_knn 個の距離の平均
mean_of_knn_distance_train.to_csv('mean_of_knn_distance_train.csv') # csv ファイルに保存。同じ名前のファイルがあるときは上書きされるため注意
# トレーニングデータのサンプルの rate_of_training_samples_inside_ad * 100 % が含まれるようにしきい値を設定
sorted_mean_of_knn_distance_train = mean_of_knn_distance_train.iloc[:, 0].sort_values(ascending=True) # 距離の平均の小さい順に並び替え
ad_threshold = sorted_mean_of_knn_distance_train.iloc[
round(autoscaled_x.shape[0] * rate_of_training_samples_inside_ad) - 1]
# トレーニングデータに対して、AD の中か外かを判定
inside_ad_flag_train = mean_of_knn_distance_train <= ad_threshold # AD 内のサンプルのみ TRUE
inside_ad_flag_train.columns=['inside_ad_flag']
inside_ad_flag_train.to_csv('inside_ad_flag_train_knn.csv') # csv ファイルに保存。同じ名前のファイルがあるときは上書きされるため注意
# 予測用データに対する k-NN 距離の計算
knn_distance_prediction, knn_index_prediction = ad_model.kneighbors(autoscaled_x_prediction)
knn_distance_prediction = pd.DataFrame(knn_distance_prediction, index=x_prediction.index) # DataFrame型に変換
mean_of_knn_distance_prediction = pd.DataFrame(knn_distance_prediction.mean(axis=1),
columns=['mean_of_knn_distance']) # k_in_knn 個の距離の平均
mean_of_knn_distance_prediction.to_csv('mean_of_knn_distance_prediction.csv') # csv ファイルに保存。同じ名前のファイルがあるときは上書きされるため注意
# 予測用データに対して、AD の中か外かを判定
inside_ad_flag_prediction = mean_of_knn_distance_prediction <= ad_threshold # AD 内のサンプルのみ TRUE
inside_ad_flag_prediction.columns=['inside_ad_flag']
inside_ad_flag_prediction.to_csv('inside_ad_flag_prediction_knn.csv') # csv ファイルに保存。同じ名前のファイルがあるときは上書きされるため注意
| [
"pandas.DataFrame",
"sklearn.neighbors.NearestNeighbors",
"pandas.read_csv"
] | [((258, 305), 'pandas.read_csv', 'pd.read_csv', (['"""resin.csv"""'], {'index_col': '(0)', 'header': '(0)'}), "('resin.csv', index_col=0, header=0)\n", (269, 305), True, 'import pandas as pd\n'), ((322, 380), 'pandas.read_csv', 'pd.read_csv', (['"""resin_prediction.csv"""'], {'index_col': '(0)', 'header': '(0)'}), "('resin_prediction.csv', index_col=0, header=0)\n", (333, 380), True, 'import pandas as pd\n'), ((774, 832), 'sklearn.neighbors.NearestNeighbors', 'NearestNeighbors', ([], {'n_neighbors': 'k_in_knn', 'metric': '"""euclidean"""'}), "(n_neighbors=k_in_knn, metric='euclidean')\n", (790, 832), False, 'from sklearn.neighbors import NearestNeighbors\n'), ((1202, 1260), 'pandas.DataFrame', 'pd.DataFrame', (['knn_distance_train'], {'index': 'autoscaled_x.index'}), '(knn_distance_train, index=autoscaled_x.index)\n', (1214, 1260), True, 'import pandas as pd\n'), ((2331, 2394), 'pandas.DataFrame', 'pd.DataFrame', (['knn_distance_prediction'], {'index': 'x_prediction.index'}), '(knn_distance_prediction, index=x_prediction.index)\n', (2343, 2394), True, 'import pandas as pd\n')] |
"""
Migration script to add 'ldda_id' column to the implicitly_converted_dataset_association table.
"""
from __future__ import print_function
import logging
from sqlalchemy import (
Column,
ForeignKey,
Integer,
MetaData
)
from galaxy.model.migrate.versions.util import (
add_column,
drop_column
)
log = logging.getLogger(__name__)
metadata = MetaData()
def upgrade(migrate_engine):
print(__doc__)
metadata.bind = migrate_engine
metadata.reflect()
# SQLAlchemy Migrate has a bug when adding a column with both a ForeignKey and a index in SQLite
if migrate_engine.name != 'sqlite':
c = Column("ldda_id", Integer, ForeignKey("library_dataset_dataset_association.id"), index=True, nullable=True)
else:
c = Column("ldda_id", Integer, index=True, nullable=True)
add_column(c, 'implicitly_converted_dataset_association', metadata, index_name='ix_implicitly_converted_ds_assoc_ldda_id')
def downgrade(migrate_engine):
metadata.bind = migrate_engine
metadata.reflect()
drop_column('ldda_id', 'implicitly_converted_dataset_association', metadata)
| [
"logging.getLogger",
"galaxy.model.migrate.versions.util.drop_column",
"galaxy.model.migrate.versions.util.add_column",
"sqlalchemy.ForeignKey",
"sqlalchemy.MetaData",
"sqlalchemy.Column"
] | [((331, 358), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (348, 358), False, 'import logging\n'), ((370, 380), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (378, 380), False, 'from sqlalchemy import Column, ForeignKey, Integer, MetaData\n'), ((831, 957), 'galaxy.model.migrate.versions.util.add_column', 'add_column', (['c', '"""implicitly_converted_dataset_association"""', 'metadata'], {'index_name': '"""ix_implicitly_converted_ds_assoc_ldda_id"""'}), "(c, 'implicitly_converted_dataset_association', metadata,\n index_name='ix_implicitly_converted_ds_assoc_ldda_id')\n", (841, 957), False, 'from galaxy.model.migrate.versions.util import add_column, drop_column\n'), ((1050, 1126), 'galaxy.model.migrate.versions.util.drop_column', 'drop_column', (['"""ldda_id"""', '"""implicitly_converted_dataset_association"""', 'metadata'], {}), "('ldda_id', 'implicitly_converted_dataset_association', metadata)\n", (1061, 1126), False, 'from galaxy.model.migrate.versions.util import add_column, drop_column\n'), ((773, 826), 'sqlalchemy.Column', 'Column', (['"""ldda_id"""', 'Integer'], {'index': '(True)', 'nullable': '(True)'}), "('ldda_id', Integer, index=True, nullable=True)\n", (779, 826), False, 'from sqlalchemy import Column, ForeignKey, Integer, MetaData\n'), ((670, 722), 'sqlalchemy.ForeignKey', 'ForeignKey', (['"""library_dataset_dataset_association.id"""'], {}), "('library_dataset_dataset_association.id')\n", (680, 722), False, 'from sqlalchemy import Column, ForeignKey, Integer, MetaData\n')] |
# The Topical Guide
# Copyright 2010-2011 Brigham Young University
#
# This file is part of the Topical Guide <http://nlp.cs.byu.edu/topic_browser>.
#
# The Topical Guide is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by the
# Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# The Topical Guide is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License
# for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with the Topical Guide. If not, see <http://www.gnu.org/licenses/>.
#
# If you have inquiries regarding any further use of the Topical Guide, please
# contact the Copyright Licensing Office, Brigham Young University, 3760 HBLL,
# Provo, UT 84602, (801) 422-9339 or 422-3821, e-mail <EMAIL>.
from __future__ import print_function
from django.shortcuts import render, redirect
from django.http import HttpResponse
import abtest
from abtest.settings import TEST_LIST
from visualize import root
# Create your views here.
def test(request, arg, *args, **kwargs):
if arg not in TEST_LIST:
print("Error! Unknown view should have been hit instead")
package_list = TEST_LIST[arg]['VIEW_PACKAGE'].split('.')
view_package = package_list.pop()
package = ".".join(package_list)
view = getattr(__import__(package, fromlist=[view_package]), view_package)
return view(request, args, kwargs)
# This view is called when the given url does not match anything
def unknown(request, arg, *args, **kwargs):
# redirect to the root view
return redirect('/')
| [
"django.shortcuts.redirect"
] | [((1820, 1833), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (1828, 1833), False, 'from django.shortcuts import render, redirect\n')] |
"""Tests for merge.py."""
from __future__ import absolute_import, division, print_function
from glob import glob
import os
import tarfile
import tempfile
from neurodocker.docker import client
from neurodocker.reprozip.trace import ReproZipMinimizer
from neurodocker.reprozip.merge import merge_pack_files
def _create_packfile(commands, dir):
"""Create packfile from list `commands` in debian:stretch container."""
container = client.containers.run('debian:stretch', detach=True, tty=True,
security_opt=['seccomp:unconfined'])
try:
minimizer = ReproZipMinimizer(container.id, commands,
packfile_save_dir=dir)
packfile_path = minimizer.run()
except:
raise
finally:
container.stop()
container.remove()
return packfile_path
def test_merge_pack_files():
tmpdir = tempfile.mkdtemp()
cmd = ["du -sh /usr", "rm --help"]
packpath = _create_packfile(cmd, tmpdir)
new_name = "first-pack.rpz"
os.rename(packpath, os.path.join(tmpdir, new_name))
cmd = ["ls -l /", "grep --help"]
_create_packfile(cmd, tmpdir)
pattern = os.path.join(tmpdir, '*.rpz')
packfiles = glob(pattern)
assert packfiles, "packfiles not found"
outfile = os.path.join(tmpdir, 'merged.rpz')
merge_pack_files(outfile=outfile, packfiles=packfiles)
with tarfile.open(outfile) as tar:
tar.extractall(path=tmpdir)
datafile = os.path.join(tmpdir, 'DATA.tar.gz')
with tarfile.open(datafile) as tardata:
tardata.extractall(path=tmpdir)
usr_path = os.path.join(tmpdir, 'DATA', 'usr', 'bin')
assert os.path.isfile(os.path.join(usr_path, 'du'))
assert os.path.isfile(os.path.join(usr_path, 'grep'))
assert os.path.isfile(os.path.join(usr_path, 'ls'))
assert os.path.isfile(os.path.join(usr_path, 'rm'))
assert not os.path.isfile(os.path.join(usr_path, 'sed'))
assert not os.path.isfile(os.path.join(usr_path, 'tar'))
| [
"tarfile.open",
"neurodocker.reprozip.trace.ReproZipMinimizer",
"os.path.join",
"neurodocker.docker.client.containers.run",
"tempfile.mkdtemp",
"glob.glob",
"neurodocker.reprozip.merge.merge_pack_files"
] | [((439, 543), 'neurodocker.docker.client.containers.run', 'client.containers.run', (['"""debian:stretch"""'], {'detach': '(True)', 'tty': '(True)', 'security_opt': "['seccomp:unconfined']"}), "('debian:stretch', detach=True, tty=True, security_opt\n =['seccomp:unconfined'])\n", (460, 543), False, 'from neurodocker.docker import client\n'), ((909, 927), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (925, 927), False, 'import tempfile\n'), ((1188, 1217), 'os.path.join', 'os.path.join', (['tmpdir', '"""*.rpz"""'], {}), "(tmpdir, '*.rpz')\n", (1200, 1217), False, 'import os\n'), ((1234, 1247), 'glob.glob', 'glob', (['pattern'], {}), '(pattern)\n', (1238, 1247), False, 'from glob import glob\n'), ((1307, 1341), 'os.path.join', 'os.path.join', (['tmpdir', '"""merged.rpz"""'], {}), "(tmpdir, 'merged.rpz')\n", (1319, 1341), False, 'import os\n'), ((1346, 1400), 'neurodocker.reprozip.merge.merge_pack_files', 'merge_pack_files', ([], {'outfile': 'outfile', 'packfiles': 'packfiles'}), '(outfile=outfile, packfiles=packfiles)\n', (1362, 1400), False, 'from neurodocker.reprozip.merge import merge_pack_files\n'), ((606, 670), 'neurodocker.reprozip.trace.ReproZipMinimizer', 'ReproZipMinimizer', (['container.id', 'commands'], {'packfile_save_dir': 'dir'}), '(container.id, commands, packfile_save_dir=dir)\n', (623, 670), False, 'from neurodocker.reprozip.trace import ReproZipMinimizer\n'), ((1069, 1099), 'os.path.join', 'os.path.join', (['tmpdir', 'new_name'], {}), '(tmpdir, new_name)\n', (1081, 1099), False, 'import os\n'), ((1411, 1432), 'tarfile.open', 'tarfile.open', (['outfile'], {}), '(outfile)\n', (1423, 1432), False, 'import tarfile\n'), ((1496, 1531), 'os.path.join', 'os.path.join', (['tmpdir', '"""DATA.tar.gz"""'], {}), "(tmpdir, 'DATA.tar.gz')\n", (1508, 1531), False, 'import os\n'), ((1545, 1567), 'tarfile.open', 'tarfile.open', (['datafile'], {}), '(datafile)\n', (1557, 1567), False, 'import tarfile\n'), ((1647, 1689), 'os.path.join', 'os.path.join', (['tmpdir', '"""DATA"""', '"""usr"""', '"""bin"""'], {}), "(tmpdir, 'DATA', 'usr', 'bin')\n", (1659, 1689), False, 'import os\n'), ((1724, 1752), 'os.path.join', 'os.path.join', (['usr_path', '"""du"""'], {}), "(usr_path, 'du')\n", (1736, 1752), False, 'import os\n'), ((1788, 1818), 'os.path.join', 'os.path.join', (['usr_path', '"""grep"""'], {}), "(usr_path, 'grep')\n", (1800, 1818), False, 'import os\n'), ((1854, 1882), 'os.path.join', 'os.path.join', (['usr_path', '"""ls"""'], {}), "(usr_path, 'ls')\n", (1866, 1882), False, 'import os\n'), ((1918, 1946), 'os.path.join', 'os.path.join', (['usr_path', '"""rm"""'], {}), "(usr_path, 'rm')\n", (1930, 1946), False, 'import os\n'), ((1986, 2015), 'os.path.join', 'os.path.join', (['usr_path', '"""sed"""'], {}), "(usr_path, 'sed')\n", (1998, 2015), False, 'import os\n'), ((2055, 2084), 'os.path.join', 'os.path.join', (['usr_path', '"""tar"""'], {}), "(usr_path, 'tar')\n", (2067, 2084), False, 'import os\n')] |
"""
HTML5 contexts.
:author: <NAME>
:license: MIT
"""
import contextlib
import io
import sys
__all__ = ['create_document', 'tag', 'as_link']
class create_document(contextlib.redirect_stdout):
"""Redirect output to an HTML5 document specified by new_target.
A HTML document title can be specified, but should not consist of
whitespace only. Default is a dash.
For serialisation, an encoding is included and defaults to UTF-8.
Make sure the output (likely ``new_target``) uses the correct one.
Arguments are not checked for validity.
"""
def __init__(self, new_target, *, title='-', encoding='utf-8'):
super().__init__(new_target)
self._title = str(title)
self._encoding = encoding
def __enter__(self):
new_target = contextlib.redirect_stdout.__enter__(self)
html5 = ('<!DOCTYPE html>\n'
'<html>\n'
'<title>{}</title>\n'
'<meta charset="{}">'.format(self._title, self._encoding))
print(html5)
return new_target
@contextlib.contextmanager
def tag(name):
"""Enclose output in an HTML tag denoted by the name."""
print('<{}>'.format(name))
yield
print('</{}>'.format(name))
class LinkStringIO(io.StringIO):
def __init__(self):
super().__init__()
self._write_text = False # switch between link href="..." and text
def write(self, s):
if not s:
return
# else:
if s.isspace():
return super().write(s)
# else:
if self._write_text:
count = super().write('<a href="')
count += super().write(s)
count += super().write('">')
else:
count = super().write(s)
count += super().write('</a>')
self._write_text = not self._write_text
return count
class write_link(contextlib.redirect_stdout):
"""Combine any two subsequent non-empty writes into an HTML link."""
def __init__(self):
super().__init__(LinkStringIO())
def __exit__(self, exctype, excinst, exctb):
super().__exit__(exctype, excinst, exctb)
with contextlib.closing(self._new_target):
self._new_target.seek(0)
sys.stdout.write(self._new_target.read())
| [
"contextlib.redirect_stdout.__enter__",
"contextlib.closing"
] | [((847, 889), 'contextlib.redirect_stdout.__enter__', 'contextlib.redirect_stdout.__enter__', (['self'], {}), '(self)\n', (883, 889), False, 'import contextlib\n'), ((2291, 2327), 'contextlib.closing', 'contextlib.closing', (['self._new_target'], {}), '(self._new_target)\n', (2309, 2327), False, 'import contextlib\n')] |
import torch
import torch.nn as nn
import numpy as np
import matplotlib
matplotlib.use("Agg")
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.metrics import *
from sklearn.metrics import precision_recall_fscore_support as prfs
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
def degrading_model_perf(data, model, save_path, data_size, largest = True):
print("\n--- Degrading Model Performance \n")
modulo = round(len(data) / 10) + 1
model.embedding.weight.requires_grad_(True)
actual = []
results = {}
results["random"] = []
results["attention"]= []
results["gradient"] = []
results["grad_attention"] = []
results["grad*attention"] = []
_, _, lengths, _ = next(iter(data))
maximum = max(lengths)
if max(lengths) <= 10 :
maximum = max(lengths) - 1
elif max(lengths) > 10 :
maximum = 10
print(maximum)
grad_set = torch.zeros([data_size, maximum]).long().to(device)
att_set = torch.zeros([data_size, maximum]).long().to(device)
rand_set = torch.zeros([data_size, maximum]).long().to(device)
att_grad_set = torch.zeros([data_size, maximum]).long().to(device)
att_x_grad_set = torch.zeros([data_size, maximum]).long().to(device)
actual_set = torch.zeros([data_size, 1]).long().to(device)
docs = []
for batchi, (doc_id, sentences, lengths, labels) in enumerate(data):
model.train()
torch.cuda.empty_cache()
model.zero_grad()
sentences, lengths, labels = sentences.to(device), lengths.to(device), labels.to(device)
yhat, weights_or = model(sentences, lengths, retain_gradient = True)
masking = yhat.max(-1)[1] == labels
if largest == False:
masking = yhat.max(-1)[1] != labels
yhat.max(-1)[0].sum().backward(retain_graph = True)
maxi = max(lengths)
doc_id = doc_id[masking]
yhat = yhat[masking]
sentences = sentences[masking]
labels = labels[masking]
lengths = lengths[masking]
weights_or = weights_or[masking]
docs.extend(doc_id)
g = model.embed.grad[masking]
weights_def_grad = model.weights.grad[masking]
max_lengths = max(max(lengths), maxi)
model_masks = model.masks[masking]
with torch.no_grad():
weights = weights_or.clone()
weight_mul_grad = weights_or * weights_def_grad
weight_mul_grad[model_masks[:,:max_lengths]] = float("-inf")
weights_def_grad_soft = weights_def_grad.clone()
weights_def_grad_soft[model_masks[:,:max_lengths]] = float("-inf")
em = model.embed[masking]
g1 = (g* em).sum(-1)[:,:max_lengths]
g1[model_masks[:,:max_lengths]] = float("-inf")
sentence_att = sentences.clone()[:,:max_lengths]
sentence_grad = sentences.clone()[:,:max_lengths]
sentence_rand = sentences.clone()[:,:max_lengths]
sentence_att_grad = sentences.clone()[:,:max_lengths]
sentence_att_mul_grad = sentences.clone()[:,:max_lengths]
g1[model_masks[:,:max_lengths]] = float("-inf")
top_grad = torch.topk(g1, k = g1.size(1), largest = largest)[1]
top_att = torch.topk(weights, k = weights.size(1),
largest = largest)[1]
top_rand = torch.randn(top_att.shape)
top_rand = torch.topk(top_rand, k = weights.size(1),
largest = largest)[1]
top_att_grad = torch.topk(weights_def_grad_soft,
k = weights.size(1),
largest = largest)[1]
top_att_mul_grad = torch.topk(weight_mul_grad,
k = weights.size(1),
largest = largest)[1]
temp_pred = []
temp_act = []
temp_act.append(labels.cpu().data.numpy())
temp_pred.append(yhat.max(-1)[1].cpu().data.numpy())
model.eval()
actual_set[doc_id] = labels.unsqueeze(-1)
rand_set[doc_id, 0] = yhat.max(-1)[1]
att_set[doc_id, 0] = yhat.max(-1)[1]
grad_set[doc_id, 0] = yhat.max(-1)[1]
att_grad_set[doc_id, 0] = yhat.max(-1)[1]
att_x_grad_set[doc_id, 0] = yhat.max(-1)[1]
rows = torch.arange(sentences.size(0))
for _j_ in range(1,maximum):
sentence_grad[rows, top_grad[:,_j_]] = 0
sentence_att[rows, top_att[:,_j_]] = 0
sentence_att_grad[rows, top_att_grad[:,_j_]] = 0
sentence_att_mul_grad[rows, top_att_mul_grad[:,_j_]] = 0
sentence_rand[rows, top_rand[:,_j_]] = 0
yhat_rand, _ = model(sentence_rand,lengths)
rand_set[doc_id, _j_] = yhat_rand.max(-1)[1]
yhat_att, _ = model(sentence_att,lengths)
att_set[doc_id, _j_] = yhat_att.max(-1)[1]
yhat_grad, _ = model(sentence_grad,lengths)
grad_set[doc_id, _j_] = yhat_grad.max(-1)[1]
yhat_att_grad, _ = model(sentence_att_grad,lengths)
att_grad_set[doc_id, _j_] = yhat_att_grad.max(-1)[1]
yhat_att_x_grad, _ = model(sentence_att_mul_grad,lengths)
att_x_grad_set[doc_id, _j_] = yhat_att_x_grad.max(-1)[1]
if batchi % modulo == 0 :
print("Remaining: ", len(data)- batchi)
docs = torch.LongTensor(docs)
rand_set = rand_set[docs]
att_set = att_set[docs]
grad_set = grad_set[docs]
att_grad_set = att_grad_set[docs]
att_x_grad_set = att_x_grad_set[docs]
actual_set = actual_set[docs]
for _k_ in range(0,maximum):
actual = actual_set.flatten().cpu().data.numpy()
rand_pred = classification_report(actual,
rand_set[:,_k_].cpu().data.numpy(),
output_dict = True)["macro avg"]["f1-score"]
att_pred = classification_report(actual,
att_set[:,_k_].cpu().data.numpy(),
output_dict = True)["macro avg"]["f1-score"]
grad_pred = classification_report(actual,
grad_set[:,_k_].cpu().data.numpy(),
output_dict = True)["macro avg"]["f1-score"]
att_grad_pred = classification_report(actual,
att_grad_set[:,_k_].cpu().data.numpy(),
output_dict = True)["macro avg"]["f1-score"]
att_x_grad_pred = classification_report(actual,
att_x_grad_set[:,_k_].cpu().data.numpy(),
output_dict = True)["macro avg"]["f1-score"]
results["random"].append(rand_pred)
results["attention"].append(att_pred)
results["gradient"].append(grad_pred)
results["grad_attention"].append(att_grad_pred)
results["grad*attention"].append(att_x_grad_pred)
results = pd.DataFrame.from_dict(results)
results.plot(kind = "line", figsize = (18,10))
ordering = "ascending"
if largest:
ordering = "descending"
plt.savefig(save_path + "_correct_classified_" + ordering + ".png")
results.to_csv(save_path + "_correct_classified_" + ordering + ".csv")
| [
"matplotlib.pyplot.savefig",
"matplotlib.use",
"torch.LongTensor",
"pandas.DataFrame.from_dict",
"torch.zeros",
"torch.cuda.is_available",
"torch.no_grad",
"torch.cuda.empty_cache",
"torch.randn"
] | [((72, 93), 'matplotlib.use', 'matplotlib.use', (['"""Agg"""'], {}), "('Agg')\n", (86, 93), False, 'import matplotlib\n'), ((6334, 6356), 'torch.LongTensor', 'torch.LongTensor', (['docs'], {}), '(docs)\n', (6350, 6356), False, 'import torch\n'), ((8097, 8128), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['results'], {}), '(results)\n', (8119, 8128), True, 'import pandas as pd\n'), ((8288, 8355), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(save_path + '_correct_classified_' + ordering + '.png')"], {}), "(save_path + '_correct_classified_' + ordering + '.png')\n", (8299, 8355), True, 'import matplotlib.pyplot as plt\n'), ((279, 304), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (302, 304), False, 'import torch\n'), ((1540, 1564), 'torch.cuda.empty_cache', 'torch.cuda.empty_cache', ([], {}), '()\n', (1562, 1564), False, 'import torch\n'), ((2533, 2548), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (2546, 2548), False, 'import torch\n'), ((3750, 3776), 'torch.randn', 'torch.randn', (['top_att.shape'], {}), '(top_att.shape)\n', (3761, 3776), False, 'import torch\n'), ((1026, 1059), 'torch.zeros', 'torch.zeros', (['[data_size, maximum]'], {}), '([data_size, maximum])\n', (1037, 1059), False, 'import torch\n'), ((1092, 1125), 'torch.zeros', 'torch.zeros', (['[data_size, maximum]'], {}), '([data_size, maximum])\n', (1103, 1125), False, 'import torch\n'), ((1159, 1192), 'torch.zeros', 'torch.zeros', (['[data_size, maximum]'], {}), '([data_size, maximum])\n', (1170, 1192), False, 'import torch\n'), ((1230, 1263), 'torch.zeros', 'torch.zeros', (['[data_size, maximum]'], {}), '([data_size, maximum])\n', (1241, 1263), False, 'import torch\n'), ((1303, 1336), 'torch.zeros', 'torch.zeros', (['[data_size, maximum]'], {}), '([data_size, maximum])\n', (1314, 1336), False, 'import torch\n'), ((1372, 1399), 'torch.zeros', 'torch.zeros', (['[data_size, 1]'], {}), '([data_size, 1])\n', (1383, 1399), False, 'import torch\n')] |
from xml.etree import ElementTree
from casexml.apps.case.tests.util import check_xml_line_by_line
from casexml.apps.case.xml import V2
from corehq.apps.fixtures import fixturegenerators
from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, \
FixtureItemField, FieldList
from corehq.apps.fixtures.views import update_tables
from corehq.apps.fixtures.exceptions import FixtureVersionError
from corehq.apps.users.models import CommCareUser
from django.test import TestCase
class FixtureDataTest(TestCase):
def setUp(self):
self.domain = 'qwerty'
self.tag = "district"
self.data_type = FixtureDataType(
domain=self.domain,
tag=self.tag,
name="Districts",
fields=[
FixtureTypeField(
field_name="state_name",
properties=[]
),
FixtureTypeField(
field_name="district_name",
properties=["lang"]
),
FixtureTypeField(
field_name="district_id",
properties=[]
)
],
item_attributes=[],
)
self.data_type.save()
self.data_item = FixtureDataItem(
domain=self.domain,
data_type_id=self.data_type.get_id,
fields= {
"state_name": FieldList(
field_list=[
FixtureItemField(
field_value="Delhi_state",
properties={}
)
]
),
"district_name": FieldList(
field_list=[
FixtureItemField(
field_value="Delhi_in_HIN",
properties={"lang": "hin"}
),
FixtureItemField(
field_value="Delhi_in_ENG",
properties={"lang": "eng"}
)
]
),
"district_id": FieldList(
field_list=[
FixtureItemField(
field_value="Delhi_id",
properties={}
)
]
)
},
item_attributes={},
)
self.data_item.save()
self.user = CommCareUser.create(self.domain, 'to_delete', '***')
self.fixture_ownership = FixtureOwnership(
domain=self.domain,
owner_id=self.user.get_id,
owner_type='user',
data_item_id=self.data_item.get_id
)
self.fixture_ownership.save()
def tearDown(self):
self.data_type.delete()
self.data_item.delete()
self.user.delete()
self.fixture_ownership.delete()
def test_xml(self):
check_xml_line_by_line(self, """
<district>
<state_name>Delhi_state</state_name>
<district_name lang="hin">Delhi_in_HIN</district_name>
<district_name lang="eng">Delhi_in_ENG</district_name>
<district_id>Delhi_id</district_id>
</district>
""", ElementTree.tostring(self.data_item.to_xml()))
def test_ownership(self):
self.assertItemsEqual([self.data_item.get_id], FixtureDataItem.by_user(self.user, wrap=False))
self.assertItemsEqual([self.user.get_id], self.data_item.get_all_users(wrap=False))
fixture, = fixturegenerators.item_lists(self.user, V2)
check_xml_line_by_line(self, """
<fixture id="item-list:district" user_id="%s">
<district_list>
<district>
<state_name>Delhi_state</state_name>
<district_name lang="hin">Delhi_in_HIN</district_name>
<district_name lang="eng">Delhi_in_ENG</district_name>
<district_id>Delhi_id</district_id>
</district>
</district_list>
</fixture>
""" % self.user.user_id, ElementTree.tostring(fixture))
self.data_item.remove_user(self.user)
self.assertItemsEqual([], self.data_item.get_all_users())
self.fixture_ownership = self.data_item.add_user(self.user)
self.assertItemsEqual([self.user.get_id], self.data_item.get_all_users(wrap=False))
def test_get_indexed_items(self):
with self.assertRaises(FixtureVersionError):
fixtures = FixtureDataItem.get_indexed_items(self.domain,
self.tag, 'state_name')
delhi_id = fixtures['Delhi_state']['district_id']
self.assertEqual(delhi_id, 'Delhi_id')
| [
"corehq.apps.users.models.CommCareUser.create",
"xml.etree.ElementTree.tostring",
"corehq.apps.fixtures.fixturegenerators.item_lists",
"corehq.apps.fixtures.models.FixtureDataItem.by_user",
"corehq.apps.fixtures.models.FixtureOwnership",
"corehq.apps.fixtures.models.FixtureDataItem.get_indexed_items",
"corehq.apps.fixtures.models.FixtureTypeField",
"corehq.apps.fixtures.models.FixtureItemField"
] | [((2589, 2641), 'corehq.apps.users.models.CommCareUser.create', 'CommCareUser.create', (['self.domain', '"""to_delete"""', '"""***"""'], {}), "(self.domain, 'to_delete', '***')\n", (2608, 2641), False, 'from corehq.apps.users.models import CommCareUser\n'), ((2676, 2799), 'corehq.apps.fixtures.models.FixtureOwnership', 'FixtureOwnership', ([], {'domain': 'self.domain', 'owner_id': 'self.user.get_id', 'owner_type': '"""user"""', 'data_item_id': 'self.data_item.get_id'}), "(domain=self.domain, owner_id=self.user.get_id, owner_type=\n 'user', data_item_id=self.data_item.get_id)\n", (2692, 2799), False, 'from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, FixtureItemField, FieldList\n'), ((3689, 3732), 'corehq.apps.fixtures.fixturegenerators.item_lists', 'fixturegenerators.item_lists', (['self.user', 'V2'], {}), '(self.user, V2)\n', (3717, 3732), False, 'from corehq.apps.fixtures import fixturegenerators\n'), ((3529, 3575), 'corehq.apps.fixtures.models.FixtureDataItem.by_user', 'FixtureDataItem.by_user', (['self.user'], {'wrap': '(False)'}), '(self.user, wrap=False)\n', (3552, 3575), False, 'from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, FixtureItemField, FieldList\n'), ((4269, 4298), 'xml.etree.ElementTree.tostring', 'ElementTree.tostring', (['fixture'], {}), '(fixture)\n', (4289, 4298), False, 'from xml.etree import ElementTree\n'), ((4689, 4759), 'corehq.apps.fixtures.models.FixtureDataItem.get_indexed_items', 'FixtureDataItem.get_indexed_items', (['self.domain', 'self.tag', '"""state_name"""'], {}), "(self.domain, self.tag, 'state_name')\n", (4722, 4759), False, 'from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, FixtureItemField, FieldList\n'), ((814, 870), 'corehq.apps.fixtures.models.FixtureTypeField', 'FixtureTypeField', ([], {'field_name': '"""state_name"""', 'properties': '[]'}), "(field_name='state_name', properties=[])\n", (830, 870), False, 'from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, FixtureItemField, FieldList\n'), ((946, 1011), 'corehq.apps.fixtures.models.FixtureTypeField', 'FixtureTypeField', ([], {'field_name': '"""district_name"""', 'properties': "['lang']"}), "(field_name='district_name', properties=['lang'])\n", (962, 1011), False, 'from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, FixtureItemField, FieldList\n'), ((1087, 1144), 'corehq.apps.fixtures.models.FixtureTypeField', 'FixtureTypeField', ([], {'field_name': '"""district_id"""', 'properties': '[]'}), "(field_name='district_id', properties=[])\n", (1103, 1144), False, 'from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, FixtureItemField, FieldList\n'), ((1545, 1603), 'corehq.apps.fixtures.models.FixtureItemField', 'FixtureItemField', ([], {'field_value': '"""Delhi_state"""', 'properties': '{}'}), "(field_value='Delhi_state', properties={})\n", (1561, 1603), False, 'from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, FixtureItemField, FieldList\n'), ((1828, 1900), 'corehq.apps.fixtures.models.FixtureItemField', 'FixtureItemField', ([], {'field_value': '"""Delhi_in_HIN"""', 'properties': "{'lang': 'hin'}"}), "(field_value='Delhi_in_HIN', properties={'lang': 'hin'})\n", (1844, 1900), False, 'from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, FixtureItemField, FieldList\n'), ((2008, 2080), 'corehq.apps.fixtures.models.FixtureItemField', 'FixtureItemField', ([], {'field_value': '"""Delhi_in_ENG"""', 'properties': "{'lang': 'eng'}"}), "(field_value='Delhi_in_ENG', properties={'lang': 'eng'})\n", (2024, 2080), False, 'from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, FixtureItemField, FieldList\n'), ((2303, 2358), 'corehq.apps.fixtures.models.FixtureItemField', 'FixtureItemField', ([], {'field_value': '"""Delhi_id"""', 'properties': '{}'}), "(field_value='Delhi_id', properties={})\n", (2319, 2358), False, 'from corehq.apps.fixtures.models import FixtureDataItem, FixtureDataType, FixtureOwnership, FixtureTypeField, FixtureItemField, FieldList\n')] |
# -*- coding: utf-8 -*-
"""We define custom Django signals to trigger before executing searches."""
from django.db.models.signals import post_save, pre_delete
from django.dispatch import receiver
from django_elasticsearch_dsl.apps import DEDConfig
from readthedocs.projects.models import HTMLFile, Project
from readthedocs.projects.signals import bulk_post_create, bulk_post_delete
from readthedocs.search.tasks import delete_objects_in_es, index_objects_to_es
@receiver(bulk_post_create, sender=HTMLFile)
def index_html_file(instance_list, **_):
"""Handle indexing from the build process."""
from readthedocs.search.documents import PageDocument
kwargs = {
'app_label': HTMLFile._meta.app_label,
'model_name': HTMLFile.__name__,
'document_class': str(PageDocument),
'objects_id': [obj.id for obj in instance_list],
}
# Do not index if autosync is disabled globally
if DEDConfig.autosync_enabled():
index_objects_to_es(**kwargs)
@receiver(bulk_post_delete, sender=HTMLFile)
def remove_html_file(instance_list, **_):
"""Remove deleted files from the build process."""
from readthedocs.search.documents import PageDocument
kwargs = {
'app_label': HTMLFile._meta.app_label,
'model_name': HTMLFile.__name__,
'document_class': str(PageDocument),
'objects_id': [obj.id for obj in instance_list],
}
# Do not index if autosync is disabled globally
if DEDConfig.autosync_enabled():
delete_objects_in_es(**kwargs)
@receiver(post_save, sender=Project)
def index_project_save(instance, *args, **kwargs):
"""
Save a Project instance based on the post_save signal.post_save.
This uses Celery to do it async, replacing how django-elasticsearch-dsl does
it.
"""
from readthedocs.search.documents import ProjectDocument
kwargs = {
'app_label': Project._meta.app_label,
'model_name': Project.__name__,
'document_class': str(ProjectDocument),
'objects_id': [instance.id],
}
# Do not index if autosync is disabled globally
if DEDConfig.autosync_enabled():
index_objects_to_es.delay(**kwargs)
@receiver(pre_delete, sender=Project)
def remove_project_delete(instance, *args, **kwargs):
from readthedocs.search.documents import ProjectDocument
kwargs = {
'app_label': Project._meta.app_label,
'model_name': Project.__name__,
'document_class': str(ProjectDocument),
'objects_id': [instance.id],
}
# Don't `delay` this because the objects will be deleted already
if DEDConfig.autosync_enabled():
delete_objects_in_es(**kwargs)
| [
"readthedocs.search.tasks.delete_objects_in_es",
"django_elasticsearch_dsl.apps.DEDConfig.autosync_enabled",
"readthedocs.search.tasks.index_objects_to_es.delay",
"django.dispatch.receiver",
"readthedocs.search.tasks.index_objects_to_es"
] | [((466, 509), 'django.dispatch.receiver', 'receiver', (['bulk_post_create'], {'sender': 'HTMLFile'}), '(bulk_post_create, sender=HTMLFile)\n', (474, 509), False, 'from django.dispatch import receiver\n'), ((1001, 1044), 'django.dispatch.receiver', 'receiver', (['bulk_post_delete'], {'sender': 'HTMLFile'}), '(bulk_post_delete, sender=HTMLFile)\n', (1009, 1044), False, 'from django.dispatch import receiver\n'), ((1543, 1578), 'django.dispatch.receiver', 'receiver', (['post_save'], {'sender': 'Project'}), '(post_save, sender=Project)\n', (1551, 1578), False, 'from django.dispatch import receiver\n'), ((2195, 2231), 'django.dispatch.receiver', 'receiver', (['pre_delete'], {'sender': 'Project'}), '(pre_delete, sender=Project)\n', (2203, 2231), False, 'from django.dispatch import receiver\n'), ((930, 958), 'django_elasticsearch_dsl.apps.DEDConfig.autosync_enabled', 'DEDConfig.autosync_enabled', ([], {}), '()\n', (956, 958), False, 'from django_elasticsearch_dsl.apps import DEDConfig\n'), ((1471, 1499), 'django_elasticsearch_dsl.apps.DEDConfig.autosync_enabled', 'DEDConfig.autosync_enabled', ([], {}), '()\n', (1497, 1499), False, 'from django_elasticsearch_dsl.apps import DEDConfig\n'), ((2118, 2146), 'django_elasticsearch_dsl.apps.DEDConfig.autosync_enabled', 'DEDConfig.autosync_enabled', ([], {}), '()\n', (2144, 2146), False, 'from django_elasticsearch_dsl.apps import DEDConfig\n'), ((2616, 2644), 'django_elasticsearch_dsl.apps.DEDConfig.autosync_enabled', 'DEDConfig.autosync_enabled', ([], {}), '()\n', (2642, 2644), False, 'from django_elasticsearch_dsl.apps import DEDConfig\n'), ((968, 997), 'readthedocs.search.tasks.index_objects_to_es', 'index_objects_to_es', ([], {}), '(**kwargs)\n', (987, 997), False, 'from readthedocs.search.tasks import delete_objects_in_es, index_objects_to_es\n'), ((1509, 1539), 'readthedocs.search.tasks.delete_objects_in_es', 'delete_objects_in_es', ([], {}), '(**kwargs)\n', (1529, 1539), False, 'from readthedocs.search.tasks import delete_objects_in_es, index_objects_to_es\n'), ((2156, 2191), 'readthedocs.search.tasks.index_objects_to_es.delay', 'index_objects_to_es.delay', ([], {}), '(**kwargs)\n', (2181, 2191), False, 'from readthedocs.search.tasks import delete_objects_in_es, index_objects_to_es\n'), ((2654, 2684), 'readthedocs.search.tasks.delete_objects_in_es', 'delete_objects_in_es', ([], {}), '(**kwargs)\n', (2674, 2684), False, 'from readthedocs.search.tasks import delete_objects_in_es, index_objects_to_es\n')] |
import itertools
import sys
from signal import SIGINT, default_int_handler, signal
from typing import Any, Dict, List
from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
from pip._vendor.progress.spinner import Spinner
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.logging import get_indentation
from pip._internal.utils.misc import format_size
try:
from pip._vendor import colorama
# Lots of different errors can come from this, including SystemError and
# ImportError.
except Exception:
colorama = None
def _select_progress_class(preferred, fallback):
# type: (Bar, Bar) -> Bar
encoding = getattr(preferred.file, "encoding", None)
# If we don't know what encoding this file is in, then we'll just assume
# that it doesn't support unicode and use the ASCII bar.
if not encoding:
return fallback
# Collect all of the possible characters we want to use with the preferred
# bar.
characters = [
getattr(preferred, "empty_fill", ""),
getattr(preferred, "fill", ""),
]
characters += list(getattr(preferred, "phases", []))
# Try to decode the characters we're using for the bar using the encoding
# of the given file, if this works then we'll assume that we can use the
# fancier bar and if not we'll fall back to the plaintext bar.
try:
"".join(characters).encode(encoding)
except UnicodeEncodeError:
return fallback
else:
return preferred
_BaseBar = _select_progress_class(IncrementalBar, Bar) # type: Any
class InterruptibleMixin:
"""
Helper to ensure that self.finish() gets called on keyboard interrupt.
This allows downloads to be interrupted without leaving temporary state
(like hidden cursors) behind.
This class is similar to the progress library's existing SigIntMixin
helper, but as of version 1.2, that helper has the following problems:
1. It calls sys.exit().
2. It discards the existing SIGINT handler completely.
3. It leaves its own handler in place even after an uninterrupted finish,
which will have unexpected delayed effects if the user triggers an
unrelated keyboard interrupt some time after a progress-displaying
download has already completed, for example.
"""
def __init__(self, *args, **kwargs):
# type: (List[Any], Dict[Any, Any]) -> None
"""
Save the original SIGINT handler for later.
"""
# https://github.com/python/mypy/issues/5887
super().__init__(*args, **kwargs) # type: ignore
self.original_handler = signal(SIGINT, self.handle_sigint)
# If signal() returns None, the previous handler was not installed from
# Python, and we cannot restore it. This probably should not happen,
# but if it does, we must restore something sensible instead, at least.
# The least bad option should be Python's default SIGINT handler, which
# just raises KeyboardInterrupt.
if self.original_handler is None:
self.original_handler = default_int_handler
def finish(self):
# type: () -> None
"""
Restore the original SIGINT handler after finishing.
This should happen regardless of whether the progress display finishes
normally, or gets interrupted.
"""
super().finish() # type: ignore
signal(SIGINT, self.original_handler)
def handle_sigint(self, signum, frame): # type: ignore
"""
Call self.finish() before delegating to the original SIGINT handler.
This handler should only be in place while the progress display is
active.
"""
self.finish()
self.original_handler(signum, frame)
class SilentBar(Bar):
def update(self):
# type: () -> None
pass
class BlueEmojiBar(IncrementalBar):
suffix = "%(percent)d%%"
bar_prefix = " "
bar_suffix = " "
phases = ("\U0001F539", "\U0001F537", "\U0001F535")
class DownloadProgressMixin:
def __init__(self, *args, **kwargs):
# type: (List[Any], Dict[Any, Any]) -> None
# https://github.com/python/mypy/issues/5887
super().__init__(*args, **kwargs) # type: ignore
self.message = (" " * (get_indentation() + 2)) + self.message # type: str
@property
def downloaded(self):
# type: () -> str
return format_size(self.index) # type: ignore
@property
def download_speed(self):
# type: () -> str
# Avoid zero division errors...
if self.avg == 0.0: # type: ignore
return "..."
return format_size(1 / self.avg) + "/s" # type: ignore
@property
def pretty_eta(self):
# type: () -> str
if self.eta: # type: ignore
return f"eta {self.eta_td}" # type: ignore
return ""
def iter(self, it): # type: ignore
for x in it:
yield x
# B305 is incorrectly raised here
# https://github.com/PyCQA/flake8-bugbear/issues/59
self.next(len(x)) # noqa: B305
self.finish()
class WindowsMixin:
def __init__(self, *args, **kwargs):
# type: (List[Any], Dict[Any, Any]) -> None
# The Windows terminal does not support the hide/show cursor ANSI codes
# even with colorama. So we'll ensure that hide_cursor is False on
# Windows.
# This call needs to go before the super() call, so that hide_cursor
# is set in time. The base progress bar class writes the "hide cursor"
# code to the terminal in its init, so if we don't set this soon
# enough, we get a "hide" with no corresponding "show"...
if WINDOWS and self.hide_cursor: # type: ignore
self.hide_cursor = False
# https://github.com/python/mypy/issues/5887
super().__init__(*args, **kwargs) # type: ignore
# Check if we are running on Windows and we have the colorama module,
# if we do then wrap our file with it.
if WINDOWS and colorama:
self.file = colorama.AnsiToWin32(self.file) # type: ignore
# The progress code expects to be able to call self.file.isatty()
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
# add it.
self.file.isatty = lambda: self.file.wrapped.isatty()
# The progress code expects to be able to call self.file.flush()
# but the colorama.AnsiToWin32() object doesn't have that, so we'll
# add it.
self.file.flush = lambda: self.file.wrapped.flush()
class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin):
file = sys.stdout
message = "%(percent)d%%"
suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar):
pass
class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):
pass
class DownloadBar(BaseDownloadProgressBar, Bar):
pass
class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar):
pass
class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar):
pass
class DownloadProgressSpinner(
WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner
):
file = sys.stdout
suffix = "%(downloaded)s %(download_speed)s"
def next_phase(self):
# type: () -> str
if not hasattr(self, "_phaser"):
self._phaser = itertools.cycle(self.phases)
return next(self._phaser)
def update(self):
# type: () -> None
message = self.message % self
phase = self.next_phase()
suffix = self.suffix % self
line = "".join(
[
message,
" " if message else "",
phase,
" " if suffix else "",
suffix,
]
)
self.writeln(line)
BAR_TYPES = {
"off": (DownloadSilentBar, DownloadSilentBar),
"on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
"ascii": (DownloadBar, DownloadProgressSpinner),
"pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
"emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner),
}
def DownloadProgressProvider(progress_bar, max=None): # type: ignore
if max is None or max == 0:
return BAR_TYPES[progress_bar][1]().iter
else:
return BAR_TYPES[progress_bar][0](max=max).iter
| [
"pip._internal.utils.logging.get_indentation",
"itertools.cycle",
"signal.signal",
"pip._vendor.colorama.AnsiToWin32",
"pip._internal.utils.misc.format_size"
] | [((2720, 2754), 'signal.signal', 'signal', (['SIGINT', 'self.handle_sigint'], {}), '(SIGINT, self.handle_sigint)\n', (2726, 2754), False, 'from signal import SIGINT, default_int_handler, signal\n'), ((3534, 3571), 'signal.signal', 'signal', (['SIGINT', 'self.original_handler'], {}), '(SIGINT, self.original_handler)\n', (3540, 3571), False, 'from signal import SIGINT, default_int_handler, signal\n'), ((4582, 4605), 'pip._internal.utils.misc.format_size', 'format_size', (['self.index'], {}), '(self.index)\n', (4593, 4605), False, 'from pip._internal.utils.misc import format_size\n'), ((4825, 4850), 'pip._internal.utils.misc.format_size', 'format_size', (['(1 / self.avg)'], {}), '(1 / self.avg)\n', (4836, 4850), False, 'from pip._internal.utils.misc import format_size\n'), ((6320, 6351), 'pip._vendor.colorama.AnsiToWin32', 'colorama.AnsiToWin32', (['self.file'], {}), '(self.file)\n', (6340, 6351), False, 'from pip._vendor import colorama\n'), ((7796, 7824), 'itertools.cycle', 'itertools.cycle', (['self.phases'], {}), '(self.phases)\n', (7811, 7824), False, 'import itertools\n'), ((4443, 4460), 'pip._internal.utils.logging.get_indentation', 'get_indentation', ([], {}), '()\n', (4458, 4460), False, 'from pip._internal.utils.logging import get_indentation\n')] |
import unittest
import datetime
from dida import schemas, triggers
from marshmallow import ValidationError
class TestTriggerSchema(unittest.TestCase):
def test_dump_trigger(self):
result = schemas.TriggerSchema().dump(triggers.IntervalTrigger())
print('IntervalTrigger dump:', result)
result = schemas.TriggerSchema().dump(triggers.DateTrigger())
print('DateTrigger dump:', result)
def test_load_trigger(self):
self.assertRaises(ValidationError, schemas.TriggerSchema().load, {"type": "unknown"})
obj = schemas.TriggerSchema().load({'type': "interval"})
self.assertIsInstance(obj, triggers.IntervalTrigger)
obj = schemas.TriggerSchema().load({'type': 'date', "params": {'run_date': "2020-01-01 00:00:00"}})
self.assertEqual(obj.run_date, datetime.datetime(2020, 1, 1).astimezone())
| [
"dida.triggers.IntervalTrigger",
"datetime.datetime",
"dida.triggers.DateTrigger",
"dida.schemas.TriggerSchema"
] | [((233, 259), 'dida.triggers.IntervalTrigger', 'triggers.IntervalTrigger', ([], {}), '()\n', (257, 259), False, 'from dida import schemas, triggers\n'), ((355, 377), 'dida.triggers.DateTrigger', 'triggers.DateTrigger', ([], {}), '()\n', (375, 377), False, 'from dida import schemas, triggers\n'), ((204, 227), 'dida.schemas.TriggerSchema', 'schemas.TriggerSchema', ([], {}), '()\n', (225, 227), False, 'from dida import schemas, triggers\n'), ((326, 349), 'dida.schemas.TriggerSchema', 'schemas.TriggerSchema', ([], {}), '()\n', (347, 349), False, 'from dida import schemas, triggers\n'), ((499, 522), 'dida.schemas.TriggerSchema', 'schemas.TriggerSchema', ([], {}), '()\n', (520, 522), False, 'from dida import schemas, triggers\n'), ((565, 588), 'dida.schemas.TriggerSchema', 'schemas.TriggerSchema', ([], {}), '()\n', (586, 588), False, 'from dida import schemas, triggers\n'), ((692, 715), 'dida.schemas.TriggerSchema', 'schemas.TriggerSchema', ([], {}), '()\n', (713, 715), False, 'from dida import schemas, triggers\n'), ((825, 854), 'datetime.datetime', 'datetime.datetime', (['(2020)', '(1)', '(1)'], {}), '(2020, 1, 1)\n', (842, 854), False, 'import datetime\n')] |
# -*- coding: utf-8 -*-
"""
Example to train and evaluate a model with given data
@author: <NAME> <<EMAIL>>
"""
from cornac.data import Reader
from cornac.eval_methods import BaseMethod
from cornac.models import MF
from cornac.metrics import MAE, RMSE
from cornac.utils import cache
# Download MovieLens 100K provided training and test splits
reader = Reader()
train_data = reader.read(cache(url='http://files.grouplens.org/datasets/movielens/ml-100k/u1.base'))
test_data = reader.read(cache(url='http://files.grouplens.org/datasets/movielens/ml-100k/u1.test'))
eval_method = BaseMethod.from_splits(train_data=train_data, test_data=test_data,
exclude_unknowns=False, verbose=True)
mf = MF(k=10, max_iter=25, learning_rate=0.01, lambda_reg=0.02,
use_bias=True, early_stop=True, verbose=True)
# Evaluation
result = eval_method.evaluate(model=mf, metrics=[MAE(), RMSE()], user_based=True)
print(result)
| [
"cornac.utils.cache",
"cornac.models.MF",
"cornac.metrics.MAE",
"cornac.eval_methods.BaseMethod.from_splits",
"cornac.data.Reader",
"cornac.metrics.RMSE"
] | [((356, 364), 'cornac.data.Reader', 'Reader', ([], {}), '()\n', (362, 364), False, 'from cornac.data import Reader\n'), ((581, 689), 'cornac.eval_methods.BaseMethod.from_splits', 'BaseMethod.from_splits', ([], {'train_data': 'train_data', 'test_data': 'test_data', 'exclude_unknowns': '(False)', 'verbose': '(True)'}), '(train_data=train_data, test_data=test_data,\n exclude_unknowns=False, verbose=True)\n', (603, 689), False, 'from cornac.eval_methods import BaseMethod\n'), ((729, 837), 'cornac.models.MF', 'MF', ([], {'k': '(10)', 'max_iter': '(25)', 'learning_rate': '(0.01)', 'lambda_reg': '(0.02)', 'use_bias': '(True)', 'early_stop': '(True)', 'verbose': '(True)'}), '(k=10, max_iter=25, learning_rate=0.01, lambda_reg=0.02, use_bias=True,\n early_stop=True, verbose=True)\n', (731, 837), False, 'from cornac.models import MF\n'), ((390, 464), 'cornac.utils.cache', 'cache', ([], {'url': '"""http://files.grouplens.org/datasets/movielens/ml-100k/u1.base"""'}), "(url='http://files.grouplens.org/datasets/movielens/ml-100k/u1.base')\n", (395, 464), False, 'from cornac.utils import cache\n'), ((490, 564), 'cornac.utils.cache', 'cache', ([], {'url': '"""http://files.grouplens.org/datasets/movielens/ml-100k/u1.test"""'}), "(url='http://files.grouplens.org/datasets/movielens/ml-100k/u1.test')\n", (495, 564), False, 'from cornac.utils import cache\n'), ((905, 910), 'cornac.metrics.MAE', 'MAE', ([], {}), '()\n', (908, 910), False, 'from cornac.metrics import MAE, RMSE\n'), ((912, 918), 'cornac.metrics.RMSE', 'RMSE', ([], {}), '()\n', (916, 918), False, 'from cornac.metrics import MAE, RMSE\n')] |
import os
import numpy as np
import cv2
import albumentations
from PIL import Image
from torch.utils.data import Dataset
from taming.data.sflckr import SegmentationBase # for examples included in repo
class Examples(SegmentationBase):
def __init__(self, size=256, random_crop=False, interpolation="bicubic"):
super().__init__(data_csv="data/ade20k_examples.txt",
data_root="data/ade20k_images",
segmentation_root="data/ade20k_segmentations",
size=size, random_crop=random_crop,
interpolation=interpolation,
n_labels=151, shift_segmentation=False)
# With semantic map and scene label
class ADE20kBase(Dataset):
def __init__(self, config=None, size=None, random_crop=False, interpolation="bicubic", crop_size=None):
self.split = self.get_split()
self.n_labels = 151 # unknown + 150
self.data_csv = {"train": "data/ade20k_train.txt",
"validation": "data/ade20k_test.txt"}[self.split]
self.data_root = "./data/ade20k_root"
with open(os.path.join(self.data_root, "sceneCategories.txt"), "r") as f:
self.scene_categories = f.read().splitlines()
self.scene_categories = dict(line.split() for line in self.scene_categories)
with open(self.data_csv, "r") as f:
self.image_paths = f.read().splitlines()
self._length = len(self.image_paths)
ss = self.split
if ss=='train':
ss='training'
self.labels = {
"relative_file_path_": [l for l in self.image_paths],
"file_path_": [os.path.join(self.data_root, "images",ss, l)
for l in self.image_paths],
"relative_segmentation_path_": [l.replace(".jpg", ".png")
for l in self.image_paths],
"segmentation_path_": [os.path.join(self.data_root, "annotations",ss,
l.replace(".jpg", ".png"))
for l in self.image_paths],
"scene_category": [self.scene_categories[l.replace(".jpg", "")]
for l in self.image_paths],
}
size = None if size is not None and size<=0 else size
self.size = size
if crop_size is None:
self.crop_size = size if size is not None else None
else:
self.crop_size = crop_size
if self.size is not None:
self.interpolation = interpolation
self.interpolation = {
"nearest": cv2.INTER_NEAREST,
"bilinear": cv2.INTER_LINEAR,
"bicubic": cv2.INTER_CUBIC,
"area": cv2.INTER_AREA,
"lanczos": cv2.INTER_LANCZOS4}[self.interpolation]
self.image_rescaler = albumentations.SmallestMaxSize(max_size=self.size,
interpolation=self.interpolation)
self.segmentation_rescaler = albumentations.SmallestMaxSize(max_size=self.size,
interpolation=cv2.INTER_NEAREST)
if crop_size is not None:
self.center_crop = not random_crop
if self.center_crop:
self.cropper = albumentations.CenterCrop(height=self.crop_size, width=self.crop_size)
else:
self.cropper = albumentations.RandomCrop(height=self.crop_size, width=self.crop_size)
self.preprocessor = self.cropper
def __len__(self):
return self._length
def __getitem__(self, i):
example = dict((k, self.labels[k][i]) for k in self.labels)
image = Image.open(example["file_path_"])
if not image.mode == "RGB":
image = image.convert("RGB")
image = np.array(image).astype(np.uint8)
if self.size is not None:
image = self.image_rescaler(image=image)["image"]
segmentation = Image.open(example["segmentation_path_"])
segmentation = np.array(segmentation).astype(np.uint8)
if self.size is not None:
segmentation = self.segmentation_rescaler(image=segmentation)["image"]
if self.size is not None:
processed = self.preprocessor(image=image, mask=segmentation)
else:
processed = {"image": image, "mask": segmentation}
example["image"] = (processed["image"]/127.5 - 1.0).astype(np.float32)
segmentation = processed["mask"]
onehot = np.eye(self.n_labels)[segmentation]
example["segmentation"] = onehot
return example
class ADE20kTrain(ADE20kBase):
# default to random_crop=True
def __init__(self, config=None, size=None, random_crop=True, interpolation="bicubic", crop_size=None):
super().__init__(config=config, size=size, random_crop=random_crop,
interpolation=interpolation, crop_size=crop_size)
def get_split(self):
return "train"
class ADE20kValidation(ADE20kBase):
def get_split(self):
return "validation"
if __name__ == "__main__":
dset = ADE20kValidation()
ex = dset[0]
for k in ["image", "scene_category", "segmentation"]:
print(type(ex[k]))
try:
print(ex[k].shape)
except:
print(ex[k])
| [
"numpy.eye",
"PIL.Image.open",
"os.path.join",
"albumentations.RandomCrop",
"numpy.array",
"albumentations.CenterCrop",
"albumentations.SmallestMaxSize"
] | [((3811, 3844), 'PIL.Image.open', 'Image.open', (["example['file_path_']"], {}), "(example['file_path_'])\n", (3821, 3844), False, 'from PIL import Image\n'), ((4090, 4131), 'PIL.Image.open', 'Image.open', (["example['segmentation_path_']"], {}), "(example['segmentation_path_'])\n", (4100, 4131), False, 'from PIL import Image\n'), ((2915, 3004), 'albumentations.SmallestMaxSize', 'albumentations.SmallestMaxSize', ([], {'max_size': 'self.size', 'interpolation': 'self.interpolation'}), '(max_size=self.size, interpolation=self.\n interpolation)\n', (2945, 3004), False, 'import albumentations\n'), ((3106, 3194), 'albumentations.SmallestMaxSize', 'albumentations.SmallestMaxSize', ([], {'max_size': 'self.size', 'interpolation': 'cv2.INTER_NEAREST'}), '(max_size=self.size, interpolation=cv2.\n INTER_NEAREST)\n', (3136, 3194), False, 'import albumentations\n'), ((4634, 4655), 'numpy.eye', 'np.eye', (['self.n_labels'], {}), '(self.n_labels)\n', (4640, 4655), True, 'import numpy as np\n'), ((1140, 1191), 'os.path.join', 'os.path.join', (['self.data_root', '"""sceneCategories.txt"""'], {}), "(self.data_root, 'sceneCategories.txt')\n", (1152, 1191), False, 'import os\n'), ((1680, 1725), 'os.path.join', 'os.path.join', (['self.data_root', '"""images"""', 'ss', 'l'], {}), "(self.data_root, 'images', ss, l)\n", (1692, 1725), False, 'import os\n'), ((3408, 3478), 'albumentations.CenterCrop', 'albumentations.CenterCrop', ([], {'height': 'self.crop_size', 'width': 'self.crop_size'}), '(height=self.crop_size, width=self.crop_size)\n', (3433, 3478), False, 'import albumentations\n'), ((3528, 3598), 'albumentations.RandomCrop', 'albumentations.RandomCrop', ([], {'height': 'self.crop_size', 'width': 'self.crop_size'}), '(height=self.crop_size, width=self.crop_size)\n', (3553, 3598), False, 'import albumentations\n'), ((3938, 3953), 'numpy.array', 'np.array', (['image'], {}), '(image)\n', (3946, 3953), True, 'import numpy as np\n'), ((4155, 4177), 'numpy.array', 'np.array', (['segmentation'], {}), '(segmentation)\n', (4163, 4177), True, 'import numpy as np\n')] |
import re
from model.contact import Contact
def test_all_contacts(app, db):
contacts_from_db = db.get_contact_list()
phone_list_from_db = db.phones_from_db()
#email_liset_from_db = db.emails_from_db()
phone_list = []
for phone in phone_list_from_db:
phone_list.append(merge_phones_like_on_home_page(phone))
email_list = []
#for email in email_liset_from_db:
# email_list.append(merge_mail_like_on_home_page(email))
contacts_from_home_page = sorted(app.contact.get_contact_list(), key=Contact.id_or_max)
phones_from_home_page = [con.all_phones_from_home_page for con in contacts_from_home_page]
#emails_from_home_page = [con.all_mail_from_home_page for con in contacts_from_home_page]
assert phone_list == phones_from_home_page
#assert email_list == emails_from_home_page
assert contacts_from_db == contacts_from_home_page
def clear(s):
return re.sub("[() -]", "", s)
def remove_spaces(s):
return re.sub(' +', ' ', s).rstrip()
def merge_phones_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: clear(x),
filter(lambda x: x is not None,
[contact.home_phone, contact.mobile_phone,
contact.work_phone, contact.secondary_phone]))))
def merge_email_like_on_home_page(contact):
return "\n".join(filter(lambda x: x != "",
map(lambda x: remove_spaces(x),
filter(lambda x: x is not None,
[contact.email, contact.email2, contact.email3]))))
| [
"re.sub"
] | [((918, 941), 're.sub', 're.sub', (['"""[() -]"""', '""""""', 's'], {}), "('[() -]', '', s)\n", (924, 941), False, 'import re\n'), ((977, 997), 're.sub', 're.sub', (['""" +"""', '""" """', 's'], {}), "(' +', ' ', s)\n", (983, 997), False, 'import re\n')] |
from discord.ext import commands
import json
import random
with open("assets/json/questions.json") as data:
data = json.load(data)
dares = data["dares"]
class Dare(commands.Cog):
def __init__(self, client):
self.client = client
@commands.command(aliases=["d"])
async def dare(self, ctx):
dare = random.choice(dares)
await ctx.send(dare)
def setup(client):
client.add_cog(Dare(client))
| [
"json.load",
"random.choice",
"discord.ext.commands.command"
] | [((121, 136), 'json.load', 'json.load', (['data'], {}), '(data)\n', (130, 136), False, 'import json\n'), ((258, 289), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['d']"}), "(aliases=['d'])\n", (274, 289), False, 'from discord.ext import commands\n'), ((336, 356), 'random.choice', 'random.choice', (['dares'], {}), '(dares)\n', (349, 356), False, 'import random\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2008-2013 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:<EMAIL>
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""Utilities for creating VCG and Dot diagrams"""
from logilab.common.vcgutils import VCGPrinter
from logilab.common.graph import DotBackend
from pylint.pyreverse.utils import is_exception
class DiagramWriter(object):
"""base class for writing project diagrams
"""
def __init__(self, config, styles):
self.config = config
self.pkg_edges, self.inh_edges, self.imp_edges, self.ass_edges = styles
self.printer = None # defined in set_printer
def write(self, diadefs):
"""write files for <project> according to <diadefs>
"""
for diagram in diadefs:
basename = diagram.title.strip().replace(' ', '_')
file_name = '%s.%s' % (basename, self.config.output_format)
self.set_printer(file_name, basename)
if diagram.TYPE == 'class':
self.write_classes(diagram)
else:
self.write_packages(diagram)
self.close_graph()
def write_packages(self, diagram):
"""write a package diagram"""
# sorted to get predictable (hence testable) results
for i, obj in enumerate(sorted(diagram.modules(), key=lambda x: x.title)):
self.printer.emit_node(i, label=self.get_title(obj), shape='box')
obj.fig_id = i
# package dependencies
for rel in diagram.get_relationships('depends'):
self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
**self.pkg_edges)
def write_classes(self, diagram):
"""write a class diagram"""
# sorted to get predictable (hence testable) results
for i, obj in enumerate(sorted(diagram.objects, key=lambda x: x.title)):
self.printer.emit_node(i, **self.get_values(obj))
obj.fig_id = i
# inheritance links
for rel in diagram.get_relationships('specialization'):
self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
**self.inh_edges)
# implementation links
for rel in diagram.get_relationships('implements'):
self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
**self.imp_edges)
# generate associations
for rel in diagram.get_relationships('association'):
self.printer.emit_edge(rel.from_object.fig_id, rel.to_object.fig_id,
label=rel.name, **self.ass_edges)
def set_printer(self, file_name, basename):
"""set printer"""
raise NotImplementedError
def get_title(self, obj):
"""get project title"""
raise NotImplementedError
def get_values(self, obj):
"""get label and shape for classes."""
raise NotImplementedError
def close_graph(self):
"""finalize the graph"""
raise NotImplementedError
class DotWriter(DiagramWriter):
"""write dot graphs from a diagram definition and a project
"""
def __init__(self, config):
styles = [dict(arrowtail='none', arrowhead="open"),
dict(arrowtail='none', arrowhead='empty'),
dict(arrowtail='node', arrowhead='empty', style='dashed'),
dict(fontcolor='green', arrowtail='none',
arrowhead='diamond', style='solid'),
]
DiagramWriter.__init__(self, config, styles)
def set_printer(self, file_name, basename):
"""initialize DotWriter and add options for layout.
"""
layout = dict(rankdir="BT")
self.printer = DotBackend(basename, additionnal_param=layout)
self.file_name = file_name
def get_title(self, obj):
"""get project title"""
return obj.title
def get_values(self, obj):
"""get label and shape for classes.
The label contains all attributes and methods
"""
label = obj.title
if obj.shape == 'interface':
label = u'«interface»\\n%s' % label
if not self.config.only_classnames:
label = r'%s|%s\l|' % (label, r'\l'.join(obj.attrs))
for func in obj.methods:
label = r'%s%s()\l' % (label, func.name)
label = '{%s}' % label
if is_exception(obj.node):
return dict(fontcolor='red', label=label, shape='record')
return dict(label=label, shape='record')
def close_graph(self):
"""print the dot graph into <file_name>"""
self.printer.generate(self.file_name)
class VCGWriter(DiagramWriter):
"""write vcg graphs from a diagram definition and a project
"""
def __init__(self, config):
styles = [dict(arrowstyle='solid', backarrowstyle='none',
backarrowsize=0),
dict(arrowstyle='solid', backarrowstyle='none',
backarrowsize=10),
dict(arrowstyle='solid', backarrowstyle='none',
linestyle='dotted', backarrowsize=10),
dict(arrowstyle='solid', backarrowstyle='none',
textcolor='green'),
]
DiagramWriter.__init__(self, config, styles)
def set_printer(self, file_name, basename):
"""initialize VCGWriter for a UML graph"""
self.graph_file = open(file_name, 'w+')
self.printer = VCGPrinter(self.graph_file)
self.printer.open_graph(title=basename, layoutalgorithm='dfs',
late_edge_labels='yes', port_sharing='no',
manhattan_edges='yes')
self.printer.emit_node = self.printer.node
self.printer.emit_edge = self.printer.edge
def get_title(self, obj):
"""get project title in vcg format"""
return r'\fb%s\fn' % obj.title
def get_values(self, obj):
"""get label and shape for classes.
The label contains all attributes and methods
"""
if is_exception(obj.node):
label = r'\fb\f09%s\fn' % obj.title
else:
label = r'\fb%s\fn' % obj.title
if obj.shape == 'interface':
shape = 'ellipse'
else:
shape = 'box'
if not self.config.only_classnames:
attrs = obj.attrs
methods = [func.name for func in obj.methods]
# box width for UML like diagram
maxlen = max(len(name) for name in [obj.title] + methods + attrs)
line = '_' * (maxlen + 2)
label = r'%s\n\f%s' % (label, line)
for attr in attrs:
label = r'%s\n\f08%s' % (label, attr)
if attrs:
label = r'%s\n\f%s' % (label, line)
for func in methods:
label = r'%s\n\f10%s()' % (label, func)
return dict(label=label, shape=shape)
def close_graph(self):
"""close graph and file"""
self.printer.close_graph()
self.graph_file.close()
| [
"logilab.common.vcgutils.VCGPrinter",
"logilab.common.graph.DotBackend",
"pylint.pyreverse.utils.is_exception"
] | [((4475, 4521), 'logilab.common.graph.DotBackend', 'DotBackend', (['basename'], {'additionnal_param': 'layout'}), '(basename, additionnal_param=layout)\n', (4485, 4521), False, 'from logilab.common.graph import DotBackend\n'), ((5148, 5170), 'pylint.pyreverse.utils.is_exception', 'is_exception', (['obj.node'], {}), '(obj.node)\n', (5160, 5170), False, 'from pylint.pyreverse.utils import is_exception\n'), ((6249, 6276), 'logilab.common.vcgutils.VCGPrinter', 'VCGPrinter', (['self.graph_file'], {}), '(self.graph_file)\n', (6259, 6276), False, 'from logilab.common.vcgutils import VCGPrinter\n'), ((6850, 6872), 'pylint.pyreverse.utils.is_exception', 'is_exception', (['obj.node'], {}), '(obj.node)\n', (6862, 6872), False, 'from pylint.pyreverse.utils import is_exception\n')] |
import logging
from ariadne import MutationType, convert_kwargs_to_snake_case
from config import clients, messages, queue
mutation = MutationType()
@mutation.field("createMessage")
@convert_kwargs_to_snake_case
async def resolve_create_message(obj, info, content, client_id):
try:
message = {"content": content, "client_id": client_id}
messages.append(message)
await queue.put(message)
return {"success": True, "message": message}
except Exception as error:
return {"success": False, "errors": [str(error)]}
@mutation.field("createClient")
@convert_kwargs_to_snake_case
async def resolve_create_client(obj, info, client_id):
try:
logging.info(f"Client id: {client_id}")
if not clients.get(client_id):
client = {"client_id": client_id}
clients[client_id] = client
return {"success": True, "client": client}
return {"success": False, "errors": ["Client is taken"]}
except Exception as error:
return {"success": False, "errors": [str(error)]}
| [
"config.messages.append",
"ariadne.MutationType",
"config.clients.get",
"config.queue.put",
"logging.info"
] | [((134, 148), 'ariadne.MutationType', 'MutationType', ([], {}), '()\n', (146, 148), False, 'from ariadne import MutationType, convert_kwargs_to_snake_case\n'), ((359, 383), 'config.messages.append', 'messages.append', (['message'], {}), '(message)\n', (374, 383), False, 'from config import clients, messages, queue\n'), ((695, 734), 'logging.info', 'logging.info', (['f"""Client id: {client_id}"""'], {}), "(f'Client id: {client_id}')\n", (707, 734), False, 'import logging\n'), ((398, 416), 'config.queue.put', 'queue.put', (['message'], {}), '(message)\n', (407, 416), False, 'from config import clients, messages, queue\n'), ((750, 772), 'config.clients.get', 'clients.get', (['client_id'], {}), '(client_id)\n', (761, 772), False, 'from config import clients, messages, queue\n')] |
import time
import sys
import pkg_resources
import os
import retrying
from sqlalchemy.exc import IntegrityError
# anchore modules
import anchore_engine.clients.services.common
import anchore_engine.subsys.servicestatus
import anchore_engine.subsys.metrics
from anchore_engine.subsys import logger
from anchore_engine.configuration import localconfig
from anchore_engine.clients.services import simplequeue, internal_client_for
from anchore_engine.clients.services.simplequeue import SimpleQueueClient
from anchore_engine.service import ApiService, LifeCycleStages
from anchore_engine.services.policy_engine.engine.feeds.feeds import (
VulnerabilityFeed,
NvdV2Feed,
PackagesFeed,
VulnDBFeed,
GithubFeed,
feed_registry,
NvdFeed,
)
# from anchore_engine.subsys.logger import enable_bootstrap_logging
# enable_bootstrap_logging()
from anchore_engine.utils import timer
feed_sync_queuename = "feed_sync_tasks"
system_user_auth = None
feed_sync_msg = {"task_type": "feed_sync", "enabled": True}
# These are user-configurable but mostly for debugging and testing purposes
try:
FEED_SYNC_RETRIES = int(os.getenv("ANCHORE_FEED_SYNC_CHECK_RETRIES", 5))
except ValueError:
logger.exception(
"Error parsing env value ANCHORE_FEED_SYNC_CHECK_RETRIES into int, using default value of 5"
)
FEED_SYNC_RETRIES = 5
try:
FEED_SYNC_RETRY_BACKOFF = int(
os.getenv("ANCHORE_FEED_SYNC_CHECK_FAILURE_BACKOFF", 5)
)
except ValueError:
logger.exception(
"Error parsing env value ANCHORE_FEED_SYNC_CHECK_FAILURE_BACKOFF into int, using default value of 5"
)
FEED_SYNC_RETRY_BACKOFF = 5
try:
feed_config_check_retries = int(os.getenv("FEED_CLIENT_CHECK_RETRIES", 3))
except ValueError:
logger.exception(
"Error parsing env value FEED_CLIENT_CHECK_RETRIES into int, using default value of 3"
)
feed_config_check_retries = 3
try:
feed_config_check_backoff = int(os.getenv("FEED_CLIENT_CHECK_BACKOFF", 5))
except ValueError:
logger.exception(
"Error parsing env FEED_CLIENT_CHECK_BACKOFF value into int, using default value of 5"
)
feed_config_check_backoff = 5
# service funcs (must be here)
def _check_feed_client_credentials():
from anchore_engine.services.policy_engine.engine.feeds.client import get_client
sleep_time = feed_config_check_backoff
last_ex = None
for i in range(feed_config_check_retries):
if i > 0:
logger.info(
"Waiting for {} seconds to try feeds client config check again".format(
sleep_time
)
)
time.sleep(sleep_time)
sleep_time += feed_config_check_backoff
try:
logger.info(
"Checking feeds client credentials. Attempt {} of {}".format(
i + 1, feed_config_check_retries
)
)
client = get_client()
client = None
logger.info("Feeds client credentials ok")
return True
except Exception as e:
logger.warn(
"Could not verify feeds endpoint and/or config. Got exception: {}".format(
e
)
)
last_ex = e
else:
if last_ex:
raise last_ex
else:
raise Exception(
"Exceeded retries for feeds client config check. Failing check"
)
def _system_creds():
global system_user_auth
if not system_user_auth:
config = localconfig.get_config()
system_user_auth = config["system_user_auth"]
return system_user_auth
def process_preflight():
"""
Execute the preflight functions, aborting service startup if any throw uncaught exceptions or return False return value
:return:
"""
preflight_check_functions = [init_db_content, init_feed_registry]
for fn in preflight_check_functions:
try:
fn()
except Exception as e:
logger.exception(
"Preflight checks failed with error: {}. Aborting service startup".format(
e
)
)
sys.exit(1)
def _init_distro_mappings():
from anchore_engine.db import session_scope, DistroMapping
initial_mappings = [
DistroMapping(from_distro="alpine", to_distro="alpine", flavor="ALPINE"),
DistroMapping(from_distro="busybox", to_distro="busybox", flavor="BUSYB"),
DistroMapping(from_distro="centos", to_distro="rhel", flavor="RHEL"),
DistroMapping(from_distro="debian", to_distro="debian", flavor="DEB"),
DistroMapping(from_distro="fedora", to_distro="rhel", flavor="RHEL"),
DistroMapping(from_distro="ol", to_distro="ol", flavor="RHEL"),
DistroMapping(from_distro="rhel", to_distro="rhel", flavor="RHEL"),
DistroMapping(from_distro="ubuntu", to_distro="ubuntu", flavor="DEB"),
DistroMapping(from_distro="amzn", to_distro="amzn", flavor="RHEL"),
DistroMapping(from_distro="redhat", to_distro="rhel", flavor="RHEL"),
]
# set up any data necessary at system init
try:
logger.info(
"Checking policy engine db initialization. Checking initial set of distro mappings"
)
with session_scope() as dbsession:
distro_mappings = dbsession.query(DistroMapping).all()
for i in initial_mappings:
if not [x for x in distro_mappings if x.from_distro == i.from_distro]:
logger.info("Adding missing mapping: {}".format(i))
dbsession.add(i)
logger.info("Distro mapping initialization complete")
except Exception as err:
if isinstance(err, IntegrityError):
logger.warn("another process has already initialized, continuing")
else:
raise Exception(
"unable to initialize default distro mappings - exception: " + str(err)
)
return True
def init_db_content():
"""
Initialize the policy engine db with any data necessary at startup.
:return:
"""
return _init_distro_mappings()
def init_feed_registry():
# Register feeds, the tuple is the class and bool if feed is a distro vulnerability feed or not
for cls_tuple in [
(NvdV2Feed, False),
(VulnDBFeed, False),
(VulnerabilityFeed, True),
(PackagesFeed, False),
(GithubFeed, False),
(NvdFeed, False),
]:
logger.info("Registering feed handler {}".format(cls_tuple[0].__feed_name__))
feed_registry.register(cls_tuple[0], is_vulnerability_feed=cls_tuple[1])
def do_feed_sync(msg):
if "FeedsUpdateTask" not in locals():
from anchore_engine.services.policy_engine.engine.tasks import FeedsUpdateTask
if "get_selected_feeds_to_sync" not in locals():
from anchore_engine.services.policy_engine.engine.feeds.sync import (
get_selected_feeds_to_sync,
)
handler_success = False
timer = time.time()
logger.info("FIRING: feed syncer")
try:
feeds = get_selected_feeds_to_sync(localconfig.get_config())
logger.info("Syncing configured feeds: {}".format(feeds))
result = FeedsUpdateTask.run_feeds_update(json_obj=msg.get("data"))
if result is not None:
handler_success = True
else:
logger.warn("Feed sync task marked as disabled, so skipping")
except ValueError as e:
logger.warn("Received msg of wrong type")
except Exception as err:
logger.warn("failure in feed sync handler - exception: " + str(err))
if handler_success:
anchore_engine.subsys.metrics.summary_observe(
"anchore_monitor_runtime_seconds",
time.time() - timer,
function="do_feed_sync",
status="success",
)
else:
anchore_engine.subsys.metrics.summary_observe(
"anchore_monitor_runtime_seconds",
time.time() - timer,
function="do_feed_sync",
status="fail",
)
def handle_feed_sync(*args, **kwargs):
"""
Initiates a feed sync in the system in response to a message from the queue
:param args:
:param kwargs:
:return:
"""
system_user = _system_creds()
logger.info("init args: {}".format(kwargs))
cycle_time = kwargs["mythread"]["cycle_timer"]
while True:
config = localconfig.get_config()
feed_sync_enabled = config.get("feeds", {}).get("sync_enabled", True)
if feed_sync_enabled:
logger.info("Feed sync task executor activated")
try:
run_feed_sync(system_user)
except Exception as e:
logger.error("Caught escaped error in feed sync handler: {}".format(e))
finally:
logger.info("Feed sync task executor complete")
else:
logger.info("sync_enabled is set to false in config - skipping feed sync")
time.sleep(cycle_time)
return True
@retrying.retry(
stop_max_attempt_number=FEED_SYNC_RETRIES,
wait_incrementing_start=FEED_SYNC_RETRY_BACKOFF * 1000,
wait_incrementing_increment=FEED_SYNC_RETRY_BACKOFF * 1000,
)
def run_feed_sync(system_user):
all_ready = anchore_engine.clients.services.common.check_services_ready(
["simplequeue"]
)
if not all_ready:
logger.info("simplequeue service not yet ready, will retry")
raise Exception("Simplequeue service not yet ready")
else:
try:
# This has its own retry on the queue fetch, so wrap with catch block to ensure we don't double-retry on task exec
simplequeue.run_target_with_queue_ttl(
None,
queue=feed_sync_queuename,
target=do_feed_sync,
max_wait_seconds=30,
visibility_timeout=180,
retries=FEED_SYNC_RETRIES,
backoff_time=FEED_SYNC_RETRY_BACKOFF,
)
except Exception as err:
logger.warn("failed to process task this cycle: " + str(err))
def handle_feed_sync_trigger(*args, **kwargs):
"""
Checks to see if there is a task for a feed sync in the queue and if not, adds one.
Interval for firing this should be longer than the expected feed sync duration.
:param args:
:param kwargs:
:return:
"""
system_user = _system_creds()
logger.info("init args: {}".format(kwargs))
cycle_time = kwargs["mythread"]["cycle_timer"]
while True:
config = localconfig.get_config()
feed_sync_enabled = config.get("feeds", {}).get("sync_enabled", True)
if feed_sync_enabled:
logger.info("Feed Sync task creator activated")
try:
push_sync_task(system_user)
logger.info("Feed Sync Trigger done, waiting for next cycle.")
except Exception as e:
logger.error(
"Error caught in feed sync trigger handler after all retries. Will wait for next cycle"
)
finally:
logger.info("Feed Sync task creator complete")
else:
logger.info(
"sync_enabled is set to false in config - skipping feed sync trigger"
)
time.sleep(cycle_time)
return True
@retrying.retry(
stop_max_attempt_number=FEED_SYNC_RETRIES,
wait_incrementing_start=FEED_SYNC_RETRY_BACKOFF * 1000,
wait_incrementing_increment=FEED_SYNC_RETRY_BACKOFF * 1000,
)
def push_sync_task(system_user):
all_ready = anchore_engine.clients.services.common.check_services_ready(
["simplequeue"]
)
if not all_ready:
logger.info("simplequeue service not yet ready, will retry")
raise Exception("Simplequeue service not yet ready")
else:
# q_client = SimpleQueueClient(user=system_user[0], password=<PASSWORD>[1])
q_client = internal_client_for(SimpleQueueClient, userId=None)
if not q_client.is_inqueue(name=feed_sync_queuename, inobj=feed_sync_msg):
try:
q_client.enqueue(name=feed_sync_queuename, inobj=feed_sync_msg)
except:
logger.error("Could not enqueue message for a feed sync")
raise
class PolicyEngineService(ApiService):
__service_name__ = "policy_engine"
__spec_dir__ = pkg_resources.resource_filename(__name__, "swagger")
__monitors__ = {
"service_heartbeat": {
"handler": anchore_engine.subsys.servicestatus.handle_service_heartbeat,
"taskType": "handle_service_heartbeat",
"args": [__service_name__],
"cycle_timer": 60,
"min_cycle_timer": 60,
"max_cycle_timer": 60,
"last_queued": 0,
"last_return": False,
"initialized": False,
},
"feed_sync_checker": {
"handler": handle_feed_sync_trigger,
"taskType": "handle_feed_sync_trigger",
"args": [],
"cycle_timer": 600,
"min_cycle_timer": 300,
"max_cycle_timer": 100000,
"last_queued": 0,
"last_return": False,
"initialized": False,
},
"feed_sync": {
"handler": handle_feed_sync,
"taskType": "handle_feed_sync",
"args": [],
"cycle_timer": 3600,
"min_cycle_timer": 1800,
"max_cycle_timer": 100000,
"last_queued": 0,
"last_return": False,
"initialized": False,
},
}
__lifecycle_handlers__ = {
LifeCycleStages.pre_register: [
(process_preflight, None),
]
}
| [
"anchore_engine.db.DistroMapping",
"anchore_engine.subsys.logger.exception",
"anchore_engine.clients.services.internal_client_for",
"os.getenv",
"anchore_engine.subsys.logger.info",
"anchore_engine.clients.services.simplequeue.run_target_with_queue_ttl",
"sys.exit",
"anchore_engine.subsys.logger.error",
"pkg_resources.resource_filename",
"time.sleep",
"anchore_engine.configuration.localconfig.get_config",
"retrying.retry",
"anchore_engine.db.session_scope",
"time.time",
"anchore_engine.subsys.logger.warn",
"anchore_engine.services.policy_engine.engine.feeds.feeds.feed_registry.register",
"anchore_engine.services.policy_engine.engine.feeds.client.get_client"
] | [((9137, 9318), 'retrying.retry', 'retrying.retry', ([], {'stop_max_attempt_number': 'FEED_SYNC_RETRIES', 'wait_incrementing_start': '(FEED_SYNC_RETRY_BACKOFF * 1000)', 'wait_incrementing_increment': '(FEED_SYNC_RETRY_BACKOFF * 1000)'}), '(stop_max_attempt_number=FEED_SYNC_RETRIES,\n wait_incrementing_start=FEED_SYNC_RETRY_BACKOFF * 1000,\n wait_incrementing_increment=FEED_SYNC_RETRY_BACKOFF * 1000)\n', (9151, 9318), False, 'import retrying\n'), ((11469, 11650), 'retrying.retry', 'retrying.retry', ([], {'stop_max_attempt_number': 'FEED_SYNC_RETRIES', 'wait_incrementing_start': '(FEED_SYNC_RETRY_BACKOFF * 1000)', 'wait_incrementing_increment': '(FEED_SYNC_RETRY_BACKOFF * 1000)'}), '(stop_max_attempt_number=FEED_SYNC_RETRIES,\n wait_incrementing_start=FEED_SYNC_RETRY_BACKOFF * 1000,\n wait_incrementing_increment=FEED_SYNC_RETRY_BACKOFF * 1000)\n', (11483, 11650), False, 'import retrying\n'), ((7101, 7112), 'time.time', 'time.time', ([], {}), '()\n', (7110, 7112), False, 'import time\n'), ((7117, 7151), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""FIRING: feed syncer"""'], {}), "('FIRING: feed syncer')\n", (7128, 7151), False, 'from anchore_engine.subsys import logger\n'), ((12511, 12563), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['__name__', '"""swagger"""'], {}), "(__name__, 'swagger')\n", (12542, 12563), False, 'import pkg_resources\n'), ((1131, 1178), 'os.getenv', 'os.getenv', (['"""ANCHORE_FEED_SYNC_CHECK_RETRIES"""', '(5)'], {}), "('ANCHORE_FEED_SYNC_CHECK_RETRIES', 5)\n", (1140, 1178), False, 'import os\n'), ((1203, 1323), 'anchore_engine.subsys.logger.exception', 'logger.exception', (['"""Error parsing env value ANCHORE_FEED_SYNC_CHECK_RETRIES into int, using default value of 5"""'], {}), "(\n 'Error parsing env value ANCHORE_FEED_SYNC_CHECK_RETRIES into int, using default value of 5'\n )\n", (1219, 1323), False, 'from anchore_engine.subsys import logger\n'), ((1403, 1458), 'os.getenv', 'os.getenv', (['"""ANCHORE_FEED_SYNC_CHECK_FAILURE_BACKOFF"""', '(5)'], {}), "('ANCHORE_FEED_SYNC_CHECK_FAILURE_BACKOFF', 5)\n", (1412, 1458), False, 'import os\n'), ((1488, 1616), 'anchore_engine.subsys.logger.exception', 'logger.exception', (['"""Error parsing env value ANCHORE_FEED_SYNC_CHECK_FAILURE_BACKOFF into int, using default value of 5"""'], {}), "(\n 'Error parsing env value ANCHORE_FEED_SYNC_CHECK_FAILURE_BACKOFF into int, using default value of 5'\n )\n", (1504, 1616), False, 'from anchore_engine.subsys import logger\n'), ((1695, 1736), 'os.getenv', 'os.getenv', (['"""FEED_CLIENT_CHECK_RETRIES"""', '(3)'], {}), "('FEED_CLIENT_CHECK_RETRIES', 3)\n", (1704, 1736), False, 'import os\n'), ((1761, 1875), 'anchore_engine.subsys.logger.exception', 'logger.exception', (['"""Error parsing env value FEED_CLIENT_CHECK_RETRIES into int, using default value of 3"""'], {}), "(\n 'Error parsing env value FEED_CLIENT_CHECK_RETRIES into int, using default value of 3'\n )\n", (1777, 1875), False, 'from anchore_engine.subsys import logger\n'), ((1956, 1997), 'os.getenv', 'os.getenv', (['"""FEED_CLIENT_CHECK_BACKOFF"""', '(5)'], {}), "('FEED_CLIENT_CHECK_BACKOFF', 5)\n", (1965, 1997), False, 'import os\n'), ((2022, 2136), 'anchore_engine.subsys.logger.exception', 'logger.exception', (['"""Error parsing env FEED_CLIENT_CHECK_BACKOFF value into int, using default value of 5"""'], {}), "(\n 'Error parsing env FEED_CLIENT_CHECK_BACKOFF value into int, using default value of 5'\n )\n", (2038, 2136), False, 'from anchore_engine.subsys import logger\n'), ((3581, 3605), 'anchore_engine.configuration.localconfig.get_config', 'localconfig.get_config', ([], {}), '()\n', (3603, 3605), False, 'from anchore_engine.configuration import localconfig\n'), ((4371, 4443), 'anchore_engine.db.DistroMapping', 'DistroMapping', ([], {'from_distro': '"""alpine"""', 'to_distro': '"""alpine"""', 'flavor': '"""ALPINE"""'}), "(from_distro='alpine', to_distro='alpine', flavor='ALPINE')\n", (4384, 4443), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((4453, 4526), 'anchore_engine.db.DistroMapping', 'DistroMapping', ([], {'from_distro': '"""busybox"""', 'to_distro': '"""busybox"""', 'flavor': '"""BUSYB"""'}), "(from_distro='busybox', to_distro='busybox', flavor='BUSYB')\n", (4466, 4526), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((4536, 4604), 'anchore_engine.db.DistroMapping', 'DistroMapping', ([], {'from_distro': '"""centos"""', 'to_distro': '"""rhel"""', 'flavor': '"""RHEL"""'}), "(from_distro='centos', to_distro='rhel', flavor='RHEL')\n", (4549, 4604), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((4614, 4683), 'anchore_engine.db.DistroMapping', 'DistroMapping', ([], {'from_distro': '"""debian"""', 'to_distro': '"""debian"""', 'flavor': '"""DEB"""'}), "(from_distro='debian', to_distro='debian', flavor='DEB')\n", (4627, 4683), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((4693, 4761), 'anchore_engine.db.DistroMapping', 'DistroMapping', ([], {'from_distro': '"""fedora"""', 'to_distro': '"""rhel"""', 'flavor': '"""RHEL"""'}), "(from_distro='fedora', to_distro='rhel', flavor='RHEL')\n", (4706, 4761), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((4771, 4833), 'anchore_engine.db.DistroMapping', 'DistroMapping', ([], {'from_distro': '"""ol"""', 'to_distro': '"""ol"""', 'flavor': '"""RHEL"""'}), "(from_distro='ol', to_distro='ol', flavor='RHEL')\n", (4784, 4833), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((4843, 4909), 'anchore_engine.db.DistroMapping', 'DistroMapping', ([], {'from_distro': '"""rhel"""', 'to_distro': '"""rhel"""', 'flavor': '"""RHEL"""'}), "(from_distro='rhel', to_distro='rhel', flavor='RHEL')\n", (4856, 4909), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((4919, 4988), 'anchore_engine.db.DistroMapping', 'DistroMapping', ([], {'from_distro': '"""ubuntu"""', 'to_distro': '"""ubuntu"""', 'flavor': '"""DEB"""'}), "(from_distro='ubuntu', to_distro='ubuntu', flavor='DEB')\n", (4932, 4988), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((4998, 5064), 'anchore_engine.db.DistroMapping', 'DistroMapping', ([], {'from_distro': '"""amzn"""', 'to_distro': '"""amzn"""', 'flavor': '"""RHEL"""'}), "(from_distro='amzn', to_distro='amzn', flavor='RHEL')\n", (5011, 5064), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((5074, 5142), 'anchore_engine.db.DistroMapping', 'DistroMapping', ([], {'from_distro': '"""redhat"""', 'to_distro': '"""rhel"""', 'flavor': '"""RHEL"""'}), "(from_distro='redhat', to_distro='rhel', flavor='RHEL')\n", (5087, 5142), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((5215, 5321), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""Checking policy engine db initialization. Checking initial set of distro mappings"""'], {}), "(\n 'Checking policy engine db initialization. Checking initial set of distro mappings'\n )\n", (5226, 5321), False, 'from anchore_engine.subsys import logger\n'), ((5690, 5743), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""Distro mapping initialization complete"""'], {}), "('Distro mapping initialization complete')\n", (5701, 5743), False, 'from anchore_engine.subsys import logger\n'), ((6651, 6723), 'anchore_engine.services.policy_engine.engine.feeds.feeds.feed_registry.register', 'feed_registry.register', (['cls_tuple[0]'], {'is_vulnerability_feed': 'cls_tuple[1]'}), '(cls_tuple[0], is_vulnerability_feed=cls_tuple[1])\n', (6673, 6723), False, 'from anchore_engine.services.policy_engine.engine.feeds.feeds import VulnerabilityFeed, NvdV2Feed, PackagesFeed, VulnDBFeed, GithubFeed, feed_registry, NvdFeed\n'), ((8522, 8546), 'anchore_engine.configuration.localconfig.get_config', 'localconfig.get_config', ([], {}), '()\n', (8544, 8546), False, 'from anchore_engine.configuration import localconfig\n'), ((9094, 9116), 'time.sleep', 'time.sleep', (['cycle_time'], {}), '(cycle_time)\n', (9104, 9116), False, 'import time\n'), ((9495, 9555), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""simplequeue service not yet ready, will retry"""'], {}), "('simplequeue service not yet ready, will retry')\n", (9506, 9555), False, 'from anchore_engine.subsys import logger\n'), ((10670, 10694), 'anchore_engine.configuration.localconfig.get_config', 'localconfig.get_config', ([], {}), '()\n', (10692, 10694), False, 'from anchore_engine.configuration import localconfig\n'), ((11426, 11448), 'time.sleep', 'time.sleep', (['cycle_time'], {}), '(cycle_time)\n', (11436, 11448), False, 'import time\n'), ((11829, 11889), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""simplequeue service not yet ready, will retry"""'], {}), "('simplequeue service not yet ready, will retry')\n", (11840, 11889), False, 'from anchore_engine.subsys import logger\n'), ((12064, 12115), 'anchore_engine.clients.services.internal_client_for', 'internal_client_for', (['SimpleQueueClient'], {'userId': 'None'}), '(SimpleQueueClient, userId=None)\n', (12083, 12115), False, 'from anchore_engine.clients.services import simplequeue, internal_client_for\n'), ((2649, 2671), 'time.sleep', 'time.sleep', (['sleep_time'], {}), '(sleep_time)\n', (2659, 2671), False, 'import time\n'), ((2947, 2959), 'anchore_engine.services.policy_engine.engine.feeds.client.get_client', 'get_client', ([], {}), '()\n', (2957, 2959), False, 'from anchore_engine.services.policy_engine.engine.feeds.client import get_client\n'), ((2998, 3040), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""Feeds client credentials ok"""'], {}), "('Feeds client credentials ok')\n", (3009, 3040), False, 'from anchore_engine.subsys import logger\n'), ((5348, 5363), 'anchore_engine.db.session_scope', 'session_scope', ([], {}), '()\n', (5361, 5363), False, 'from anchore_engine.db import session_scope, DistroMapping\n'), ((7204, 7228), 'anchore_engine.configuration.localconfig.get_config', 'localconfig.get_config', ([], {}), '()\n', (7226, 7228), False, 'from anchore_engine.configuration import localconfig\n'), ((7465, 7526), 'anchore_engine.subsys.logger.warn', 'logger.warn', (['"""Feed sync task marked as disabled, so skipping"""'], {}), "('Feed sync task marked as disabled, so skipping')\n", (7476, 7526), False, 'from anchore_engine.subsys import logger\n'), ((7563, 7604), 'anchore_engine.subsys.logger.warn', 'logger.warn', (['"""Received msg of wrong type"""'], {}), "('Received msg of wrong type')\n", (7574, 7604), False, 'from anchore_engine.subsys import logger\n'), ((8667, 8715), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""Feed sync task executor activated"""'], {}), "('Feed sync task executor activated')\n", (8678, 8715), False, 'from anchore_engine.subsys import logger\n'), ((9010, 9084), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""sync_enabled is set to false in config - skipping feed sync"""'], {}), "('sync_enabled is set to false in config - skipping feed sync')\n", (9021, 9084), False, 'from anchore_engine.subsys import logger\n'), ((9779, 9988), 'anchore_engine.clients.services.simplequeue.run_target_with_queue_ttl', 'simplequeue.run_target_with_queue_ttl', (['None'], {'queue': 'feed_sync_queuename', 'target': 'do_feed_sync', 'max_wait_seconds': '(30)', 'visibility_timeout': '(180)', 'retries': 'FEED_SYNC_RETRIES', 'backoff_time': 'FEED_SYNC_RETRY_BACKOFF'}), '(None, queue=feed_sync_queuename,\n target=do_feed_sync, max_wait_seconds=30, visibility_timeout=180,\n retries=FEED_SYNC_RETRIES, backoff_time=FEED_SYNC_RETRY_BACKOFF)\n', (9816, 9988), False, 'from anchore_engine.clients.services import simplequeue, internal_client_for\n'), ((10815, 10862), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""Feed Sync task creator activated"""'], {}), "('Feed Sync task creator activated')\n", (10826, 10862), False, 'from anchore_engine.subsys import logger\n'), ((11304, 11391), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""sync_enabled is set to false in config - skipping feed sync trigger"""'], {}), "(\n 'sync_enabled is set to false in config - skipping feed sync trigger')\n", (11315, 11391), False, 'from anchore_engine.subsys import logger\n'), ((4231, 4242), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (4239, 4242), False, 'import sys\n'), ((5830, 5896), 'anchore_engine.subsys.logger.warn', 'logger.warn', (['"""another process has already initialized, continuing"""'], {}), "('another process has already initialized, continuing')\n", (5841, 5896), False, 'from anchore_engine.subsys import logger\n'), ((7850, 7861), 'time.time', 'time.time', ([], {}), '()\n', (7859, 7861), False, 'import time\n'), ((8072, 8083), 'time.time', 'time.time', ([], {}), '()\n', (8081, 8083), False, 'import time\n'), ((8936, 8983), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""Feed sync task executor complete"""'], {}), "('Feed sync task executor complete')\n", (8947, 8983), False, 'from anchore_engine.subsys import logger\n'), ((10940, 11002), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""Feed Sync Trigger done, waiting for next cycle."""'], {}), "('Feed Sync Trigger done, waiting for next cycle.')\n", (10951, 11002), False, 'from anchore_engine.subsys import logger\n'), ((11231, 11277), 'anchore_engine.subsys.logger.info', 'logger.info', (['"""Feed Sync task creator complete"""'], {}), "('Feed Sync task creator complete')\n", (11242, 11277), False, 'from anchore_engine.subsys import logger\n'), ((11054, 11165), 'anchore_engine.subsys.logger.error', 'logger.error', (['"""Error caught in feed sync trigger handler after all retries. Will wait for next cycle"""'], {}), "(\n 'Error caught in feed sync trigger handler after all retries. Will wait for next cycle'\n )\n", (11066, 11165), False, 'from anchore_engine.subsys import logger\n'), ((12332, 12389), 'anchore_engine.subsys.logger.error', 'logger.error', (['"""Could not enqueue message for a feed sync"""'], {}), "('Could not enqueue message for a feed sync')\n", (12344, 12389), False, 'from anchore_engine.subsys import logger\n')] |
# -*- coding: utf-8 -*-
import argparse
import os
import shutil
import time
import numpy as np
import random
from collections import OrderedDict
import torch
import torch.backends.cudnn as cudnn
from callbacks import AverageMeter
from data_utils.causal_data_loader_frames import VideoFolder
from utils import save_results
from tqdm import tqdm
parser = argparse.ArgumentParser(description='Counterfactual CAR')
# Path, dataset and log related arguments
parser.add_argument('--root_frames', type=str, default='/mnt/data1/home/sunpengzhan/sth-sth-v2/',
help='path to the folder with frames')
parser.add_argument('--json_data_train', type=str, default='../data/dataset_splits/compositional/train.json',
help='path to the json file with train video meta data')
parser.add_argument('--json_data_val', type=str, default='../data/dataset_splits/compositional/validation.json',
help='path to the json file with validation video meta data')
parser.add_argument('--json_file_labels', type=str, default='../data/dataset_splits/compositional/labels.json',
help='path to the json file with ground truth labels')
parser.add_argument('--dataset', default='smth_smth',
help='which dataset to train')
parser.add_argument('--logname', default='my_method',
help='name of the experiment for checkpoints and logs')
parser.add_argument('--print_freq', '-p', default=20, type=int,
metavar='N', help='print frequency (default: 20)')
parser.add_argument('--ckpt', default='./ckpt',
help='folder to output checkpoints')
parser.add_argument('--resume_vision', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--resume_coord', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--resume_fusion', default='', type=str, metavar='PATH',
help='path to latest checkpoint (default: none)')
# model, image&feature dim and training related arguments
parser.add_argument('--model_vision', default='rgb_roi')
parser.add_argument('--model_coord', default='interaction')
parser.add_argument('--model_fusion', default='concat_fusion')
parser.add_argument('--fusion_function', default='fused_sum', type=str,
help='function for fusing activations from each branch')
parser.add_argument('--img_feature_dim', default=512, type=int, metavar='N',
help='intermediate feature dimension for image-based features')
parser.add_argument('--coord_feature_dim', default=512, type=int, metavar='N',
help='intermediate feature dimension for coord-based features')
parser.add_argument('--size', default=224, type=int, metavar='N',
help='primary image input size')
parser.add_argument('--num_boxes', default=4, type=int,
help='num of boxes for each image')
parser.add_argument('--num_frames', default=16, type=int,
help='num of frames for the model')
parser.add_argument('--num_classes', default=174, type=int,
help='num of class in the model')
parser.add_argument('--epochs', default=30, type=int, metavar='N',
help='number of total epochs to run')
parser.add_argument('--start_epoch', default=None, type=int, metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('--batch_size', '-b', default=16, type=int,
metavar='N', help='mini-batch size')
parser.add_argument('--lr', '--learning-rate', default=0.01, type=float,
metavar='LR', help='initial learning rate')
parser.add_argument('--lr_steps', default=[24, 35, 45], type=float, nargs="+",
metavar='LRSteps', help='epochs to decay learning rate by 10')
parser.add_argument('--momentum', default=0.9, type=float, metavar='M',
help='momentum')
parser.add_argument('--weight_decay', '--wd', default=0.0001, type=float,
metavar='W', help='weight decay (default: 1e-4)')
parser.add_argument('--clip_gradient', '-cg', default=5, type=float,
metavar='W', help='gradient norm clipping (default: 5)')
parser.add_argument('--search_stride', type=int, default=5, help='test performance every n strides')
# train mode, hardware setting and others related arguments
parser.add_argument('-j', '--workers', default=4, type=int, metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('-e', '--evaluate', dest='evaluate', action='store_true',
help='evaluate model on validation set')
parser.add_argument('--cf_inference_group', action='store_true', help='counterfactual inference model on validation set')
parser.add_argument('--parallel', default=True, type=bool,
help='whether or not train with multi GPUs')
parser.add_argument('--gpu_index', type=str, default='0, 1, 2, 3', help='the index of gpu you want to use')
best_loss = 1000000
def main():
global args, best_loss
args = parser.parse_args()
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_index
print(args)
# create vision model
if args.model_vision == 'global_i3d':
from model.model_lib import VideoGlobalModel as RGBModel
print('global_i3d loaded!!')
elif args.model_vision == 'rgb_roi':
from model.model_lib import BboxVisualModel as RGBModel
print('rgb_roi loaded!!')
else:
print("no such a vision model!")
# create coord model
if args.model_coord == 'interaction':
from model.model_lib import BboxInteractionLatentModel as BboxModel
print('interaction loaded!!')
else:
print("no such a coordinate model!")
# create fusion model
if args.model_fusion == 'concat_fusion':
from model.model_lib import ConcatFusionModel as FusionModel
print('concat_fusion loaded!!')
else:
print('no such a fusion model!')
# load model branch
vision_model = RGBModel(args)
coord_model = BboxModel(args)
fusion_model = FusionModel(args)
# create the fusion function for the activation of three branches
if args.fusion_function == 'fused_sum':
from fusion_function import logsigsum as fusion_func
print('fused_sum loaded!!')
elif args.fusion_function == 'naive_sum':
from fusion_function import naivesum as fusion_func
print('naive_sum loaded!!')
else:
print('no such a fusion function!')
fusion_function = fusion_func()
if args.parallel:
vision_model = torch.nn.DataParallel(vision_model).cuda()
coord_model = torch.nn.DataParallel(coord_model).cuda()
fusion_model = torch.nn.DataParallel(fusion_model).cuda()
else:
vision_model = vision_model.cuda()
coord_model = coord_model.cuda()
fusion_model = fusion_model.cuda()
# optionally resume vision model from a checkpoint
if args.resume_vision:
assert os.path.isfile(args.resume_vision), "No checkpoint found at '{}'".format(args.resume_vision)
print("=> loading checkpoint '{}'".format(args.resume_vision))
checkpoint = torch.load(args.resume_vision)
if args.start_epoch is None:
args.start_epoch = checkpoint['epoch']
best_loss = checkpoint['best_loss']
vision_model.load_state_dict(checkpoint['state_dict'])
print("=> loaded checkpoint '{}' (epoch {})"
.format(args.resume_vision, checkpoint['epoch']))
# optionally resume coord model from a checkpoint
if args.resume_coord:
assert os.path.isfile(args.resume_coord), "No checkpoint found at '{}'".format(args.resume_coord)
print("=> loading checkpoint '{}'".format(args.resume_coord))
checkpoint = torch.load(args.resume_coord)
if args.start_epoch is None:
args.start_epoch = checkpoint['epoch']
best_loss = checkpoint['best_loss']
coord_model.load_state_dict(checkpoint['state_dict'])
print("=> loaded checkpoint '{}' (epoch {})"
.format(args.resume_coord, checkpoint['epoch']))
if args.resume_fusion:
assert os.path.isfile(args.resume_fusion), "No checkpoint found at '{}'".format(args.resume_fusion)
print("=> loading checkpoint '{}'".format(args.resume_fusion))
checkpoint = torch.load(args.resume_fusion)
if args.start_epoch is None:
args.start_epoch = checkpoint['epoch']
best_loss = checkpoint['best_loss']
fusion_model.load_state_dict(checkpoint['state_dict'])
print("=> loaded checkpoint '{}' (epoch {})"
.format(args.resume_fusion, checkpoint['epoch']))
if args.start_epoch is None:
args.start_epoch = 0
cudnn.benchmark = True
# create training and validation dataset
dataset_train = VideoFolder(root=args.root_frames,
num_boxes=args.num_boxes,
file_input=args.json_data_train,
file_labels=args.json_file_labels,
frames_duration=args.num_frames,
args=args,
is_val=False,
if_augment=True,
)
dataset_val = VideoFolder(root=args.root_frames,
num_boxes=args.num_boxes,
file_input=args.json_data_val,
file_labels=args.json_file_labels,
frames_duration=args.num_frames,
args=args,
is_val=True,
if_augment=True,
)
# create training and validation loader
train_loader = torch.utils.data.DataLoader(
dataset_train,
batch_size=args.batch_size, shuffle=True,
num_workers=args.workers, drop_last=True,
pin_memory=True
)
val_loader = torch.utils.data.DataLoader(
dataset_val, drop_last=True,
batch_size=args.batch_size, shuffle=False,
num_workers=args.workers, pin_memory=False
)
model_list = [vision_model, coord_model, fusion_model]
optimizer_vision = torch.optim.SGD(filter(lambda p: p.requires_grad, vision_model.parameters()),
momentum=args.momentum, lr=args.lr, weight_decay=args.weight_decay)
optimizer_coord = torch.optim.SGD(filter(lambda p: p.requires_grad, coord_model.parameters()),
momentum=args.momentum, lr=args.lr, weight_decay=args.weight_decay)
optimizer_fusion = torch.optim.SGD(filter(lambda p: p.requires_grad, fusion_model.parameters()),
momentum=args.momentum, lr=args.lr, weight_decay=args.weight_decay)
optimizer_list = [optimizer_vision, optimizer_coord, optimizer_fusion]
criterion = torch.nn.CrossEntropyLoss()
search_list = np.linspace(0.0, 1.0, 11)
# factual inference (vanilla test stage)
if args.evaluate:
validate(val_loader, model_list, fusion_function, criterion, class_to_idx=dataset_val.classes_dict)
return
# Counterfactual inference by trying a list of hyperparameter
if args.cf_inference_group:
cf_inference_group(val_loader, model_list, fusion_function, search_list,
class_to_idx=dataset_val.classes_dict)
return
print('training begin...')
for epoch in tqdm(range(args.start_epoch, args.epochs)):
adjust_learning_rate(optimizer_vision, epoch, args.lr_steps, 'vision')
adjust_learning_rate(optimizer_coord, epoch, args.lr_steps, 'coord')
adjust_learning_rate(optimizer_fusion, epoch, args.lr_steps, 'fusion')
# train for one epoch
train(train_loader, model_list, fusion_function, optimizer_list, epoch, criterion)
if (epoch+1) >= 30 and (epoch + 1) % args.search_stride == 0:
loss = validate(val_loader, model_list, fusion_function, criterion,
epoch=epoch, class_to_idx=dataset_val.classes_dict)
else:
loss = 100
# remember best loss and save checkpoint
is_best = loss < best_loss
best_loss = min(loss, best_loss)
save_checkpoint(
{
'epoch': epoch + 1,
'state_dict': vision_model.state_dict(),
'best_loss': best_loss,
},
is_best,
os.path.join(args.ckpt, '{}_{}'.format(args.model_vision, args.logname)))
save_checkpoint(
{
'epoch': epoch + 1,
'state_dict': coord_model.state_dict(),
'best_loss': best_loss,
},
is_best,
os.path.join(args.ckpt, '{}_{}'.format(args.model_coord, args.logname)))
save_checkpoint(
{
'epoch': epoch + 1,
'state_dict': fusion_model.state_dict(),
'best_loss': best_loss,
},
is_best,
os.path.join(args.ckpt, '{}_{}'.format(args.model_fusion, args.logname)))
def train(train_loader, model_list, fusion_function,
optimizer_list, epoch, criterion):
global args
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
acc_top1 = AverageMeter()
acc_top5 = AverageMeter()
# load three model branches
[vision_model, coord_model, fusion_model] = model_list
# load four optimizers, including the one designed for uniform assumption
[optimizer_vision, optimizer_coord, optimizer_fusion] = optimizer_list
# switch to train mode
vision_model.train()
coord_model.train()
fusion_model.train()
end = time.time()
for i, (global_img_tensors, box_tensors, box_categories, video_label) in enumerate(train_loader):
data_time.update(time.time() - end)
# obtain the activation and vision features from vision branch
output_vision, feature_vision = vision_model(global_img_tensors.cuda(), box_categories, box_tensors.cuda(), video_label)
output_vision = output_vision.view((-1, len(train_loader.dataset.classes)))
# obtain the activation and coordinate features from coordinate branch
output_coord, feature_coord = coord_model(global_img_tensors, box_categories.cuda(), box_tensors.cuda(), video_label)
output_coord = output_coord.view((-1, len(train_loader.dataset.classes)))
# detach the computation graph, avoid the gradient confusion
feature_vision_detached = feature_vision.detach()
feature_coord_detached = feature_coord.detach()
# obtain the activation of fusion branch
output_fusion = fusion_model(feature_vision_detached.cuda(), feature_coord_detached.cuda())
output_fusion = output_fusion.view((-1, len(train_loader.dataset.classes)))
output_factual = fusion_function(output_vision, output_coord, output_fusion)
# loss_fusion is the loss of output_fusion(fused, obtained from the fusion_function)
loss_vision = criterion(output_vision, video_label.long().cuda())
loss_coord = criterion(output_coord, video_label.long().cuda())
loss_fusion = criterion(output_fusion, video_label.long().cuda())
loss_factual = criterion(output_factual, video_label.long().cuda())
# Measure the accuracy of the sum of three branch activation results
acc1, acc5 = accuracy(output_factual.cpu(), video_label, topk=(1, 5))
# record the accuracy and loss
losses.update(loss_factual.item(), global_img_tensors.size(0))
acc_top1.update(acc1.item(), global_img_tensors.size(0))
acc_top5.update(acc5.item(), global_img_tensors.size(0))
# refresh the optimizer
optimizer_vision.zero_grad()
optimizer_coord.zero_grad()
optimizer_fusion.zero_grad()
loss = loss_vision + loss_coord + loss_factual
loss.backward()
if args.clip_gradient is not None:
torch.nn.utils.clip_grad_norm_(vision_model.parameters(), args.clip_gradient)
# update the parameter
optimizer_vision.step()
optimizer_coord.step()
optimizer_fusion.step()
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0:
print('Epoch: [{0}][{1}/{2}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Acc1 {acc_top1.val:.1f} ({acc_top1.avg:.1f})\t'
'Acc5 {acc_top5.val:.1f} ({acc_top5.avg:.1f})'.format(
epoch, i, len(train_loader), batch_time=batch_time,
data_time=data_time, loss=losses,
acc_top1=acc_top1, acc_top5=acc_top5))
def validate(val_loader, model_list, fusion_function, criterion,
epoch=None, class_to_idx=None):
batch_time = AverageMeter()
losses = AverageMeter()
acc_top1 = AverageMeter()
acc_top5 = AverageMeter()
logits_matrix = []
targets_list = []
# unpack three models
[vision_model, coord_model, fusion_model] = model_list
# switch to evaluate mode
vision_model.eval()
coord_model.eval()
fusion_model.eval()
end = time.time()
for i, (global_img_tensors, box_tensors, box_categories, video_label) in enumerate(val_loader):
# compute output
with torch.no_grad():
output_vision, feature_vision = vision_model(global_img_tensors.cuda(), box_categories, box_tensors.cuda(), video_label)
output_vision = output_vision.view((-1, len(val_loader.dataset.classes)))
output_coord, feature_coord = coord_model(global_img_tensors, box_categories.cuda(), box_tensors.cuda(), video_label)
output_coord = output_coord.view((-1, len(val_loader.dataset.classes)))
# detach the computation graph, avoid the gradient confusion
feature_vision_detached = feature_vision.detach()
feature_coord_detached = feature_coord.detach()
# obtain the activation of fusion branch
output_fusion = fusion_model(feature_vision_detached.cuda(), feature_coord_detached.cuda())
output_fusion = output_fusion.view((-1, len(val_loader.dataset.classes)))
# fuse three outputs
output_factual = fusion_function(output_vision, output_coord, output_fusion)
# warning: loss_fusion is the loss of output_fusion(fused, obtained from the fusion_function)
loss_vision = criterion(output_vision, video_label.long().cuda())
loss_coord = criterion(output_coord, video_label.long().cuda())
loss_fusion = criterion(output_factual, video_label.long().cuda())
# statistic result from fusion_branch or value after fusion function
output = output_factual
loss = loss_vision
acc1, acc5 = accuracy(output.cpu(), video_label, topk=(1, 5))
if args.evaluate:
logits_matrix.append(output.cpu().data.numpy())
targets_list.append(video_label.cpu().numpy())
# measure accuracy and record loss
losses.update(loss.item(), global_img_tensors.size(0))
acc_top1.update(acc1.item(), global_img_tensors.size(0))
acc_top5.update(acc5.item(), global_img_tensors.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0 or i + 1 == len(val_loader):
print('Test: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'
'Acc1 {acc_top1.val:.1f} ({acc_top1.avg:.1f})\t'
'Acc5 {acc_top5.val:.1f} ({acc_top5.avg:.1f})\t'.format(
i, len(val_loader), batch_time=batch_time, loss=losses,
acc_top1=acc_top1, acc_top5=acc_top5,
))
if args.evaluate:
logits_matrix = np.concatenate(logits_matrix)
targets_list = np.concatenate(targets_list)
save_results(logits_matrix, targets_list, class_to_idx, args)
return losses.avg
def cf_inference_group(val_loader, model_list, fusion_function, search_list, class_to_idx=None):
batch_time = AverageMeter()
search_length = len(search_list)
search_dict = {}
for i in range(search_length):
search_dict['acc_1_alpha_{}'.format(round(search_list[i], 1))] = AverageMeter()
search_dict['acc_5_alpha_{}'.format(round(search_list[i], 1))] = AverageMeter()
[vision_model, coord_model, fusion_model] = model_list
# switch to evaluate mode
vision_model.eval()
coord_model.eval()
fusion_model.eval()
end = time.time()
for i, (global_img_tensors, box_tensors, box_categories, video_label) in enumerate(val_loader):
# compute output
with torch.no_grad():
# factual inference
output_vision, feature_vision = vision_model(global_img_tensors.cuda(), box_categories, box_tensors.cuda(),
video_label)
output_vision = output_vision.view((-1, len(val_loader.dataset.classes)))
output_coord, feature_coord = coord_model(global_img_tensors, box_categories.cuda(), box_tensors.cuda(),
video_label)
output_coord = output_coord.view((-1, len(val_loader.dataset.classes)))
# obtain the activation of fusion branch
output_fusion = fusion_model(feature_vision.cuda(), feature_coord.cuda())
output_fusion = output_fusion.view((-1, len(val_loader.dataset.classes)))
# fuse three outputs
output_factual = fusion_function(output_vision, output_coord, output_fusion)
# counterfactual inference
output_vision_subtrahend = output_vision
output_counterfactual = fusion_function(output_vision_subtrahend, torch.tensor(0.0), torch.tensor(0.0))
for j in range(search_length):
weight = search_list[j]
output_debiased = output_factual - output_counterfactual * weight
acc1, acc5 = accuracy(output_debiased.cpu(), video_label, topk=(1, 5))
search_dict['acc_1_alpha_{}'.format(round(search_list[j], 1))].update(acc1.item(), global_img_tensors.size(0))
search_dict['acc_5_alpha_{}'.format(round(search_list[j], 1))].update(acc5.item(), global_img_tensors.size(0))
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % args.print_freq == 0 or i + 1 == len(val_loader):
print('Cf-Inference: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Acc1_0.0 {acc_top1_00.val:.1f} ({acc_top1_00.avg:.1f})\t'
'Acc1_0.2 {acc_top1_02.val:.1f} ({acc_top1_02.avg:.1f})\t'
'Acc1_0.5 {acc_top1_05.val:.1f} ({acc_top1_05.avg:.1f})\t'
'Acc1_0.8 {acc_top1_08.val:.1f} ({acc_top1_08.avg:.1f})\t'
'Acc1_1.0 {acc_top1_10.val:.1f} ({acc_top1_10.avg:.1f})'.format(
i, len(val_loader), batch_time=batch_time, acc_top1_00=search_dict['acc_1_alpha_0.0'],
acc_top1_02=search_dict['acc_1_alpha_0.2'], acc_top1_05=search_dict['acc_1_alpha_0.5'],
acc_top1_08=search_dict['acc_1_alpha_0.8'], acc_top1_10=search_dict['acc_1_alpha_1.0']))
for k in range(search_length):
print(search_list[k], search_dict['acc_1_alpha_{}'.format(round(search_list[k], 1))].avg,
search_dict['acc_5_alpha_{}'.format(round(search_list[k], 1))].avg)
return
def save_checkpoint(state, is_best, filename):
torch.save(state, filename + '_latest.pth.tar')
if is_best:
shutil.copyfile(filename + '_latest.pth.tar', filename + '_best.pth.tar')
def adjust_learning_rate(optimizer, epoch, lr_steps, branch_name=None):
"""Sets the learning rate to the initial LR decayed by 10"""
decay = 0.1 ** (sum(epoch >= np.array(lr_steps)))
lr = args.lr * decay
if branch_name == 'vision':
for param_group in optimizer.param_groups:
param_group['lr'] = lr * 0.8
elif branch_name == 'coord':
for param_group in optimizer.param_groups:
param_group['lr'] = lr
elif branch_name == 'fusion':
for param_group in optimizer.param_groups:
param_group['lr'] = lr
else:
for param_group in optimizer.param_groups:
param_group['lr'] = lr
def accuracy(output, target, topk=(1,)):
"""Computes the accuracy over the k top predictions for the specified values of k"""
with torch.no_grad():
maxk = max(topk)
batch_size = target.size(0)
_, pred = output.topk(maxk, 1, True, True)
pred = pred.t()
correct = pred.eq(target.view(1, -1).expand_as(pred))
res = []
for k in topk:
correct_k = correct[:k].contiguous().view(-1).float().sum(0, keepdim=True)
res.append(correct_k.mul_(100.0 / batch_size))
return res
if __name__ == '__main__':
main()
| [
"torch.nn.CrossEntropyLoss",
"utils.save_results",
"numpy.array",
"model.model_lib.BboxInteractionLatentModel",
"argparse.ArgumentParser",
"model.model_lib.BboxVisualModel",
"numpy.linspace",
"numpy.concatenate",
"model.model_lib.ConcatFusionModel",
"callbacks.AverageMeter",
"os.path.isfile",
"shutil.copyfile",
"torch.save",
"time.time",
"torch.load",
"fusion_function.naivesum",
"torch.nn.DataParallel",
"torch.tensor",
"data_utils.causal_data_loader_frames.VideoFolder",
"torch.utils.data.DataLoader",
"torch.no_grad"
] | [((371, 428), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Counterfactual CAR"""'}), "(description='Counterfactual CAR')\n", (394, 428), False, 'import argparse\n'), ((6328, 6342), 'model.model_lib.BboxVisualModel', 'RGBModel', (['args'], {}), '(args)\n', (6336, 6342), True, 'from model.model_lib import BboxVisualModel as RGBModel\n'), ((6362, 6377), 'model.model_lib.BboxInteractionLatentModel', 'BboxModel', (['args'], {}), '(args)\n', (6371, 6377), True, 'from model.model_lib import BboxInteractionLatentModel as BboxModel\n'), ((6398, 6415), 'model.model_lib.ConcatFusionModel', 'FusionModel', (['args'], {}), '(args)\n', (6409, 6415), True, 'from model.model_lib import ConcatFusionModel as FusionModel\n'), ((6859, 6872), 'fusion_function.naivesum', 'fusion_func', ([], {}), '()\n', (6870, 6872), True, 'from fusion_function import naivesum as fusion_func\n'), ((9249, 9461), 'data_utils.causal_data_loader_frames.VideoFolder', 'VideoFolder', ([], {'root': 'args.root_frames', 'num_boxes': 'args.num_boxes', 'file_input': 'args.json_data_train', 'file_labels': 'args.json_file_labels', 'frames_duration': 'args.num_frames', 'args': 'args', 'is_val': '(False)', 'if_augment': '(True)'}), '(root=args.root_frames, num_boxes=args.num_boxes, file_input=\n args.json_data_train, file_labels=args.json_file_labels,\n frames_duration=args.num_frames, args=args, is_val=False, if_augment=True)\n', (9260, 9461), False, 'from data_utils.causal_data_loader_frames import VideoFolder\n'), ((9738, 9948), 'data_utils.causal_data_loader_frames.VideoFolder', 'VideoFolder', ([], {'root': 'args.root_frames', 'num_boxes': 'args.num_boxes', 'file_input': 'args.json_data_val', 'file_labels': 'args.json_file_labels', 'frames_duration': 'args.num_frames', 'args': 'args', 'is_val': '(True)', 'if_augment': '(True)'}), '(root=args.root_frames, num_boxes=args.num_boxes, file_input=\n args.json_data_val, file_labels=args.json_file_labels, frames_duration=\n args.num_frames, args=args, is_val=True, if_augment=True)\n', (9749, 9948), False, 'from data_utils.causal_data_loader_frames import VideoFolder\n'), ((10256, 10403), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['dataset_train'], {'batch_size': 'args.batch_size', 'shuffle': '(True)', 'num_workers': 'args.workers', 'drop_last': '(True)', 'pin_memory': '(True)'}), '(dataset_train, batch_size=args.batch_size,\n shuffle=True, num_workers=args.workers, drop_last=True, pin_memory=True)\n', (10283, 10403), False, 'import torch\n'), ((10461, 10609), 'torch.utils.data.DataLoader', 'torch.utils.data.DataLoader', (['dataset_val'], {'drop_last': '(True)', 'batch_size': 'args.batch_size', 'shuffle': '(False)', 'num_workers': 'args.workers', 'pin_memory': '(False)'}), '(dataset_val, drop_last=True, batch_size=args.\n batch_size, shuffle=False, num_workers=args.workers, pin_memory=False)\n', (10488, 10609), False, 'import torch\n'), ((11422, 11449), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ([], {}), '()\n', (11447, 11449), False, 'import torch\n'), ((11469, 11494), 'numpy.linspace', 'np.linspace', (['(0.0)', '(1.0)', '(11)'], {}), '(0.0, 1.0, 11)\n', (11480, 11494), True, 'import numpy as np\n'), ((13865, 13879), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (13877, 13879), False, 'from callbacks import AverageMeter\n'), ((13897, 13911), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (13909, 13911), False, 'from callbacks import AverageMeter\n'), ((13928, 13942), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (13940, 13942), False, 'from callbacks import AverageMeter\n'), ((13961, 13975), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (13973, 13975), False, 'from callbacks import AverageMeter\n'), ((13992, 14006), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (14004, 14006), False, 'from callbacks import AverageMeter\n'), ((14379, 14390), 'time.time', 'time.time', ([], {}), '()\n', (14388, 14390), False, 'import time\n'), ((17766, 17780), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (17778, 17780), False, 'from callbacks import AverageMeter\n'), ((17795, 17809), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (17807, 17809), False, 'from callbacks import AverageMeter\n'), ((17826, 17840), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (17838, 17840), False, 'from callbacks import AverageMeter\n'), ((17857, 17871), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (17869, 17871), False, 'from callbacks import AverageMeter\n'), ((18128, 18139), 'time.time', 'time.time', ([], {}), '()\n', (18137, 18139), False, 'import time\n'), ((21275, 21289), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (21287, 21289), False, 'from callbacks import AverageMeter\n'), ((21746, 21757), 'time.time', 'time.time', ([], {}), '()\n', (21755, 21757), False, 'import time\n'), ((24887, 24934), 'torch.save', 'torch.save', (['state', "(filename + '_latest.pth.tar')"], {}), "(state, filename + '_latest.pth.tar')\n", (24897, 24934), False, 'import torch\n'), ((7340, 7374), 'os.path.isfile', 'os.path.isfile', (['args.resume_vision'], {}), '(args.resume_vision)\n', (7354, 7374), False, 'import os\n'), ((7527, 7557), 'torch.load', 'torch.load', (['args.resume_vision'], {}), '(args.resume_vision)\n', (7537, 7557), False, 'import torch\n'), ((7974, 8007), 'os.path.isfile', 'os.path.isfile', (['args.resume_coord'], {}), '(args.resume_coord)\n', (7988, 8007), False, 'import os\n'), ((8158, 8187), 'torch.load', 'torch.load', (['args.resume_coord'], {}), '(args.resume_coord)\n', (8168, 8187), False, 'import torch\n'), ((8548, 8582), 'os.path.isfile', 'os.path.isfile', (['args.resume_fusion'], {}), '(args.resume_fusion)\n', (8562, 8582), False, 'import os\n'), ((8735, 8765), 'torch.load', 'torch.load', (['args.resume_fusion'], {}), '(args.resume_fusion)\n', (8745, 8765), False, 'import torch\n'), ((17006, 17017), 'time.time', 'time.time', ([], {}), '()\n', (17015, 17017), False, 'import time\n'), ((20387, 20398), 'time.time', 'time.time', ([], {}), '()\n', (20396, 20398), False, 'import time\n'), ((20976, 21005), 'numpy.concatenate', 'np.concatenate', (['logits_matrix'], {}), '(logits_matrix)\n', (20990, 21005), True, 'import numpy as np\n'), ((21030, 21058), 'numpy.concatenate', 'np.concatenate', (['targets_list'], {}), '(targets_list)\n', (21044, 21058), True, 'import numpy as np\n'), ((21068, 21129), 'utils.save_results', 'save_results', (['logits_matrix', 'targets_list', 'class_to_idx', 'args'], {}), '(logits_matrix, targets_list, class_to_idx, args)\n', (21080, 21129), False, 'from utils import save_results\n'), ((21460, 21474), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (21472, 21474), False, 'from callbacks import AverageMeter\n'), ((21549, 21563), 'callbacks.AverageMeter', 'AverageMeter', ([], {}), '()\n', (21561, 21563), False, 'from callbacks import AverageMeter\n'), ((23686, 23697), 'time.time', 'time.time', ([], {}), '()\n', (23695, 23697), False, 'import time\n'), ((24961, 25034), 'shutil.copyfile', 'shutil.copyfile', (["(filename + '_latest.pth.tar')", "(filename + '_best.pth.tar')"], {}), "(filename + '_latest.pth.tar', filename + '_best.pth.tar')\n", (24976, 25034), False, 'import shutil\n'), ((25878, 25893), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (25891, 25893), False, 'import torch\n'), ((18281, 18296), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (18294, 18296), False, 'import torch\n'), ((21899, 21914), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (21912, 21914), False, 'import torch\n'), ((6922, 6957), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['vision_model'], {}), '(vision_model)\n', (6943, 6957), False, 'import torch\n'), ((6988, 7022), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['coord_model'], {}), '(coord_model)\n', (7009, 7022), False, 'import torch\n'), ((7054, 7089), 'torch.nn.DataParallel', 'torch.nn.DataParallel', (['fusion_model'], {}), '(fusion_model)\n', (7075, 7089), False, 'import torch\n'), ((14522, 14533), 'time.time', 'time.time', ([], {}), '()\n', (14531, 14533), False, 'import time\n'), ((16972, 16983), 'time.time', 'time.time', ([], {}), '()\n', (16981, 16983), False, 'import time\n'), ((20353, 20364), 'time.time', 'time.time', ([], {}), '()\n', (20362, 20364), False, 'import time\n'), ((23037, 23054), 'torch.tensor', 'torch.tensor', (['(0.0)'], {}), '(0.0)\n', (23049, 23054), False, 'import torch\n'), ((23056, 23073), 'torch.tensor', 'torch.tensor', (['(0.0)'], {}), '(0.0)\n', (23068, 23073), False, 'import torch\n'), ((23652, 23663), 'time.time', 'time.time', ([], {}), '()\n', (23661, 23663), False, 'import time\n'), ((25214, 25232), 'numpy.array', 'np.array', (['lr_steps'], {}), '(lr_steps)\n', (25222, 25232), True, 'import numpy as np\n')] |
# encoding: utf-8
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_cors import CORS
import logging
app = Flask(__name__)
CORS(app, resources={r"/*": {"origins": "*"}})
app.config.from_object('config.current')
db = SQLAlchemy(app)
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
'''
'''
import application.jwt
import application.routes.config
import application.routes.user
import application.routes.permission
import application.routes.role
import application.routes.access
# after Model defined
db.create_all() | [
"logging.getLogger",
"flask_sqlalchemy.SQLAlchemy",
"flask_cors.CORS",
"flask.Flask"
] | [((144, 159), 'flask.Flask', 'Flask', (['__name__'], {}), '(__name__)\n', (149, 159), False, 'from flask import Flask\n'), ((161, 206), 'flask_cors.CORS', 'CORS', (['app'], {'resources': "{'/*': {'origins': '*'}}"}), "(app, resources={'/*': {'origins': '*'}})\n", (165, 206), False, 'from flask_cors import CORS\n'), ((260, 275), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', (['app'], {}), '(app)\n', (270, 275), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((288, 315), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (305, 315), False, 'import logging\n')] |
from Module import AbstractModule
class Module(AbstractModule):
def __init__(self):
AbstractModule.__init__(self)
def run(
self, network, antecedents, out_attributes, user_options, num_cores,
outfile):
import os
import shutil
from genomicode import filelib
in_data = antecedents
result_files = os.listdir(in_data.identifier)
for result_file in result_files:
if '-controls' in result_file:
goal_file = os.path.join(in_data.identifier, result_file)
shutil.copyfile(goal_file, outfile)
assert filelib.exists_nz(outfile), (
'the output file %s for illu_control fails' % outfile
)
def name_outfile(self, antecedents, user_options):
from Betsy import module_utils
original_file = module_utils.get_inputid(antecedents.identifier)
filename = 'control_illumina_' + original_file + '.gct'
return filename
| [
"Betsy.module_utils.get_inputid",
"os.listdir",
"os.path.join",
"shutil.copyfile",
"genomicode.filelib.exists_nz",
"Module.AbstractModule.__init__"
] | [((97, 126), 'Module.AbstractModule.__init__', 'AbstractModule.__init__', (['self'], {}), '(self)\n', (120, 126), False, 'from Module import AbstractModule\n'), ((368, 398), 'os.listdir', 'os.listdir', (['in_data.identifier'], {}), '(in_data.identifier)\n', (378, 398), False, 'import os\n'), ((638, 664), 'genomicode.filelib.exists_nz', 'filelib.exists_nz', (['outfile'], {}), '(outfile)\n', (655, 664), False, 'from genomicode import filelib\n'), ((864, 912), 'Betsy.module_utils.get_inputid', 'module_utils.get_inputid', (['antecedents.identifier'], {}), '(antecedents.identifier)\n', (888, 912), False, 'from Betsy import module_utils\n'), ((511, 556), 'os.path.join', 'os.path.join', (['in_data.identifier', 'result_file'], {}), '(in_data.identifier, result_file)\n', (523, 556), False, 'import os\n'), ((573, 608), 'shutil.copyfile', 'shutil.copyfile', (['goal_file', 'outfile'], {}), '(goal_file, outfile)\n', (588, 608), False, 'import shutil\n')] |
import unittest
import base
class Test(base.BaseScriptTest, unittest.TestCase):
command_line = "./scripts/maf_extract_ranges_indexed.py ./test_data/maf_tests/mm8_chr7_tiny.maf -c -m 5 -p mm8."
input_stdin = base.TestFile(filename="./test_data/maf_tests/dcking_ghp074.bed")
output_stdout = base.TestFile(filename="./test_data/maf_tests/dcking_ghp074.maf")
| [
"base.TestFile"
] | [((218, 283), 'base.TestFile', 'base.TestFile', ([], {'filename': '"""./test_data/maf_tests/dcking_ghp074.bed"""'}), "(filename='./test_data/maf_tests/dcking_ghp074.bed')\n", (231, 283), False, 'import base\n'), ((304, 369), 'base.TestFile', 'base.TestFile', ([], {'filename': '"""./test_data/maf_tests/dcking_ghp074.maf"""'}), "(filename='./test_data/maf_tests/dcking_ghp074.maf')\n", (317, 369), False, 'import base\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneauth1 import loading as ks_loading
from keystoneauth1 import service_token
import mock
import nova.conf
from nova import context
from nova import service_auth
from nova import test
CONF = nova.conf.CONF
class ServiceAuthTestCase(test.NoDBTestCase):
def setUp(self):
super(ServiceAuthTestCase, self).setUp()
self.ctx = context.RequestContext('fake', 'fake')
self.addCleanup(service_auth.reset_globals)
@mock.patch.object(ks_loading, 'load_auth_from_conf_options')
def test_get_auth_plugin_no_wraps(self, mock_load):
context = mock.MagicMock()
context.get_auth_plugin.return_value = "fake"
result = service_auth.get_auth_plugin(context)
self.assertEqual("fake", result)
mock_load.assert_not_called()
@mock.patch.object(ks_loading, 'load_auth_from_conf_options')
def test_get_auth_plugin_wraps(self, mock_load):
self.flags(send_service_user_token=True, group='service_user')
result = service_auth.get_auth_plugin(self.ctx)
self.assertIsInstance(result, service_token.ServiceTokenAuthWrapper)
@mock.patch.object(ks_loading, 'load_auth_from_conf_options',
return_value=None)
def test_get_auth_plugin_wraps_bad_config(self, mock_load):
"""Tests the case that send_service_user_token is True but there
is some misconfiguration with the [service_user] section which makes
KSA return None for the service user auth.
"""
self.flags(send_service_user_token=True, group='service_user')
result = service_auth.get_auth_plugin(self.ctx)
self.assertEqual(1, mock_load.call_count)
self.assertNotIsInstance(result, service_token.ServiceTokenAuthWrapper)
| [
"mock.patch.object",
"mock.MagicMock",
"nova.context.RequestContext",
"nova.service_auth.get_auth_plugin"
] | [((1027, 1087), 'mock.patch.object', 'mock.patch.object', (['ks_loading', '"""load_auth_from_conf_options"""'], {}), "(ks_loading, 'load_auth_from_conf_options')\n", (1044, 1087), False, 'import mock\n'), ((1375, 1435), 'mock.patch.object', 'mock.patch.object', (['ks_loading', '"""load_auth_from_conf_options"""'], {}), "(ks_loading, 'load_auth_from_conf_options')\n", (1392, 1435), False, 'import mock\n'), ((1701, 1780), 'mock.patch.object', 'mock.patch.object', (['ks_loading', '"""load_auth_from_conf_options"""'], {'return_value': 'None'}), "(ks_loading, 'load_auth_from_conf_options', return_value=None)\n", (1718, 1780), False, 'import mock\n'), ((930, 968), 'nova.context.RequestContext', 'context.RequestContext', (['"""fake"""', '"""fake"""'], {}), "('fake', 'fake')\n", (952, 968), False, 'from nova import context\n'), ((1162, 1178), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (1176, 1178), False, 'import mock\n'), ((1251, 1288), 'nova.service_auth.get_auth_plugin', 'service_auth.get_auth_plugin', (['context'], {}), '(context)\n', (1279, 1288), False, 'from nova import service_auth\n'), ((1578, 1616), 'nova.service_auth.get_auth_plugin', 'service_auth.get_auth_plugin', (['self.ctx'], {}), '(self.ctx)\n', (1606, 1616), False, 'from nova import service_auth\n'), ((2169, 2207), 'nova.service_auth.get_auth_plugin', 'service_auth.get_auth_plugin', (['self.ctx'], {}), '(self.ctx)\n', (2197, 2207), False, 'from nova import service_auth\n')] |
import svhn2mnist
import usps
import syn2gtrsb
import syndig2svhn
def Generator(source, target, pixelda=False):
if source == 'usps' or target == 'usps':
return usps.Feature()
elif source == 'svhn':
return svhn2mnist.Feature()
elif source == 'synth':
return syn2gtrsb.Feature()
def Classifier(source, target):
if source == 'usps' or target == 'usps':
return usps.Predictor()
if source == 'svhn':
return svhn2mnist.Predictor()
if source == 'synth':
return syn2gtrsb.Predictor()
| [
"syn2gtrsb.Feature",
"svhn2mnist.Feature",
"syn2gtrsb.Predictor",
"usps.Predictor",
"svhn2mnist.Predictor",
"usps.Feature"
] | [((173, 187), 'usps.Feature', 'usps.Feature', ([], {}), '()\n', (185, 187), False, 'import usps\n'), ((408, 424), 'usps.Predictor', 'usps.Predictor', ([], {}), '()\n', (422, 424), False, 'import usps\n'), ((465, 487), 'svhn2mnist.Predictor', 'svhn2mnist.Predictor', ([], {}), '()\n', (485, 487), False, 'import svhn2mnist\n'), ((529, 550), 'syn2gtrsb.Predictor', 'syn2gtrsb.Predictor', ([], {}), '()\n', (548, 550), False, 'import syn2gtrsb\n'), ((230, 250), 'svhn2mnist.Feature', 'svhn2mnist.Feature', ([], {}), '()\n', (248, 250), False, 'import svhn2mnist\n'), ((294, 313), 'syn2gtrsb.Feature', 'syn2gtrsb.Feature', ([], {}), '()\n', (311, 313), False, 'import syn2gtrsb\n')] |
##############################################################################
#
# Copyright (c) 2002 Zope Foundation and Contributors.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
""" Unit tests for DirectoryView module.
"""
import sys
import unittest
import warnings
from os import mkdir
from os import remove
from os.path import join
from tempfile import mktemp
from App.config import getConfiguration
from . import _globals
from .base.dummy import DummyFolder
from .base.testcase import FSDVTest
from .base.testcase import WritableFSDVTest
class DirectoryViewPathTests(unittest.TestCase):
"""
These test that, no matter what is stored in their dirpath,
FSDV's will do their best to find an appropriate skin
and only do nothing in the case where an appropriate skin
can't be found.
"""
def setUp(self):
from Products.CMFCore.DirectoryView import addDirectoryViews
from Products.CMFCore.DirectoryView import registerDirectory
registerDirectory('fake_skins', _globals)
self.ob = DummyFolder()
addDirectoryViews(self.ob, 'fake_skins', _globals)
def test__generateKey(self):
from Products.CMFCore.DirectoryView import _generateKey
key = _generateKey('Products.CMFCore', 'tests')
self.assertEqual(key.split(':')[0], 'Products.CMFCore')
subkey = _generateKey('Products.CMFCore', 'tests\foo')
self.assertTrue(subkey.startswith(key))
def test__findProductForPath(self):
from Products.CMFCore.DirectoryView import _findProductForPath
cmfpath = sys.modules['Products.CMFCore'].__path__[0]
self.assertEqual(_findProductForPath(cmfpath),
('Products.CMFCore', ''))
cmfpath = join(cmfpath, 'tests')
self.assertEqual(_findProductForPath(cmfpath),
('Products.CMFCore', 'tests'))
def test_getDirectoryInfo(self):
skin = self.ob.fake_skin
skin.manage_properties('Products.CMFCore.tests:fake_skins/fake_skin')
self.assertTrue(hasattr(self.ob.fake_skin, 'test1'),
self.ob.fake_skin.getDirPath())
# Test we do nothing if given a really wacky path
def test_UnhandleableExpandPath(self):
file = mktemp()
with warnings.catch_warnings(record=True) as w:
warnings.simplefilter('always')
self.ob.fake_skin.manage_properties(file)
self.assertEqual(self.ob.fake_skin.objectIds(), [])
# Check that a warning was raised.
self.assertEqual(len(w), 1)
self.assertTrue(issubclass(w[-1].category, UserWarning))
text = ('DirectoryView fake_skin refers to a non-existing path %r'
% file)
self.assertTrue(text in str(w[-1].message))
# this test tests that registerDirectory creates keys in the right format.
def test_registerDirectoryKeys(self):
from Products.CMFCore.DirectoryView import _dirreg
dirs = _dirreg._directories
self.assertTrue('Products.CMFCore.tests:fake_skins/fake_skin' in dirs,
dirs.keys())
self.assertEqual(self.ob.fake_skin.getDirPath(),
'Products.CMFCore.tests:fake_skins/fake_skin')
class DirectoryViewTests(FSDVTest):
def setUp(self):
FSDVTest.setUp(self)
self._registerDirectory(self)
def test_addDirectoryViews(self):
# Test addDirectoryViews
# also test registration of directory views doesn't barf
pass
def test_DirectoryViewExists(self):
# Check DirectoryView added by addDirectoryViews
# appears as a DirectoryViewSurrogate due
# to Acquisition hackery.
from Products.CMFCore.DirectoryView import DirectoryViewSurrogate
self.assertTrue(isinstance(self.ob.fake_skin, DirectoryViewSurrogate))
def test_DirectoryViewMethod(self):
# Check if DirectoryView method works
self.assertEqual(self.ob.fake_skin.test1(), 'test1')
def test_properties(self):
# Make sure the directory view is reading properties
self.assertEqual(self.ob.fake_skin.testPT.title, 'Zope Pope')
def test_ignored(self):
# Test that "artifact" files and dirs are ignored
for name in '#test1', 'CVS', '.test1', 'test1~':
self.assertTrue(name not in self.ob.fake_skin.objectIds(),
'%s not ignored' % name)
def test_surrogate_writethrough(self):
# CMF Collector 316: It is possible to cause ZODB writes because
# setting attributes on the non-persistent surrogate writes them
# into the persistent DirectoryView as well. This is bad in situations
# where you only want to store markers and remove them before the
# transaction has ended - they never got removed because there was
# no equivalent __delattr__ on the surrogate that would clean up
# the persistent DirectoryView as well.
fs = self.ob.fake_skin
test_foo = 'My Foovalue'
fs.foo = test_foo
self.assertEqual(fs.foo, test_foo)
self.assertEqual(fs.__dict__['_real'].foo, test_foo)
del fs.foo
self.assertRaises(AttributeError, getattr, fs, 'foo')
self.assertRaises(AttributeError, getattr, fs.__dict__['_real'], 'foo')
class DirectoryViewIgnoreTests(FSDVTest):
def setUp(self):
FSDVTest.setUp(self)
self.manual_ign = ('CVS', 'SVN', 'test_manual_ignore.py')
self._registerDirectory(self, ignore=self.manual_ign)
def test_ignored(self):
# Test that "artifact" files and dirs are ignored,
# even when a custom ignore list is used; and that the
# custom ignore list is also honored
auto_ign = ('#test1', '.test1', 'test1~')
must_ignore = self.manual_ign + auto_ign + ('test_manual_ignore',)
visible = self.ob.fake_skin.objectIds()
for name in must_ignore:
self.assertFalse(name in visible)
class DirectoryViewFolderTests(FSDVTest):
def setUp(self):
FSDVTest.setUp(self)
self._registerDirectory(self)
def tearDown(self):
from Products.CMFCore import DirectoryView
# This is nasty, but there is no way to unregister anything
# right now...
metatype_registry = DirectoryView._dirreg._meta_types
if 'FOLDER' in metatype_registry:
del metatype_registry['FOLDER']
FSDVTest.tearDown(self)
def test_DirectoryViewMetadata(self):
# Test to determine if metadata shows up correctly on a
# FSDV that has a corresponding .metadata file
testfolder = self.ob.fake_skin.test_directory
self.assertEqual(testfolder.title, 'test_directory Title')
def test_DirectoryViewMetadataOnPropertyManager(self):
# Test to determine if metadata shows up correctly on a
# FSDV that has a corresponding .metadata file
testfolder = self.ob.fake_skin.test_directory
self.assertEqual(testfolder.getProperty('title'),
'test_directory Title')
def test_DirectoryViewFolderDefault(self):
# Test that a folder inside the fake skin really is of type
# DirectoryViewSurrogate
from Products.CMFCore.DirectoryView import DirectoryViewSurrogate
testfolder = self.ob.fake_skin.test_directory
self.assertTrue(isinstance(testfolder, DirectoryViewSurrogate))
def test_DirectoryViewFolderCustom(self):
# Now we register a different class under the fake meta_type
# "FOLDER" and test again...
from Products.CMFCore.DirectoryView import DirectoryView
from Products.CMFCore.DirectoryView import registerMetaType
class DummyDirectoryViewSurrogate:
pass
class DummyDirectoryView(DirectoryView):
def __of__(self, parent):
return DummyDirectoryViewSurrogate()
registerMetaType('FOLDER', DummyDirectoryView)
# In order to regenerate the FSDV data we need to remove and
# register again, that way the newly registered meta_type is used
self.ob._delObject('fake_skin')
self._registerDirectory(self)
testfolder = self.ob.fake_skin.test_directory
self.assertTrue(isinstance(testfolder, DummyDirectoryViewSurrogate))
class DebugModeTests(WritableFSDVTest):
def setUp(self):
from Products.CMFCore.DirectoryView import _dirreg
WritableFSDVTest.setUp(self)
self.saved_cfg_debug_mode = getConfiguration().debug_mode
getConfiguration().debug_mode = True
# initialise skins
self._registerDirectory(self)
# add a method to the fake skin folder
self._writeFile('test2.py', "return 'test2'")
# edit the test1 method
self._writeFile('test1.py', "return 'new test1'")
# add a new folder
mkdir(join(self.skin_path_name, 'test3'))
info = _dirreg.getDirectoryInfo(self.ob.fake_skin._dirpath)
info.reload()
self.use_dir_mtime = info.use_dir_mtime
def tearDown(self):
getConfiguration().debug_mode = self.saved_cfg_debug_mode
WritableFSDVTest.tearDown(self)
def test_AddNewMethod(self):
# See if a method added to the skin folder can be found
self.assertEqual(self.ob.fake_skin.test2(), 'test2')
def test_EditMethod(self):
# See if an edited method exhibits its new behaviour
self.assertEqual(self.ob.fake_skin.test1(), 'new test1')
def test_DeleteMethod(self):
# Make sure a deleted method goes away
remove(join(self.skin_path_name, 'test2.py'))
self.assertFalse(hasattr(self.ob.fake_skin, 'test2'))
def test_DeleteAddEditMethod(self):
# Check that if we delete a method, then add it back,
# then edit it, the DirectoryView notices.
# This exercises yet another Win32 mtime weirdity.
remove(join(self.skin_path_name, 'test2.py'))
self.assertFalse(hasattr(self.ob.fake_skin, 'test2'))
# add method back to the fake skin folder
self._writeFile('test2.py', "return 'test2.2'",
self.use_dir_mtime)
# check
self.assertEqual(self.ob.fake_skin.test2(), 'test2.2')
# edit method
self._writeFile('test2.py', "return 'test2.3'",
self.use_dir_mtime)
# check
self.assertEqual(self.ob.fake_skin.test2(), 'test2.3')
def test_NewFolder(self):
# See if a new folder shows up
self.assertFalse(hasattr(self.ob.fake_skin, 'test3'))
def test_DeleteFolder(self):
# Make sure a deleted folder goes away
self.assertTrue(hasattr(self.ob.fake_skin, 'test_directory'))
# It has a file, which we need to delete first.
self.assertTrue(hasattr(self.ob.fake_skin.test_directory,
'README.txt'))
self._deleteFile(join('test_directory', 'README.txt'),
self.use_dir_mtime)
self._deleteDirectory('test_directory', self.use_dir_mtime)
self.assertFalse(hasattr(self.ob.fake_skin, 'test_directory'))
def test_suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(DirectoryViewPathTests))
suite.addTest(unittest.makeSuite(DirectoryViewTests))
suite.addTest(unittest.makeSuite(DirectoryViewIgnoreTests))
suite.addTest(unittest.makeSuite(DirectoryViewFolderTests))
suite.addTest(unittest.makeSuite(DebugModeTests))
return suite
| [
"unittest.TestSuite",
"Products.CMFCore.DirectoryView._dirreg.getDirectoryInfo",
"Products.CMFCore.DirectoryView.registerDirectory",
"unittest.makeSuite",
"os.path.join",
"warnings.catch_warnings",
"Products.CMFCore.DirectoryView._generateKey",
"Products.CMFCore.DirectoryView.addDirectoryViews",
"tempfile.mktemp",
"App.config.getConfiguration",
"Products.CMFCore.DirectoryView._findProductForPath",
"Products.CMFCore.DirectoryView.registerMetaType",
"warnings.simplefilter"
] | [((11700, 11720), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (11718, 11720), False, 'import unittest\n'), ((1418, 1459), 'Products.CMFCore.DirectoryView.registerDirectory', 'registerDirectory', (['"""fake_skins"""', '_globals'], {}), "('fake_skins', _globals)\n", (1435, 1459), False, 'from Products.CMFCore.DirectoryView import registerDirectory\n'), ((1500, 1550), 'Products.CMFCore.DirectoryView.addDirectoryViews', 'addDirectoryViews', (['self.ob', '"""fake_skins"""', '_globals'], {}), "(self.ob, 'fake_skins', _globals)\n", (1517, 1550), False, 'from Products.CMFCore.DirectoryView import addDirectoryViews\n'), ((1664, 1705), 'Products.CMFCore.DirectoryView._generateKey', '_generateKey', (['"""Products.CMFCore"""', '"""tests"""'], {}), "('Products.CMFCore', 'tests')\n", (1676, 1705), False, 'from Products.CMFCore.DirectoryView import _generateKey\n'), ((1788, 1835), 'Products.CMFCore.DirectoryView._generateKey', '_generateKey', (['"""Products.CMFCore"""', '"""tests\x0coo"""'], {}), "('Products.CMFCore', 'tests\\x0coo')\n", (1800, 1835), False, 'from Products.CMFCore.DirectoryView import _generateKey\n'), ((2182, 2204), 'os.path.join', 'join', (['cmfpath', '"""tests"""'], {}), "(cmfpath, 'tests')\n", (2186, 2204), False, 'from os.path import join\n'), ((2695, 2703), 'tempfile.mktemp', 'mktemp', ([], {}), '()\n', (2701, 2703), False, 'from tempfile import mktemp\n'), ((8413, 8459), 'Products.CMFCore.DirectoryView.registerMetaType', 'registerMetaType', (['"""FOLDER"""', 'DummyDirectoryView'], {}), "('FOLDER', DummyDirectoryView)\n", (8429, 8459), False, 'from Products.CMFCore.DirectoryView import registerMetaType\n'), ((9438, 9490), 'Products.CMFCore.DirectoryView._dirreg.getDirectoryInfo', '_dirreg.getDirectoryInfo', (['self.ob.fake_skin._dirpath'], {}), '(self.ob.fake_skin._dirpath)\n', (9462, 9490), False, 'from Products.CMFCore.DirectoryView import _dirreg\n'), ((11739, 11781), 'unittest.makeSuite', 'unittest.makeSuite', (['DirectoryViewPathTests'], {}), '(DirectoryViewPathTests)\n', (11757, 11781), False, 'import unittest\n'), ((11801, 11839), 'unittest.makeSuite', 'unittest.makeSuite', (['DirectoryViewTests'], {}), '(DirectoryViewTests)\n', (11819, 11839), False, 'import unittest\n'), ((11859, 11903), 'unittest.makeSuite', 'unittest.makeSuite', (['DirectoryViewIgnoreTests'], {}), '(DirectoryViewIgnoreTests)\n', (11877, 11903), False, 'import unittest\n'), ((11923, 11967), 'unittest.makeSuite', 'unittest.makeSuite', (['DirectoryViewFolderTests'], {}), '(DirectoryViewFolderTests)\n', (11941, 11967), False, 'import unittest\n'), ((11987, 12021), 'unittest.makeSuite', 'unittest.makeSuite', (['DebugModeTests'], {}), '(DebugModeTests)\n', (12005, 12021), False, 'import unittest\n'), ((2082, 2110), 'Products.CMFCore.DirectoryView._findProductForPath', '_findProductForPath', (['cmfpath'], {}), '(cmfpath)\n', (2101, 2110), False, 'from Products.CMFCore.DirectoryView import _findProductForPath\n'), ((2230, 2258), 'Products.CMFCore.DirectoryView._findProductForPath', '_findProductForPath', (['cmfpath'], {}), '(cmfpath)\n', (2249, 2258), False, 'from Products.CMFCore.DirectoryView import _findProductForPath\n'), ((2717, 2753), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(True)'}), '(record=True)\n', (2740, 2753), False, 'import warnings\n'), ((2772, 2803), 'warnings.simplefilter', 'warnings.simplefilter', (['"""always"""'], {}), "('always')\n", (2793, 2803), False, 'import warnings\n'), ((9010, 9028), 'App.config.getConfiguration', 'getConfiguration', ([], {}), '()\n', (9026, 9028), False, 'from App.config import getConfiguration\n'), ((9048, 9066), 'App.config.getConfiguration', 'getConfiguration', ([], {}), '()\n', (9064, 9066), False, 'from App.config import getConfiguration\n'), ((9386, 9420), 'os.path.join', 'join', (['self.skin_path_name', '"""test3"""'], {}), "(self.skin_path_name, 'test3')\n", (9390, 9420), False, 'from os.path import join\n'), ((9594, 9612), 'App.config.getConfiguration', 'getConfiguration', ([], {}), '()\n', (9610, 9612), False, 'from App.config import getConfiguration\n'), ((10105, 10142), 'os.path.join', 'join', (['self.skin_path_name', '"""test2.py"""'], {}), "(self.skin_path_name, 'test2.py')\n", (10109, 10142), False, 'from os.path import join\n'), ((10434, 10471), 'os.path.join', 'join', (['self.skin_path_name', '"""test2.py"""'], {}), "(self.skin_path_name, 'test2.py')\n", (10438, 10471), False, 'from os.path import join\n'), ((11446, 11482), 'os.path.join', 'join', (['"""test_directory"""', '"""README.txt"""'], {}), "('test_directory', 'README.txt')\n", (11450, 11482), False, 'from os.path import join\n')] |
import openmdao.api as om
from pycycle.thermo.cea import species_data
from pycycle.constants import AIR_ELEMENTS
from pycycle.elements.ambient import Ambient
from pycycle.elements.flow_start import FlowStart
class FlightConditions(om.Group):
"""Determines total and static flow properties given an altitude and Mach number using the input atmosphere model"""
def initialize(self):
self.options.declare('thermo_method', default='CEA', values=('CEA',),
desc='Method for computing thermodynamic properties')
self.options.declare('thermo_data', default=species_data.janaf,
desc='thermodynamic data set', recordable=False)
self.options.declare('elements', default=AIR_ELEMENTS,
desc='set of elements present in the flow')
self.options.declare('use_WAR', default=False, values=[True, False],
desc='If True, includes WAR calculation')
def setup(self):
thermo_method = self.options['thermo_method']
thermo_data = self.options['thermo_data']
elements = self.options['elements']
use_WAR = self.options['use_WAR']
self.add_subsystem('ambient', Ambient(), promotes=('alt', 'dTs')) # inputs
conv = self.add_subsystem('conv', om.Group(), promotes=['*'])
if use_WAR == True:
proms = ['Fl_O:*', 'MN', 'W', 'WAR']
else:
proms = ['Fl_O:*', 'MN', 'W']
conv.add_subsystem('fs', FlowStart(thermo_method=thermo_method,
thermo_data=thermo_data,
elements=elements,
use_WAR=use_WAR),
promotes=proms)
balance = conv.add_subsystem('balance', om.BalanceComp())
balance.add_balance('Tt', val=500.0, lower=1e-4, units='degR', desc='Total temperature', eq_units='degR')
balance.add_balance('Pt', val=14.696, lower=1e-4, units='psi', desc='Total pressure', eq_units='psi')
# sub.set_order(['fs','balance'])
newton = conv.nonlinear_solver = om.NewtonSolver()
newton.options['atol'] = 1e-10
newton.options['rtol'] = 1e-10
newton.options['maxiter'] = 10
newton.options['iprint'] = -1
newton.options['solve_subsystems'] = True
newton.options['reraise_child_analysiserror'] = False
newton.linesearch = om.BoundsEnforceLS()
newton.linesearch.options['bound_enforcement'] = 'scalar'
newton.linesearch.options['iprint'] = -1
# newton.linesearch.options['solve_subsystems'] = True
conv.linear_solver = om.DirectSolver(assemble_jac=True)
self.connect('ambient.Ps', 'balance.rhs:Pt')
self.connect('ambient.Ts', 'balance.rhs:Tt')
self.connect('balance.Pt', 'fs.P')
self.connect('balance.Tt', 'fs.T')
self.connect('Fl_O:stat:P', 'balance.lhs:Pt')
self.connect('Fl_O:stat:T', 'balance.lhs:Tt')
# self.set_order(['ambient', 'subgroup'])
if __name__ == "__main__":
p1 = om.Problem()
p1.model = om.Group()
des_vars = p1.model.add_subsystem('des_vars', om.IndepVarComp())
des_vars.add_output('W', 0.0, units='lbm/s')
des_vars.add_output('alt', 1., units='ft')
des_vars.add_output('MN', 0.5)
des_vars.add_output('dTs', 0.0, units='degR')
fc = p1.model.add_subsystem("fc", FlightConditions())
p1.model.connect('des_vars.W', 'fc.W')
p1.model.connect('des_vars.alt', 'fc.alt')
p1.model.connect('des_vars.MN', 'fc.MN')
p1.model.connect('des_vars.dTs', 'fc.dTs')
p1.setup()
# p1.root.list_connections()
p1['des_vars.alt'] = 17868.79060515557
p1['des_vars.MN'] = 2.101070288213628
p1['des_vars.dTs'] = 0.0
p1['des_vars.W'] = 1.0
p1.run_model()
print('Ts_atm: ', p1['fc.ambient.Ts'])
print('Ts_set: ', p1['fc.Fl_O:stat:T'])
print('Ps_atm: ', p1['fc.ambient.Ps'])
print('Ps_set: ', p1['fc.Fl_O:stat:P'])
print('rhos_atm: ', p1['fc.ambient.rhos']*32.175)
print('rhos_set: ', p1['fc.Fl_O:stat:rho'])
print('W', p1['fc.Fl_O:stat:W'])
print('Pt: ', p1['fc.Fl_O:tot:P'])
| [
"openmdao.api.IndepVarComp",
"pycycle.elements.ambient.Ambient",
"openmdao.api.Group",
"openmdao.api.BalanceComp",
"openmdao.api.DirectSolver",
"openmdao.api.NewtonSolver",
"pycycle.elements.flow_start.FlowStart",
"openmdao.api.BoundsEnforceLS",
"openmdao.api.Problem"
] | [((3148, 3160), 'openmdao.api.Problem', 'om.Problem', ([], {}), '()\n', (3158, 3160), True, 'import openmdao.api as om\n'), ((3176, 3186), 'openmdao.api.Group', 'om.Group', ([], {}), '()\n', (3184, 3186), True, 'import openmdao.api as om\n'), ((2177, 2194), 'openmdao.api.NewtonSolver', 'om.NewtonSolver', ([], {}), '()\n', (2192, 2194), True, 'import openmdao.api as om\n'), ((2490, 2510), 'openmdao.api.BoundsEnforceLS', 'om.BoundsEnforceLS', ([], {}), '()\n', (2508, 2510), True, 'import openmdao.api as om\n'), ((2720, 2754), 'openmdao.api.DirectSolver', 'om.DirectSolver', ([], {'assemble_jac': '(True)'}), '(assemble_jac=True)\n', (2735, 2754), True, 'import openmdao.api as om\n'), ((3238, 3255), 'openmdao.api.IndepVarComp', 'om.IndepVarComp', ([], {}), '()\n', (3253, 3255), True, 'import openmdao.api as om\n'), ((1244, 1253), 'pycycle.elements.ambient.Ambient', 'Ambient', ([], {}), '()\n', (1251, 1253), False, 'from pycycle.elements.ambient import Ambient\n'), ((1333, 1343), 'openmdao.api.Group', 'om.Group', ([], {}), '()\n', (1341, 1343), True, 'import openmdao.api as om\n'), ((1527, 1631), 'pycycle.elements.flow_start.FlowStart', 'FlowStart', ([], {'thermo_method': 'thermo_method', 'thermo_data': 'thermo_data', 'elements': 'elements', 'use_WAR': 'use_WAR'}), '(thermo_method=thermo_method, thermo_data=thermo_data, elements=\n elements, use_WAR=use_WAR)\n', (1536, 1631), False, 'from pycycle.elements.flow_start import FlowStart\n'), ((1851, 1867), 'openmdao.api.BalanceComp', 'om.BalanceComp', ([], {}), '()\n', (1865, 1867), True, 'import openmdao.api as om\n')] |
# Lint as: python3
# coding=utf-8
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Split data into train, validation and test dataset according to person.
That is, use some people's data as train, some other people's data as
validation, and the rest ones' data as test. These data would be saved
separately under "/person_split".
It will generate new files with the following structure:
├──person_split
│ ├── test
│ ├── train
│ └──valid
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import random
from data_split import read_data
from data_split import write_data
def person_split(whole_data, train_names, valid_names, test_names):
"""Split data by person."""
random.seed(30)
random.shuffle(whole_data)
train_data = []
valid_data = []
test_data = []
for idx, data in enumerate(whole_data): # pylint: disable=unused-variable
if data["name"] in train_names:
train_data.append(data)
elif data["name"] in valid_names:
valid_data.append(data)
elif data["name"] in test_names:
test_data.append(data)
print("train_length:" + str(len(train_data)))
print("valid_length:" + str(len(valid_data)))
print("test_length:" + str(len(test_data)))
return train_data, valid_data, test_data
if __name__ == "__main__":
data = read_data("./data/complete_data")
train_names = [
"hyw", "shiyun", "tangsy", "dengyl", "jiangyh", "xunkai", "negative3",
"negative4", "negative5", "negative6"
]
valid_names = ["lsj", "pengxl", "negative2", "negative7"]
test_names = ["liucx", "zhangxy", "negative1", "negative8"]
train_data, valid_data, test_data = person_split(data, train_names,
valid_names, test_names)
if not os.path.exists("./person_split"):
os.makedirs("./person_split")
write_data(train_data, "./person_split/train")
write_data(valid_data, "./person_split/valid")
write_data(test_data, "./person_split/test")
| [
"os.path.exists",
"random.shuffle",
"os.makedirs",
"data_split.write_data",
"data_split.read_data",
"random.seed"
] | [((1405, 1420), 'random.seed', 'random.seed', (['(30)'], {}), '(30)\n', (1416, 1420), False, 'import random\n'), ((1425, 1451), 'random.shuffle', 'random.shuffle', (['whole_data'], {}), '(whole_data)\n', (1439, 1451), False, 'import random\n'), ((2055, 2088), 'data_split.read_data', 'read_data', (['"""./data/complete_data"""'], {}), "('./data/complete_data')\n", (2064, 2088), False, 'from data_split import read_data\n'), ((2659, 2705), 'data_split.write_data', 'write_data', (['train_data', '"""./person_split/train"""'], {}), "(train_data, './person_split/train')\n", (2669, 2705), False, 'from data_split import write_data\n'), ((2710, 2756), 'data_split.write_data', 'write_data', (['valid_data', '"""./person_split/valid"""'], {}), "(valid_data, './person_split/valid')\n", (2720, 2756), False, 'from data_split import write_data\n'), ((2761, 2805), 'data_split.write_data', 'write_data', (['test_data', '"""./person_split/test"""'], {}), "(test_data, './person_split/test')\n", (2771, 2805), False, 'from data_split import write_data\n'), ((2583, 2615), 'os.path.exists', 'os.path.exists', (['"""./person_split"""'], {}), "('./person_split')\n", (2597, 2615), False, 'import os\n'), ((2625, 2654), 'os.makedirs', 'os.makedirs', (['"""./person_split"""'], {}), "('./person_split')\n", (2636, 2654), False, 'import os\n')] |
from datetime import datetime
from kubernetes import client
from kubernetes.client.rest import ApiException
import os
import time
import yaml
from tests import config as conf
import tests.utils as ut
def remove_clusterrole_binding(shipper_name, crb_name):
# remove clusterrolebind
k8s_client = client.RbacAuthorizationV1Api()
try:
k8s_client.delete_cluster_role_binding(crb_name)
print(f"\nsuccessfully deleted: {crb_name}")
except Exception as e:
print(f"\n{shipper_name} cluster role binding deletion has failed, please manually delete {crb_name}:")
print(f"kubectl delete clusterrolebinding {crb_name}")
def filebeat_teardown(namespace):
# remove clusterrolebind
# TODO: find a solution for sharing the name both here and in the kube object
crb_name = f"filebeat-cluster-role-binding-{namespace}"
remove_clusterrole_binding("filebeat", crb_name)
def fluent_bit_teardown(namespace):
# remove clusterrolebind
# TODO: find a solution for sharing the name both here and in the kube object
crb_name = f"fluent-bit-clusterrole-binding-{namespace}"
remove_clusterrole_binding("fluent-bit", crb_name)
def add_elastic_cluster(namespace):
print("\nDeploying ElasticSearch\n")
add_deployment_dir(namespace, conf.ELASTIC_CONF_DIR)
def add_filebeat_cluster(namespace):
print("\nDeploying FileBeat\n")
add_deployment_dir(namespace, conf.FILEBEAT_CONF_DIR)
def add_fluent_bit_cluster(namespace):
print("\nDeploying Fluent-bit\n")
add_deployment_dir(namespace, conf.FLUENT_BIT_CONF_DIR)
def add_kibana_cluster(namespace):
print("\nDeploying Kibana\n")
add_deployment_dir(namespace, conf.KIBANA_CONF_DIR)
def add_logstash_cluster(namespace):
print("\nDeploying LogStash\n")
add_deployment_dir(namespace, conf.LOGSTASH_CONF_DIR)
def add_deployment_dir(namespace, dir_path, delete=False):
with open(os.path.join(dir_path, 'dep_order.txt')) as f:
dep_order = f.readline()
dep_lst = [x.strip() for x in dep_order.split(',')]
print(dep_lst)
phrases_to_replace = ["(?<!_)NAMESPACE", "REP_ES_USER", "REP_ES_PASS"]
values_for_replacement = [namespace, conf.ES_USER_LOCAL, conf.ES_PASS_LOCAL]
for filename in dep_lst:
# replace all phrases with the actual values if exists
modified_file_path, is_change = ut.duplicate_file_and_replace_phrases(
dir_path, filename, f"{namespace}_{filename}", phrases_to_replace, values_for_replacement
)
print(f"applying file: {filename}")
with open(modified_file_path) as f:
dep = yaml.safe_load(f)
if modified_file_path != os.path.join(dir_path, filename) and is_change:
# remove modified file
ut.delete_file(modified_file_path)
name = dep["metadata"]["name"]
if dep['kind'] == 'StatefulSet':
k8s_client = client.AppsV1Api()
if not delete:
k8s_client.create_namespaced_stateful_set(body=dep, namespace=namespace)
else:
k8s_client.delete_namespaced_stateful_set(name=name, namespace=namespace)
elif dep['kind'] == 'DaemonSet':
k8s_client = client.AppsV1Api()
k8s_client.create_namespaced_daemon_set(body=dep, namespace=namespace)
elif dep['kind'] == 'Deployment':
k8s_client = client.AppsV1Api()
k8s_client.create_namespaced_deployment(body=dep, namespace=namespace)
elif dep['kind'] == 'Service':
try:
k8s_client = client.CoreV1Api()
k8s_client.create_namespaced_service(body=dep, namespace=namespace)
except ApiException as e:
if e.status == 409:
print(f"Service exists: {dep['metadata']['name']}")
continue
raise e
elif dep['kind'] == 'PodDisruptionBudget':
k8s_client = client.PolicyV1beta1Api()
k8s_client.create_namespaced_pod_disruption_budget(body=dep, namespace=namespace)
elif dep["kind"] == 'Role':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.create_namespaced_role(body=dep, namespace=namespace)
elif dep["kind"] == 'ClusterRole':
try:
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.create_cluster_role(body=dep)
except ApiException as e:
if e.status == 409:
print(f"cluster role already exists")
continue
raise e
elif dep["kind"] == 'RoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
dep["subjects"][0]["namespace"] = namespace
k8s_client.create_namespaced_role_binding(body=dep, namespace=namespace)
elif dep["kind"] == 'ClusterRoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
try:
k8s_client.create_cluster_role_binding(body=dep)
except ApiException as e:
if e.status == 409:
print(f"cluster role binding already exists")
continue
raise e
elif dep["kind"] == 'ConfigMap':
k8s_client = client.CoreV1Api()
k8s_client.create_namespaced_config_map(body=dep, namespace=namespace)
elif dep["kind"] == 'ServiceAccount':
k8s_client = client.CoreV1Api()
k8s_client.create_namespaced_service_account(body=dep, namespace=namespace)
print("\nDone\n")
def remove_deployment_dir(namespace, dir_path):
with open(os.path.join(dir_path, 'dep_order.txt')) as f:
dep_order = f.readline()
dep_lst = [x.strip() for x in dep_order.split(',')]
print(dep_lst)
for filename in dep_lst:
print(f"deleting {filename}")
with open(os.path.join(dir_path, filename)) as f:
dep = yaml.safe_load(f)
name = dep["metadata"]["name"]
if dep['kind'] == 'StatefulSet':
k8s_client = client.AppsV1Api()
k8s_client.delete_namespaced_stateful_set(name=name, namespace=namespace)
elif dep['kind'] == 'DaemonSet':
k8s_client = client.AppsV1Api()
k8s_client.delete_namespaced_daemon_set(name=name, namespace=namespace)
elif dep['kind'] == 'Deployment':
k8s_client = client.AppsV1Api()
k8s_client.delete_namespaced_deployment(name=name, namespace=namespace)
elif dep['kind'] == 'Service':
k8s_client = client.CoreV1Api()
k8s_client.delete_namespaced_service(name=name, namespace=namespace, grace_period_seconds=0)
delete_func = k8s_client.delete_namespaced_service
list_func = k8s_client.list_namespaced_service
wait_for_namespaced_deletion(name, namespace, delete_func, list_func)
elif dep['kind'] == 'PodDisruptionBudget':
k8s_client = client.PolicyV1beta1Api()
k8s_client.delete_namespaced_pod_disruption_budget(name=name, namespace=namespace)
elif dep["kind"] == 'Role':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.delete_namespaced_role(name=name, namespace=namespace)
elif dep["kind"] == 'RoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.delete_namespaced_role_binding(name=name, namespace=namespace)
elif dep["kind"] == 'ClusterRoleBinding':
k8s_client = client.RbacAuthorizationV1Api()
k8s_client.delete_cluster_role_binding(name=name)
elif dep["kind"] == 'ConfigMap':
k8s_client = client.CoreV1Api()
k8s_client.delete_namespaced_config_map(name=name, namespace=namespace)
elif dep["kind"] == 'ServiceAccount':
k8s_client = client.CoreV1Api()
k8s_client.delete_namespaced_service_account(name=name, namespace=namespace)
print("\nDone\n")
def wait_for_namespaced_deletion(name, namespace, deletion_func, list_func, timeout=15):
deleted = False
orig_timeout = timeout
while not deleted:
# find by name and delete requested item
for item in list_func(namespace).items:
if item.metadata.name == name:
if timeout < 0:
raise TimeoutError(f"{orig_timeout} was not enough for deleting item:\n{item}\n")
deletion_func(name=name, namespace=namespace)
print(f"service {name} was not deleted, retrying")
time.sleep(1)
timeout -= 1
# validate item was deleted
for item in list_func(namespace).items:
deleted = True
if item.metadata.name == name:
deleted = False
return deleted
def wait_for_daemonset_to_be_ready(name, namespace, timeout=None):
wait_for_to_be_ready("daemonset", name, namespace, timeout=timeout)
def resolve_read_status_func(obj_name):
if obj_name == "daemonset":
return client.AppsV1Api().read_namespaced_daemon_set_status
else:
raise ValueError(f"resolve_read_status_func: {obj_name} is not a valid value")
def wait_for_to_be_ready(obj_name, name, namespace, timeout=None):
start = datetime.now()
while True:
read_func = resolve_read_status_func(obj_name)
resp = read_func(name=name, namespace=namespace)
total_sleep_time = (datetime.now()-start).total_seconds()
number_ready = resp.status.number_ready
updated_number_scheduled = resp.status.updated_number_scheduled
if number_ready and updated_number_scheduled and number_ready == updated_number_scheduled:
print("Total time waiting for {3} {0} [size: {1}]: {2} sec".format(name, number_ready, total_sleep_time,
obj_name))
break
print("{0}/{1} pods ready {2} sec ".format(number_ready, updated_number_scheduled, total_sleep_time), end="\r")
time.sleep(1)
if timeout and total_sleep_time > timeout:
raise Exception(f"Timeout waiting for {obj_name} to be ready")
| [
"kubernetes.client.CoreV1Api",
"tests.utils.duplicate_file_and_replace_phrases",
"os.path.join",
"time.sleep",
"datetime.datetime.now",
"yaml.safe_load",
"kubernetes.client.AppsV1Api",
"kubernetes.client.RbacAuthorizationV1Api",
"kubernetes.client.PolicyV1beta1Api",
"tests.utils.delete_file"
] | [((305, 336), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (334, 336), False, 'from kubernetes import client\n'), ((9695, 9709), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9707, 9709), False, 'from datetime import datetime\n'), ((2378, 2510), 'tests.utils.duplicate_file_and_replace_phrases', 'ut.duplicate_file_and_replace_phrases', (['dir_path', 'filename', 'f"""{namespace}_{filename}"""', 'phrases_to_replace', 'values_for_replacement'], {}), "(dir_path, filename,\n f'{namespace}_{filename}', phrases_to_replace, values_for_replacement)\n", (2415, 2510), True, 'import tests.utils as ut\n'), ((10490, 10503), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (10500, 10503), False, 'import time\n'), ((1926, 1965), 'os.path.join', 'os.path.join', (['dir_path', '"""dep_order.txt"""'], {}), "(dir_path, 'dep_order.txt')\n", (1938, 1965), False, 'import os\n'), ((2635, 2652), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (2649, 2652), False, 'import yaml\n'), ((5910, 5949), 'os.path.join', 'os.path.join', (['dir_path', '"""dep_order.txt"""'], {}), "(dir_path, 'dep_order.txt')\n", (5922, 5949), False, 'import os\n'), ((6217, 6234), 'yaml.safe_load', 'yaml.safe_load', (['f'], {}), '(f)\n', (6231, 6234), False, 'import yaml\n'), ((9464, 9482), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (9480, 9482), False, 'from kubernetes import client\n'), ((2793, 2827), 'tests.utils.delete_file', 'ut.delete_file', (['modified_file_path'], {}), '(modified_file_path)\n', (2807, 2827), True, 'import tests.utils as ut\n'), ((2946, 2964), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (2962, 2964), False, 'from kubernetes import client\n'), ((6159, 6191), 'os.path.join', 'os.path.join', (['dir_path', 'filename'], {}), '(dir_path, filename)\n', (6171, 6191), False, 'import os\n'), ((6352, 6370), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (6368, 6370), False, 'from kubernetes import client\n'), ((8986, 8999), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (8996, 8999), False, 'import time\n'), ((2690, 2722), 'os.path.join', 'os.path.join', (['dir_path', 'filename'], {}), '(dir_path, filename)\n', (2702, 2722), False, 'import os\n'), ((3279, 3297), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (3295, 3297), False, 'from kubernetes import client\n'), ((6535, 6553), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (6551, 6553), False, 'from kubernetes import client\n'), ((9866, 9880), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (9878, 9880), False, 'from datetime import datetime\n'), ((3460, 3478), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (3476, 3478), False, 'from kubernetes import client\n'), ((6717, 6735), 'kubernetes.client.AppsV1Api', 'client.AppsV1Api', ([], {}), '()\n', (6733, 6735), False, 'from kubernetes import client\n'), ((6896, 6914), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (6912, 6914), False, 'from kubernetes import client\n'), ((3663, 3681), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (3679, 3681), False, 'from kubernetes import client\n'), ((4073, 4098), 'kubernetes.client.PolicyV1beta1Api', 'client.PolicyV1beta1Api', ([], {}), '()\n', (4096, 4098), False, 'from kubernetes import client\n'), ((7324, 7349), 'kubernetes.client.PolicyV1beta1Api', 'client.PolicyV1beta1Api', ([], {}), '()\n', (7347, 7349), False, 'from kubernetes import client\n'), ((4266, 4297), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (4295, 4297), False, 'from kubernetes import client\n'), ((7518, 7549), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (7547, 7549), False, 'from kubernetes import client\n'), ((7708, 7739), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (7737, 7739), False, 'from kubernetes import client\n'), ((4480, 4511), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (4509, 4511), False, 'from kubernetes import client\n'), ((4854, 4885), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (4883, 4885), False, 'from kubernetes import client\n'), ((7913, 7944), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (7942, 7944), False, 'from kubernetes import client\n'), ((5118, 5149), 'kubernetes.client.RbacAuthorizationV1Api', 'client.RbacAuthorizationV1Api', ([], {}), '()\n', (5147, 5149), False, 'from kubernetes import client\n'), ((8085, 8103), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (8101, 8103), False, 'from kubernetes import client\n'), ((5527, 5545), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (5543, 5545), False, 'from kubernetes import client\n'), ((8271, 8289), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (8287, 8289), False, 'from kubernetes import client\n'), ((5712, 5730), 'kubernetes.client.CoreV1Api', 'client.CoreV1Api', ([], {}), '()\n', (5728, 5730), False, 'from kubernetes import client\n')] |
import os
from dotenv import load_dotenv
class Config:
# Current Version
NATLAS_VERSION = "0.6.10"
BASEDIR = os.path.abspath(os.path.dirname(__file__))
load_dotenv(os.path.join(BASEDIR, '.env'))
def get_int(self, varname):
tmp = os.environ.get(varname)
if tmp:
return int(tmp)
return None
def get_bool(self, varname):
tmp = os.environ.get(varname)
if tmp and tmp.upper() == "TRUE":
return True
if tmp and tmp.upper() == "FALSE":
return False
return None
def __init__(self):
# url of server to get/submit work from/to
self.server = os.environ.get('NATLAS_SERVER_ADDRESS') or 'http://127.0.0.1:5000'
# ignore warnings about SSL connections
# you shouldn't ignore ssl warnings, but I'll give you the option
# Instead, you should put the trusted CA certificate bundle on the agent and use the REQUESTS_CA_BUNDLE env variable
self.ignore_ssl_warn = self.get_bool('NATLAS_IGNORE_SSL_WARN') or False
# maximum number of threads to utilize
self.max_threads = self.get_int('NATLAS_MAX_THREADS') or 3
# Are we allowed to scan local addresses?
# By default, agents protect themselves from scanning their local network
self.scan_local = self.get_bool('NATLAS_SCAN_LOCAL') or False
# default time to wait for the server to respond
self.request_timeout = self.get_int('NATLAS_REQUEST_TIMEOUT') or 15 # seconds
# Maximum value for exponential backoff of requests, 5 minutes default
self.backoff_max = self.get_int('NATLAS_BACKOFF_MAX') or 300 # seconds
# Base value to begin the exponential backoff
self.backoff_base = self.get_int('NATLAS_BACKOFF_BASE') or 1 # seconds
# Maximum number of times to retry submitting data before giving up
# This is useful if a thread is submitting data that the server doesn't understand for some reason
self.max_retries = self.get_int('NATLAS_MAX_RETRIES') or 10
# Identification string that identifies the agent that performed any given scan
# Used for database lookup and stored in scan output
self.agent_id = os.environ.get("NATLAS_AGENT_ID") or None
# Authentication token that agents can use to talk to the server API
# Only needed if the server is configured to require agent authentication
self.auth_token = os.environ.get("NATLAS_AGENT_TOKEN") or None
# Optionally save files that failed to upload
self.save_fails = self.get_bool("NATLAS_SAVE_FAILS") or False
# Allow version overrides for local development
# Necessary to test versioned host data templates before release
self.version_override = os.environ.get("NATLAS_VERSION_OVERRIDE") or None
self.sentry_dsn = os.environ.get("SENTRY_DSN") or None
if self.version_override:
self.NATLAS_VERSION = self.version_override
| [
"os.path.dirname",
"os.path.join",
"os.environ.get"
] | [((132, 157), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (147, 157), False, 'import os\n'), ((172, 201), 'os.path.join', 'os.path.join', (['BASEDIR', '""".env"""'], {}), "(BASEDIR, '.env')\n", (184, 201), False, 'import os\n'), ((241, 264), 'os.environ.get', 'os.environ.get', (['varname'], {}), '(varname)\n', (255, 264), False, 'import os\n'), ((347, 370), 'os.environ.get', 'os.environ.get', (['varname'], {}), '(varname)\n', (361, 370), False, 'import os\n'), ((572, 611), 'os.environ.get', 'os.environ.get', (['"""NATLAS_SERVER_ADDRESS"""'], {}), "('NATLAS_SERVER_ADDRESS')\n", (586, 611), False, 'import os\n'), ((2022, 2055), 'os.environ.get', 'os.environ.get', (['"""NATLAS_AGENT_ID"""'], {}), "('NATLAS_AGENT_ID')\n", (2036, 2055), False, 'import os\n'), ((2232, 2268), 'os.environ.get', 'os.environ.get', (['"""NATLAS_AGENT_TOKEN"""'], {}), "('NATLAS_AGENT_TOKEN')\n", (2246, 2268), False, 'import os\n'), ((2534, 2575), 'os.environ.get', 'os.environ.get', (['"""NATLAS_VERSION_OVERRIDE"""'], {}), "('NATLAS_VERSION_OVERRIDE')\n", (2548, 2575), False, 'import os\n'), ((2605, 2633), 'os.environ.get', 'os.environ.get', (['"""SENTRY_DSN"""'], {}), "('SENTRY_DSN')\n", (2619, 2633), False, 'import os\n')] |
import os
import sys
import click
import pickle
import sncosmo
import numpy as np
from astropy.table import Table
DATA_PATH = '/home/samdixon/jla_light_curves/'
def modify_error(lc, error_floor=0.):
"""Add an error floor of `error_floor` times the maximum flux of the band
to each observation
"""
data = sncosmo.photdata.photometric_data(lc).normalized(zp=25., zpsys='ab')
new_lc = {'time': data.time,
'band': data.band,
'flux': data.flux,
'fluxerr': data.fluxerr,
'zp': data.zp,
'zpsys': data.zpsys}
for band in set(data.band):
band_cut = data.band==band
max_flux_in_band = np.max(data.flux[band_cut])
new_lc['fluxerr'][band_cut] = np.sqrt((error_floor*max_flux_in_band)**2+data.fluxerr[band_cut]**2)
new_lc = Table(new_lc, meta=lc.meta)
return new_lc
def fit_lc_and_save(lc, model_name, save_dir, no_mc):
name = lc.meta['SN']
model = sncosmo.Model(source=model_name,
effects=[sncosmo.CCM89Dust()],
effect_names=['mw'],
effect_frames=['obs'])
if type(name) is float:
name = int(name)
z = lc.meta['Z_HELIO']
mwebv = lc.meta['MWEBV']
bounds = {}
try:
t0 = float(lc.meta['DayMax'].split()[0])
bounds['t0'] = (t0-5, t0+5)
except KeyError:
try:
t0 = np.mean(lc['Date'])
bounds['t0'] = (min(lc['Date'])-20, max(lc['Date']))
except KeyError:
t0 = np.mean(lc['time'])
bounds['t0'] = (min(lc['time'])-20, max(lc['time']))
bounds['z'] = ((1-1e-4)*z, (1+1e-4)*z)
for param_name in model.source.param_names[1:]:
bounds[param_name] = (-50, 50)
modelcov = model_name=='salt2'
model.set(z=z, t0=t0, mwebv=mwebv)
phase_range = (-15, 45) if model_name=='salt2' else (-10, 40)
wave_range = (3000, 7000) if model_name=='salt2' else None
save_path = os.path.join(save_dir, '{}.pkl'.format(name))
try:
minuit_result, minuit_fit_model = sncosmo.fit_lc(lc, model, model.param_names[:-2], bounds=bounds,
phase_range=phase_range, wave_range=wave_range,
warn=False, modelcov=modelcov)
if not no_mc:
emcee_result, emcee_fit_model = sncosmo.mcmc_lc(sncosmo.select_data(lc, minuit_result['data_mask']),
minuit_fit_model,
model.param_names[:-2],
guess_t0=False,
bounds=bounds,
warn=False,
nwalkers=40,
modelcov=modelcov)
pickle.dump(emcee_result, open(save_path, 'wb'))
else:
pickle.dump(minuit_result, open(save_path, 'wb'))
except:
print('Fit to {} failed'.format(name))
sys.stdout.flush()
def main():
model_name, start, finish, err_floor, no_mc = sys.argv[1:]
start = int(start)
finish = int(finish)
err_floor = float(err_floor)
no_mc = bool(int(no_mc))
if no_mc:
save_dir = '/home/samdixon/host_unity/fitting/results_mw_reddening/jla_{}_{:02d}'.format(model_name, int(err_floor*100))
else:
save_dir = '/home/samdixon/host_unity/fitting/results_mw_reddening_mcmc/jla_{}_{:02d}'.format(model_name, int(err_floor*100))
if not os.path.isdir(save_dir):
os.makedirs(save_dir)
lcs = []
for f in os.listdir(DATA_PATH)[int(start):int(finish)]:
if f[:2] == 'lc':
lc = sncosmo.read_lc(os.path.join(DATA_PATH, f), format='salt2', expand_bands=True, read_covmat=True)
lc = modify_error(lc, err_floor)
name = lc.meta['SN']
if type(name) is float:
name = int(name)
load_path = os.path.join(save_dir, '{}.pkl'.format(name))
try:
pickle.load(open(load_path, 'rb'))
print('{}: loaded'.format(name))
sys.stdout.flush()
except IOError:
print('Fitting {}'.format(name))
sys.stdout.flush()
fit_lc_and_save(lc, model_name, save_dir, no_mc)
else:
continue
if __name__=='__main__':
main()
| [
"numpy.mean",
"sncosmo.fit_lc",
"numpy.sqrt",
"os.listdir",
"astropy.table.Table",
"os.makedirs",
"os.path.join",
"numpy.max",
"os.path.isdir",
"sncosmo.select_data",
"sncosmo.CCM89Dust",
"sys.stdout.flush",
"sncosmo.photdata.photometric_data"
] | [((841, 868), 'astropy.table.Table', 'Table', (['new_lc'], {'meta': 'lc.meta'}), '(new_lc, meta=lc.meta)\n', (846, 868), False, 'from astropy.table import Table\n'), ((693, 720), 'numpy.max', 'np.max', (['data.flux[band_cut]'], {}), '(data.flux[band_cut])\n', (699, 720), True, 'import numpy as np\n'), ((759, 835), 'numpy.sqrt', 'np.sqrt', (['((error_floor * max_flux_in_band) ** 2 + data.fluxerr[band_cut] ** 2)'], {}), '((error_floor * max_flux_in_band) ** 2 + data.fluxerr[band_cut] ** 2)\n', (766, 835), True, 'import numpy as np\n'), ((2109, 2261), 'sncosmo.fit_lc', 'sncosmo.fit_lc', (['lc', 'model', 'model.param_names[:-2]'], {'bounds': 'bounds', 'phase_range': 'phase_range', 'wave_range': 'wave_range', 'warn': '(False)', 'modelcov': 'modelcov'}), '(lc, model, model.param_names[:-2], bounds=bounds,\n phase_range=phase_range, wave_range=wave_range, warn=False, modelcov=\n modelcov)\n', (2123, 2261), False, 'import sncosmo\n'), ((3760, 3783), 'os.path.isdir', 'os.path.isdir', (['save_dir'], {}), '(save_dir)\n', (3773, 3783), False, 'import os\n'), ((3793, 3814), 'os.makedirs', 'os.makedirs', (['save_dir'], {}), '(save_dir)\n', (3804, 3814), False, 'import os\n'), ((3842, 3863), 'os.listdir', 'os.listdir', (['DATA_PATH'], {}), '(DATA_PATH)\n', (3852, 3863), False, 'import os\n'), ((328, 365), 'sncosmo.photdata.photometric_data', 'sncosmo.photdata.photometric_data', (['lc'], {}), '(lc)\n', (361, 365), False, 'import sncosmo\n'), ((3243, 3261), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (3259, 3261), False, 'import sys\n'), ((1059, 1078), 'sncosmo.CCM89Dust', 'sncosmo.CCM89Dust', ([], {}), '()\n', (1076, 1078), False, 'import sncosmo\n'), ((1447, 1466), 'numpy.mean', 'np.mean', (["lc['Date']"], {}), "(lc['Date'])\n", (1454, 1466), True, 'import numpy as np\n'), ((2449, 2500), 'sncosmo.select_data', 'sncosmo.select_data', (['lc', "minuit_result['data_mask']"], {}), "(lc, minuit_result['data_mask'])\n", (2468, 2500), False, 'import sncosmo\n'), ((3948, 3974), 'os.path.join', 'os.path.join', (['DATA_PATH', 'f'], {}), '(DATA_PATH, f)\n', (3960, 3974), False, 'import os\n'), ((4379, 4397), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4395, 4397), False, 'import sys\n'), ((1574, 1593), 'numpy.mean', 'np.mean', (["lc['time']"], {}), "(lc['time'])\n", (1581, 1593), True, 'import numpy as np\n'), ((4491, 4509), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (4507, 4509), False, 'import sys\n')] |
""" Unit tests for the system interface."""
import unittest
from six import assertRaisesRegex
from six.moves import cStringIO
import numpy as np
from openmdao.api import Problem, Group, IndepVarComp, ExecComp
from openmdao.test_suite.components.options_feature_vector import VectorDoublingComp
from openmdao.utils.assert_utils import assert_rel_error, assert_warning
class TestSystem(unittest.TestCase):
def test_vector_context_managers(self):
g1 = Group()
g1.add_subsystem('Indep', IndepVarComp('a', 5.0), promotes=['a'])
g2 = g1.add_subsystem('G2', Group(), promotes=['*'])
g2.add_subsystem('C1', ExecComp('b=2*a'), promotes=['a', 'b'])
model = Group()
model.add_subsystem('G1', g1, promotes=['b'])
model.add_subsystem('Sink', ExecComp('c=2*b'), promotes=['b'])
p = Problem(model=model)
p.set_solver_print(level=0)
# Test pre-setup errors
with self.assertRaises(Exception) as cm:
inputs, outputs, residuals = model.get_nonlinear_vectors()
self.assertEqual(str(cm.exception),
"Group: Cannot get vectors because setup has not yet been called.")
with self.assertRaises(Exception) as cm:
d_inputs, d_outputs, d_residuals = model.get_linear_vectors('vec')
self.assertEqual(str(cm.exception),
"Group: Cannot get vectors because setup has not yet been called.")
p.setup()
p.run_model()
# Test inputs with original values
inputs, outputs, residuals = model.get_nonlinear_vectors()
self.assertEqual(inputs['G1.G2.C1.a'], 5.)
inputs, outputs, residuals = g1.get_nonlinear_vectors()
self.assertEqual(inputs['G2.C1.a'], 5.)
# Test inputs after setting a new value
inputs, outputs, residuals = g2.get_nonlinear_vectors()
inputs['C1.a'] = -1.
inputs, outputs, residuals = model.get_nonlinear_vectors()
self.assertEqual(inputs['G1.G2.C1.a'], -1.)
inputs, outputs, residuals = g1.get_nonlinear_vectors()
self.assertEqual(inputs['G2.C1.a'], -1.)
# Test outputs with original values
inputs, outputs, residuals = model.get_nonlinear_vectors()
self.assertEqual(outputs['G1.G2.C1.b'], 10.)
inputs, outputs, residuals = g2.get_nonlinear_vectors()
# Test outputs after setting a new value
inputs, outputs, residuals = model.get_nonlinear_vectors()
outputs['G1.G2.C1.b'] = 123.
self.assertEqual(outputs['G1.G2.C1.b'], 123.)
inputs, outputs, residuals = g2.get_nonlinear_vectors()
outputs['C1.b'] = 789.
self.assertEqual(outputs['C1.b'], 789.)
# Test residuals
inputs, outputs, residuals = model.get_nonlinear_vectors()
residuals['G1.G2.C1.b'] = 99.0
self.assertEqual(residuals['G1.G2.C1.b'], 99.0)
# Test linear
d_inputs, d_outputs, d_residuals = model.get_linear_vectors('linear')
d_outputs['G1.G2.C1.b'] = 10.
self.assertEqual(d_outputs['G1.G2.C1.b'], 10.)
# Test linear with invalid vec_name
with self.assertRaises(Exception) as cm:
d_inputs, d_outputs, d_residuals = model.get_linear_vectors('bad_name')
self.assertEqual(str(cm.exception),
"Group (<model>): There is no linear vector named %s" % 'bad_name')
def test_set_checks_shape(self):
indep = IndepVarComp()
indep.add_output('a')
indep.add_output('x', shape=(5, 1))
g1 = Group()
g1.add_subsystem('Indep', indep, promotes=['a', 'x'])
g2 = g1.add_subsystem('G2', Group(), promotes=['*'])
g2.add_subsystem('C1', ExecComp('b=2*a'), promotes=['a', 'b'])
g2.add_subsystem('C2', ExecComp('y=2*x',
x=np.zeros((5, 1)),
y=np.zeros((5, 1))),
promotes=['x', 'y'])
model = Group()
model.add_subsystem('G1', g1, promotes=['b', 'y'])
model.add_subsystem('Sink', ExecComp(('c=2*b', 'z=2*y'),
y=np.zeros((5, 1)),
z=np.zeros((5, 1))),
promotes=['b', 'y'])
p = Problem(model=model)
p.setup()
p.set_solver_print(level=0)
p.run_model()
msg = "Incompatible shape for '.*': Expected (.*) but got (.*)"
num_val = -10
arr_val = -10*np.ones((5, 1))
bad_val = -10*np.ones((10))
inputs, outputs, residuals = g2.get_nonlinear_vectors()
#
# set input
#
# assign array to scalar
with assertRaisesRegex(self, ValueError, msg):
inputs['C1.a'] = arr_val
# assign scalar to array
inputs['C2.x'] = num_val
assert_rel_error(self, inputs['C2.x'], arr_val, 1e-10)
# assign array to array
inputs['C2.x'] = arr_val
assert_rel_error(self, inputs['C2.x'], arr_val, 1e-10)
# assign bad array shape to array
with assertRaisesRegex(self, ValueError, msg):
inputs['C2.x'] = bad_val
# assign list to array
inputs['C2.x'] = arr_val.tolist()
assert_rel_error(self, inputs['C2.x'], arr_val, 1e-10)
# assign bad list shape to array
with assertRaisesRegex(self, ValueError, msg):
inputs['C2.x'] = bad_val.tolist()
#
# set output
#
# assign array to scalar
with assertRaisesRegex(self, ValueError, msg):
outputs['C1.b'] = arr_val
# assign scalar to array
outputs['C2.y'] = num_val
assert_rel_error(self, outputs['C2.y'], arr_val, 1e-10)
# assign array to array
outputs['C2.y'] = arr_val
assert_rel_error(self, outputs['C2.y'], arr_val, 1e-10)
# assign bad array shape to array
with assertRaisesRegex(self, ValueError, msg):
outputs['C2.y'] = bad_val
# assign list to array
outputs['C2.y'] = arr_val.tolist()
assert_rel_error(self, outputs['C2.y'], arr_val, 1e-10)
# assign bad list shape to array
with assertRaisesRegex(self, ValueError, msg):
outputs['C2.y'] = bad_val.tolist()
#
# set residual
#
# assign array to scalar
with assertRaisesRegex(self, ValueError, msg):
residuals['C1.b'] = arr_val
# assign scalar to array
residuals['C2.y'] = num_val
assert_rel_error(self, residuals['C2.y'], arr_val, 1e-10)
# assign array to array
residuals['C2.y'] = arr_val
assert_rel_error(self, residuals['C2.y'], arr_val, 1e-10)
# assign bad array shape to array
with assertRaisesRegex(self, ValueError, msg):
residuals['C2.y'] = bad_val
# assign list to array
residuals['C2.y'] = arr_val.tolist()
assert_rel_error(self, residuals['C2.y'], arr_val, 1e-10)
# assign bad list shape to array
with assertRaisesRegex(self, ValueError, msg):
residuals['C2.y'] = bad_val.tolist()
def test_deprecated_solver_names(self):
class DummySolver():
pass
model = Group()
# check nl_solver setter & getter
msg = "The 'nl_solver' attribute provides backwards compatibility " \
"with OpenMDAO 1.x ; use 'nonlinear_solver' instead."
with assert_warning(DeprecationWarning, msg):
model.nl_solver = DummySolver()
with assert_warning(DeprecationWarning, msg):
solver = model.nl_solver
self.assertTrue(isinstance(solver, DummySolver))
# check ln_solver setter & getter
msg = "The 'ln_solver' attribute provides backwards compatibility " \
"with OpenMDAO 1.x ; use 'linear_solver' instead."
with assert_warning(DeprecationWarning, msg):
model.ln_solver = DummySolver()
with assert_warning(DeprecationWarning, msg):
solver = model.ln_solver
self.assertTrue(isinstance(solver, DummySolver))
def test_deprecated_metadata(self):
prob = Problem()
prob.model.add_subsystem('inputs', IndepVarComp('x', shape=3))
prob.model.add_subsystem('double', VectorDoublingComp())
msg = "The 'metadata' attribute provides backwards compatibility " \
"with earlier version of OpenMDAO; use 'options' instead."
with assert_warning(DeprecationWarning, msg):
prob.model.double.metadata['size'] = 3
prob.model.connect('inputs.x', 'double.x')
prob.setup()
prob['inputs.x'] = [1., 2., 3.]
prob.run_model()
assert_rel_error(self, prob['double.y'], [2., 4., 6.])
def test_list_inputs_output_with_includes_excludes(self):
from openmdao.test_suite.scripts.circuit_analysis import Resistor, Diode, Node, Circuit
p = Problem()
model = p.model
model.add_subsystem('ground', IndepVarComp('V', 0., units='V'))
model.add_subsystem('source', IndepVarComp('I', 0.1, units='A'))
model.add_subsystem('circuit', Circuit())
model.connect('source.I', 'circuit.I_in')
model.connect('ground.V', 'circuit.Vg')
p.setup()
p.run_model()
# Inputs with no includes or excludes
inputs = model.list_inputs(out_stream=None)
self.assertEqual( len(inputs), 11)
# Inputs with includes
inputs = model.list_inputs(includes=['*V_out*'], out_stream=None)
self.assertEqual( len(inputs), 3)
# Inputs with includes matching a promoted name
inputs = model.list_inputs(includes=['*Vg*'], out_stream=None)
self.assertEqual( len(inputs), 2)
# Inputs with excludes
inputs = model.list_inputs(excludes=['*V_out*'], out_stream=None)
self.assertEqual( len(inputs), 8)
# Inputs with excludes matching a promoted name
inputs = model.list_inputs(excludes=['*Vg*'], out_stream=None)
self.assertEqual( len(inputs), 9)
# Inputs with includes and excludes
inputs = model.list_inputs(includes=['*V_out*'], excludes=['*Vg*'], out_stream=None)
self.assertEqual( len(inputs), 1)
# Outputs with no includes or excludes. Explicit only
outputs = model.list_outputs(implicit=False, out_stream=None)
self.assertEqual( len(outputs), 5)
# Outputs with includes. Explicit only
outputs = model.list_outputs(includes=['*I'], implicit=False, out_stream=None)
self.assertEqual( len(outputs), 4)
# Outputs with excludes. Explicit only
outputs = model.list_outputs(excludes=['circuit*'], implicit=False, out_stream=None)
self.assertEqual( len(outputs), 2)
if __name__ == "__main__":
unittest.main()
| [
"openmdao.utils.assert_utils.assert_rel_error",
"openmdao.api.ExecComp",
"numpy.ones",
"openmdao.utils.assert_utils.assert_warning",
"openmdao.test_suite.scripts.circuit_analysis.Circuit",
"openmdao.api.IndepVarComp",
"openmdao.api.Group",
"numpy.zeros",
"six.assertRaisesRegex",
"unittest.main",
"openmdao.api.Problem",
"openmdao.test_suite.components.options_feature_vector.VectorDoublingComp"
] | [((10995, 11010), 'unittest.main', 'unittest.main', ([], {}), '()\n', (11008, 11010), False, 'import unittest\n'), ((467, 474), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (472, 474), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((698, 705), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (703, 705), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((844, 864), 'openmdao.api.Problem', 'Problem', ([], {'model': 'model'}), '(model=model)\n', (851, 864), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((3482, 3496), 'openmdao.api.IndepVarComp', 'IndepVarComp', ([], {}), '()\n', (3494, 3496), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((3585, 3592), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (3590, 3592), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((4036, 4043), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (4041, 4043), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((4378, 4398), 'openmdao.api.Problem', 'Problem', ([], {'model': 'model'}), '(model=model)\n', (4385, 4398), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((4952, 5006), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "inputs['C2.x']", 'arr_val', '(1e-10)'], {}), "(self, inputs['C2.x'], arr_val, 1e-10)\n", (4968, 5006), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((5081, 5135), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "inputs['C2.x']", 'arr_val', '(1e-10)'], {}), "(self, inputs['C2.x'], arr_val, 1e-10)\n", (5097, 5135), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((5353, 5407), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "inputs['C2.x']", 'arr_val', '(1e-10)'], {}), "(self, inputs['C2.x'], arr_val, 1e-10)\n", (5369, 5407), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((5796, 5851), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "outputs['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, outputs['C2.y'], arr_val, 1e-10)\n", (5812, 5851), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((5927, 5982), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "outputs['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, outputs['C2.y'], arr_val, 1e-10)\n", (5943, 5982), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((6202, 6257), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "outputs['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, outputs['C2.y'], arr_val, 1e-10)\n", (6218, 6257), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((6653, 6710), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "residuals['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, residuals['C2.y'], arr_val, 1e-10)\n", (6669, 6710), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((6788, 6845), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "residuals['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, residuals['C2.y'], arr_val, 1e-10)\n", (6804, 6845), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((7069, 7126), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "residuals['C2.y']", 'arr_val', '(1e-10)'], {}), "(self, residuals['C2.y'], arr_val, 1e-10)\n", (7085, 7126), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((7381, 7388), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (7386, 7388), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((8318, 8327), 'openmdao.api.Problem', 'Problem', ([], {}), '()\n', (8325, 8327), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((8869, 8926), 'openmdao.utils.assert_utils.assert_rel_error', 'assert_rel_error', (['self', "prob['double.y']", '[2.0, 4.0, 6.0]'], {}), "(self, prob['double.y'], [2.0, 4.0, 6.0])\n", (8885, 8926), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((9096, 9105), 'openmdao.api.Problem', 'Problem', ([], {}), '()\n', (9103, 9105), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((509, 531), 'openmdao.api.IndepVarComp', 'IndepVarComp', (['"""a"""', '(5.0)'], {}), "('a', 5.0)\n", (521, 531), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((585, 592), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (590, 592), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((641, 658), 'openmdao.api.ExecComp', 'ExecComp', (['"""b=2*a"""'], {}), "('b=2*a')\n", (649, 658), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((796, 813), 'openmdao.api.ExecComp', 'ExecComp', (['"""c=2*b"""'], {}), "('c=2*b')\n", (804, 813), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((3692, 3699), 'openmdao.api.Group', 'Group', ([], {}), '()\n', (3697, 3699), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((3748, 3765), 'openmdao.api.ExecComp', 'ExecComp', (['"""b=2*a"""'], {}), "('b=2*a')\n", (3756, 3765), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((4594, 4609), 'numpy.ones', 'np.ones', (['(5, 1)'], {}), '((5, 1))\n', (4601, 4609), True, 'import numpy as np\n'), ((4632, 4643), 'numpy.ones', 'np.ones', (['(10)'], {}), '(10)\n', (4639, 4643), True, 'import numpy as np\n'), ((4798, 4838), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (4815, 4838), False, 'from six import assertRaisesRegex\n'), ((5192, 5232), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (5209, 5232), False, 'from six import assertRaisesRegex\n'), ((5463, 5503), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (5480, 5503), False, 'from six import assertRaisesRegex\n'), ((5640, 5680), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (5657, 5680), False, 'from six import assertRaisesRegex\n'), ((6039, 6079), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (6056, 6079), False, 'from six import assertRaisesRegex\n'), ((6313, 6353), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (6330, 6353), False, 'from six import assertRaisesRegex\n'), ((6493, 6533), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (6510, 6533), False, 'from six import assertRaisesRegex\n'), ((6902, 6942), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (6919, 6942), False, 'from six import assertRaisesRegex\n'), ((7182, 7222), 'six.assertRaisesRegex', 'assertRaisesRegex', (['self', 'ValueError', 'msg'], {}), '(self, ValueError, msg)\n', (7199, 7222), False, 'from six import assertRaisesRegex\n'), ((7592, 7631), 'openmdao.utils.assert_utils.assert_warning', 'assert_warning', (['DeprecationWarning', 'msg'], {}), '(DeprecationWarning, msg)\n', (7606, 7631), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((7691, 7730), 'openmdao.utils.assert_utils.assert_warning', 'assert_warning', (['DeprecationWarning', 'msg'], {}), '(DeprecationWarning, msg)\n', (7705, 7730), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((8027, 8066), 'openmdao.utils.assert_utils.assert_warning', 'assert_warning', (['DeprecationWarning', 'msg'], {}), '(DeprecationWarning, msg)\n', (8041, 8066), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((8126, 8165), 'openmdao.utils.assert_utils.assert_warning', 'assert_warning', (['DeprecationWarning', 'msg'], {}), '(DeprecationWarning, msg)\n', (8140, 8165), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((8371, 8397), 'openmdao.api.IndepVarComp', 'IndepVarComp', (['"""x"""'], {'shape': '(3)'}), "('x', shape=3)\n", (8383, 8397), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((8442, 8462), 'openmdao.test_suite.components.options_feature_vector.VectorDoublingComp', 'VectorDoublingComp', ([], {}), '()\n', (8460, 8462), False, 'from openmdao.test_suite.components.options_feature_vector import VectorDoublingComp\n'), ((8629, 8668), 'openmdao.utils.assert_utils.assert_warning', 'assert_warning', (['DeprecationWarning', 'msg'], {}), '(DeprecationWarning, msg)\n', (8643, 8668), False, 'from openmdao.utils.assert_utils import assert_rel_error, assert_warning\n'), ((9169, 9202), 'openmdao.api.IndepVarComp', 'IndepVarComp', (['"""V"""', '(0.0)'], {'units': '"""V"""'}), "('V', 0.0, units='V')\n", (9181, 9202), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((9241, 9274), 'openmdao.api.IndepVarComp', 'IndepVarComp', (['"""I"""', '(0.1)'], {'units': '"""A"""'}), "('I', 0.1, units='A')\n", (9253, 9274), False, 'from openmdao.api import Problem, Group, IndepVarComp, ExecComp\n'), ((9315, 9324), 'openmdao.test_suite.scripts.circuit_analysis.Circuit', 'Circuit', ([], {}), '()\n', (9322, 9324), False, 'from openmdao.test_suite.scripts.circuit_analysis import Resistor, Diode, Node, Circuit\n'), ((3879, 3895), 'numpy.zeros', 'np.zeros', (['(5, 1)'], {}), '((5, 1))\n', (3887, 3895), True, 'import numpy as np\n'), ((3939, 3955), 'numpy.zeros', 'np.zeros', (['(5, 1)'], {}), '((5, 1))\n', (3947, 3955), True, 'import numpy as np\n'), ((4215, 4231), 'numpy.zeros', 'np.zeros', (['(5, 1)'], {}), '((5, 1))\n', (4223, 4231), True, 'import numpy as np\n'), ((4280, 4296), 'numpy.zeros', 'np.zeros', (['(5, 1)'], {}), '((5, 1))\n', (4288, 4296), True, 'import numpy as np\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import argparse
import os
import sqlite3
import sys
import pandas as pd
from src import config
def parse_args(argv):
parser = argparse.ArgumentParser()
parser.add_argument('sample')
parser.add_argument('replace')
return parser.parse_args()
def db_tables(connection):
"""List tables in database."""
res = pd.read_sql("select name from sqlite_master", connection)
return res.name.values
def create_database(sample):
"""Create database with tables for targets, outcomes, and predictions."""
db_name = f'{sample}.db'
db_path = os.path.join(config.DATADIR, db_name)
conn = sqlite3.connect(db_path)
usr_name = f'users_{sample}.csv'
usr_path = os.path.join(config.DATADIR, usr_name)
users = pd.read_csv(usr_path)
db_tbls = db_tables(conn)
for tbl in ['decisions', 'outcomes', 'predictions']:
if tbl not in db_tbls:
users.to_sql(tbl, conn, index=False)
conn.execute(f"create index idx_{tbl}_user_id on {tbl}(user_id)")
def main(argv=None):
if argv is None:
argv = sys.argv[:1]
args = parse_args(argv)
create_database(args.sample)
if __name__ == '__main__':
sys.exit(main())
| [
"sqlite3.connect",
"argparse.ArgumentParser",
"pandas.read_csv",
"os.path.join",
"pandas.read_sql"
] | [((180, 205), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (203, 205), False, 'import argparse\n'), ((380, 437), 'pandas.read_sql', 'pd.read_sql', (['"""select name from sqlite_master"""', 'connection'], {}), "('select name from sqlite_master', connection)\n", (391, 437), True, 'import pandas as pd\n'), ((617, 654), 'os.path.join', 'os.path.join', (['config.DATADIR', 'db_name'], {}), '(config.DATADIR, db_name)\n', (629, 654), False, 'import os\n'), ((666, 690), 'sqlite3.connect', 'sqlite3.connect', (['db_path'], {}), '(db_path)\n', (681, 690), False, 'import sqlite3\n'), ((743, 781), 'os.path.join', 'os.path.join', (['config.DATADIR', 'usr_name'], {}), '(config.DATADIR, usr_name)\n', (755, 781), False, 'import os\n'), ((794, 815), 'pandas.read_csv', 'pd.read_csv', (['usr_path'], {}), '(usr_path)\n', (805, 815), True, 'import pandas as pd\n')] |
# coding: utf-8
import pytest
import app as service
import yaml
import responder
from starlette.responses import PlainTextResponse
@pytest.fixture
def api():
return service.api
def test_hello_world(api):
r = api.requests.get("/api/v1.0/index")
assert r.text == "Hello, World!"
def test_basic_route(api):
@api.route("/api/v1.0/index")
def index(req, resp):
resp.text = "Hello, World!"
def test_requests_session(api):
assert api.session()
assert api.requests
def test_json_media(api):
dump = {"life": 42}
@api.route("/")
def media(req, resp):
resp.media = dump
r = api.requests.get("http://;/")
assert "json" in r.headers["Content-Type"]
assert r.json() == dump
def test_yaml_media(api):
dump = {"life": 42}
@api.route("/")
def media(req, resp):
resp.media = dump
r = api.requests.get("http://;/", headers={"Accept": "yaml"})
assert "yaml" in r.headers["Content-Type"]
assert yaml.load(r.content) == dump
def test_background(api):
@api.route("/")
def route(req, resp):
@api.background.task
def task():
import time
time.sleep(3)
task()
api.text = "ok"
r = api.requests.get(api.url_for(route))
assert r.ok
def test_500_error(api):
def catcher(req, exc):
return PlainTextResponse("Suppressed error", 500)
api.app.add_exception_handler(ValueError, catcher)
@api.route("/api/v1.0/index")
def view(req, resp):
raise ValueError
r = api.requests.get(api.url_for(view))
assert not r.ok
assert r.content == b'Suppressed error'
def test_404_error(api):
r = api.requests.get("/api/v1.0/foo")
assert r.status_code == responder.API.status_codes.HTTP_404
| [
"time.sleep",
"yaml.load",
"starlette.responses.PlainTextResponse"
] | [((993, 1013), 'yaml.load', 'yaml.load', (['r.content'], {}), '(r.content)\n', (1002, 1013), False, 'import yaml\n'), ((1366, 1408), 'starlette.responses.PlainTextResponse', 'PlainTextResponse', (['"""Suppressed error"""', '(500)'], {}), "('Suppressed error', 500)\n", (1383, 1408), False, 'from starlette.responses import PlainTextResponse\n'), ((1181, 1194), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (1191, 1194), False, 'import time\n')] |
# -*- coding: utf-8 -*-
"""
Script to execute example covarying MMGP regression forecasting model
with full Krhh.
Inputs: Data training and test sets (dictionary pickle)
Data for example:
- normalised solar data for 25 sites for 15 minute forecast
- N_train = 4200, N_test = 2276, P = 25, D = 51
- Xtr[:, :50] 2 recent lagged observations for each site in order
- Xtr[:, 50] time index
- link inputs is a 25x2 array (link inputs repeated for every group)
with normalised lat,long for each site in order
Model Options:
- Sparse or full x-function covariance prior Krhh (set bool SPARSE_PRIOR)
- Diagonal or Kronecker-structured variational posterior covariance Sr (set bool DIAG_POST)
- Sparse or full posterior covariance (when Kronecker posterior; set bool SPARSE_POST)
Current Settings (sparse covarying mmgp model with sparse Kronecker posterior):
DIAG_POST = False
SPARSE_PRIOR = False # set True for equivalent sparse scmmgp model
SPARSE_POST = True
Note on specifying group structure for F:
Grouping occurs via block_struct, a nested list of grouping order
Where functions [i] are independent i.e. in own block, set link_kernel[i] = link_inputs[i] = 1.0
See model class preamble and example below for further details.
"""
import os
import numpy as np
import pickle
import pandas as pd
import traceback
import time
import sklearn.cluster
import csv
import sys
import mmgp
from mmgp import likelihoods
from mmgp import kernels
import tensorflow as tf
from mmgp import datasets
from mmgp import losses
from mmgp import util
dpath = '/experiments/datasets/'
dfile = 'p25_inputsdict.pickle'
dlinkfile = 'p25_linkinputsarray.pickle'
outdir = '/experiments/results/p25_nonsparse_cmmgp/'
try:
os.makedirs(outdir)
except FileExistsError:
pass
def get_inputs():
"""
inputsdict contains {'Yte': Yte, 'Ytr': Ytr, 'Xtr': Xtr, 'Xte': Xte} where values are np.arrays
np. arrays are truncated to evenly split into batches of size = batchsize
returns inputsdict, Xtr_link (ndarray, shape = [P, D_link_features])
"""
with open(os.path.join(dpath, dfile), 'rb') as f:
d_all = pickle.load(f)
with open(os.path.join(dpath, dlinkfile), 'rb') as f:
d_link = pickle.load(f)
return d_all, d_link
def init_z(train_inputs, num_inducing):
# Initialize inducing points using clustering.
mini_batch = sklearn.cluster.MiniBatchKMeans(num_inducing)
cluster_indices = mini_batch.fit_predict(train_inputs)
inducing_locations = mini_batch.cluster_centers_
return inducing_locations
FLAGS = util.util.get_flags()
BATCH_SIZE = FLAGS.batch_size
LEARNING_RATE = FLAGS.learning_rate
DISPLAY_STEP = FLAGS.display_step
EPOCHS = FLAGS.n_epochs
NUM_SAMPLES = FLAGS.mc_train
PRED_SAMPLES = FLAGS.mc_test
NUM_INDUCING = FLAGS.n_inducing
NUM_COMPONENTS = FLAGS.num_components
IS_ARD = FLAGS.is_ard
TOL = FLAGS.opt_tol
VAR_STEPS = FLAGS.var_steps
DIAG_POST = False
SPARSE_PRIOR = False
SPARSE_POST = True # option for non-diag post
MAXTIME = 1200
print("settings done")
# define GPRN P and Q
output_dim = 25 #P
node_dim = 25 #Q
lag_dim = 2
save_nlpds = False # If True saves samples of nlpds for n,p,s
# extract dataset
d, d_link = get_inputs()
Ytr, Yte, Xtr, Xte = d['Ytr'], d['Yte'], d['Xtr'], d['Xte']
data = datasets.DataSet(Xtr.astype(np.float32), Ytr.astype(np.float32), shuffle=False)
test = datasets.DataSet(Xte.astype(np.float32), Yte.astype(np.float32), shuffle=False)
print("dataset created")
# model config block rows (where P=Q): block all w.1, w.2 etc, leave f independent
# order of block_struct is rows, node functions
# lists required: block_struct, link_inputs, kern_link, kern
#block_struct nested list of grouping order
weight_struct = [[] for _ in range(output_dim)]
for i in range(output_dim):
row = list(range(i, i+output_dim*(node_dim-1)+1, output_dim))
row_0 = row.pop(i) # bring diag to pivot position
weight_struct[i] = [row_0] + row
nodes = [[x] for x in list(range(output_dim * node_dim, output_dim * node_dim + output_dim))]
block_struct = weight_struct + nodes
# create link inputs (link inputs used repeatedly but can have link input per group)
# permute to bring diagonal to first position
link_inputs = [[] for _ in range(output_dim)]
for i in range(output_dim):
idx = list(range(d_link.shape[0]))
link_inputs[i] = d_link[[idx.pop(i)] + idx, :]
link_inputs = link_inputs + [1.0 for i in range(output_dim)] # for full W row blocks, independent nodes
# create 'between' kernel list
klink_rows = [kernels.CompositeKernel('mul',[kernels.RadialBasis(2, std_dev=2.0, lengthscale=1.0, white=0.01, input_scaling = IS_ARD),
kernels.CompactSlice(2, active_dims=[0,1], lengthscale = 2.0, input_scaling = IS_ARD)] )
for i in range(output_dim) ]
klink_f = [1.0 for i in range(node_dim)]
kernlink = klink_rows + klink_f
# create 'within' kernel
# kern
lag_active_dims_s = [ [] for _ in range(output_dim)]
for i in range(output_dim):
lag_active_dims_s[i] = list(range(lag_dim*i, lag_dim*(i+1)))
k_rows = [kernels.CompositeKernel('mul',[kernels.RadialBasisSlice(lag_dim, active_dims=lag_active_dims_s[i],
std_dev = 1.0, white = 0.01, input_scaling = IS_ARD),
kernels.PeriodicSliceFixed(1, active_dims=[Xtr.shape[1]-1],
lengthscale=0.5, std_dev=1.0, period = 144) ])
for i in range(output_dim)]
k_f = [kernels.RadialBasisSlice(lag_dim, active_dims=lag_active_dims_s[i], std_dev = 1.0, white = 0.01, input_scaling = IS_ARD)
for i in range(output_dim)]
kern = k_rows + k_f
print('len link_inputs ',len(link_inputs))
print('len kernlink ',len(kernlink))
print('len kern ', len(kern))
print('no. groups = ', len(block_struct), 'no. latent functions =', len([i for b in block_struct for i in b]))
print('number latent functions', node_dim*(output_dim+1))
likelihood = likelihoods.CovaryingRegressionNetwork(output_dim, node_dim, std_dev = 0.2) # p, q, lik_noise
print("likelihood and kernels set")
Z = init_z(data.X, NUM_INDUCING)
print('inducing points set')
m = mmgp.ExplicitSCMMGP(output_dim, likelihood, kern, kernlink, block_struct, Z, link_inputs,
num_components=NUM_COMPONENTS, diag_post=DIAG_POST, sparse_prior=SPARSE_PRIOR,
sparse_post=SPARSE_POST, num_samples=NUM_SAMPLES, predict_samples=PRED_SAMPLES)
print("model set")
# initialise losses and logging
error_rate = losses.RootMeanSqError(data.Dout)
os.chdir(outdir)
with open("log_results.csv", 'w', newline='') as f:
csv.writer(f).writerow(['epoch', 'fit_runtime', 'nelbo', error_rate.get_name(),'generalised_nlpd'])
with open("log_params.csv", 'w', newline='') as f:
csv.writer(f).writerow(['epoch', 'raw_kernel_params', 'raw_kernlink_params', 'raw_likelihood_params', 'raw_weights'])
with open("log_comp_time.csv", 'w', newline='') as f:
csv.writer(f).writerow(['epoch', 'batch_time', 'nelbo_time', 'pred_time', 'gen_nlpd_time', error_rate.get_name()+'_time'])
# optimise
o = tf.train.AdamOptimizer(LEARNING_RATE, beta1=0.9,beta2=0.99)
print("start time = ", time.strftime('%X %x %Z'))
m.fit(data, o, var_steps = VAR_STEPS, epochs = EPOCHS, batch_size = BATCH_SIZE, display_step=DISPLAY_STEP,
test = test, loss = error_rate, tolerance = TOL, max_time=MAXTIME )
print("optimisation complete")
# export final predicted values and loss metrics
ypred = m.predict(test.X, batch_size = BATCH_SIZE) #same batchsize used for convenience
np.savetxt("predictions.csv", np.concatenate(ypred, axis=1), delimiter=",")
if save_nlpds == True:
nlpd_samples, nlpd_meanvar = m.nlpd_samples(test.X, test.Y, batch_size = BATCH_SIZE)
try:
np.savetxt("nlpd_meanvar.csv", nlpd_meanvar, delimiter=",") # N x 2P as for predictions
except:
print('nlpd_meanvar export fail')
try:
np.savetxt("nlpd_samples.csv", nlpd_samples, delimiter=",") # NP x S (NxS concat for P tasks)
except:
print('nlpd_samples export fail')
print("Final " + error_rate.get_name() + "=" + "%.4f" % error_rate.eval(test.Y, ypred[0]))
print("Final " + "generalised_nlpd" + "=" + "%.4f" % m.nlpd_general(test.X, test.Y, batch_size = BATCH_SIZE))
error_rate_end = [losses.MeanAbsError(data.Dout)] # any extra accuracy measures at end of routine
print("Final ", [e.get_name() for e in error_rate_end])
print([e.eval(test.Y, ypred[0]) for e in error_rate_end])
predvar = [np.mean(np.mean(ypred[1]))]
print("Final predvar ", predvar)
with open("final_losses.csv", 'w', newline='') as f:
csv.writer(f).writerows([[e.get_name() for e in error_rate_end] + ['pred_var'],
[e.eval(test.Y, ypred[0]) for e in error_rate_end] + predvar])
print("finish time = " + time.strftime('%X %x %Z'))
| [
"mmgp.ExplicitSCMMGP",
"mmgp.losses.MeanAbsError",
"numpy.mean",
"mmgp.kernels.PeriodicSliceFixed",
"mmgp.util.util.get_flags",
"numpy.concatenate",
"tensorflow.train.AdamOptimizer",
"mmgp.kernels.RadialBasisSlice",
"csv.writer",
"pickle.load",
"numpy.savetxt",
"mmgp.kernels.CompactSlice",
"mmgp.likelihoods.CovaryingRegressionNetwork",
"os.makedirs",
"time.strftime",
"os.path.join",
"os.chdir",
"mmgp.kernels.RadialBasis",
"mmgp.losses.RootMeanSqError"
] | [((2670, 2691), 'mmgp.util.util.get_flags', 'util.util.get_flags', ([], {}), '()\n', (2689, 2691), False, 'from mmgp import util\n'), ((6258, 6331), 'mmgp.likelihoods.CovaryingRegressionNetwork', 'likelihoods.CovaryingRegressionNetwork', (['output_dim', 'node_dim'], {'std_dev': '(0.2)'}), '(output_dim, node_dim, std_dev=0.2)\n', (6296, 6331), False, 'from mmgp import likelihoods\n'), ((6459, 6720), 'mmgp.ExplicitSCMMGP', 'mmgp.ExplicitSCMMGP', (['output_dim', 'likelihood', 'kern', 'kernlink', 'block_struct', 'Z', 'link_inputs'], {'num_components': 'NUM_COMPONENTS', 'diag_post': 'DIAG_POST', 'sparse_prior': 'SPARSE_PRIOR', 'sparse_post': 'SPARSE_POST', 'num_samples': 'NUM_SAMPLES', 'predict_samples': 'PRED_SAMPLES'}), '(output_dim, likelihood, kern, kernlink, block_struct, Z,\n link_inputs, num_components=NUM_COMPONENTS, diag_post=DIAG_POST,\n sparse_prior=SPARSE_PRIOR, sparse_post=SPARSE_POST, num_samples=\n NUM_SAMPLES, predict_samples=PRED_SAMPLES)\n', (6478, 6720), False, 'import mmgp\n'), ((6789, 6822), 'mmgp.losses.RootMeanSqError', 'losses.RootMeanSqError', (['data.Dout'], {}), '(data.Dout)\n', (6811, 6822), False, 'from mmgp import losses\n'), ((6826, 6842), 'os.chdir', 'os.chdir', (['outdir'], {}), '(outdir)\n', (6834, 6842), False, 'import os\n'), ((7378, 7438), 'tensorflow.train.AdamOptimizer', 'tf.train.AdamOptimizer', (['LEARNING_RATE'], {'beta1': '(0.9)', 'beta2': '(0.99)'}), '(LEARNING_RATE, beta1=0.9, beta2=0.99)\n', (7400, 7438), True, 'import tensorflow as tf\n'), ((1792, 1811), 'os.makedirs', 'os.makedirs', (['outdir'], {}), '(outdir)\n', (1803, 1811), False, 'import os\n'), ((5777, 5896), 'mmgp.kernels.RadialBasisSlice', 'kernels.RadialBasisSlice', (['lag_dim'], {'active_dims': 'lag_active_dims_s[i]', 'std_dev': '(1.0)', 'white': '(0.01)', 'input_scaling': 'IS_ARD'}), '(lag_dim, active_dims=lag_active_dims_s[i], std_dev\n =1.0, white=0.01, input_scaling=IS_ARD)\n', (5801, 5896), False, 'from mmgp import kernels\n'), ((7462, 7487), 'time.strftime', 'time.strftime', (['"""%X %x %Z"""'], {}), "('%X %x %Z')\n", (7475, 7487), False, 'import time\n'), ((7883, 7912), 'numpy.concatenate', 'np.concatenate', (['ypred'], {'axis': '(1)'}), '(ypred, axis=1)\n', (7897, 7912), True, 'import numpy as np\n'), ((8603, 8633), 'mmgp.losses.MeanAbsError', 'losses.MeanAbsError', (['data.Dout'], {}), '(data.Dout)\n', (8622, 8633), False, 'from mmgp import losses\n'), ((2214, 2228), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2225, 2228), False, 'import pickle\n'), ((2308, 2322), 'pickle.load', 'pickle.load', (['f'], {}), '(f)\n', (2319, 2322), False, 'import pickle\n'), ((8064, 8123), 'numpy.savetxt', 'np.savetxt', (['"""nlpd_meanvar.csv"""', 'nlpd_meanvar'], {'delimiter': '""","""'}), "('nlpd_meanvar.csv', nlpd_meanvar, delimiter=',')\n", (8074, 8123), True, 'import numpy as np\n'), ((8228, 8287), 'numpy.savetxt', 'np.savetxt', (['"""nlpd_samples.csv"""', 'nlpd_samples'], {'delimiter': '""","""'}), "('nlpd_samples.csv', nlpd_samples, delimiter=',')\n", (8238, 8287), True, 'import numpy as np\n'), ((8819, 8836), 'numpy.mean', 'np.mean', (['ypred[1]'], {}), '(ypred[1])\n', (8826, 8836), True, 'import numpy as np\n'), ((9134, 9159), 'time.strftime', 'time.strftime', (['"""%X %x %Z"""'], {}), "('%X %x %Z')\n", (9147, 9159), False, 'import time\n'), ((2157, 2183), 'os.path.join', 'os.path.join', (['dpath', 'dfile'], {}), '(dpath, dfile)\n', (2169, 2183), False, 'import os\n'), ((2246, 2276), 'os.path.join', 'os.path.join', (['dpath', 'dlinkfile'], {}), '(dpath, dlinkfile)\n', (2258, 2276), False, 'import os\n'), ((4725, 4815), 'mmgp.kernels.RadialBasis', 'kernels.RadialBasis', (['(2)'], {'std_dev': '(2.0)', 'lengthscale': '(1.0)', 'white': '(0.01)', 'input_scaling': 'IS_ARD'}), '(2, std_dev=2.0, lengthscale=1.0, white=0.01,\n input_scaling=IS_ARD)\n', (4744, 4815), False, 'from mmgp import kernels\n'), ((4860, 4947), 'mmgp.kernels.CompactSlice', 'kernels.CompactSlice', (['(2)'], {'active_dims': '[0, 1]', 'lengthscale': '(2.0)', 'input_scaling': 'IS_ARD'}), '(2, active_dims=[0, 1], lengthscale=2.0, input_scaling=\n IS_ARD)\n', (4880, 4947), False, 'from mmgp import kernels\n'), ((5332, 5451), 'mmgp.kernels.RadialBasisSlice', 'kernels.RadialBasisSlice', (['lag_dim'], {'active_dims': 'lag_active_dims_s[i]', 'std_dev': '(1.0)', 'white': '(0.01)', 'input_scaling': 'IS_ARD'}), '(lag_dim, active_dims=lag_active_dims_s[i], std_dev\n =1.0, white=0.01, input_scaling=IS_ARD)\n', (5356, 5451), False, 'from mmgp import kernels\n'), ((5544, 5652), 'mmgp.kernels.PeriodicSliceFixed', 'kernels.PeriodicSliceFixed', (['(1)'], {'active_dims': '[Xtr.shape[1] - 1]', 'lengthscale': '(0.5)', 'std_dev': '(1.0)', 'period': '(144)'}), '(1, active_dims=[Xtr.shape[1] - 1], lengthscale=\n 0.5, std_dev=1.0, period=144)\n', (5570, 5652), False, 'from mmgp import kernels\n'), ((6901, 6914), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (6911, 6914), False, 'import csv\n'), ((7058, 7071), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (7068, 7071), False, 'import csv\n'), ((7236, 7249), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (7246, 7249), False, 'import csv\n'), ((8934, 8947), 'csv.writer', 'csv.writer', (['f'], {}), '(f)\n', (8944, 8947), False, 'import csv\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: <NAME>
# Date: 10-02-2018
"""ExpandRank keyphrase extraction model.
Graph-based ranking approach to keyphrase extraction described in:
* <NAME> and <NAME>.
Single Document Keyphrase Extraction Using Neighborhood Knowledge.
*In proceedings of AAAI*, pages 855-860, 2008.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
from __future__ import absolute_import
from onmt.keyphrase.pke.unsupervised import SingleRank
from onmt.keyphrase.pke.base import LoadFile
import networkx as nx
import logging
class ExpandRank(SingleRank):
"""ExpandRank keyphrase extraction model.
Parameterized example::
import pke
import string
from nltk.corpus import stopwords
# 1. create an ExpandRank extractor.
extractor = pke.unsupervised.ExpandRank()
# 2. load the content of the document.
extractor.load_document(input='path/to/input.xml')
# 3. select the the longest sequences of nouns and adjectives, that do
# not contain punctuation marks or stopwords as candidates.
pos = {'NOUN', 'PROPN', 'ADJ'}
stoplist = list(string.punctuation)
stoplist += ['-lrb-', '-rrb-', '-lcb-', '-rcb-', '-lsb-', '-rsb-']
stoplist += stopwords.words('english')
extractor.candidate_selection(pos=pos, stoplist=stoplist)
# 4. weight the candidates using the sum of their word's scores that are
# computed using random walk. In the graph, nodes are words (nouns
# and adjectives only) that are connected if they occur in a window
# of 10 words. A set of extra documents should be provided to expand
# the graph.
expanded_documents = [('path/to/input1.xml', similarity1),
('path/to/input2.xml', similarity2)]
extractor.candidate_weighting(window=10,
pos=pos,
expanded_documents=expanded_documents,
format='corenlp')
# 5. get the 10-highest scored candidates as keyphrases
keyphrases = extractor.get_n_best(n=10)
"""
def __init__(self):
""" Redefining initializer for ExpandRank. """
super(ExpandRank, self).__init__()
def expand_word_graph(self,
input_file,
similarity,
window=10,
pos=None):
"""Expands the word graph using the given document.
Args:
input_file (str): path to the input file.
similarity (float): similarity for weighting edges.
window (int): the window within the sentence for connecting two
words in the graph, defaults to 10.
pos (set): the set of valid pos for words to be considered as nodes
in the graph, defaults to ('NOUN', 'PROPN', 'ADJ').
"""
# define default pos tags set
if pos is None:
pos = {'NOUN', 'PROPN', 'ADJ'}
# initialize document loader
doc = LoadFile()
# read document
doc.load_document(input=input_file,
language=self.language,
normalization=self.normalization)
# flatten document and initialize nodes
sequence = []
for sentence in doc.sentences:
for j, node in enumerate(sentence.stems):
if node not in self.graph and sentence.pos[j] in pos:
self.graph.add_node(node)
sequence.append((node, sentence.pos[j]))
# loop through sequence to build the edges in the graph
for j, node_1 in enumerate(sequence):
for k in range(j + 1, min(j + window, len(sequence))):
node_2 = sequence[k]
if node_1[1] in pos and node_2[1] in pos \
and node_1[0] != node_2[0]:
if not self.graph.has_edge(node_1[0], node_2[0]):
self.graph.add_edge(node_1[0], node_2[0], weight=0)
self.graph[node_1[0]][node_2[0]]['weight'] += similarity
def candidate_weighting(self,
window=10,
pos=None,
expanded_documents=None,
normalized=False):
"""Candidate ranking using random walk.
Args:
window (int): the window within the sentence for connecting two
words in the graph, defaults to 10.
pos (set): the set of valid pos for words to be considered as nodes
in the graph, defaults to ('NOUN', 'PROPN', 'ADJ').
expanded_documents (list): the set of documents to expand the graph,
should be a list of tuples (input_path, similarity). Defaults to
empty list, i.e. no expansion.
normalized (False): normalize keyphrase score by their length,
defaults to False.
"""
# define default pos tags set
if pos is None:
pos = {'NOUN', 'PROPN', 'ADJ'}
if expanded_documents is None:
expanded_documents = []
logging.warning('No neighbor documents provided for ExpandRank.')
# build the word graph
self.build_word_graph(window=window, pos=pos)
# expand the word graph
for input_file, similarity in expanded_documents:
self.expand_word_graph(input_file=input_file,
similarity=similarity,
window=window,
pos=pos)
# compute the word scores using random walk
w = nx.pagerank_scipy(self.graph, alpha=0.85, weight='weight')
# loop through the candidates
for k in self.candidates.keys():
tokens = self.candidates[k].lexical_form
self.weights[k] = sum([w[t] for t in tokens])
if normalized:
self.weights[k] /= len(tokens)
| [
"onmt.keyphrase.pke.base.LoadFile",
"logging.warning",
"networkx.pagerank_scipy"
] | [((3217, 3227), 'onmt.keyphrase.pke.base.LoadFile', 'LoadFile', ([], {}), '()\n', (3225, 3227), False, 'from onmt.keyphrase.pke.base import LoadFile\n'), ((5889, 5947), 'networkx.pagerank_scipy', 'nx.pagerank_scipy', (['self.graph'], {'alpha': '(0.85)', 'weight': '"""weight"""'}), "(self.graph, alpha=0.85, weight='weight')\n", (5906, 5947), True, 'import networkx as nx\n'), ((5371, 5436), 'logging.warning', 'logging.warning', (['"""No neighbor documents provided for ExpandRank."""'], {}), "('No neighbor documents provided for ExpandRank.')\n", (5386, 5436), False, 'import logging\n')] |
import json
import inspect
import hashlib
from _plotly_utils.utils import PlotlyJSONEncoder
from dash.long_callback.managers import BaseLongCallbackManager
class CeleryLongCallbackManager(BaseLongCallbackManager):
def __init__(self, celery_app, cache_by=None, expire=None):
"""
Long callback manager that runs callback logic on a celery task queue,
and stores results using a celery result backend.
:param celery_app:
A celery.Celery application instance that must be configured with a
result backend. See the celery documentation for information on
configuration options.
:param cache_by:
A list of zero-argument functions. When provided, caching is enabled and
the return values of these functions are combined with the callback
function's input arguments and source code to generate cache keys.
:param expire:
If provided, a cache entry will be removed when it has not been accessed
for ``expire`` seconds. If not provided, the lifetime of cache entries
is determined by the default behavior of the celery result backend.
"""
try:
import celery # pylint: disable=import-outside-toplevel,import-error
from celery.backends.base import ( # pylint: disable=import-outside-toplevel,import-error
DisabledBackend,
)
except ImportError as missing_imports:
raise ImportError(
"""\
CeleryLongCallbackManager requires extra dependencies which can be installed doing
$ pip install "dash[celery]"\n"""
) from missing_imports
if not isinstance(celery_app, celery.Celery):
raise ValueError("First argument must be a celery.Celery object")
if isinstance(celery_app.backend, DisabledBackend):
raise ValueError("Celery instance must be configured with a result backend")
super().__init__(cache_by)
self.handle = celery_app
self.expire = expire
def terminate_job(self, job):
if job is None:
return
self.handle.control.terminate(job)
def terminate_unhealthy_job(self, job):
task = self.get_task(job)
if task and task.status in ("FAILURE", "REVOKED"):
return self.terminate_job(job)
return False
def job_running(self, job):
future = self.get_task(job)
return future and future.status in (
"PENDING",
"RECEIVED",
"STARTED",
"RETRY",
"PROGRESS",
)
def make_job_fn(self, fn, progress, args_deps):
return _make_job_fn(fn, self.handle, progress, args_deps)
def get_task(self, job):
if job:
return self.handle.AsyncResult(job)
return None
def clear_cache_entry(self, key):
self.handle.backend.delete(key)
def call_job_fn(self, key, job_fn, args):
task = job_fn.delay(key, self._make_progress_key(key), args)
return task.task_id
def get_progress(self, key):
progress_key = self._make_progress_key(key)
progress_data = self.handle.backend.get(progress_key)
if progress_data:
return json.loads(progress_data)
return None
def result_ready(self, key):
return self.handle.backend.get(key) is not None
def get_result(self, key, job):
# Get result value
result = self.handle.backend.get(key)
if result is None:
return None
result = json.loads(result)
# Clear result if not caching
if self.cache_by is None:
self.clear_cache_entry(key)
else:
if self.expire:
# Set/update expiration time
self.handle.backend.expire(key, self.expire)
self.clear_cache_entry(self._make_progress_key(key))
self.terminate_job(job)
return result
def _make_job_fn(fn, celery_app, progress, args_deps):
cache = celery_app.backend
# Hash function source and module to create a unique (but stable) celery task name
fn_source = inspect.getsource(fn)
fn_str = fn_source
fn_hash = hashlib.sha1(fn_str.encode("utf-8")).hexdigest()
@celery_app.task(name=f"long_callback_{fn_hash}")
def job_fn(result_key, progress_key, user_callback_args, fn=fn):
def _set_progress(progress_value):
cache.set(progress_key, json.dumps(progress_value, cls=PlotlyJSONEncoder))
maybe_progress = [_set_progress] if progress else []
if isinstance(args_deps, dict):
user_callback_output = fn(*maybe_progress, **user_callback_args)
elif isinstance(args_deps, (list, tuple)):
user_callback_output = fn(*maybe_progress, *user_callback_args)
else:
user_callback_output = fn(*maybe_progress, user_callback_args)
cache.set(result_key, json.dumps(user_callback_output, cls=PlotlyJSONEncoder))
return job_fn
| [
"json.loads",
"json.dumps",
"inspect.getsource"
] | [((4202, 4223), 'inspect.getsource', 'inspect.getsource', (['fn'], {}), '(fn)\n', (4219, 4223), False, 'import inspect\n'), ((3614, 3632), 'json.loads', 'json.loads', (['result'], {}), '(result)\n', (3624, 3632), False, 'import json\n'), ((3298, 3323), 'json.loads', 'json.loads', (['progress_data'], {}), '(progress_data)\n', (3308, 3323), False, 'import json\n'), ((4990, 5045), 'json.dumps', 'json.dumps', (['user_callback_output'], {'cls': 'PlotlyJSONEncoder'}), '(user_callback_output, cls=PlotlyJSONEncoder)\n', (5000, 5045), False, 'import json\n'), ((4513, 4562), 'json.dumps', 'json.dumps', (['progress_value'], {'cls': 'PlotlyJSONEncoder'}), '(progress_value, cls=PlotlyJSONEncoder)\n', (4523, 4562), False, 'import json\n')] |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import SDKClient
from msrest import Serializer, Deserializer
from ._configuration import TokenApiClientConfiguration
from .operations import BotSignInOperations
from .operations import UserTokenOperations
from . import models
class TokenApiClient(SDKClient):
"""TokenApiClient
:ivar config: Configuration for client.
:vartype config: TokenApiClientConfiguration
:ivar bot_sign_in: BotSignIn operations
:vartype bot_sign_in: botframework.tokenapi.operations.BotSignInOperations
:ivar user_token: UserToken operations
:vartype user_token: botframework.tokenapi.operations.UserTokenOperations
:param credentials: Subscription credentials which uniquely identify
client subscription.
:type credentials: None
:param str base_url: Service URL
"""
def __init__(self, credentials, base_url=None):
self.config = TokenApiClientConfiguration(credentials, base_url)
super(TokenApiClient, self).__init__(self.config.credentials, self.config)
client_models = {
k: v for k, v in models.__dict__.items() if isinstance(v, type)
}
self.api_version = "token"
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
self.bot_sign_in = BotSignInOperations(
self._client, self.config, self._serialize, self._deserialize
)
self.user_token = UserTokenOperations(
self._client, self.config, self._serialize, self._deserialize
)
| [
"msrest.Serializer",
"msrest.Deserializer"
] | [((1681, 1706), 'msrest.Serializer', 'Serializer', (['client_models'], {}), '(client_models)\n', (1691, 1706), False, 'from msrest import Serializer, Deserializer\n'), ((1735, 1762), 'msrest.Deserializer', 'Deserializer', (['client_models'], {}), '(client_models)\n', (1747, 1762), False, 'from msrest import Serializer, Deserializer\n')] |
"""
Copyright 2019 Samsung SDS
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from brightics.common.utils import check_required_parameters
from brightics.common.exception import BrighticsFunctionException
from .data import regex_format_dict
import re
def regex(table, **params):
check_required_parameters(_regex, params, ['table'])
return _regex(table, **params)
def _regex(table, input_cols, transformation_mode='extract', find_mode='all', pattern='',
user_dict_pattern='', custom_pattern='', replacement_string='', user_dict=None):
out_table = table.copy()
pattern_dict = regex_format_dict.pattern_dict
user_pattern_dict = {}
if user_dict is not None:
user_patterns = user_dict.values
for user_pattern in user_patterns:
user_pattern_name = user_pattern[0]
user_pattern_content = user_pattern[1]
user_pattern_dict[user_pattern_name] = user_pattern_dict.get(user_pattern_name, []) + [user_pattern_content]
user_pattern_dict = {key: r'|'.join(value) for key, value in user_pattern_dict.items()}
if pattern == '':
raise BrighticsFunctionException.from_errors([{'0100': "Please choose a pattern."}])
if pattern == 'custom':
raw_pattern = custom_pattern
elif pattern == 'user_dictionary':
raw_pattern = user_pattern_dict.get(user_dict_pattern)
if raw_pattern is None:
raise BrighticsFunctionException.from_errors(
[{'0100': user_dict_pattern + " is not a valid pattern name in the user dictionary."}])
else:
raw_pattern = pattern_dict.get(pattern)
regex_pattern = re.compile(raw_pattern)
def transformation(text):
if transformation_mode == 'extract':
if find_mode == 'first':
result = regex_pattern.search(text)
if result is None:
return ""
else:
return result.group()
else: # find_mode == 'all'
return regex_pattern.findall(text)
elif transformation_mode == 'replace':
if find_mode == 'first':
return regex_pattern.sub(replacement_string, text, 1)
else: # find_mode == 'all'
return regex_pattern.sub(replacement_string, text)
elif transformation_mode == 'remove':
if find_mode == 'first':
return regex_pattern.sub("", text, 1)
else: # find_mode == 'all'
return regex_pattern.sub("", text)
else: # transformation_mode == 'split'
if find_mode == 'first':
return regex_pattern.split(text, 1)
else: # find_mode == 'all'
return regex_pattern.split(text)
for col in input_cols:
result_col = table[col].apply(transformation)
out_table['regex_' + col] = result_col
return {'out_table': out_table}
| [
"brightics.common.exception.BrighticsFunctionException.from_errors",
"brightics.common.utils.check_required_parameters",
"re.compile"
] | [((820, 872), 'brightics.common.utils.check_required_parameters', 'check_required_parameters', (['_regex', 'params', "['table']"], {}), "(_regex, params, ['table'])\n", (845, 872), False, 'from brightics.common.utils import check_required_parameters\n'), ((2179, 2202), 're.compile', 're.compile', (['raw_pattern'], {}), '(raw_pattern)\n', (2189, 2202), False, 'import re\n'), ((1661, 1739), 'brightics.common.exception.BrighticsFunctionException.from_errors', 'BrighticsFunctionException.from_errors', (["[{'0100': 'Please choose a pattern.'}]"], {}), "([{'0100': 'Please choose a pattern.'}])\n", (1699, 1739), False, 'from brightics.common.exception import BrighticsFunctionException\n'), ((1957, 2087), 'brightics.common.exception.BrighticsFunctionException.from_errors', 'BrighticsFunctionException.from_errors', (["[{'0100': user_dict_pattern +\n ' is not a valid pattern name in the user dictionary.'}]"], {}), "([{'0100': user_dict_pattern +\n ' is not a valid pattern name in the user dictionary.'}])\n", (1995, 2087), False, 'from brightics.common.exception import BrighticsFunctionException\n')] |
#!/usr/bin/env python3
import sys
import os
import logging
import numpy as np
import pandas as pd
import dateutil
def tempF2C(x): return (x-32.0)*5.0/9.0
def tempC2F(x): return (x*9.0/5.0)+32.0
def load_temperature_hdf5(temps_fn, local_time_offset, basedir=None, start_year=None, truncate_to_full_day=False):
## Load temperature
# temps_fn = "{}_AT_cleaned.h5".format(station_callsign)
logging.info("Using saved temperatures file '{}'".format(temps_fn))
if basedir is not None:
temps_fn = os.path.join(basedir, temps_fn)
tempdf = pd.read_hdf(temps_fn, 'table')
tmp = local_time_offset.split(':')
tmp = int(tmp[0])*3600+int(tmp[1])*60
sitetz = dateutil.tz.tzoffset(local_time_offset, tmp)
tempdf.index = tempdf.index.tz_convert(sitetz)
if truncate_to_full_day:
x = tempdf.index[-1]
if x.hour != 23:
x = x-pd.Timedelta(days=1)
tmp = '{:04d}-{:02d}-{:02d}'.format(x.year, x.month, x.day)
tempdf = tempdf.loc[:tmp]
if start_year is not None:
tempdf = tempdf.loc['{}-01-01'.format(start_year):]
logging.info("Temperature data date range used: {} through {}".format(tempdf.index[0], tempdf.index[-1]))
return tempdf
def load_temperature_csv(fn, local_time_offset=None):
t = pd.read_csv(fn, index_col=0)
if local_time_offset is not None:
tmp = local_time_offset.split(':')
tmp = int(tmp[0])*3600+int(tmp[1])*60
sitetz = dateutil.tz.tzoffset(local_time_offset, tmp)
#t.index = pd.to_datetime(t.index).tz_localize('UTC').tz_convert(sitetz) # @TCC this fails if csv contains datetimes with TZ
t.index = pd.to_datetime(t.index)
try:
t.index = t.index.tz_localize('UTC')
except TypeError:
pass
t.index = t.index.tz_convert(sitetz)
return t
# Function which computes BM (single sine method) degree day generation from temperature data
def compute_BMDD_Fs(tmin, tmax, base_temp, dd_gen):
# Used internally
def _compute_daily_BM_DD(mint, maxt, avet, base_temp):
"""Use standard Baskerville-Ermin (single sine) degree-day method
to compute the degree-day values for each a single day.
"""
if avet is None:
avet = (mint+maxt)/2.0 # simple midpoint (like in the refs)
dd = np.nan # value which we're computing
# Step 1: Adjust for observation time; not relevant
# Step 2: GDD = 0 if max < base (curve all below base)
if maxt < base_temp:
dd = 0
# Step 3: Calc mean temp for day; already done previously
# Step 4: min > base; then whole curve counts
elif mint >= base_temp:
dd = avet - base_temp
# Step 5: else use curve minus part below base
else:
W = (maxt-mint)/2.0
tmp = (base_temp-avet) / W
if tmp < -1:
print('WARNING: (base_temp-avet)/W = {} : should be [-1:1]'.format(tmp))
tmp = -1
if tmp > 1:
print('WARNING: (base_temp-avet)/W = {} : should be [-1:1]'.format(tmp))
tmp = 1
A = np.arcsin(tmp)
dd = ((W*np.cos(A))-((base_temp-avet)*((np.pi/2.0)-A)))/np.pi
return dd
# compute the degree-days for each day in the temperature input (from tmin and tmax vectors)
dd = pd.concat([tmin,tmax], axis=1)
dd.columns = ['tmin', 'tmax']
dd['DD'] = dd.apply(lambda x: _compute_daily_BM_DD(x[0], x[1], (x[0]+x[1])/2.0, base_temp), axis=1)
# compute the degree-days for each day in the temperature input (from a daily groupby)
# grp = t.groupby(pd.TimeGrouper('D'))
# dd = grp.agg(lambda x: _compute_daily_BM_DD(np.min(x), np.max(x), None, base_temp))
# dd.columns = ['DD']
# Find the point where cumulative sums of degree days cross the threshold
cDD = dd['DD'].cumsum(skipna=True)
for cumdd_threshold,label in [[1*dd_gen,'F1'], [2*dd_gen,'F2'], [3*dd_gen,'F3']]:
dtmp = np.zeros(len(dd['DD']))*np.nan
tmp = np.searchsorted(cDD, cDD+(cumdd_threshold)-dd['DD'], side='left').astype(float)
tmp[tmp>=len(tmp)] = np.nan
#dd[label+'_idx'] = tmp
# convert those indexes into end times
e = pd.Series(index=dd.index, dtype='float64')#, dtype='datetime64[ns]')
#e[~np.isnan(tmp)] = dd.index[tmp[~np.isnan(tmp)].astype(int)] # @TCC previous code
e.loc[~np.isnan(tmp)] = dd.index[tmp[~np.isnan(tmp)].astype(int)]
e.loc[np.isnan(tmp)] = np.nan
dd[label+'_end'] = e
# and duration...
#dd[label] = (e-dd.index+pd.Timedelta(days=1)).apply(lambda x: np.nan if pd.isnull(x) else x.days) # @TCC previous code
dd[label] = (pd.to_datetime(e)-dd.index+pd.Timedelta(days=1)).apply(lambda x: np.nan if pd.isnull(x) else x.days)
#dd.loc[np.isnan(tmp), label] = np.nan
print("DD dataframe min values\n", dd.min())
return dd
def compute_year_over_year_norm(in_dataframe,
start, end,
norm_start=None, norm_end=None,
freq='daily',
interp_method='linear',
norm_method='mean'):
"""
Parameters
----------
start: convertable to Datetime
start range of dates to output
end: convertable to Datetime
end range of dates to output
norm_start : convertable to Datetime or None
`None` will use in_dataframe.index[0]
norm_end : convertable to Datetime or None
if given (not None), output range does not include `norm_end` (it is half-open)
`None` will use in_dataframe.index[-1]
freq : {'daily', 'hourly'}
interp_method : str or None
`None` will skip resample and interpolation, so
`in_dataframe` must already be daily or hourly (depending on `freq`)!
norm_method : {'mean', 'median'}
"""
if freq == 'hourly':
hrs = 24
hrs_freq = '1h'
elif freq == 'daily':
hrs = 1
hrs_freq = '24h'
else:
raise ValueError("Invalid `freq` argument value: {}".format(freq))
if norm_start is None:
norm_start = in_dataframe.index[0]
if norm_end is None:
norm_end = in_dataframe.index[-1]
else:
norm_end = pd.to_datetime([norm_end])[0] - pd.Timedelta('1 second')
print('Computing using range:', norm_start, 'to', norm_end)
if interp_method is None: # skip resample+interpolation (assumes in_dataframe is daily!)
t = in_dataframe.loc[norm_start:norm_end]
else: # resample and interpolate to get hourly
t = in_dataframe.resample(hrs_freq).interpolate(method=interp_method).loc[norm_start:norm_end]
if norm_method == 'mean':
norm = t.groupby([t.index.month, t.index.day, t.index.hour]).mean().sort_index()
elif norm_method == 'median':
norm = t.groupby([t.index.month, t.index.day, t.index.hour]).median().sort_index()
else:
assert False, "Error: Unknown norm_method '{}'".format(norm_method)
# now replicate and trim to the desired output range
start = pd.to_datetime(start)
end = pd.to_datetime(end)
# need a non-leapyear and leapyear version
norm_ly = norm.copy()
if norm.shape[0] == 366*hrs:
norm = norm.drop((2,29,))
else: # norm doesn't include any leapyear data
assert norm.shape[0] == 365*hrs
# make Feb 29 the mean of Feb 28 and Mar 1
foo = (norm.loc[(2,28,)] + norm.loc[(3,1,)]) / 2.0
foo.index = pd.MultiIndex.from_product( ([2],[29],list(range(hrs))) )
norm_ly = pd.concat((norm_ly,foo)).sort_index()
norm_ly.sort_index(inplace=True) # probably not needed
# build up a 'long normal' (lnorm) dataframe year by year by appending the norm or norm_ly
lnorm = None
for yr in np.arange(start.year, end.year+1):
#print(yr)
idx = pd.date_range(start='{}-{:02d}-{:02d} {:02d}:00:00'.format(yr,*norm.index[0]),
end= '{}-{:02d}-{:02d} {:02d}:00:00'.format(yr,*norm.index[-1]),
freq=hrs_freq)
if idx.shape[0] == 366*hrs:
foo = norm_ly.copy()
else:
assert norm.shape[0] == 365*hrs
foo = norm.copy()
foo.index = idx
if lnorm is None:
lnorm = foo
else:
lnorm = lnorm.append(foo)
return lnorm.loc[start:end]
| [
"pandas.Series",
"pandas.isnull",
"pandas.read_csv",
"numpy.arange",
"numpy.searchsorted",
"pandas.Timedelta",
"os.path.join",
"numpy.arcsin",
"dateutil.tz.tzoffset",
"pandas.read_hdf",
"numpy.isnan",
"numpy.cos",
"pandas.concat",
"pandas.to_datetime"
] | [((562, 592), 'pandas.read_hdf', 'pd.read_hdf', (['temps_fn', '"""table"""'], {}), "(temps_fn, 'table')\n", (573, 592), True, 'import pandas as pd\n'), ((688, 732), 'dateutil.tz.tzoffset', 'dateutil.tz.tzoffset', (['local_time_offset', 'tmp'], {}), '(local_time_offset, tmp)\n', (708, 732), False, 'import dateutil\n'), ((1300, 1328), 'pandas.read_csv', 'pd.read_csv', (['fn'], {'index_col': '(0)'}), '(fn, index_col=0)\n', (1311, 1328), True, 'import pandas as pd\n'), ((3385, 3416), 'pandas.concat', 'pd.concat', (['[tmin, tmax]'], {'axis': '(1)'}), '([tmin, tmax], axis=1)\n', (3394, 3416), True, 'import pandas as pd\n'), ((7182, 7203), 'pandas.to_datetime', 'pd.to_datetime', (['start'], {}), '(start)\n', (7196, 7203), True, 'import pandas as pd\n'), ((7214, 7233), 'pandas.to_datetime', 'pd.to_datetime', (['end'], {}), '(end)\n', (7228, 7233), True, 'import pandas as pd\n'), ((7900, 7935), 'numpy.arange', 'np.arange', (['start.year', '(end.year + 1)'], {}), '(start.year, end.year + 1)\n', (7909, 7935), True, 'import numpy as np\n'), ((517, 548), 'os.path.join', 'os.path.join', (['basedir', 'temps_fn'], {}), '(basedir, temps_fn)\n', (529, 548), False, 'import os\n'), ((1473, 1517), 'dateutil.tz.tzoffset', 'dateutil.tz.tzoffset', (['local_time_offset', 'tmp'], {}), '(local_time_offset, tmp)\n', (1493, 1517), False, 'import dateutil\n'), ((1669, 1692), 'pandas.to_datetime', 'pd.to_datetime', (['t.index'], {}), '(t.index)\n', (1683, 1692), True, 'import pandas as pd\n'), ((4275, 4317), 'pandas.Series', 'pd.Series', ([], {'index': 'dd.index', 'dtype': '"""float64"""'}), "(index=dd.index, dtype='float64')\n", (4284, 4317), True, 'import pandas as pd\n'), ((4525, 4538), 'numpy.isnan', 'np.isnan', (['tmp'], {}), '(tmp)\n', (4533, 4538), True, 'import numpy as np\n'), ((6393, 6417), 'pandas.Timedelta', 'pd.Timedelta', (['"""1 second"""'], {}), "('1 second')\n", (6405, 6417), True, 'import pandas as pd\n'), ((886, 906), 'pandas.Timedelta', 'pd.Timedelta', ([], {'days': '(1)'}), '(days=1)\n', (898, 906), True, 'import pandas as pd\n'), ((3171, 3185), 'numpy.arcsin', 'np.arcsin', (['tmp'], {}), '(tmp)\n', (3180, 3185), True, 'import numpy as np\n'), ((4068, 4135), 'numpy.searchsorted', 'np.searchsorted', (['cDD', "(cDD + cumdd_threshold - dd['DD'])"], {'side': '"""left"""'}), "(cDD, cDD + cumdd_threshold - dd['DD'], side='left')\n", (4083, 4135), True, 'import numpy as np\n'), ((4452, 4465), 'numpy.isnan', 'np.isnan', (['tmp'], {}), '(tmp)\n', (4460, 4465), True, 'import numpy as np\n'), ((6361, 6387), 'pandas.to_datetime', 'pd.to_datetime', (['[norm_end]'], {}), '([norm_end])\n', (6375, 6387), True, 'import pandas as pd\n'), ((7672, 7697), 'pandas.concat', 'pd.concat', (['(norm_ly, foo)'], {}), '((norm_ly, foo))\n', (7681, 7697), True, 'import pandas as pd\n'), ((4780, 4800), 'pandas.Timedelta', 'pd.Timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4792, 4800), True, 'import pandas as pd\n'), ((4828, 4840), 'pandas.isnull', 'pd.isnull', (['x'], {}), '(x)\n', (4837, 4840), True, 'import pandas as pd\n'), ((4753, 4770), 'pandas.to_datetime', 'pd.to_datetime', (['e'], {}), '(e)\n', (4767, 4770), True, 'import pandas as pd\n'), ((3207, 3216), 'numpy.cos', 'np.cos', (['A'], {}), '(A)\n', (3213, 3216), True, 'import numpy as np\n'), ((4483, 4496), 'numpy.isnan', 'np.isnan', (['tmp'], {}), '(tmp)\n', (4491, 4496), True, 'import numpy as np\n')] |
# import Kratos
import KratosMultiphysics
import KratosMultiphysics.StructuralMechanicsApplication as StructuralMechanicsApplication
import KratosMultiphysics.CSharpWrapperApplication as CSharpWrapperApplication
import run_cpp_unit_tests
# Import Kratos "wrapper" for unittests
import KratosMultiphysics.KratosUnittest as KratosUnittest
# Import subprocess
import subprocess
# Using kratos_utilities
import KratosMultiphysics.kratos_utilities as kratos_utilities
if kratos_utilities.CheckIfApplicationsAvailable("ExternalSolversApplication"):
has_external_solvers_application = True
else:
has_external_solvers_application = False
# Import the tests o test_classes to create the suits
## SMALL TESTS
## NIGTHLY TESTS
## VALIDATION TESTS
def AssembleTestSuites():
''' Populates the test suites to run.
Populates the test suites to run. At least, it should pupulate the suites:
"small", "nighlty" and "all"
Return
------
suites: A dictionary of suites
The set of suites with its test_cases added.
'''
suites = KratosUnittest.KratosSuites
# Create a test suit with the selected tests (Small tests):
smallSuite = suites['small']
# Create a test suit with the selected tests plus all small tests
nightlySuite = suites['nightly']
### BEGIN SMALL SUITE ###
### END SMALL SUITE ###
### BEGIN NIGHTLY SUITE ###
### END VALIDATION SUITE ###
### BEGIN VALIDATION SUITE ###
# For very long tests that should not be in nighly and you can use to validate
validationSuite = suites['validation']
validationSuite.addTests(nightlySuite)
### END VALIDATION ###
# Create a test suit that contains all the tests:
allSuite = suites['all']
allSuite.addTests(nightlySuite) # Already contains the smallSuite
validationSuite.addTests(allSuite) # Validation contains all
# Manual list for debugging
#allSuite.addTests(
#KratosUnittest.TestLoader().loadTestsFromTestCases([
#### STANDALONE
#### SMALL
#### NIGTHLY
#### VALIDATION
#])
#)
return suites
if __name__ == '__main__':
KratosMultiphysics.Logger.PrintInfo("Unittests", "\nRunning cpp unit tests ...")
run_cpp_unit_tests.run()
KratosMultiphysics.Logger.PrintInfo("Unittests", "Finished running cpp unit tests!")
KratosMultiphysics.Logger.PrintInfo("Unittests", "\nRunning python tests ...")
KratosUnittest.runTests(AssembleTestSuites())
KratosMultiphysics.Logger.PrintInfo("Unittests", "Finished python tests!")
| [
"KratosMultiphysics.kratos_utilities.CheckIfApplicationsAvailable",
"KratosMultiphysics.Logger.PrintInfo",
"run_cpp_unit_tests.run"
] | [((469, 544), 'KratosMultiphysics.kratos_utilities.CheckIfApplicationsAvailable', 'kratos_utilities.CheckIfApplicationsAvailable', (['"""ExternalSolversApplication"""'], {}), "('ExternalSolversApplication')\n", (514, 544), True, 'import KratosMultiphysics.kratos_utilities as kratos_utilities\n'), ((2172, 2259), 'KratosMultiphysics.Logger.PrintInfo', 'KratosMultiphysics.Logger.PrintInfo', (['"""Unittests"""', '"""\nRunning cpp unit tests ..."""'], {}), '(\'Unittests\',\n """\nRunning cpp unit tests ...""")\n', (2207, 2259), False, 'import KratosMultiphysics\n'), ((2257, 2281), 'run_cpp_unit_tests.run', 'run_cpp_unit_tests.run', ([], {}), '()\n', (2279, 2281), False, 'import run_cpp_unit_tests\n'), ((2286, 2374), 'KratosMultiphysics.Logger.PrintInfo', 'KratosMultiphysics.Logger.PrintInfo', (['"""Unittests"""', '"""Finished running cpp unit tests!"""'], {}), "('Unittests',\n 'Finished running cpp unit tests!')\n", (2321, 2374), False, 'import KratosMultiphysics\n'), ((2376, 2461), 'KratosMultiphysics.Logger.PrintInfo', 'KratosMultiphysics.Logger.PrintInfo', (['"""Unittests"""', '"""\nRunning python tests ..."""'], {}), '(\'Unittests\',\n """\nRunning python tests ...""")\n', (2411, 2461), False, 'import KratosMultiphysics\n'), ((2509, 2583), 'KratosMultiphysics.Logger.PrintInfo', 'KratosMultiphysics.Logger.PrintInfo', (['"""Unittests"""', '"""Finished python tests!"""'], {}), "('Unittests', 'Finished python tests!')\n", (2544, 2583), False, 'import KratosMultiphysics\n')] |
# -*- coding: utf-8 -*-
import pytest
import datetime
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from rest_framework.test import APIClient, APIRequestFactory
from resources.enums import UnitAuthorizationLevel
from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period
from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup
from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility
from munigeo.models import Municipality
@pytest.fixture
def api_client():
return APIClient()
@pytest.fixture
def staff_api_client(staff_user):
api_client = APIClient()
api_client.force_authenticate(user=staff_user)
return api_client
@pytest.fixture
def user_api_client(user):
api_client = APIClient()
api_client.force_authenticate(user=user)
return api_client
@pytest.fixture(params=[None, 'user', 'staff_user'])
def all_user_types_api_client(request):
api_client = APIClient()
if request.param:
api_client.force_authenticate(request.getfixturevalue(request.param))
return api_client
@pytest.fixture
def api_rf():
return APIRequestFactory()
@pytest.mark.django_db
@pytest.fixture
def space_resource_type():
return ResourceType.objects.get_or_create(id="test_space", name="test_space", main_type="space")[0]
@pytest.mark.django_db
@pytest.fixture
def space_resource(space_resource_type):
return Resource.objects.create(type=space_resource_type, authentication="none", name="resource")
@pytest.mark.django_db
@pytest.fixture
def test_unit():
return Unit.objects.create(name="unit", time_zone='Europe/Helsinki')
@pytest.fixture
def test_unit2():
return Unit.objects.create(name="unit 2", time_zone='Europe/Helsinki')
@pytest.fixture
def test_unit3():
return Unit.objects.create(name="unit 3", time_zone='Europe/Helsinki')
@pytest.fixture
def terms_of_use():
return TermsOfUse.objects.create(
name_fi='testikäyttöehdot',
name_en='test terms of use',
text_fi='kaikki on kielletty',
text_en='everything is forbidden',
)
@pytest.mark.django_db
@pytest.fixture
def resource_in_unit(space_resource_type, test_unit, terms_of_use):
return Resource.objects.create(
type=space_resource_type,
authentication="none",
name="resource in unit",
unit=test_unit,
max_reservations_per_user=1,
max_period=datetime.timedelta(hours=2),
reservable=True,
generic_terms=terms_of_use,
specific_terms_fi='spesifiset käyttöehdot',
specific_terms_en='specific terms of use',
reservation_confirmed_notification_extra_en='this resource rocks'
)
@pytest.mark.django_db
@pytest.fixture
def resource_in_unit2(space_resource_type, test_unit2):
return Resource.objects.create(
type=space_resource_type,
authentication="none",
name="resource in unit 2",
unit=test_unit2,
max_reservations_per_user=2,
max_period=datetime.timedelta(hours=4),
reservable=True,
)
@pytest.mark.django_db
@pytest.fixture
def resource_in_unit3(space_resource_type, test_unit3):
return Resource.objects.create(
type=space_resource_type,
authentication="none",
name="resource in unit 3",
unit=test_unit3,
max_reservations_per_user=2,
max_period=datetime.timedelta(hours=4),
reservable=True,
)
@pytest.mark.django_db
@pytest.fixture
def resource_with_opening_hours(resource_in_unit):
p1 = Period.objects.create(start=datetime.date(2115, 1, 1),
end=datetime.date(2115, 12, 31),
resource=resource_in_unit, name='regular hours')
for weekday in range(0, 7):
Day.objects.create(period=p1, weekday=weekday,
opens=datetime.time(8, 0),
closes=datetime.time(18, 0))
resource_in_unit.update_opening_hours()
return resource_in_unit
@pytest.mark.django_db
@pytest.fixture
def exceptional_period(resource_with_opening_hours):
parent = resource_with_opening_hours.periods.first()
period = Period.objects.create(start='2115-01-10', end='2115-01-12',
resource=resource_with_opening_hours,
name='exceptional hours',
exceptional=True, parent=parent)
date = period.start
Day.objects.create(period=period, weekday=date.weekday(),
closed=True)
date = date + datetime.timedelta(days=1)
Day.objects.create(period=period, weekday=date.weekday(),
opens='12:00', closes='13:00')
date = date + datetime.timedelta(days=1)
Day.objects.create(period=period, weekday=date.weekday(),
closed=True)
return period
@pytest.mark.django_db
@pytest.fixture
def equipment_category():
return EquipmentCategory.objects.create(
name='test equipment category'
)
@pytest.mark.django_db
@pytest.fixture
def equipment(equipment_category):
equipment = Equipment.objects.create(name='test equipment', category=equipment_category)
return equipment
@pytest.mark.django_db
@pytest.fixture
def equipment_alias(equipment):
equipment_alias = EquipmentAlias.objects.create(name='test equipment alias', language='fi', equipment=equipment)
return equipment_alias
@pytest.mark.django_db
@pytest.fixture
def resource_equipment(resource_in_unit, equipment):
data = {'test_key': 'test_value'}
resource_equipment = ResourceEquipment.objects.create(
equipment=equipment,
resource=resource_in_unit,
data=data,
description='test resource equipment',
)
return resource_equipment
@pytest.mark.django_db
@pytest.fixture
def user():
return get_user_model().objects.create(
username='test_user',
first_name='Cem',
last_name='Kaner',
email='<EMAIL>',
preferred_language='en'
)
@pytest.mark.django_db
@pytest.fixture
def user2():
return get_user_model().objects.create(
username='test_user2',
first_name='Brendan',
last_name='Neutra',
email='<EMAIL>'
)
@pytest.mark.django_db
@pytest.fixture
def staff_user():
return get_user_model().objects.create(
username='test_staff_user',
first_name='John',
last_name='Staff',
email='<EMAIL>',
is_staff=True,
preferred_language='en'
)
@pytest.mark.django_db
@pytest.fixture
def unit_manager_user(resource_in_unit):
user = get_user_model().objects.create(
username='test_manager_user',
first_name='Inspector',
last_name='Lestrade',
email='<EMAIL>',
is_staff=True,
preferred_language='en'
)
user.unit_authorizations.create(subject=resource_in_unit.unit, level=UnitAuthorizationLevel.manager)
return user
@pytest.mark.django_db
@pytest.fixture
def general_admin():
return get_user_model().objects.create(
username='test_general_admin',
first_name='Genie',
last_name='Manager',
email='<EMAIL>',
is_staff=True,
is_general_admin=True,
preferred_language='en'
)
@pytest.mark.django_db
@pytest.fixture
def group():
return Group.objects.create(name='test group')
@pytest.mark.django_db
@pytest.fixture
def purpose():
return Purpose.objects.create(name='test purpose', id='test-purpose')
@pytest.fixture
def resource_group(resource_in_unit):
group = ResourceGroup.objects.create(
identifier='test_group',
name='Test resource group'
)
group.resources.set([resource_in_unit])
return group
@pytest.fixture
def resource_group2(resource_in_unit2):
group = ResourceGroup.objects.create(
identifier='test_group_2',
name='Test resource group 2'
)
group.resources.set([resource_in_unit2])
return group
@pytest.fixture
def test_municipality():
municipality = Municipality.objects.create(
id='foo',
name='Foo'
)
return municipality
@pytest.fixture
def accessibility_viewpoint_wheelchair():
vp = {"id": "10", "name_en": "I am a wheelchair user", "order_text": 10}
return AccessibilityViewpoint.objects.create(**vp)
@pytest.fixture
def accessibility_viewpoint_hearing():
vp = {"id": "20", "name_en": "I am hearing impaired", "order_text": 20}
return AccessibilityViewpoint.objects.create(**vp)
@pytest.fixture
def accessibility_value_green():
return AccessibilityValue.objects.create(value='green', order=10)
@pytest.fixture
def accessibility_value_red():
return AccessibilityValue.objects.create(value='red', order=-10)
@pytest.fixture
def resource_with_accessibility_data(resource_in_unit, accessibility_viewpoint_wheelchair,
accessibility_viewpoint_hearing, accessibility_value_green,
accessibility_value_red):
""" Resource is wheelchair accessible, not hearing accessible, unit is accessible to both """
ResourceAccessibility.objects.create(
resource=resource_in_unit,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_green
)
ResourceAccessibility.objects.create(
resource=resource_in_unit,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_red
)
UnitAccessibility.objects.create(
unit=resource_in_unit.unit,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_green
)
UnitAccessibility.objects.create(
unit=resource_in_unit.unit,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_green
)
return resource_in_unit
@pytest.fixture
def resource_with_accessibility_data2(resource_in_unit2, accessibility_viewpoint_wheelchair,
accessibility_viewpoint_hearing, accessibility_value_green,
accessibility_value_red):
""" Resource is hearing accessible, not wheelchair accessible, unit is accessible to both """
ResourceAccessibility.objects.create(
resource=resource_in_unit2,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_red
)
ResourceAccessibility.objects.create(
resource=resource_in_unit2,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_green
)
UnitAccessibility.objects.create(
unit=resource_in_unit2.unit,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_green
)
UnitAccessibility.objects.create(
unit=resource_in_unit2.unit,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_green
)
return resource_in_unit2
@pytest.fixture
def resource_with_accessibility_data3(resource_in_unit3, accessibility_viewpoint_wheelchair,
accessibility_viewpoint_hearing, accessibility_value_green,
accessibility_value_red):
""" Resource is accessible, unit is not """
ResourceAccessibility.objects.create(
resource=resource_in_unit3,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_green
)
ResourceAccessibility.objects.create(
resource=resource_in_unit3,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_green
)
UnitAccessibility.objects.create(
unit=resource_in_unit3.unit,
viewpoint=accessibility_viewpoint_wheelchair,
value=accessibility_value_red
)
UnitAccessibility.objects.create(
unit=resource_in_unit3.unit,
viewpoint=accessibility_viewpoint_hearing,
value=accessibility_value_red
)
return resource_in_unit3
| [
"resources.models.ResourceEquipment.objects.create",
"resources.models.Equipment.objects.create",
"rest_framework.test.APIClient",
"pytest.fixture",
"datetime.timedelta",
"resources.models.Period.objects.create",
"resources.models.AccessibilityViewpoint.objects.create",
"resources.models.UnitAccessibility.objects.create",
"django.contrib.auth.get_user_model",
"resources.models.Purpose.objects.create",
"resources.models.EquipmentAlias.objects.create",
"resources.models.Resource.objects.create",
"datetime.time",
"resources.models.ResourceGroup.objects.create",
"resources.models.ResourceType.objects.get_or_create",
"datetime.date",
"resources.models.AccessibilityValue.objects.create",
"munigeo.models.Municipality.objects.create",
"resources.models.TermsOfUse.objects.create",
"resources.models.Unit.objects.create",
"resources.models.EquipmentCategory.objects.create",
"rest_framework.test.APIRequestFactory",
"django.contrib.auth.models.Group.objects.create",
"resources.models.ResourceAccessibility.objects.create"
] | [((970, 1021), 'pytest.fixture', 'pytest.fixture', ([], {'params': "[None, 'user', 'staff_user']"}), "(params=[None, 'user', 'staff_user'])\n", (984, 1021), False, 'import pytest\n'), ((660, 671), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (669, 671), False, 'from rest_framework.test import APIClient, APIRequestFactory\n'), ((741, 752), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (750, 752), False, 'from rest_framework.test import APIClient, APIRequestFactory\n'), ((888, 899), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (897, 899), False, 'from rest_framework.test import APIClient, APIRequestFactory\n'), ((1079, 1090), 'rest_framework.test.APIClient', 'APIClient', ([], {}), '()\n', (1088, 1090), False, 'from rest_framework.test import APIClient, APIRequestFactory\n'), ((1256, 1275), 'rest_framework.test.APIRequestFactory', 'APIRequestFactory', ([], {}), '()\n', (1273, 1275), False, 'from rest_framework.test import APIClient, APIRequestFactory\n'), ((1541, 1634), 'resources.models.Resource.objects.create', 'Resource.objects.create', ([], {'type': 'space_resource_type', 'authentication': '"""none"""', 'name': '"""resource"""'}), "(type=space_resource_type, authentication='none',\n name='resource')\n", (1564, 1634), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((1700, 1761), 'resources.models.Unit.objects.create', 'Unit.objects.create', ([], {'name': '"""unit"""', 'time_zone': '"""Europe/Helsinki"""'}), "(name='unit', time_zone='Europe/Helsinki')\n", (1719, 1761), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((1809, 1872), 'resources.models.Unit.objects.create', 'Unit.objects.create', ([], {'name': '"""unit 2"""', 'time_zone': '"""Europe/Helsinki"""'}), "(name='unit 2', time_zone='Europe/Helsinki')\n", (1828, 1872), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((1920, 1983), 'resources.models.Unit.objects.create', 'Unit.objects.create', ([], {'name': '"""unit 3"""', 'time_zone': '"""Europe/Helsinki"""'}), "(name='unit 3', time_zone='Europe/Helsinki')\n", (1939, 1983), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((2033, 2191), 'resources.models.TermsOfUse.objects.create', 'TermsOfUse.objects.create', ([], {'name_fi': '"""testikäyttöehdot"""', 'name_en': '"""test terms of use"""', 'text_fi': '"""kaikki on kielletty"""', 'text_en': '"""everything is forbidden"""'}), "(name_fi='testikäyttöehdot', name_en=\n 'test terms of use', text_fi='kaikki on kielletty', text_en=\n 'everything is forbidden')\n", (2058, 2191), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((4298, 4463), 'resources.models.Period.objects.create', 'Period.objects.create', ([], {'start': '"""2115-01-10"""', 'end': '"""2115-01-12"""', 'resource': 'resource_with_opening_hours', 'name': '"""exceptional hours"""', 'exceptional': '(True)', 'parent': 'parent'}), "(start='2115-01-10', end='2115-01-12', resource=\n resource_with_opening_hours, name='exceptional hours', exceptional=True,\n parent=parent)\n", (4319, 4463), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((5084, 5148), 'resources.models.EquipmentCategory.objects.create', 'EquipmentCategory.objects.create', ([], {'name': '"""test equipment category"""'}), "(name='test equipment category')\n", (5116, 5148), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((5255, 5331), 'resources.models.Equipment.objects.create', 'Equipment.objects.create', ([], {'name': '"""test equipment"""', 'category': 'equipment_category'}), "(name='test equipment', category=equipment_category)\n", (5279, 5331), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((5448, 5546), 'resources.models.EquipmentAlias.objects.create', 'EquipmentAlias.objects.create', ([], {'name': '"""test equipment alias"""', 'language': '"""fi"""', 'equipment': 'equipment'}), "(name='test equipment alias', language='fi',\n equipment=equipment)\n", (5477, 5546), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((5727, 5862), 'resources.models.ResourceEquipment.objects.create', 'ResourceEquipment.objects.create', ([], {'equipment': 'equipment', 'resource': 'resource_in_unit', 'data': 'data', 'description': '"""test resource equipment"""'}), "(equipment=equipment, resource=\n resource_in_unit, data=data, description='test resource equipment')\n", (5759, 5862), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((7483, 7522), 'django.contrib.auth.models.Group.objects.create', 'Group.objects.create', ([], {'name': '"""test group"""'}), "(name='test group')\n", (7503, 7522), False, 'from django.contrib.auth.models import Group\n'), ((7590, 7652), 'resources.models.Purpose.objects.create', 'Purpose.objects.create', ([], {'name': '"""test purpose"""', 'id': '"""test-purpose"""'}), "(name='test purpose', id='test-purpose')\n", (7612, 7652), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((7721, 7807), 'resources.models.ResourceGroup.objects.create', 'ResourceGroup.objects.create', ([], {'identifier': '"""test_group"""', 'name': '"""Test resource group"""'}), "(identifier='test_group', name=\n 'Test resource group')\n", (7749, 7807), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((7956, 8046), 'resources.models.ResourceGroup.objects.create', 'ResourceGroup.objects.create', ([], {'identifier': '"""test_group_2"""', 'name': '"""Test resource group 2"""'}), "(identifier='test_group_2', name=\n 'Test resource group 2')\n", (7984, 8046), False, 'from resources.models import Equipment, EquipmentAlias, ResourceEquipment, EquipmentCategory, TermsOfUse, ResourceGroup\n'), ((8187, 8236), 'munigeo.models.Municipality.objects.create', 'Municipality.objects.create', ([], {'id': '"""foo"""', 'name': '"""Foo"""'}), "(id='foo', name='Foo')\n", (8214, 8236), False, 'from munigeo.models import Municipality\n'), ((8431, 8474), 'resources.models.AccessibilityViewpoint.objects.create', 'AccessibilityViewpoint.objects.create', ([], {}), '(**vp)\n', (8468, 8474), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((8619, 8662), 'resources.models.AccessibilityViewpoint.objects.create', 'AccessibilityViewpoint.objects.create', ([], {}), '(**vp)\n', (8656, 8662), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((8725, 8783), 'resources.models.AccessibilityValue.objects.create', 'AccessibilityValue.objects.create', ([], {'value': '"""green"""', 'order': '(10)'}), "(value='green', order=10)\n", (8758, 8783), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((8844, 8901), 'resources.models.AccessibilityValue.objects.create', 'AccessibilityValue.objects.create', ([], {'value': '"""red"""', 'order': '(-10)'}), "(value='red', order=-10)\n", (8877, 8901), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((9273, 9420), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_green'}), '(resource=resource_in_unit, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_green)\n', (9309, 9420), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((9450, 9592), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_red'}), '(resource=resource_in_unit, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_red)\n', (9486, 9592), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((9622, 9766), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit.unit', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_green'}), '(unit=resource_in_unit.unit, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_green)\n', (9654, 9766), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((9796, 9937), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit.unit', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_green'}), '(unit=resource_in_unit.unit, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_green)\n', (9828, 9937), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((10366, 10512), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit2', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_red'}), '(resource=resource_in_unit2, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_red)\n', (10402, 10512), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((10542, 10687), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit2', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_green'}), '(resource=resource_in_unit2, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_green)\n', (10578, 10687), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((10717, 10862), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit2.unit', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_green'}), '(unit=resource_in_unit2.unit, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_green)\n', (10749, 10862), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((10892, 11034), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit2.unit', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_green'}), '(unit=resource_in_unit2.unit, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_green)\n', (10924, 11034), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((11414, 11562), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit3', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_green'}), '(resource=resource_in_unit3, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_green)\n', (11450, 11562), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((11592, 11737), 'resources.models.ResourceAccessibility.objects.create', 'ResourceAccessibility.objects.create', ([], {'resource': 'resource_in_unit3', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_green'}), '(resource=resource_in_unit3, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_green)\n', (11628, 11737), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((11767, 11910), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit3.unit', 'viewpoint': 'accessibility_viewpoint_wheelchair', 'value': 'accessibility_value_red'}), '(unit=resource_in_unit3.unit, viewpoint=\n accessibility_viewpoint_wheelchair, value=accessibility_value_red)\n', (11799, 11910), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((11940, 12080), 'resources.models.UnitAccessibility.objects.create', 'UnitAccessibility.objects.create', ([], {'unit': 'resource_in_unit3.unit', 'viewpoint': 'accessibility_viewpoint_hearing', 'value': 'accessibility_value_red'}), '(unit=resource_in_unit3.unit, viewpoint=\n accessibility_viewpoint_hearing, value=accessibility_value_red)\n', (11972, 12080), False, 'from resources.models import AccessibilityValue, AccessibilityViewpoint, ResourceAccessibility, UnitAccessibility\n'), ((1355, 1448), 'resources.models.ResourceType.objects.get_or_create', 'ResourceType.objects.get_or_create', ([], {'id': '"""test_space"""', 'name': '"""test_space"""', 'main_type': '"""space"""'}), "(id='test_space', name='test_space',\n main_type='space')\n", (1389, 1448), False, 'from resources.models import Resource, ResourceType, Unit, Purpose, Day, Period\n'), ((4701, 4727), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4719, 4727), False, 'import datetime\n'), ((4862, 4888), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (4880, 4888), False, 'import datetime\n'), ((2544, 2571), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(2)'}), '(hours=2)\n', (2562, 2571), False, 'import datetime\n'), ((3131, 3158), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(4)'}), '(hours=4)\n', (3149, 3158), False, 'import datetime\n'), ((3505, 3532), 'datetime.timedelta', 'datetime.timedelta', ([], {'hours': '(4)'}), '(hours=4)\n', (3523, 3532), False, 'import datetime\n'), ((3694, 3719), 'datetime.date', 'datetime.date', (['(2115)', '(1)', '(1)'], {}), '(2115, 1, 1)\n', (3707, 3719), False, 'import datetime\n'), ((3756, 3783), 'datetime.date', 'datetime.date', (['(2115)', '(12)', '(31)'], {}), '(2115, 12, 31)\n', (3769, 3783), False, 'import datetime\n'), ((3985, 4004), 'datetime.time', 'datetime.time', (['(8)', '(0)'], {}), '(8, 0)\n', (3998, 4004), False, 'import datetime\n'), ((4040, 4060), 'datetime.time', 'datetime.time', (['(18)', '(0)'], {}), '(18, 0)\n', (4053, 4060), False, 'import datetime\n'), ((5991, 6007), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (6005, 6007), False, 'from django.contrib.auth import get_user_model\n'), ((6235, 6251), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (6249, 6251), False, 'from django.contrib.auth import get_user_model\n'), ((6457, 6473), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (6471, 6473), False, 'from django.contrib.auth import get_user_model\n'), ((6759, 6775), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (6773, 6775), False, 'from django.contrib.auth import get_user_model\n'), ((7172, 7188), 'django.contrib.auth.get_user_model', 'get_user_model', ([], {}), '()\n', (7186, 7188), False, 'from django.contrib.auth import get_user_model\n')] |
from enum import Enum
from window import Window
D = Enum ('Directions','N NE E SE S SW W NW')
selector_map = {
D.NW: [0.5,0.5], D.N: [1.5,0], D.NE: [2.5,0.5],
D.W: [0,1.5], D.E: [3,1.5],
D.SW: [0.5,2.5], D.S: [1.5,3], D.SE: [2.5,2.5],
}
selector_size = 100
window_size = selector_size*4
window = Window (window_size,window_size,selector_map,selector_size,selector_size)
# set actions here
from functools import partial
def say (something):
print (''.join (('Me: "',something,'"')))
window.actions[D.NW] = partial (say,'northwast')
window.actions[D.N] = partial (say,'north')
window.actions[D.NE] = partial (say,'neorthest')
window.actions[D.W] = partial (say,'western')
window.actions[D.E] = partial (say,'easy')
window.actions[D.SW] = partial (say,'suess whest')
window.actions[D.S] = partial (say,'sissy')
window.actions[D.SE] = partial (say,'seoul')
window.go ()
| [
"window.Window",
"functools.partial",
"enum.Enum"
] | [((53, 94), 'enum.Enum', 'Enum', (['"""Directions"""', '"""N NE E SE S SW W NW"""'], {}), "('Directions', 'N NE E SE S SW W NW')\n", (57, 94), False, 'from enum import Enum\n'), ((351, 427), 'window.Window', 'Window', (['window_size', 'window_size', 'selector_map', 'selector_size', 'selector_size'], {}), '(window_size, window_size, selector_map, selector_size, selector_size)\n', (357, 427), False, 'from window import Window\n'), ((566, 591), 'functools.partial', 'partial', (['say', '"""northwast"""'], {}), "(say, 'northwast')\n", (573, 591), False, 'from functools import partial\n'), ((614, 635), 'functools.partial', 'partial', (['say', '"""north"""'], {}), "(say, 'north')\n", (621, 635), False, 'from functools import partial\n'), ((659, 684), 'functools.partial', 'partial', (['say', '"""neorthest"""'], {}), "(say, 'neorthest')\n", (666, 684), False, 'from functools import partial\n'), ((707, 730), 'functools.partial', 'partial', (['say', '"""western"""'], {}), "(say, 'western')\n", (714, 730), False, 'from functools import partial\n'), ((753, 773), 'functools.partial', 'partial', (['say', '"""easy"""'], {}), "(say, 'easy')\n", (760, 773), False, 'from functools import partial\n'), ((797, 824), 'functools.partial', 'partial', (['say', '"""suess whest"""'], {}), "(say, 'suess whest')\n", (804, 824), False, 'from functools import partial\n'), ((847, 868), 'functools.partial', 'partial', (['say', '"""sissy"""'], {}), "(say, 'sissy')\n", (854, 868), False, 'from functools import partial\n'), ((892, 913), 'functools.partial', 'partial', (['say', '"""seoul"""'], {}), "(say, 'seoul')\n", (899, 913), False, 'from functools import partial\n')] |
# coding=utf-8
#!/usr/bin/env python3
from libs.check_modules import check_modules
from sys import exit
from os import _exit
check_modules()
from os import path
from libs.logo import print_logo
from libs.utils import print_success
from libs.utils import print_error
from libs.utils import ask_question
from libs.utils import print_status
from libs.utils import parse_proxy_file
from libs.proxy_harvester import find_proxies
from libs.attack import report_profile_attack
from libs.attack import report_video_attack
from multiprocessing import Process
from colorama import Fore, Back, Style
def chunks(lst, n):
"""Yield successive n-sized chunks from lst."""
for i in range(0, len(lst), n):
yield lst[i:i + n]
def profile_attack_process(username, proxy_list):
if (len(proxy_list) == 0):
for _ in range(10):
report_profile_attack(username, None)
return
for proxy in proxy_list:
report_profile_attack(username, proxy)
def video_attack_process(video_url, proxy_list):
if (len(proxy_list) == 0):
for _ in range(10):
report_video_attack(video_url, None)
return
for proxy in proxy_list:
report_video_attack(video_url, proxy)
def video_attack(proxies):
video_url = ask_question("Enter the link of the video you want to report")
print(Style.RESET_ALL)
if (len(proxies) == 0):
for k in range(5):
p = Process(target=video_attack_process, args=(video_url, [],))
p.start()
print_status(str(k + 1) + ". Transaction Opened!")
if (k == 5): print()
return
chunk = list(chunks(proxies, 10))
print("")
print_status("Video complaint attack is on!\n")
i = 1
for proxy_list in chunk:
p = Process(target=video_attack_process, args=(video_url, proxy_list,))
p.start()
print_status(str(i) + ". Transaction Opened!")
if (k == 5): print()
i = i + 1
def profile_attack(proxies):
username = ask_question("Enter the username of the person you want to report")
print(Style.RESET_ALL)
if (len(proxies) == 0):
for k in range(5):
p = Process(target=profile_attack_process, args=(username, [],))
p.start()
print_status(str(k + 1) + ". Transaction Opened!")
return
chunk = list(chunks(proxies, 10))
print("")
print_status("Profile complaint attack is starting!\n")
i = 1
for proxy_list in chunk:
p = Process(target=profile_attack_process, args=(username, proxy_list,))
p.start()
print_status(str(i) + ". Transaction Opened!")
if (k == 5): print()
i = i + 1
def main():
print_success("Modules loaded!\n")
ret = ask_question("Would you like to use a proxy? [Y / N]")
proxies = []
if (ret == "Y" or ret == "y"):
ret = ask_question("Would you like to collect your proxies from the internet? [Y / N]")
if (ret == "Y" or ret == "y"):
print_status("Gathering proxy from the Internet! This may take a while.\n")
proxies = find_proxies()
elif (ret == "N" or ret == "n"):
print_status("Please have a maximum of 50 proxies in a file!")
file_path = ask_question("Enter the path to your proxy list")
proxies = parse_proxy_file(file_path)
else:
print_error("Answer not understood, exiting!")
exit()
print_success(str(len(proxies)) + " Number of proxy found!\n")
elif (ret == "N" or ret == "n"):
pass
else:
print_error("Answer not understood, exiting!")
exit()
print("")
print_status("1 - Report Profile.")
print_status("2 - Report a video.")
report_choice = ask_question("Please select the complaint method")
print("")
if (report_choice.isdigit() == False):
print_error("The answer is not understood.")
exit(0)
if (int(report_choice) > 2 or int(report_choice) == 0):
print_error("The answer is not understood.")
exit(0)
if (int(report_choice) == 1):
profile_attack(proxies)
elif (int(report_choice) == 2):
video_attack(proxies)
if __name__ == "__main__":
print_logo()
try:
main()
print(Style.RESET_ALL)
except KeyboardInterrupt:
print("\n\n" + Fore.RED + "[*] Program is closing!")
print(Style.RESET_ALL)
_exit(0) | [
"libs.attack.report_profile_attack",
"libs.utils.print_success",
"libs.attack.report_video_attack",
"libs.utils.print_error",
"libs.utils.print_status",
"multiprocessing.Process",
"libs.utils.ask_question",
"libs.check_modules.check_modules",
"libs.logo.print_logo",
"os._exit",
"libs.utils.parse_proxy_file",
"sys.exit",
"libs.proxy_harvester.find_proxies"
] | [((134, 149), 'libs.check_modules.check_modules', 'check_modules', ([], {}), '()\n', (147, 149), False, 'from libs.check_modules import check_modules\n'), ((1323, 1385), 'libs.utils.ask_question', 'ask_question', (['"""Enter the link of the video you want to report"""'], {}), "('Enter the link of the video you want to report')\n", (1335, 1385), False, 'from libs.utils import ask_question\n'), ((1748, 1795), 'libs.utils.print_status', 'print_status', (['"""Video complaint attack is on!\n"""'], {}), "('Video complaint attack is on!\\n')\n", (1760, 1795), False, 'from libs.utils import print_status\n'), ((2092, 2159), 'libs.utils.ask_question', 'ask_question', (['"""Enter the username of the person you want to report"""'], {}), "('Enter the username of the person you want to report')\n", (2104, 2159), False, 'from libs.utils import ask_question\n'), ((2489, 2544), 'libs.utils.print_status', 'print_status', (['"""Profile complaint attack is starting!\n"""'], {}), "('Profile complaint attack is starting!\\n')\n", (2501, 2544), False, 'from libs.utils import print_status\n'), ((2814, 2848), 'libs.utils.print_success', 'print_success', (['"""Modules loaded!\n"""'], {}), "('Modules loaded!\\n')\n", (2827, 2848), False, 'from libs.utils import print_success\n'), ((2862, 2916), 'libs.utils.ask_question', 'ask_question', (['"""Would you like to use a proxy? [Y / N]"""'], {}), "('Would you like to use a proxy? [Y / N]')\n", (2874, 2916), False, 'from libs.utils import ask_question\n'), ((3819, 3854), 'libs.utils.print_status', 'print_status', (['"""1 - Report Profile."""'], {}), "('1 - Report Profile.')\n", (3831, 3854), False, 'from libs.utils import print_status\n'), ((3860, 3895), 'libs.utils.print_status', 'print_status', (['"""2 - Report a video."""'], {}), "('2 - Report a video.')\n", (3872, 3895), False, 'from libs.utils import print_status\n'), ((3917, 3967), 'libs.utils.ask_question', 'ask_question', (['"""Please select the complaint method"""'], {}), "('Please select the complaint method')\n", (3929, 3967), False, 'from libs.utils import ask_question\n'), ((4411, 4423), 'libs.logo.print_logo', 'print_logo', ([], {}), '()\n', (4421, 4423), False, 'from libs.logo import print_logo\n'), ((979, 1017), 'libs.attack.report_profile_attack', 'report_profile_attack', (['username', 'proxy'], {}), '(username, proxy)\n', (1000, 1017), False, 'from libs.attack import report_profile_attack\n'), ((1238, 1275), 'libs.attack.report_video_attack', 'report_video_attack', (['video_url', 'proxy'], {}), '(video_url, proxy)\n', (1257, 1275), False, 'from libs.attack import report_video_attack\n'), ((1852, 1918), 'multiprocessing.Process', 'Process', ([], {'target': 'video_attack_process', 'args': '(video_url, proxy_list)'}), '(target=video_attack_process, args=(video_url, proxy_list))\n', (1859, 1918), False, 'from multiprocessing import Process\n'), ((2601, 2668), 'multiprocessing.Process', 'Process', ([], {'target': 'profile_attack_process', 'args': '(username, proxy_list)'}), '(target=profile_attack_process, args=(username, proxy_list))\n', (2608, 2668), False, 'from multiprocessing import Process\n'), ((2990, 3076), 'libs.utils.ask_question', 'ask_question', (['"""Would you like to collect your proxies from the internet? [Y / N]"""'], {}), "(\n 'Would you like to collect your proxies from the internet? [Y / N]')\n", (3002, 3076), False, 'from libs.utils import ask_question\n'), ((4038, 4082), 'libs.utils.print_error', 'print_error', (['"""The answer is not understood."""'], {}), "('The answer is not understood.')\n", (4049, 4082), False, 'from libs.utils import print_error\n'), ((4092, 4099), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (4096, 4099), False, 'from sys import exit\n'), ((4176, 4220), 'libs.utils.print_error', 'print_error', (['"""The answer is not understood."""'], {}), "('The answer is not understood.')\n", (4187, 4220), False, 'from libs.utils import print_error\n'), ((4230, 4237), 'sys.exit', 'exit', (['(0)'], {}), '(0)\n', (4234, 4237), False, 'from sys import exit\n'), ((884, 921), 'libs.attack.report_profile_attack', 'report_profile_attack', (['username', 'None'], {}), '(username, None)\n', (905, 921), False, 'from libs.attack import report_profile_attack\n'), ((1144, 1180), 'libs.attack.report_video_attack', 'report_video_attack', (['video_url', 'None'], {}), '(video_url, None)\n', (1163, 1180), False, 'from libs.attack import report_video_attack\n'), ((1488, 1546), 'multiprocessing.Process', 'Process', ([], {'target': 'video_attack_process', 'args': '(video_url, [])'}), '(target=video_attack_process, args=(video_url, []))\n', (1495, 1546), False, 'from multiprocessing import Process\n'), ((2262, 2321), 'multiprocessing.Process', 'Process', ([], {'target': 'profile_attack_process', 'args': '(username, [])'}), '(target=profile_attack_process, args=(username, []))\n', (2269, 2321), False, 'from multiprocessing import Process\n'), ((3127, 3202), 'libs.utils.print_status', 'print_status', (['"""Gathering proxy from the Internet! This may take a while.\n"""'], {}), "('Gathering proxy from the Internet! This may take a while.\\n')\n", (3139, 3202), False, 'from libs.utils import print_status\n'), ((3226, 3240), 'libs.proxy_harvester.find_proxies', 'find_proxies', ([], {}), '()\n', (3238, 3240), False, 'from libs.proxy_harvester import find_proxies\n'), ((3726, 3772), 'libs.utils.print_error', 'print_error', (['"""Answer not understood, exiting!"""'], {}), "('Answer not understood, exiting!')\n", (3737, 3772), False, 'from libs.utils import print_error\n'), ((3782, 3788), 'sys.exit', 'exit', ([], {}), '()\n', (3786, 3788), False, 'from sys import exit\n'), ((4616, 4624), 'os._exit', '_exit', (['(0)'], {}), '(0)\n', (4621, 4624), False, 'from os import _exit\n'), ((3296, 3358), 'libs.utils.print_status', 'print_status', (['"""Please have a maximum of 50 proxies in a file!"""'], {}), "('Please have a maximum of 50 proxies in a file!')\n", (3308, 3358), False, 'from libs.utils import print_status\n'), ((3384, 3433), 'libs.utils.ask_question', 'ask_question', (['"""Enter the path to your proxy list"""'], {}), "('Enter the path to your proxy list')\n", (3396, 3433), False, 'from libs.utils import ask_question\n'), ((3457, 3484), 'libs.utils.parse_proxy_file', 'parse_proxy_file', (['file_path'], {}), '(file_path)\n', (3473, 3484), False, 'from libs.utils import parse_proxy_file\n'), ((3513, 3559), 'libs.utils.print_error', 'print_error', (['"""Answer not understood, exiting!"""'], {}), "('Answer not understood, exiting!')\n", (3524, 3559), False, 'from libs.utils import print_error\n'), ((3573, 3579), 'sys.exit', 'exit', ([], {}), '()\n', (3577, 3579), False, 'from sys import exit\n')] |
# -*- coding: utf-8 -*-
#
# Copyright © Spyder Project Contributors
# Licensed under the terms of the MIT License
# (see spyder/__init__.py for details)
"""
NumPy Array Editor Dialog based on Qt
"""
# pylint: disable=C0103
# pylint: disable=R0903
# pylint: disable=R0911
# pylint: disable=R0201
# Standard library imports
from __future__ import print_function
# Third party imports
from qtpy.compat import from_qvariant, to_qvariant
from qtpy.QtCore import (QAbstractTableModel, QItemSelection, QLocale,
QItemSelectionRange, QModelIndex, Qt, Slot)
from qtpy.QtGui import QColor, QCursor, QDoubleValidator, QKeySequence
from qtpy.QtWidgets import (QAbstractItemDelegate, QApplication, QCheckBox,
QComboBox, QDialog, QDialogButtonBox, QGridLayout,
QHBoxLayout, QInputDialog, QItemDelegate, QLabel,
QLineEdit, QMenu, QMessageBox, QPushButton,
QSpinBox, QStackedWidget, QTableView, QVBoxLayout,
QWidget)
import numpy as np
# Local imports
from spyder.config.base import _
from spyder.config.fonts import DEFAULT_SMALL_DELTA
from spyder.config.gui import get_font, config_shortcut
from spyder.py3compat import (io, is_binary_string, is_string,
is_text_string, PY3, to_binary_string,
to_text_string)
from spyder.utils import icon_manager as ima
from spyder.utils.qthelpers import add_actions, create_action, keybinding
# Note: string and unicode data types will be formatted with '%s' (see below)
SUPPORTED_FORMATS = {
'single': '%.6g',
'double': '%.6g',
'float_': '%.6g',
'longfloat': '%.6g',
'float16': '%.6g',
'float32': '%.6g',
'float64': '%.6g',
'float96': '%.6g',
'float128': '%.6g',
'csingle': '%r',
'complex_': '%r',
'clongfloat': '%r',
'complex64': '%r',
'complex128': '%r',
'complex192': '%r',
'complex256': '%r',
'byte': '%d',
'bytes8': '%s',
'short': '%d',
'intc': '%d',
'int_': '%d',
'longlong': '%d',
'intp': '%d',
'int8': '%d',
'int16': '%d',
'int32': '%d',
'int64': '%d',
'ubyte': '%d',
'ushort': '%d',
'uintc': '%d',
'uint': '%d',
'ulonglong': '%d',
'uintp': '%d',
'uint8': '%d',
'uint16': '%d',
'uint32': '%d',
'uint64': '%d',
'bool_': '%r',
'bool8': '%r',
'bool': '%r',
}
LARGE_SIZE = 5e5
LARGE_NROWS = 1e5
LARGE_COLS = 60
#==============================================================================
# Utility functions
#==============================================================================
def is_float(dtype):
"""Return True if datatype dtype is a float kind"""
return ('float' in dtype.name) or dtype.name in ['single', 'double']
def is_number(dtype):
"""Return True is datatype dtype is a number kind"""
return is_float(dtype) or ('int' in dtype.name) or ('long' in dtype.name) \
or ('short' in dtype.name)
def get_idx_rect(index_list):
"""Extract the boundaries from a list of indexes"""
rows, cols = list(zip(*[(i.row(), i.column()) for i in index_list]))
return ( min(rows), max(rows), min(cols), max(cols) )
#==============================================================================
# Main classes
#==============================================================================
class ArrayModel(QAbstractTableModel):
"""Array Editor Table Model"""
ROWS_TO_LOAD = 500
COLS_TO_LOAD = 40
def __init__(self, data, format="%.6g", xlabels=None, ylabels=None,
readonly=False, parent=None):
QAbstractTableModel.__init__(self)
self.dialog = parent
self.changes = {}
self.xlabels = xlabels
self.ylabels = ylabels
self.readonly = readonly
self.test_array = np.array([0], dtype=data.dtype)
# for complex numbers, shading will be based on absolute value
# but for all other types it will be the real part
if data.dtype in (np.complex64, np.complex128):
self.color_func = np.abs
else:
self.color_func = np.real
# Backgroundcolor settings
huerange = [.66, .99] # Hue
self.sat = .7 # Saturation
self.val = 1. # Value
self.alp = .6 # Alpha-channel
self._data = data
self._format = format
self.total_rows = self._data.shape[0]
self.total_cols = self._data.shape[1]
size = self.total_rows * self.total_cols
try:
self.vmin = np.nanmin(self.color_func(data))
self.vmax = np.nanmax(self.color_func(data))
if self.vmax == self.vmin:
self.vmin -= 1
self.hue0 = huerange[0]
self.dhue = huerange[1]-huerange[0]
self.bgcolor_enabled = True
except (TypeError, ValueError):
self.vmin = None
self.vmax = None
self.hue0 = None
self.dhue = None
self.bgcolor_enabled = False
# Use paging when the total size, number of rows or number of
# columns is too large
if size > LARGE_SIZE:
self.rows_loaded = self.ROWS_TO_LOAD
self.cols_loaded = self.COLS_TO_LOAD
else:
if self.total_rows > LARGE_NROWS:
self.rows_loaded = self.ROWS_TO_LOAD
else:
self.rows_loaded = self.total_rows
if self.total_cols > LARGE_COLS:
self.cols_loaded = self.COLS_TO_LOAD
else:
self.cols_loaded = self.total_cols
def get_format(self):
"""Return current format"""
# Avoid accessing the private attribute _format from outside
return self._format
def get_data(self):
"""Return data"""
return self._data
def set_format(self, format):
"""Change display format"""
self._format = format
self.reset()
def columnCount(self, qindex=QModelIndex()):
"""Array column number"""
if self.total_cols <= self.cols_loaded:
return self.total_cols
else:
return self.cols_loaded
def rowCount(self, qindex=QModelIndex()):
"""Array row number"""
if self.total_rows <= self.rows_loaded:
return self.total_rows
else:
return self.rows_loaded
def can_fetch_more(self, rows=False, columns=False):
if rows:
if self.total_rows > self.rows_loaded:
return True
else:
return False
if columns:
if self.total_cols > self.cols_loaded:
return True
else:
return False
def fetch_more(self, rows=False, columns=False):
if self.can_fetch_more(rows=rows):
reminder = self.total_rows - self.rows_loaded
items_to_fetch = min(reminder, self.ROWS_TO_LOAD)
self.beginInsertRows(QModelIndex(), self.rows_loaded,
self.rows_loaded + items_to_fetch - 1)
self.rows_loaded += items_to_fetch
self.endInsertRows()
if self.can_fetch_more(columns=columns):
reminder = self.total_cols - self.cols_loaded
items_to_fetch = min(reminder, self.COLS_TO_LOAD)
self.beginInsertColumns(QModelIndex(), self.cols_loaded,
self.cols_loaded + items_to_fetch - 1)
self.cols_loaded += items_to_fetch
self.endInsertColumns()
def bgcolor(self, state):
"""Toggle backgroundcolor"""
self.bgcolor_enabled = state > 0
self.reset()
def get_value(self, index):
i = index.row()
j = index.column()
if len(self._data.shape) == 1:
value = self._data[j]
else:
value = self._data[i, j]
return self.changes.get((i, j), value)
def data(self, index, role=Qt.DisplayRole):
"""Cell content"""
if not index.isValid():
return to_qvariant()
value = self.get_value(index)
if is_binary_string(value):
try:
value = to_text_string(value, 'utf8')
except:
pass
if role == Qt.DisplayRole:
if value is np.ma.masked:
return ''
else:
try:
return to_qvariant(self._format % value)
except TypeError:
self.readonly = True
return repr(value)
elif role == Qt.TextAlignmentRole:
return to_qvariant(int(Qt.AlignCenter|Qt.AlignVCenter))
elif role == Qt.BackgroundColorRole and self.bgcolor_enabled \
and value is not np.ma.masked:
try:
hue = (self.hue0 +
self.dhue * (float(self.vmax) - self.color_func(value))
/ (float(self.vmax) - self.vmin))
hue = float(np.abs(hue))
color = QColor.fromHsvF(hue, self.sat, self.val, self.alp)
return to_qvariant(color)
except TypeError:
return to_qvariant()
elif role == Qt.FontRole:
return to_qvariant(get_font(font_size_delta=DEFAULT_SMALL_DELTA))
return to_qvariant()
def setData(self, index, value, role=Qt.EditRole):
"""Cell content change"""
if not index.isValid() or self.readonly:
return False
i = index.row()
j = index.column()
value = from_qvariant(value, str)
dtype = self._data.dtype.name
if dtype == "bool":
try:
val = bool(float(value))
except ValueError:
val = value.lower() == "true"
elif dtype.startswith("string") or dtype.startswith("bytes"):
val = to_binary_string(value, 'utf8')
elif dtype.startswith("unicode") or dtype.startswith("str"):
val = to_text_string(value)
else:
if value.lower().startswith('e') or value.lower().endswith('e'):
return False
try:
val = complex(value)
if not val.imag:
val = val.real
except ValueError as e:
QMessageBox.critical(self.dialog, "Error",
"Value error: %s" % str(e))
return False
try:
self.test_array[0] = val # will raise an Exception eventually
except OverflowError as e:
print("OverflowError: " + str(e)) # spyder: test-skip
QMessageBox.critical(self.dialog, "Error",
"Overflow error: %s" % str(e))
return False
# Add change to self.changes
self.changes[(i, j)] = val
self.dataChanged.emit(index, index)
if not is_string(val):
if val > self.vmax:
self.vmax = val
if val < self.vmin:
self.vmin = val
return True
def flags(self, index):
"""Set editable flag"""
if not index.isValid():
return Qt.ItemIsEnabled
return Qt.ItemFlags(QAbstractTableModel.flags(self, index)|
Qt.ItemIsEditable)
def headerData(self, section, orientation, role=Qt.DisplayRole):
"""Set header data"""
if role != Qt.DisplayRole:
return to_qvariant()
labels = self.xlabels if orientation == Qt.Horizontal else self.ylabels
if labels is None:
return to_qvariant(int(section))
else:
return to_qvariant(labels[section])
def reset(self):
self.beginResetModel()
self.endResetModel()
class ArrayDelegate(QItemDelegate):
"""Array Editor Item Delegate"""
def __init__(self, dtype, parent=None):
QItemDelegate.__init__(self, parent)
self.dtype = dtype
def createEditor(self, parent, option, index):
"""Create editor widget"""
model = index.model()
value = model.get_value(index)
if model._data.dtype.name == "bool":
value = not value
model.setData(index, to_qvariant(value))
return
elif value is not np.ma.masked:
editor = QLineEdit(parent)
editor.setFont(get_font(font_size_delta=DEFAULT_SMALL_DELTA))
editor.setAlignment(Qt.AlignCenter)
if is_number(self.dtype):
validator = QDoubleValidator(editor)
validator.setLocale(QLocale('C'))
editor.setValidator(validator)
editor.returnPressed.connect(self.commitAndCloseEditor)
return editor
def commitAndCloseEditor(self):
"""Commit and close editor"""
editor = self.sender()
# Avoid a segfault with PyQt5. Variable value won't be changed
# but at least Spyder won't crash. It seems generated by a bug in sip.
try:
self.commitData.emit(editor)
except AttributeError:
pass
self.closeEditor.emit(editor, QAbstractItemDelegate.NoHint)
def setEditorData(self, editor, index):
"""Set editor widget's data"""
text = from_qvariant(index.model().data(index, Qt.DisplayRole), str)
editor.setText(text)
#TODO: Implement "Paste" (from clipboard) feature
class ArrayView(QTableView):
"""Array view class"""
def __init__(self, parent, model, dtype, shape):
QTableView.__init__(self, parent)
self.setModel(model)
self.setItemDelegate(ArrayDelegate(dtype, self))
total_width = 0
for k in range(shape[1]):
total_width += self.columnWidth(k)
self.viewport().resize(min(total_width, 1024), self.height())
self.shape = shape
self.menu = self.setup_menu()
config_shortcut(self.copy, context='variable_explorer', name='copy',
parent=self)
self.horizontalScrollBar().valueChanged.connect(
lambda val: self.load_more_data(val, columns=True))
self.verticalScrollBar().valueChanged.connect(
lambda val: self.load_more_data(val, rows=True))
def load_more_data(self, value, rows=False, columns=False):
try:
old_selection = self.selectionModel().selection()
old_rows_loaded = old_cols_loaded = None
if rows and value == self.verticalScrollBar().maximum():
old_rows_loaded = self.model().rows_loaded
self.model().fetch_more(rows=rows)
if columns and value == self.horizontalScrollBar().maximum():
old_cols_loaded = self.model().cols_loaded
self.model().fetch_more(columns=columns)
if old_rows_loaded is not None or old_cols_loaded is not None:
# if we've changed anything, update selection
new_selection = QItemSelection()
for part in old_selection:
top = part.top()
bottom = part.bottom()
if (old_rows_loaded is not None and
top == 0 and bottom == (old_rows_loaded-1)):
# complete column selected (so expand it to match
# updated range)
bottom = self.model().rows_loaded-1
left = part.left()
right = part.right()
if (old_cols_loaded is not None
and left == 0 and right == (old_cols_loaded-1)):
# compete row selected (so expand it to match updated
# range)
right = self.model().cols_loaded-1
top_left = self.model().index(top, left)
bottom_right = self.model().index(bottom, right)
part = QItemSelectionRange(top_left, bottom_right)
new_selection.append(part)
self.selectionModel().select
(new_selection, self.selectionModel().ClearAndSelect)
except NameError:
# Needed to handle a NameError while fetching data when closing
# See isue 7880
pass
def resize_to_contents(self):
"""Resize cells to contents"""
QApplication.setOverrideCursor(QCursor(Qt.WaitCursor))
self.resizeColumnsToContents()
self.model().fetch_more(columns=True)
self.resizeColumnsToContents()
QApplication.restoreOverrideCursor()
def setup_menu(self):
"""Setup context menu"""
self.copy_action = create_action(self, _('Copy'),
shortcut=keybinding('Copy'),
icon=ima.icon('editcopy'),
triggered=self.copy,
context=Qt.WidgetShortcut)
menu = QMenu(self)
add_actions(menu, [self.copy_action, ])
return menu
def contextMenuEvent(self, event):
"""Reimplement Qt method"""
self.menu.popup(event.globalPos())
event.accept()
def keyPressEvent(self, event):
"""Reimplement Qt method"""
if event == QKeySequence.Copy:
self.copy()
else:
QTableView.keyPressEvent(self, event)
def _sel_to_text(self, cell_range):
"""Copy an array portion to a unicode string"""
if not cell_range:
return
row_min, row_max, col_min, col_max = get_idx_rect(cell_range)
if col_min == 0 and col_max == (self.model().cols_loaded-1):
# we've selected a whole column. It isn't possible to
# select only the first part of a column without loading more,
# so we can treat it as intentional and copy the whole thing
col_max = self.model().total_cols-1
if row_min == 0 and row_max == (self.model().rows_loaded-1):
row_max = self.model().total_rows-1
_data = self.model().get_data()
if PY3:
output = io.BytesIO()
else:
output = io.StringIO()
try:
np.savetxt(output, _data[row_min:row_max+1, col_min:col_max+1],
delimiter='\t', fmt=self.model().get_format())
except:
QMessageBox.warning(self, _("Warning"),
_("It was not possible to copy values for "
"this array"))
return
contents = output.getvalue().decode('utf-8')
output.close()
return contents
@Slot()
def copy(self):
"""Copy text to clipboard"""
cliptxt = self._sel_to_text( self.selectedIndexes() )
clipboard = QApplication.clipboard()
clipboard.setText(cliptxt)
class ArrayEditorWidget(QWidget):
def __init__(self, parent, data, readonly=False,
xlabels=None, ylabels=None):
QWidget.__init__(self, parent)
self.data = data
self.old_data_shape = None
if len(self.data.shape) == 1:
self.old_data_shape = self.data.shape
self.data.shape = (self.data.shape[0], 1)
elif len(self.data.shape) == 0:
self.old_data_shape = self.data.shape
self.data.shape = (1, 1)
format = SUPPORTED_FORMATS.get(data.dtype.name, '%s')
self.model = ArrayModel(self.data, format=format, xlabels=xlabels,
ylabels=ylabels, readonly=readonly, parent=self)
self.view = ArrayView(self, self.model, data.dtype, data.shape)
btn_layout = QHBoxLayout()
btn_layout.setAlignment(Qt.AlignLeft)
btn = QPushButton(_( "Format"))
# disable format button for int type
btn.setEnabled(is_float(data.dtype))
btn_layout.addWidget(btn)
btn.clicked.connect(self.change_format)
btn = QPushButton(_( "Resize"))
btn_layout.addWidget(btn)
btn.clicked.connect(self.view.resize_to_contents)
bgcolor = QCheckBox(_( 'Background color'))
bgcolor.setChecked(self.model.bgcolor_enabled)
bgcolor.setEnabled(self.model.bgcolor_enabled)
bgcolor.stateChanged.connect(self.model.bgcolor)
btn_layout.addWidget(bgcolor)
layout = QVBoxLayout()
layout.addWidget(self.view)
layout.addLayout(btn_layout)
self.setLayout(layout)
def accept_changes(self):
"""Accept changes"""
for (i, j), value in list(self.model.changes.items()):
self.data[i, j] = value
if self.old_data_shape is not None:
self.data.shape = self.old_data_shape
def reject_changes(self):
"""Reject changes"""
if self.old_data_shape is not None:
self.data.shape = self.old_data_shape
def change_format(self):
"""Change display format"""
format, valid = QInputDialog.getText(self, _( 'Format'),
_( "Float formatting"),
QLineEdit.Normal, self.model.get_format())
if valid:
format = str(format)
try:
format % 1.1
except:
QMessageBox.critical(self, _("Error"),
_("Format (%s) is incorrect") % format)
return
self.model.set_format(format)
class ArrayEditor(QDialog):
"""Array Editor Dialog"""
def __init__(self, parent=None):
QDialog.__init__(self, parent)
# Destroying the C++ object right after closing the dialog box,
# otherwise it may be garbage-collected in another QThread
# (e.g. the editor's analysis thread in Spyder), thus leading to
# a segmentation fault on UNIX or an application crash on Windows
self.setAttribute(Qt.WA_DeleteOnClose)
self.data = None
self.arraywidget = None
self.stack = None
self.layout = None
self.btn_save_and_close = None
self.btn_close = None
# Values for 3d array editor
self.dim_indexes = [{}, {}, {}]
self.last_dim = 0 # Adjust this for changing the startup dimension
def setup_and_check(self, data, title='', readonly=False,
xlabels=None, ylabels=None):
"""
Setup ArrayEditor:
return False if data is not supported, True otherwise
"""
self.data = data
readonly = readonly or not self.data.flags.writeable
is_record_array = data.dtype.names is not None
is_masked_array = isinstance(data, np.ma.MaskedArray)
if data.ndim > 3:
self.error(_("Arrays with more than 3 dimensions are not "
"supported"))
return False
if xlabels is not None and len(xlabels) != self.data.shape[1]:
self.error(_("The 'xlabels' argument length do no match array "
"column number"))
return False
if ylabels is not None and len(ylabels) != self.data.shape[0]:
self.error(_("The 'ylabels' argument length do no match array row "
"number"))
return False
if not is_record_array:
dtn = data.dtype.name
if dtn not in SUPPORTED_FORMATS and not dtn.startswith('str') \
and not dtn.startswith('unicode'):
arr = _("%s arrays") % data.dtype.name
self.error(_("%s are currently not supported") % arr)
return False
self.layout = QGridLayout()
self.setLayout(self.layout)
self.setWindowIcon(ima.icon('arredit'))
if title:
title = to_text_string(title) + " - " + _("NumPy array")
else:
title = _("Array editor")
if readonly:
title += ' (' + _('read only') + ')'
self.setWindowTitle(title)
self.resize(600, 500)
# Stack widget
self.stack = QStackedWidget(self)
if is_record_array:
for name in data.dtype.names:
self.stack.addWidget(ArrayEditorWidget(self, data[name],
readonly, xlabels, ylabels))
elif is_masked_array:
self.stack.addWidget(ArrayEditorWidget(self, data, readonly,
xlabels, ylabels))
self.stack.addWidget(ArrayEditorWidget(self, data.data, readonly,
xlabels, ylabels))
self.stack.addWidget(ArrayEditorWidget(self, data.mask, readonly,
xlabels, ylabels))
elif data.ndim == 3:
pass
else:
self.stack.addWidget(ArrayEditorWidget(self, data, readonly,
xlabels, ylabels))
self.arraywidget = self.stack.currentWidget()
if self.arraywidget:
self.arraywidget.model.dataChanged.connect(
self.save_and_close_enable)
self.stack.currentChanged.connect(self.current_widget_changed)
self.layout.addWidget(self.stack, 1, 0)
# Buttons configuration
btn_layout = QHBoxLayout()
if is_record_array or is_masked_array or data.ndim == 3:
if is_record_array:
btn_layout.addWidget(QLabel(_("Record array fields:")))
names = []
for name in data.dtype.names:
field = data.dtype.fields[name]
text = name
if len(field) >= 3:
title = field[2]
if not is_text_string(title):
title = repr(title)
text += ' - '+title
names.append(text)
else:
names = [_('Masked data'), _('Data'), _('Mask')]
if data.ndim == 3:
# QSpinBox
self.index_spin = QSpinBox(self, keyboardTracking=False)
self.index_spin.valueChanged.connect(self.change_active_widget)
# QComboBox
names = [str(i) for i in range(3)]
ra_combo = QComboBox(self)
ra_combo.addItems(names)
ra_combo.currentIndexChanged.connect(self.current_dim_changed)
# Adding the widgets to layout
label = QLabel(_("Axis:"))
btn_layout.addWidget(label)
btn_layout.addWidget(ra_combo)
self.shape_label = QLabel()
btn_layout.addWidget(self.shape_label)
label = QLabel(_("Index:"))
btn_layout.addWidget(label)
btn_layout.addWidget(self.index_spin)
self.slicing_label = QLabel()
btn_layout.addWidget(self.slicing_label)
# set the widget to display when launched
self.current_dim_changed(self.last_dim)
else:
ra_combo = QComboBox(self)
ra_combo.currentIndexChanged.connect(self.stack.setCurrentIndex)
ra_combo.addItems(names)
btn_layout.addWidget(ra_combo)
if is_masked_array:
label = QLabel(_("<u>Warning</u>: changes are applied separately"))
label.setToolTip(_("For performance reasons, changes applied "\
"to masked array won't be reflected in "\
"array's data (and vice-versa)."))
btn_layout.addWidget(label)
btn_layout.addStretch()
if not readonly:
self.btn_save_and_close = QPushButton(_('Save and Close'))
self.btn_save_and_close.setDisabled(True)
self.btn_save_and_close.clicked.connect(self.accept)
btn_layout.addWidget(self.btn_save_and_close)
self.btn_close = QPushButton(_('Close'))
self.btn_close.setAutoDefault(True)
self.btn_close.setDefault(True)
self.btn_close.clicked.connect(self.reject)
btn_layout.addWidget(self.btn_close)
self.layout.addLayout(btn_layout, 2, 0)
self.setMinimumSize(400, 300)
# Make the dialog act as a window
self.setWindowFlags(Qt.Window)
return True
@Slot(QModelIndex, QModelIndex)
def save_and_close_enable(self, left_top, bottom_right):
"""Handle the data change event to enable the save and close button."""
if self.btn_save_and_close:
self.btn_save_and_close.setEnabled(True)
self.btn_save_and_close.setAutoDefault(True)
self.btn_save_and_close.setDefault(True)
def current_widget_changed(self, index):
self.arraywidget = self.stack.widget(index)
self.arraywidget.model.dataChanged.connect(self.save_and_close_enable)
def change_active_widget(self, index):
"""
This is implemented for handling negative values in index for
3d arrays, to give the same behavior as slicing
"""
string_index = [':']*3
string_index[self.last_dim] = '<font color=red>%i</font>'
self.slicing_label.setText((r"Slicing: [" + ", ".join(string_index) +
"]") % index)
if index < 0:
data_index = self.data.shape[self.last_dim] + index
else:
data_index = index
slice_index = [slice(None)]*3
slice_index[self.last_dim] = data_index
stack_index = self.dim_indexes[self.last_dim].get(data_index)
if stack_index is None:
stack_index = self.stack.count()
try:
self.stack.addWidget(ArrayEditorWidget(
self, self.data[tuple(slice_index)]))
except IndexError: # Handle arrays of size 0 in one axis
self.stack.addWidget(ArrayEditorWidget(self, self.data))
self.dim_indexes[self.last_dim][data_index] = stack_index
self.stack.update()
self.stack.setCurrentIndex(stack_index)
def current_dim_changed(self, index):
"""
This change the active axis the array editor is plotting over
in 3D
"""
self.last_dim = index
string_size = ['%i']*3
string_size[index] = '<font color=red>%i</font>'
self.shape_label.setText(('Shape: (' + ', '.join(string_size) +
') ') % self.data.shape)
if self.index_spin.value() != 0:
self.index_spin.setValue(0)
else:
# this is done since if the value is currently 0 it does not emit
# currentIndexChanged(int)
self.change_active_widget(0)
self.index_spin.setRange(-self.data.shape[index],
self.data.shape[index]-1)
@Slot()
def accept(self):
"""Reimplement Qt method"""
for index in range(self.stack.count()):
self.stack.widget(index).accept_changes()
QDialog.accept(self)
def get_value(self):
"""Return modified array -- this is *not* a copy"""
# It is import to avoid accessing Qt C++ object as it has probably
# already been destroyed, due to the Qt.WA_DeleteOnClose attribute
return self.data
def error(self, message):
"""An error occured, closing the dialog box"""
QMessageBox.critical(self, _("Array editor"), message)
self.setAttribute(Qt.WA_DeleteOnClose)
self.reject()
@Slot()
def reject(self):
"""Reimplement Qt method"""
if self.arraywidget is not None:
for index in range(self.stack.count()):
self.stack.widget(index).reject_changes()
QDialog.reject(self)
| [
"qtpy.QtWidgets.QComboBox",
"spyder.utils.qthelpers.add_actions",
"qtpy.QtWidgets.QGridLayout",
"spyder.py3compat.to_text_string",
"spyder.utils.qthelpers.keybinding",
"numpy.array",
"spyder.py3compat.to_binary_string",
"qtpy.QtWidgets.QDialog.accept",
"spyder.py3compat.is_binary_string",
"spyder.py3compat.is_string",
"spyder.config.base._",
"qtpy.QtWidgets.QTableView.keyPressEvent",
"qtpy.QtWidgets.QSpinBox",
"qtpy.compat.to_qvariant",
"qtpy.QtWidgets.QVBoxLayout",
"qtpy.QtCore.QAbstractTableModel.__init__",
"qtpy.QtCore.Slot",
"spyder.py3compat.io.BytesIO",
"spyder.config.gui.get_font",
"qtpy.QtWidgets.QWidget.__init__",
"numpy.abs",
"qtpy.QtWidgets.QMenu",
"spyder.config.gui.config_shortcut",
"qtpy.QtWidgets.QTableView.__init__",
"qtpy.QtCore.QAbstractTableModel.flags",
"qtpy.QtCore.QItemSelection",
"qtpy.compat.from_qvariant",
"qtpy.QtWidgets.QApplication.clipboard",
"qtpy.QtCore.QLocale",
"qtpy.QtWidgets.QStackedWidget",
"spyder.utils.icon_manager.icon",
"qtpy.QtWidgets.QHBoxLayout",
"qtpy.QtCore.QModelIndex",
"spyder.py3compat.io.StringIO",
"spyder.py3compat.is_text_string",
"qtpy.QtGui.QColor.fromHsvF",
"qtpy.QtGui.QDoubleValidator",
"qtpy.QtWidgets.QItemDelegate.__init__",
"qtpy.QtWidgets.QLabel",
"qtpy.QtGui.QCursor",
"qtpy.QtWidgets.QDialog.reject",
"qtpy.QtWidgets.QApplication.restoreOverrideCursor",
"qtpy.QtWidgets.QLineEdit",
"qtpy.QtWidgets.QDialog.__init__",
"qtpy.QtCore.QItemSelectionRange"
] | [((20131, 20137), 'qtpy.QtCore.Slot', 'Slot', ([], {}), '()\n', (20135, 20137), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((30224, 30254), 'qtpy.QtCore.Slot', 'Slot', (['QModelIndex', 'QModelIndex'], {}), '(QModelIndex, QModelIndex)\n', (30228, 30254), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((32813, 32819), 'qtpy.QtCore.Slot', 'Slot', ([], {}), '()\n', (32817, 32819), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((33513, 33519), 'qtpy.QtCore.Slot', 'Slot', ([], {}), '()\n', (33517, 33519), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((4530, 4564), 'qtpy.QtCore.QAbstractTableModel.__init__', 'QAbstractTableModel.__init__', (['self'], {}), '(self)\n', (4558, 4564), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((4749, 4780), 'numpy.array', 'np.array', (['[0]'], {'dtype': 'data.dtype'}), '([0], dtype=data.dtype)\n', (4757, 4780), True, 'import numpy as np\n'), ((6988, 7001), 'qtpy.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (6999, 7001), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((7209, 7222), 'qtpy.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (7220, 7222), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((9196, 9219), 'spyder.py3compat.is_binary_string', 'is_binary_string', (['value'], {}), '(value)\n', (9212, 9219), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((10438, 10451), 'qtpy.compat.to_qvariant', 'to_qvariant', ([], {}), '()\n', (10449, 10451), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((10691, 10716), 'qtpy.compat.from_qvariant', 'from_qvariant', (['value', 'str'], {}), '(value, str)\n', (10704, 10716), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((13108, 13144), 'qtpy.QtWidgets.QItemDelegate.__init__', 'QItemDelegate.__init__', (['self', 'parent'], {}), '(self, parent)\n', (13130, 13144), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((14786, 14819), 'qtpy.QtWidgets.QTableView.__init__', 'QTableView.__init__', (['self', 'parent'], {}), '(self, parent)\n', (14805, 14819), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((15165, 15251), 'spyder.config.gui.config_shortcut', 'config_shortcut', (['self.copy'], {'context': '"""variable_explorer"""', 'name': '"""copy"""', 'parent': 'self'}), "(self.copy, context='variable_explorer', name='copy', parent\n =self)\n", (15180, 15251), False, 'from spyder.config.gui import get_font, config_shortcut\n'), ((17938, 17974), 'qtpy.QtWidgets.QApplication.restoreOverrideCursor', 'QApplication.restoreOverrideCursor', ([], {}), '()\n', (17972, 17974), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((18385, 18396), 'qtpy.QtWidgets.QMenu', 'QMenu', (['self'], {}), '(self)\n', (18390, 18396), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((18406, 18443), 'spyder.utils.qthelpers.add_actions', 'add_actions', (['menu', '[self.copy_action]'], {}), '(menu, [self.copy_action])\n', (18417, 18443), False, 'from spyder.utils.qthelpers import add_actions, create_action, keybinding\n'), ((20281, 20305), 'qtpy.QtWidgets.QApplication.clipboard', 'QApplication.clipboard', ([], {}), '()\n', (20303, 20305), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((20493, 20523), 'qtpy.QtWidgets.QWidget.__init__', 'QWidget.__init__', (['self', 'parent'], {}), '(self, parent)\n', (20509, 20523), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((21181, 21194), 'qtpy.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (21192, 21194), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((21876, 21889), 'qtpy.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (21887, 21889), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((23120, 23150), 'qtpy.QtWidgets.QDialog.__init__', 'QDialog.__init__', (['self', 'parent'], {}), '(self, parent)\n', (23136, 23150), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((25257, 25270), 'qtpy.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (25268, 25270), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((25687, 25707), 'qtpy.QtWidgets.QStackedWidget', 'QStackedWidget', (['self'], {}), '(self)\n', (25701, 25707), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((27020, 27033), 'qtpy.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (27031, 27033), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((32993, 33013), 'qtpy.QtWidgets.QDialog.accept', 'QDialog.accept', (['self'], {}), '(self)\n', (33007, 33013), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((33743, 33763), 'qtpy.QtWidgets.QDialog.reject', 'QDialog.reject', (['self'], {}), '(self)\n', (33757, 33763), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((9131, 9144), 'qtpy.compat.to_qvariant', 'to_qvariant', ([], {}), '()\n', (9142, 9144), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((12077, 12091), 'spyder.py3compat.is_string', 'is_string', (['val'], {}), '(val)\n', (12086, 12091), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((12656, 12669), 'qtpy.compat.to_qvariant', 'to_qvariant', ([], {}), '()\n', (12667, 12669), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((12860, 12888), 'qtpy.compat.to_qvariant', 'to_qvariant', (['labels[section]'], {}), '(labels[section])\n', (12871, 12888), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((17778, 17800), 'qtpy.QtGui.QCursor', 'QCursor', (['Qt.WaitCursor'], {}), '(Qt.WaitCursor)\n', (17785, 17800), False, 'from qtpy.QtGui import QColor, QCursor, QDoubleValidator, QKeySequence\n'), ((18086, 18095), 'spyder.config.base._', '_', (['"""Copy"""'], {}), "('Copy')\n", (18087, 18095), False, 'from spyder.config.base import _\n'), ((18783, 18820), 'qtpy.QtWidgets.QTableView.keyPressEvent', 'QTableView.keyPressEvent', (['self', 'event'], {}), '(self, event)\n', (18807, 18820), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((19577, 19589), 'spyder.py3compat.io.BytesIO', 'io.BytesIO', ([], {}), '()\n', (19587, 19589), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((19627, 19640), 'spyder.py3compat.io.StringIO', 'io.StringIO', ([], {}), '()\n', (19638, 19640), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((21269, 21280), 'spyder.config.base._', '_', (['"""Format"""'], {}), "('Format')\n", (21270, 21280), False, 'from spyder.config.base import _\n'), ((21486, 21497), 'spyder.config.base._', '_', (['"""Resize"""'], {}), "('Resize')\n", (21487, 21497), False, 'from spyder.config.base import _\n'), ((21623, 21644), 'spyder.config.base._', '_', (['"""Background color"""'], {}), "('Background color')\n", (21624, 21644), False, 'from spyder.config.base import _\n'), ((22537, 22548), 'spyder.config.base._', '_', (['"""Format"""'], {}), "('Format')\n", (22538, 22548), False, 'from spyder.config.base import _\n'), ((22585, 22606), 'spyder.config.base._', '_', (['"""Float formatting"""'], {}), "('Float formatting')\n", (22586, 22606), False, 'from spyder.config.base import _\n'), ((25336, 25355), 'spyder.utils.icon_manager.icon', 'ima.icon', (['"""arredit"""'], {}), "('arredit')\n", (25344, 25355), True, 'from spyder.utils import icon_manager as ima\n'), ((25482, 25499), 'spyder.config.base._', '_', (['"""Array editor"""'], {}), "('Array editor')\n", (25483, 25499), False, 'from spyder.config.base import _\n'), ((29821, 29831), 'spyder.config.base._', '_', (['"""Close"""'], {}), "('Close')\n", (29822, 29831), False, 'from spyder.config.base import _\n'), ((33406, 33423), 'spyder.config.base._', '_', (['"""Array editor"""'], {}), "('Array editor')\n", (33407, 33423), False, 'from spyder.config.base import _\n'), ((8009, 8022), 'qtpy.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (8020, 8022), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((8406, 8419), 'qtpy.QtCore.QModelIndex', 'QModelIndex', ([], {}), '()\n', (8417, 8419), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((9264, 9293), 'spyder.py3compat.to_text_string', 'to_text_string', (['value', '"""utf8"""'], {}), "(value, 'utf8')\n", (9278, 9293), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((11014, 11045), 'spyder.py3compat.to_binary_string', 'to_binary_string', (['value', '"""utf8"""'], {}), "(value, 'utf8')\n", (11030, 11045), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((12409, 12447), 'qtpy.QtCore.QAbstractTableModel.flags', 'QAbstractTableModel.flags', (['self', 'index'], {}), '(self, index)\n', (12434, 12447), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((13445, 13463), 'qtpy.compat.to_qvariant', 'to_qvariant', (['value'], {}), '(value)\n', (13456, 13463), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((13548, 13565), 'qtpy.QtWidgets.QLineEdit', 'QLineEdit', (['parent'], {}), '(parent)\n', (13557, 13565), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((16301, 16317), 'qtpy.QtCore.QItemSelection', 'QItemSelection', ([], {}), '()\n', (16315, 16317), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((18148, 18166), 'spyder.utils.qthelpers.keybinding', 'keybinding', (['"""Copy"""'], {}), "('Copy')\n", (18158, 18166), False, 'from spyder.utils.qthelpers import add_actions, create_action, keybinding\n'), ((18215, 18235), 'spyder.utils.icon_manager.icon', 'ima.icon', (['"""editcopy"""'], {}), "('editcopy')\n", (18223, 18235), True, 'from spyder.utils import icon_manager as ima\n'), ((24330, 24387), 'spyder.config.base._', '_', (['"""Arrays with more than 3 dimensions are not supported"""'], {}), "('Arrays with more than 3 dimensions are not supported')\n", (24331, 24387), False, 'from spyder.config.base import _\n'), ((24540, 24606), 'spyder.config.base._', '_', (['"""The \'xlabels\' argument length do no match array column number"""'], {}), '("The \'xlabels\' argument length do no match array column number")\n', (24541, 24606), False, 'from spyder.config.base import _\n'), ((24759, 24822), 'spyder.config.base._', '_', (['"""The \'ylabels\' argument length do no match array row number"""'], {}), '("The \'ylabels\' argument length do no match array row number")\n', (24760, 24822), False, 'from spyder.config.base import _\n'), ((25429, 25445), 'spyder.config.base._', '_', (['"""NumPy array"""'], {}), "('NumPy array')\n", (25430, 25445), False, 'from spyder.config.base import _\n'), ((27819, 27857), 'qtpy.QtWidgets.QSpinBox', 'QSpinBox', (['self'], {'keyboardTracking': '(False)'}), '(self, keyboardTracking=False)\n', (27827, 27857), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((28048, 28063), 'qtpy.QtWidgets.QComboBox', 'QComboBox', (['self'], {}), '(self)\n', (28057, 28063), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((28407, 28415), 'qtpy.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (28413, 28415), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((28655, 28663), 'qtpy.QtWidgets.QLabel', 'QLabel', ([], {}), '()\n', (28661, 28663), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((28885, 28900), 'qtpy.QtWidgets.QComboBox', 'QComboBox', (['self'], {}), '(self)\n', (28894, 28900), False, 'from qtpy.QtWidgets import QAbstractItemDelegate, QApplication, QCheckBox, QComboBox, QDialog, QDialogButtonBox, QGridLayout, QHBoxLayout, QInputDialog, QItemDelegate, QLabel, QLineEdit, QMenu, QMessageBox, QPushButton, QSpinBox, QStackedWidget, QTableView, QVBoxLayout, QWidget\n'), ((29580, 29599), 'spyder.config.base._', '_', (['"""Save and Close"""'], {}), "('Save and Close')\n", (29581, 29599), False, 'from spyder.config.base import _\n'), ((9508, 9541), 'qtpy.compat.to_qvariant', 'to_qvariant', (['(self._format % value)'], {}), '(self._format % value)\n', (9519, 9541), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((11135, 11156), 'spyder.py3compat.to_text_string', 'to_text_string', (['value'], {}), '(value)\n', (11149, 11156), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((13594, 13639), 'spyder.config.gui.get_font', 'get_font', ([], {'font_size_delta': 'DEFAULT_SMALL_DELTA'}), '(font_size_delta=DEFAULT_SMALL_DELTA)\n', (13602, 13639), False, 'from spyder.config.gui import get_font, config_shortcut\n'), ((13758, 13782), 'qtpy.QtGui.QDoubleValidator', 'QDoubleValidator', (['editor'], {}), '(editor)\n', (13774, 13782), False, 'from qtpy.QtGui import QColor, QCursor, QDoubleValidator, QKeySequence\n'), ((17299, 17342), 'qtpy.QtCore.QItemSelectionRange', 'QItemSelectionRange', (['top_left', 'bottom_right'], {}), '(top_left, bottom_right)\n', (17318, 17342), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((19859, 19871), 'spyder.config.base._', '_', (['"""Warning"""'], {}), "('Warning')\n", (19860, 19871), False, 'from spyder.config.base import _\n'), ((19906, 19960), 'spyder.config.base._', '_', (['"""It was not possible to copy values for this array"""'], {}), "('It was not possible to copy values for this array')\n", (19907, 19960), False, 'from spyder.config.base import _\n'), ((25098, 25112), 'spyder.config.base._', '_', (['"""%s arrays"""'], {}), "('%s arrays')\n", (25099, 25112), False, 'from spyder.config.base import _\n'), ((25397, 25418), 'spyder.py3compat.to_text_string', 'to_text_string', (['title'], {}), '(title)\n', (25411, 25418), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n'), ((25551, 25565), 'spyder.config.base._', '_', (['"""read only"""'], {}), "('read only')\n", (25552, 25565), False, 'from spyder.config.base import _\n'), ((27684, 27700), 'spyder.config.base._', '_', (['"""Masked data"""'], {}), "('Masked data')\n", (27685, 27700), False, 'from spyder.config.base import _\n'), ((27702, 27711), 'spyder.config.base._', '_', (['"""Data"""'], {}), "('Data')\n", (27703, 27711), False, 'from spyder.config.base import _\n'), ((27713, 27722), 'spyder.config.base._', '_', (['"""Mask"""'], {}), "('Mask')\n", (27714, 27722), False, 'from spyder.config.base import _\n'), ((28266, 28276), 'spyder.config.base._', '_', (['"""Axis:"""'], {}), "('Axis:')\n", (28267, 28276), False, 'from spyder.config.base import _\n'), ((28504, 28515), 'spyder.config.base._', '_', (['"""Index:"""'], {}), "('Index:')\n", (28505, 28515), False, 'from spyder.config.base import _\n'), ((29138, 29189), 'spyder.config.base._', '_', (['"""<u>Warning</u>: changes are applied separately"""'], {}), "('<u>Warning</u>: changes are applied separately')\n", (29139, 29189), False, 'from spyder.config.base import _\n'), ((29225, 29344), 'spyder.config.base._', '_', (['"""For performance reasons, changes applied to masked array won\'t be reflected in array\'s data (and vice-versa)."""'], {}), '("For performance reasons, changes applied to masked array won\'t be reflected in array\'s data (and vice-versa)."\n )\n', (29226, 29344), False, 'from spyder.config.base import _\n'), ((10145, 10195), 'qtpy.QtGui.QColor.fromHsvF', 'QColor.fromHsvF', (['hue', 'self.sat', 'self.val', 'self.alp'], {}), '(hue, self.sat, self.val, self.alp)\n', (10160, 10195), False, 'from qtpy.QtGui import QColor, QCursor, QDoubleValidator, QKeySequence\n'), ((10220, 10238), 'qtpy.compat.to_qvariant', 'to_qvariant', (['color'], {}), '(color)\n', (10231, 10238), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((13820, 13832), 'qtpy.QtCore.QLocale', 'QLocale', (['"""C"""'], {}), "('C')\n", (13827, 13832), False, 'from qtpy.QtCore import QAbstractTableModel, QItemSelection, QLocale, QItemSelectionRange, QModelIndex, Qt, Slot\n'), ((22852, 22862), 'spyder.config.base._', '_', (['"""Error"""'], {}), "('Error')\n", (22853, 22862), False, 'from spyder.config.base import _\n'), ((25159, 25194), 'spyder.config.base._', '_', (['"""%s are currently not supported"""'], {}), "('%s are currently not supported')\n", (25160, 25194), False, 'from spyder.config.base import _\n'), ((27178, 27203), 'spyder.config.base._', '_', (['"""Record array fields:"""'], {}), "('Record array fields:')\n", (27179, 27203), False, 'from spyder.config.base import _\n'), ((10107, 10118), 'numpy.abs', 'np.abs', (['hue'], {}), '(hue)\n', (10113, 10118), True, 'import numpy as np\n'), ((10294, 10307), 'qtpy.compat.to_qvariant', 'to_qvariant', ([], {}), '()\n', (10305, 10307), False, 'from qtpy.compat import from_qvariant, to_qvariant\n'), ((10375, 10420), 'spyder.config.gui.get_font', 'get_font', ([], {'font_size_delta': 'DEFAULT_SMALL_DELTA'}), '(font_size_delta=DEFAULT_SMALL_DELTA)\n', (10383, 10420), False, 'from spyder.config.gui import get_font, config_shortcut\n'), ((22902, 22931), 'spyder.config.base._', '_', (['"""Format (%s) is incorrect"""'], {}), "('Format (%s) is incorrect')\n", (22903, 22931), False, 'from spyder.config.base import _\n'), ((27482, 27503), 'spyder.py3compat.is_text_string', 'is_text_string', (['title'], {}), '(title)\n', (27496, 27503), False, 'from spyder.py3compat import io, is_binary_string, is_string, is_text_string, PY3, to_binary_string, to_text_string\n')] |
""" Defines the PolygonPlot class.
"""
from __future__ import with_statement
# Major library imports
import numpy as np
# Enthought library imports.
from enable.api import LineStyle, black_color_trait, \
transparent_color_trait
from kiva.agg import points_in_polygon
from traits.api import Enum, Float, Tuple, Property, cached_property, \
on_trait_change
# Local imports.
from base_xy_plot import BaseXYPlot
class PolygonPlot(BaseXYPlot):
""" Plots a polygon in dataspace.
Assuming that the index and value mappers are linear mappers, and that
"index" corresponds to X-coordinates and "value" corresponds to
Y-coordinates, the points are arranged in a counter-clockwise fashion.
The polygon is closed automatically, so there is no need to reproduce
the first point as the last point.
Nonlinear mappers are possible, but the results may be unexpected. Only the
data-space points are mapped in a nonlinear fashion. Straight lines
connecting them in a linear screen-space become curved in a nonlinear
screen-space; however, the drawing still contains straight lines in
screen-space.
If you don't want the edge of the polygon to be drawn, set **edge_color**
to transparent; don't try to do this by setting **edge_width** to 0. In
some drawing systems, such as PostScript, a line width of 0 means to make
the line as small as possible while still putting ink on the page.
"""
# The color of the line on the edge of the polygon.
edge_color = black_color_trait
# The thickness of the edge of the polygon.
edge_width = Float(1.0)
# The line dash style for the edge of the polygon.
edge_style = LineStyle
# The color of the face of the polygon.
face_color = transparent_color_trait
# Override the hittest_type trait inherited from BaseXYPlot
hittest_type = Enum("poly", "point", "line")
# The RGBA tuple for rendering edges. It is always a tuple of length 4.
# It has the same RGB values as edge_color_, and its alpha value is the
# alpha value of self.edge_color multiplied by self.alpha.
effective_edge_color = Property(Tuple, depends_on=['edge_color', 'alpha'])
# The RGBA tuple for rendering the face. It is always a tuple of length 4.
# It has the same RGB values as face_color_, and its alpha value is the
# alpha value of self.face_color multiplied by self.alpha.
effective_face_color = Property(Tuple, depends_on=['face_color', 'alpha'])
#----------------------------------------------------------------------
# Private 'BaseXYPlot' interface
#----------------------------------------------------------------------
def _gather_points(self):
""" Collects the data points that are within the bounds of the plot and
caches them.
"""
if self._cache_valid:
return
index = self.index.get_data()
value = self.value.get_data()
if not self.index or not self.value:
return
if len(index) == 0 or len(value) == 0 or len(index) != len(value):
self._cached_data_pts = []
self._cache_valid = True
return
points = np.transpose(np.array((index,value)))
self._cached_data_pts = points
self._cache_valid = True
def _render(self, gc, points):
""" Renders an Nx2 array of screen-space points as a polygon.
"""
with gc:
gc.clip_to_rect(self.x, self.y, self.width, self.height)
gc.set_stroke_color(self.effective_edge_color)
gc.set_line_width(self.edge_width)
gc.set_line_dash(self.edge_style_)
gc.set_fill_color(self.effective_face_color)
gc.lines(points)
gc.close_path()
gc.draw_path()
def _render_icon(self, gc, x, y, width, height):
""" Renders a representation of this plot as an icon into the box
defined by the parameters.
Used by the legend.
"""
with gc:
gc.set_stroke_color(self.effective_edge_color)
gc.set_line_width(self.edge_width)
gc.set_fill_color(self.effective_face_color)
if hasattr(self, 'line_style_'):
gc.set_line_dash(self.line_style_)
gc.draw_rect((x,y,width,height))
return
def hittest(self, screen_pt, threshold=7.0, return_distance=False):
""" Performs point-in-polygon testing or point/line proximity testing.
If self.hittest_type is "line" or "point", then behaves like the
parent class BaseXYPlot.hittest().
If self.hittest_type is "poly", then returns True if the given
point is inside the polygon, and False otherwise.
"""
if self.hittest_type in ("line", "point"):
return BaseXYPlot.hittest(self, screen_pt, threshold, return_distance)
data_pt = self.map_data(screen_pt, all_values=True)
index = self.index.get_data()
value = self.value.get_data()
poly = np.vstack((index,value)).T
if points_in_polygon([data_pt], poly)[0] == 1:
return True
else:
return False
#------------------------------------------------------------------------
# Event handlers
#------------------------------------------------------------------------
@on_trait_change('edge_color, edge_width, edge_style, face_color, alpha')
def _attributes_changed(self):
self.invalidate_draw()
self.request_redraw()
#------------------------------------------------------------------------
# Property getters
#------------------------------------------------------------------------
@cached_property
def _get_effective_edge_color(self):
if len(self.edge_color_) == 4:
edge_alpha = self.edge_color_[-1]
else:
edge_alpha = 1.0
c = self.edge_color_[:3] + (edge_alpha * self.alpha,)
return c
@cached_property
def _get_effective_face_color(self):
if len(self.face_color_) == 4:
face_alpha = self.face_color_[-1]
else:
face_alpha = 1.0
c = self.face_color_[:3] + (face_alpha * self.alpha,)
return c
| [
"traits.api.Enum",
"traits.api.on_trait_change",
"traits.api.Property",
"kiva.agg.points_in_polygon",
"base_xy_plot.BaseXYPlot.hittest",
"numpy.array",
"numpy.vstack",
"traits.api.Float"
] | [((1659, 1669), 'traits.api.Float', 'Float', (['(1.0)'], {}), '(1.0)\n', (1664, 1669), False, 'from traits.api import Enum, Float, Tuple, Property, cached_property, on_trait_change\n'), ((1923, 1952), 'traits.api.Enum', 'Enum', (['"""poly"""', '"""point"""', '"""line"""'], {}), "('poly', 'point', 'line')\n", (1927, 1952), False, 'from traits.api import Enum, Float, Tuple, Property, cached_property, on_trait_change\n'), ((2202, 2253), 'traits.api.Property', 'Property', (['Tuple'], {'depends_on': "['edge_color', 'alpha']"}), "(Tuple, depends_on=['edge_color', 'alpha'])\n", (2210, 2253), False, 'from traits.api import Enum, Float, Tuple, Property, cached_property, on_trait_change\n'), ((2508, 2559), 'traits.api.Property', 'Property', (['Tuple'], {'depends_on': "['face_color', 'alpha']"}), "(Tuple, depends_on=['face_color', 'alpha'])\n", (2516, 2559), False, 'from traits.api import Enum, Float, Tuple, Property, cached_property, on_trait_change\n'), ((5451, 5523), 'traits.api.on_trait_change', 'on_trait_change', (['"""edge_color, edge_width, edge_style, face_color, alpha"""'], {}), "('edge_color, edge_width, edge_style, face_color, alpha')\n", (5466, 5523), False, 'from traits.api import Enum, Float, Tuple, Property, cached_property, on_trait_change\n'), ((3287, 3311), 'numpy.array', 'np.array', (['(index, value)'], {}), '((index, value))\n', (3295, 3311), True, 'import numpy as np\n'), ((4906, 4969), 'base_xy_plot.BaseXYPlot.hittest', 'BaseXYPlot.hittest', (['self', 'screen_pt', 'threshold', 'return_distance'], {}), '(self, screen_pt, threshold, return_distance)\n', (4924, 4969), False, 'from base_xy_plot import BaseXYPlot\n'), ((5122, 5147), 'numpy.vstack', 'np.vstack', (['(index, value)'], {}), '((index, value))\n', (5131, 5147), True, 'import numpy as np\n'), ((5160, 5194), 'kiva.agg.points_in_polygon', 'points_in_polygon', (['[data_pt]', 'poly'], {}), '([data_pt], poly)\n', (5177, 5194), False, 'from kiva.agg import points_in_polygon\n')] |
import os
from datetime import timedelta
basedir = os.path.abspath(os.path.dirname(__file__))
API_DATA_URL = "https://invest-public-api.tinkoff.ru/rest/tinkoff.public.invest.api.contract.v1.InstrumentsService/"
API_LASTPRICES_URL = "https://invest-public-api.tinkoff.ru/rest/\
tinkoff.public.invest.api.contract.v1.MarketDataService/GetLastPrices"
SQLALCHEMY_DATABASE_URI = 'sqlite:///' + os.path.join(basedir, '..', 'webapp.db')
REMEMBER_COOKIE_DURATION = timedelta(days=1)
SQLALCHEMY_TRACK_MODIFICATIONS = False
SECRET_KEY = ""
API_TOKEN = ""
| [
"os.path.dirname",
"datetime.timedelta",
"os.path.join"
] | [((461, 478), 'datetime.timedelta', 'timedelta', ([], {'days': '(1)'}), '(days=1)\n', (470, 478), False, 'from datetime import timedelta\n'), ((68, 93), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (83, 93), False, 'import os\n'), ((392, 432), 'os.path.join', 'os.path.join', (['basedir', '""".."""', '"""webapp.db"""'], {}), "(basedir, '..', 'webapp.db')\n", (404, 432), False, 'import os\n')] |
import threading
from multiprocessing import Queue
results = []
results2 = []
def take_numbers(q):
print('Enter the numbers:')
for i in range(0,3):
num1 = int(input('Enter first number: '))
num2 = int(input('Enter second number: '))
q.put(num1)
q.put(num2)
def add_num(q):
for i in range(0,3):
num1 = q.get()
num2 = q.get()
results.append(num1+num2)
results2.append(num1-num2)
q = Queue()
t2 = threading.Thread(target=add_num, args=(q, ))
t1 = threading.Thread(target=take_numbers, args=(q, ))
t2.start()
t1.start()
t2.join()
t1.join()
q.close()
for result in results:
print ("adunare =", result)
for result in results2:
print ("scadere =", result) | [
"threading.Thread",
"multiprocessing.Queue"
] | [((464, 471), 'multiprocessing.Queue', 'Queue', ([], {}), '()\n', (469, 471), False, 'from multiprocessing import Queue\n'), ((477, 520), 'threading.Thread', 'threading.Thread', ([], {'target': 'add_num', 'args': '(q,)'}), '(target=add_num, args=(q,))\n', (493, 520), False, 'import threading\n'), ((527, 575), 'threading.Thread', 'threading.Thread', ([], {'target': 'take_numbers', 'args': '(q,)'}), '(target=take_numbers, args=(q,))\n', (543, 575), False, 'import threading\n')] |
import os
import os.path as osp
import numpy as np
from joblib import Parallel, delayed
from tensorflow.keras.utils import get_file
from tqdm import tqdm
from spektral.data import Dataset, Graph
from spektral.utils import label_to_one_hot, sparse
from spektral.utils.io import load_csv, load_sdf
ATOM_TYPES = [1, 6, 7, 8, 9]
BOND_TYPES = [1, 2, 3, 4]
class QM9(Dataset):
"""
The QM9 chemical data set of small molecules.
In this dataset, nodes represent atoms and edges represent chemical bonds.
There are 5 possible atom types (H, C, N, O, F) and 4 bond types (single,
double, triple, aromatic).
Node features represent the chemical properties of each atom and include:
- The atomic number, one-hot encoded;
- The atom's position in the X, Y, and Z dimensions;
- The atomic charge;
- The mass difference from the monoisotope;
The edge features represent the type of chemical bond between two atoms,
one-hot encoded.
Each graph has an 19-dimensional label for regression.
**Arguments**
- `amount`: int, load this many molecules instead of the full dataset
(useful for debugging).
- `n_jobs`: number of CPU cores to use for reading the data (-1, to use all
available cores).
"""
url = "https://deepchemdata.s3-us-west-1.amazonaws.com/datasets/gdb9.tar.gz"
def __init__(self, amount=None, n_jobs=1, **kwargs):
self.amount = amount
self.n_jobs = n_jobs
super().__init__(**kwargs)
def download(self):
get_file(
"qm9.tar.gz",
self.url,
extract=True,
cache_dir=self.path,
cache_subdir=self.path,
)
os.remove(osp.join(self.path, "qm9.tar.gz"))
def read(self):
print("Loading QM9 dataset.")
sdf_file = osp.join(self.path, "gdb9.sdf")
data = load_sdf(sdf_file, amount=self.amount) # Internal SDF format
def read_mol(mol):
x = np.array([atom_to_feature(atom) for atom in mol["atoms"]])
a, e = mol_to_adj(mol)
return x, a, e
data = Parallel(n_jobs=self.n_jobs)(
delayed(read_mol)(mol) for mol in tqdm(data, ncols=80)
)
x_list, a_list, e_list = list(zip(*data))
# Load labels
labels_file = osp.join(self.path, "gdb9.sdf.csv")
labels = load_csv(labels_file)
labels = labels.set_index("mol_id").values
if self.amount is not None:
labels = labels[: self.amount]
return [
Graph(x=x, a=a, e=e, y=y)
for x, a, e, y in zip(x_list, a_list, e_list, labels)
]
def atom_to_feature(atom):
atomic_num = label_to_one_hot(atom["atomic_num"], ATOM_TYPES)
coords = atom["coords"]
charge = atom["charge"]
iso = atom["iso"]
return np.concatenate((atomic_num, coords, [charge, iso]), -1)
def mol_to_adj(mol):
row, col, edge_features = [], [], []
for bond in mol["bonds"]:
start, end = bond["start_atom"], bond["end_atom"]
row += [start, end]
col += [end, start]
edge_features += [bond["type"]] * 2
a, e = sparse.edge_index_to_matrix(
edge_index=np.array((row, col)).T,
edge_weight=np.ones_like(row),
edge_features=label_to_one_hot(edge_features, BOND_TYPES),
)
return a, e
| [
"spektral.utils.io.load_sdf",
"spektral.utils.label_to_one_hot",
"numpy.ones_like",
"spektral.utils.io.load_csv",
"tqdm.tqdm",
"os.path.join",
"joblib.Parallel",
"numpy.array",
"tensorflow.keras.utils.get_file",
"numpy.concatenate",
"joblib.delayed",
"spektral.data.Graph"
] | [((2701, 2749), 'spektral.utils.label_to_one_hot', 'label_to_one_hot', (["atom['atomic_num']", 'ATOM_TYPES'], {}), "(atom['atomic_num'], ATOM_TYPES)\n", (2717, 2749), False, 'from spektral.utils import label_to_one_hot, sparse\n'), ((2840, 2895), 'numpy.concatenate', 'np.concatenate', (['(atomic_num, coords, [charge, iso])', '(-1)'], {}), '((atomic_num, coords, [charge, iso]), -1)\n', (2854, 2895), True, 'import numpy as np\n'), ((1532, 1627), 'tensorflow.keras.utils.get_file', 'get_file', (['"""qm9.tar.gz"""', 'self.url'], {'extract': '(True)', 'cache_dir': 'self.path', 'cache_subdir': 'self.path'}), "('qm9.tar.gz', self.url, extract=True, cache_dir=self.path,\n cache_subdir=self.path)\n", (1540, 1627), False, 'from tensorflow.keras.utils import get_file\n'), ((1826, 1857), 'os.path.join', 'osp.join', (['self.path', '"""gdb9.sdf"""'], {}), "(self.path, 'gdb9.sdf')\n", (1834, 1857), True, 'import os.path as osp\n'), ((1873, 1911), 'spektral.utils.io.load_sdf', 'load_sdf', (['sdf_file'], {'amount': 'self.amount'}), '(sdf_file, amount=self.amount)\n', (1881, 1911), False, 'from spektral.utils.io import load_csv, load_sdf\n'), ((2318, 2353), 'os.path.join', 'osp.join', (['self.path', '"""gdb9.sdf.csv"""'], {}), "(self.path, 'gdb9.sdf.csv')\n", (2326, 2353), True, 'import os.path as osp\n'), ((2371, 2392), 'spektral.utils.io.load_csv', 'load_csv', (['labels_file'], {}), '(labels_file)\n', (2379, 2392), False, 'from spektral.utils.io import load_csv, load_sdf\n'), ((1713, 1746), 'os.path.join', 'osp.join', (['self.path', '"""qm9.tar.gz"""'], {}), "(self.path, 'qm9.tar.gz')\n", (1721, 1746), True, 'import os.path as osp\n'), ((2116, 2144), 'joblib.Parallel', 'Parallel', ([], {'n_jobs': 'self.n_jobs'}), '(n_jobs=self.n_jobs)\n', (2124, 2144), False, 'from joblib import Parallel, delayed\n'), ((2553, 2578), 'spektral.data.Graph', 'Graph', ([], {'x': 'x', 'a': 'a', 'e': 'e', 'y': 'y'}), '(x=x, a=a, e=e, y=y)\n', (2558, 2578), False, 'from spektral.data import Dataset, Graph\n'), ((3252, 3269), 'numpy.ones_like', 'np.ones_like', (['row'], {}), '(row)\n', (3264, 3269), True, 'import numpy as np\n'), ((3293, 3336), 'spektral.utils.label_to_one_hot', 'label_to_one_hot', (['edge_features', 'BOND_TYPES'], {}), '(edge_features, BOND_TYPES)\n', (3309, 3336), False, 'from spektral.utils import label_to_one_hot, sparse\n'), ((3208, 3228), 'numpy.array', 'np.array', (['(row, col)'], {}), '((row, col))\n', (3216, 3228), True, 'import numpy as np\n'), ((2158, 2175), 'joblib.delayed', 'delayed', (['read_mol'], {}), '(read_mol)\n', (2165, 2175), False, 'from joblib import Parallel, delayed\n'), ((2192, 2212), 'tqdm.tqdm', 'tqdm', (['data'], {'ncols': '(80)'}), '(data, ncols=80)\n', (2196, 2212), False, 'from tqdm import tqdm\n')] |