code
stringlengths 22
1.05M
| apis
sequencelengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from __future__ import print_function
from keras.datasets import mnist
from keras.datasets import cifar10
from keras.utils.np_utils import to_categorical
import numpy as np
from keras import backend as K
from evolution import Evolution
from genome_handler import GenomeHandler
import tensorflow as tf
#import mlflow.keras
#import mlflow
#import mlflow.tensorflow
#mlflow.tensorflow.autolog()
#mlflow.keras.autolog()
print("Num GPUs Available: ", len(tf.config.list_physical_devices('GPU')))
K.set_image_data_format("channels_last")
#(x_train, y_train), (x_test, y_test) = mnist.load_data()
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train = x_train.reshape(x_train.shape[0], x_train.shape[1], x_train.shape[2],x_train.shape[3]).astype('float32') / 255
x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2], x_test.shape[3]).astype('float32') / 255
# nCLasses
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
#y_train.shape
dataset = ((x_train, y_train), (x_test, y_test))
genome_handler = GenomeHandler(max_conv_layers=4,
max_dense_layers=2, # includes final dense layer
max_filters=512,
max_dense_nodes=1024,
input_shape=x_train.shape[1:],
n_classes=10)
evo = Evolution(genome_handler, data_path="log/evo_cifar10_gen40_pop10_e20.csv")
model = evo.run(dataset=dataset,
num_generations=40,
pop_size=10,
epochs=20,metric='acc')
#epochs=10,metric='loss')
print(model.summary()) | [
"keras.backend.set_image_data_format",
"keras.datasets.cifar10.load_data",
"keras.utils.np_utils.to_categorical",
"tensorflow.config.list_physical_devices",
"evolution.Evolution",
"genome_handler.GenomeHandler"
] | [((494, 534), 'keras.backend.set_image_data_format', 'K.set_image_data_format', (['"""channels_last"""'], {}), "('channels_last')\n", (517, 534), True, 'from keras import backend as K\n'), ((633, 652), 'keras.datasets.cifar10.load_data', 'cifar10.load_data', ([], {}), '()\n', (650, 652), False, 'from keras.datasets import cifar10\n'), ((912, 935), 'keras.utils.np_utils.to_categorical', 'to_categorical', (['y_train'], {}), '(y_train)\n', (926, 935), False, 'from keras.utils.np_utils import to_categorical\n'), ((945, 967), 'keras.utils.np_utils.to_categorical', 'to_categorical', (['y_test'], {}), '(y_test)\n', (959, 967), False, 'from keras.utils.np_utils import to_categorical\n'), ((1050, 1190), 'genome_handler.GenomeHandler', 'GenomeHandler', ([], {'max_conv_layers': '(4)', 'max_dense_layers': '(2)', 'max_filters': '(512)', 'max_dense_nodes': '(1024)', 'input_shape': 'x_train.shape[1:]', 'n_classes': '(10)'}), '(max_conv_layers=4, max_dense_layers=2, max_filters=512,\n max_dense_nodes=1024, input_shape=x_train.shape[1:], n_classes=10)\n', (1063, 1190), False, 'from genome_handler import GenomeHandler\n'), ((1379, 1453), 'evolution.Evolution', 'Evolution', (['genome_handler'], {'data_path': '"""log/evo_cifar10_gen40_pop10_e20.csv"""'}), "(genome_handler, data_path='log/evo_cifar10_gen40_pop10_e20.csv')\n", (1388, 1453), False, 'from evolution import Evolution\n'), ((452, 490), 'tensorflow.config.list_physical_devices', 'tf.config.list_physical_devices', (['"""GPU"""'], {}), "('GPU')\n", (483, 490), True, 'import tensorflow as tf\n')] |
'''
Autores:<NAME> A01749381
<NAME> A01751192
<NAME> A01379868
<NAME> A01749375
'''
from random import random
from mesa.visualization.modules import CanvasGrid
from mesa.visualization.ModularVisualization import ModularServer
from mesa.batchrunner import BatchRunner
from mesa.datacollection import DataCollector
from mesa.space import MultiGrid
from mesa import Agent , Model
from mesa.time import RandomActivation
#Clase para crear a los agentes automóviles
class CarAgent(Agent):
def __init__(self, unique_id, model):
super().__init__(unique_id, model)
self.next_cell = None
self.direction = None
self.agent_type = 0
#Función para validar si la posición es válida, en caso de que sea válida regresa True, en caso contrario
#regresa False.
def is_valid(self, position):
if position[0] < self.model.width and position[1] < self.model.height and position[0] >= 0 and position[1] >= 0:
if not self.model.grid.is_cell_empty(position):
return True
return False
#Función para recibir las posibles celdas a dónde moverse, regresa la posición de la calle.
def get_poss_cell(self):
neighborhood = self.model.grid.get_neighborhood(self.pos, moore=False, include_center=False)
for cell in neighborhood:
for agent in self.model.grid.get_cell_list_contents(cell):
if agent.agent_type == 2:
next_dir = (self.pos[0] - agent.pos[0], self.pos[1] - agent.pos[1])
if next_dir[0] * -1 != self.direction[0] and next_dir[1] * -1 != self.direction[1]:
return agent.pos
#Función para avanzar hacia el frente, regresa el valor de la variable move que son coordenadas.
def get_nextcell(self):
move = (self.pos[0] + self.direction[0], self.pos[1] + self.direction[1])
return move
#Función para obtener la dirección hacia donde debe moverse el automóvil, regresa la dirección
# de la calle.
def get_nextdirect(self, position):
for agent in self.model.grid.get_cell_list_contents(position):
if agent.agent_type == 2:
return agent.direction
#Función para dar vuelta, regresa la dirección de la calle.
def turn(self):
for cell in self.model.grid.get_neighborhood(self.pos, moore=False, include_center=False):
for agent in self.model.grid.get_cell_list_contents(cell):
if agent.agent_type == 2:
if agent.direction != self.direction:
return agent.direction
return None
#Función para revisar la luz de los semáforos, regresa la luz del semáforo en caso
# de que el automóvil tenga uno de vecino. En caso contrario regresa True.
def check_light(self):
for agent in self.model.grid.get_cell_list_contents(self.next_cell):
if agent.agent_type == 1:
return agent.light
return True
#Función para checar si hay otro automovil enfrente, regresa un valor booleano.
def check_car(self):
for agent in self.model.grid.get_cell_list_contents(self.next_cell):
if agent.agent_type == 0:
return False
return True
def step(self):
#Variable para guardar el resultado de la función get_nextcell().
next_cell = self.get_nextcell()
#Condición, si la siguiente celda es válida, se guarda en el automóvil y se cambia su dirección.
if self.is_valid(next_cell):
self.next_cell = next_cell
self.direction = self.get_nextdirect(self.next_cell)
#En caso contrario una varible guarda el resultado de la función turn().
else:
direction = self.turn()
#Condición, si la variable direction es verdadera se cambia la dirección del automóvil.
if direction:
self.direction = direction
#En caso contrario una variable guarda el resultado de la función get_poss_cell().
#La siguiente celda del automóvil cambia al valor de la variable.
else:
poss = self.get_poss_cell()
self.next_cell = poss
if self.check_car():
if self.check_light():
self.model.grid.move_agent(self, self.next_cell)
#Clase para crear a los agentes semáforos.
class TrafficLightAgent(Agent):
def __init__(self, unique_id, model):
super().__init__(unique_id, model)
self.agent_type = 1
self.light = False
#Función para cambiar la luz de los semáforos.
def change(self):
self.light = not self.light
#Función para contar el número de automóviles que hay en un semáforo,
# regresa el contador con el número de automóviles.
def count_cars(self):
counter = 0
neighborhood = self.model.grid.get_neighborhood(self.pos, moore=False, include_center=True)
for cell in neighborhood:
for agent in self.model.grid.get_cell_list_contents(cell):
if agent.agent_type == 0:
counter += 1
return counter
#Clase para crear a los agentes calle.
class StreetAgent(Agent):
def __init__(self, unique_id, model):
super().__init__(unique_id, model)
self.direction = None
self.agent_type = 2
#Clase para crear el modelo.
class CarModel(Model):
def __init__(self, N: int, width: int, height: int):
self.num_agents = N
self.running = True
self.grid = MultiGrid(width, height, False)
self.schedule = RandomActivation(self)
self.uids = 0
self.lights_ids = 0
self.width = width
self.height = height
street_pos = []
self.lights = 4
#Loop para crear la parte interior de las calles, donde está el cruce.
for row in range(height):
for col in range(width):
agent = StreetAgent(self.uids, self)
self.uids += 1
flag = True
if col > width // 2 - 2 and col < width // 2 + 1 and col > 1 and col < height - 1:
if row >= height // 2:
agent.direction = (0, 1)
else:
agent.direction = (0, -1)
elif row > height // 2 - 2 and row < height // 2 + 1 and row > 1 and row < width - 1:
if col > width // 2:
agent.direction = (-1, 0)
else:
agent.direction = (1, 0)
else:
flag = False
if flag:
self.grid.place_agent(agent, (col, row))
street_pos.append((col, row))
#Loop para crear la parte exterior de las calles, donde NO está el cruce.
for row in range(height):
for col in range(width):
agent = StreetAgent(self.uids, self)
self.uids += 1
flag = True
if row < 2:
if col < width - 2:
agent.direction = (1, 0)
else:
agent.direction = (0, 1)
elif row >= 2 and row < height - 2:
if col < 2:
agent.direction = (0, -1)
elif col >= width - 2 and col < width:
agent.direction = (0, 1)
else:
flag = False
elif row >= height -2 and row < height:
if col < width - 2:
agent.direction = (-1, 0)
else:
agent.direction = (0, 1)
else:
flag = False
if flag:
self.grid.place_agent(agent, (col, row))
street_pos.append((col, row))
#Loop para crear los automóviles en posiciones random donde hay calle.
for i in range(self.num_agents):
a = CarAgent(self.uids, self)
self.uids += 1
pos_index = self.random.randint(0, len(street_pos) - 1)
pos = street_pos.pop(pos_index)
a.direction = self.grid.get_cell_list_contents(pos)[0].direction
self.grid.place_agent(a, pos)
self.schedule.add(a)
#Crear los semáforos
for i in range(self.lights):
alight = TrafficLightAgent(self.lights_ids, self)
self.lights_ids += 1
self.schedule.add(alight)
x = 8
y = 9
if i == 0:
alight.light = True
self.grid.place_agent(alight, (x, y))
elif i == 1:
x = 8
y = 10
alight.light = True
self.grid.place_agent(alight, (x, y))
elif i == 2:
x = 11
y = 9
alight.light = False
self.grid.place_agent(alight, (x, y))
else:
x = 11
y = 10
alight.light = False
self.grid.place_agent(alight, (x, y))
def step(self):
#Contadores para saber cuáles semáforos tienen más automóviles.
count_left = 0
count_right = 0
#Loop para añadir a los contadores la cantidad de automóviles que hay en cada lado.
for agent in self.schedule.agents:
if agent.agent_type == 1:
if agent.unique_id == 0:
count_left += agent.count_cars()
elif agent.unique_id == 1:
count_left += agent.count_cars()
elif agent.unique_id == 2:
count_right += agent.count_cars()
elif agent.unique_id == 3:
count_right += agent.count_cars()
#Condición, si el lado izquierdo tiene más automóviles, los semáforos del lado izquierdo
#dan luz verde y los semáforos del lado derecho dan luz roja.
if count_left >= count_right:
for agent in self.schedule.agents:
if agent.agent_type == 1:
if agent.unique_id == 0:
agent.light = True
elif agent.unique_id == 1:
agent.light = True
elif agent.unique_id == 2:
agent.light = False
else:
agent.light = False
#En caso contrario los semáforos del lado derecho dan luz verde y los semáforos del lado
#izquierdo dan luz roja.
else:
for agent in self.schedule.agents:
if agent.agent_type == 1:
if agent.unique_id == 0:
agent.light = False
elif agent.unique_id == 1:
agent.light = False
elif agent.unique_id == 2:
agent.light = True
else:
agent.light = True
self.schedule.step() | [
"mesa.space.MultiGrid",
"mesa.time.RandomActivation"
] | [((5740, 5771), 'mesa.space.MultiGrid', 'MultiGrid', (['width', 'height', '(False)'], {}), '(width, height, False)\n', (5749, 5771), False, 'from mesa.space import MultiGrid\n'), ((5796, 5818), 'mesa.time.RandomActivation', 'RandomActivation', (['self'], {}), '(self)\n', (5812, 5818), False, 'from mesa.time import RandomActivation\n')] |
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.urls import reverse_lazy
from django.contrib.auth.views import LoginView
from .models import Task
# Create your views here.
class CustomLoginView(LoginView):
template_name='base/login.html'
fiels='__all__'
redirect_auhenticated_user = True
def get_success_url(self):
return reverse_lazy('tasks')
class TaskList(ListView):
model = Task
context_object_name = 'tasks'
class TaskDetail(DetailView):
model = Task
context_object_name = 'task'
class TaskCreate(CreateView):
model = Task
fields = '__all__'
success_url = reverse_lazy('tasks')
class TaskUpdate(UpdateView):
model = Task
fields = '__all__'
success_url = reverse_lazy('tasks')
class TaskDelete(DeleteView):
model = Task
context_object_name='Task'
success_url = reverse_lazy('tasks')
| [
"django.urls.reverse_lazy"
] | [((823, 844), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""tasks"""'], {}), "('tasks')\n", (835, 844), False, 'from django.urls import reverse_lazy\n'), ((939, 960), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""tasks"""'], {}), "('tasks')\n", (951, 960), False, 'from django.urls import reverse_lazy\n'), ((1063, 1084), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""tasks"""'], {}), "('tasks')\n", (1075, 1084), False, 'from django.urls import reverse_lazy\n'), ((540, 561), 'django.urls.reverse_lazy', 'reverse_lazy', (['"""tasks"""'], {}), "('tasks')\n", (552, 561), False, 'from django.urls import reverse_lazy\n')] |
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
Run an agent in it's own (independent) process.
What Agent code does is out of our direct control, we want to avoid any interactions with global state that might be present in the SMARTS process.
To protect and isolate Agents from any pollution of global state in the main SMARTS process, we spawn Agents in their fresh and independent python process.
This script is called from within SMARTS to instantiate a remote agent.
The protocal is as follows:
1. SMARTS calls: worker.py --port 5467 # sets a unique port per agent
2. worker.py will begin listening on port 5467.
3. SMARTS connects to (ip, 5467) as a client.
4. SMARTS calls `build()` rpc with `AgentSpec` as input.
5. worker.py recieves the `AgentSpec` instances and builds the Agent.
6. SMARTS calls `act()` rpc with observation as input and receives the actions as response from worker.py.
"""
import argparse
import importlib
import logging
import os
import signal
import sys
from concurrent import futures
import grpc
from smarts.zoo import worker_pb2_grpc, worker_servicer
# Front-load some expensive imports as to not block the simulation
modules = [
"smarts.core.utils.pybullet",
"smarts.core.utils.sumo",
"smarts.core.sumo_road_network",
"numpy",
"sklearn",
"shapely",
"scipy",
"trimesh",
"panda3d",
"gym",
"ray",
]
for mod in modules:
try:
importlib.import_module(mod)
except ImportError:
if mod == "ray":
print(
"You need to install the ray dependency using pip install -e .[train] first"
)
if mod == "panda3d":
print(
"You need to install the panda3d dependency using pip install -e .[camera-obs] first"
)
pass
# End front-loaded imports
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(f"worker.py - pid({os.getpid()})")
def serve(port):
ip = "[::]"
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
worker_pb2_grpc.add_WorkerServicer_to_server(
worker_servicer.WorkerServicer(), server
)
server.add_insecure_port(f"{ip}:{port}")
server.start()
log.debug(f"Worker - ip({ip}), port({port}), pid({os.getpid()}): Started serving.")
def stop_server(unused_signum, unused_frame):
server.stop(0)
log.debug(
f"Worker - ip({ip}), port({port}), pid({os.getpid()}): Received interrupt signal."
)
# Catch keyboard interrupt and terminate signal
signal.signal(signal.SIGINT, stop_server)
signal.signal(signal.SIGTERM, stop_server)
# Wait to receive server termination signal
server.wait_for_termination()
log.debug(f"Worker - ip({ip}), port({port}), pid({os.getpid()}): Server exited")
if __name__ == "__main__":
parser = argparse.ArgumentParser("Run an agent in an independent process.")
parser.add_argument(
"--port",
type=int,
required=True,
help="Port to listen for remote client connections.",
)
args = parser.parse_args()
serve(args.port)
| [
"logging.basicConfig",
"signal.signal",
"importlib.import_module",
"argparse.ArgumentParser",
"concurrent.futures.ThreadPoolExecutor",
"os.getpid",
"smarts.zoo.worker_servicer.WorkerServicer"
] | [((2917, 2956), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (2936, 2956), False, 'import logging\n'), ((3631, 3672), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'stop_server'], {}), '(signal.SIGINT, stop_server)\n', (3644, 3672), False, 'import signal\n'), ((3677, 3719), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'stop_server'], {}), '(signal.SIGTERM, stop_server)\n', (3690, 3719), False, 'import signal\n'), ((3930, 3996), 'argparse.ArgumentParser', 'argparse.ArgumentParser', (['"""Run an agent in an independent process."""'], {}), "('Run an agent in an independent process.')\n", (3953, 3996), False, 'import argparse\n'), ((2506, 2534), 'importlib.import_module', 'importlib.import_module', (['mod'], {}), '(mod)\n', (2529, 2534), False, 'import importlib\n'), ((3076, 3117), 'concurrent.futures.ThreadPoolExecutor', 'futures.ThreadPoolExecutor', ([], {'max_workers': '(1)'}), '(max_workers=1)\n', (3102, 3117), False, 'from concurrent import futures\n'), ((3177, 3209), 'smarts.zoo.worker_servicer.WorkerServicer', 'worker_servicer.WorkerServicer', ([], {}), '()\n', (3207, 3209), False, 'from smarts.zoo import worker_pb2_grpc, worker_servicer\n'), ((3000, 3011), 'os.getpid', 'os.getpid', ([], {}), '()\n', (3009, 3011), False, 'import os\n'), ((3342, 3353), 'os.getpid', 'os.getpid', ([], {}), '()\n', (3351, 3353), False, 'import os\n'), ((3857, 3868), 'os.getpid', 'os.getpid', ([], {}), '()\n', (3866, 3868), False, 'import os\n'), ((3521, 3532), 'os.getpid', 'os.getpid', ([], {}), '()\n', (3530, 3532), False, 'import os\n')] |
# V0
import collections
class Solution(object):
def frequencySort(self, s):
count = collections.Counter(s)
count_dict = dict(count)
count_tuple_sorted = sorted(count_dict.items(), key=lambda kv : -kv[1])
res = ''
for item in count_tuple_sorted:
res += item[0] * item[1]
return res
# V0'
# IDEA : collections.Counter(s).most_common
class Solution(object):
def frequencySort(self, s):
return ''.join(c * t for c, t in collections.Counter(s).most_common())
# V1
# IDEA : SORT
# https://blog.csdn.net/fuxuemingzhu/article/details/79437548
import collections
class Solution(object):
def frequencySort(self, s):
"""
:type s: str
:rtype: str
"""
count = collections.Counter(s).most_common()
res = ''
for c, v in count:
res += c * v
return res
### Test case:
s=Solution()
assert s.frequencySort(['a','b','c','c']) == 'ccab'
assert s.frequencySort(['a']) == 'a'
assert s.frequencySort(['a','A','c','c']) == 'ccaA'
assert s.frequencySort(['c','c','c']) == 'ccc'
assert s.frequencySort([]) == ''
assert s.frequencySort(['','','']) == ''
# V1'
# http://bookshadow.com/weblog/2016/11/02/leetcode-sort-characters-by-frequency/
class Solution(object):
def frequencySort(self, s):
"""
:type s: str
:rtype: str
"""
return ''.join(c * t for c, t in collections.Counter(s).most_common())
# V2
import collections
class Solution(object):
def frequencySort(self, s):
# sort Counter by value
# https://stackoverflow.com/questions/20950650/how-to-sort-counter-by-value-python
s_freq_dict = collections.Counter(s).most_common()
output = ''
for i in range(len(s_freq_dict)):
output = output + (s_freq_dict[i][0]*s_freq_dict[i][1])
return output
# V2'
# Time: O(n)
# Space: O(n)
import collections
class Solution(object):
def frequencySort(self, s):
"""
:type s: str
:rtype: str
"""
freq = collections.defaultdict(int)
for c in s:
freq[c] += 1
counts = [""] * (len(s)+1)
for c in freq:
counts[freq[c]] += c
result = ""
for count in reversed(range(len(counts)-1)):
for c in counts[count]:
result += c * count
return result
| [
"collections.Counter",
"collections.defaultdict"
] | [((96, 118), 'collections.Counter', 'collections.Counter', (['s'], {}), '(s)\n', (115, 118), False, 'import collections\n'), ((2076, 2104), 'collections.defaultdict', 'collections.defaultdict', (['int'], {}), '(int)\n', (2099, 2104), False, 'import collections\n'), ((771, 793), 'collections.Counter', 'collections.Counter', (['s'], {}), '(s)\n', (790, 793), False, 'import collections\n'), ((1696, 1718), 'collections.Counter', 'collections.Counter', (['s'], {}), '(s)\n', (1715, 1718), False, 'import collections\n'), ((493, 515), 'collections.Counter', 'collections.Counter', (['s'], {}), '(s)\n', (512, 515), False, 'import collections\n'), ((1437, 1459), 'collections.Counter', 'collections.Counter', (['s'], {}), '(s)\n', (1456, 1459), False, 'import collections\n')] |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
from __future__ import absolute_import
import argparse
import logging
import signal
import sys
import typing
import grpc
from past.builtins import unicode
import apache_beam as beam
import apache_beam.transforms.combiners as combine
from apache_beam.coders import StrUtf8Coder
from apache_beam.pipeline import PipelineOptions
from apache_beam.portability.api import beam_expansion_api_pb2_grpc
from apache_beam.portability.api.external_transforms_pb2 import ExternalConfigurationPayload
from apache_beam.runners.portability import expansion_service
from apache_beam.transforms import ptransform
from apache_beam.transforms.external import ImplicitSchemaPayloadBuilder
from apache_beam.utils.thread_pool_executor import UnboundedThreadPoolExecutor
# This script provides an expansion service and example ptransforms for running
# external transform test cases. See external_test.py for details.
_LOGGER = logging.getLogger(__name__)
TEST_PREFIX_URN = "beam:transforms:xlang:test:prefix"
TEST_MULTI_URN = "beam:transforms:xlang:test:multi"
TEST_GBK_URN = "beam:transforms:xlang:test:gbk"
TEST_CGBK_URN = "beam:transforms:xlang:test:cgbk"
TEST_COMGL_URN = "beam:transforms:xlang:test:comgl"
TEST_COMPK_URN = "beam:transforms:xlang:test:compk"
TEST_FLATTEN_URN = "beam:transforms:xlang:test:flatten"
TEST_PARTITION_URN = "beam:transforms:xlang:test:partition"
@ptransform.PTransform.register_urn('beam:transforms:xlang:count', None)
class CountPerElementTransform(ptransform.PTransform):
def expand(self, pcoll):
return pcoll | combine.Count.PerElement()
def to_runner_api_parameter(self, unused_context):
return 'beam:transforms:xlang:count', None
@staticmethod
def from_runner_api_parameter(
unused_ptransform, unused_parameter, unused_context):
return CountPerElementTransform()
@ptransform.PTransform.register_urn(
'beam:transforms:xlang:filter_less_than_eq', bytes)
class FilterLessThanTransform(ptransform.PTransform):
def __init__(self, payload):
self._payload = payload
def expand(self, pcoll):
return (
pcoll | beam.Filter(
lambda elem, target: elem <= target, int(ord(self._payload[0]))))
def to_runner_api_parameter(self, unused_context):
return (
'beam:transforms:xlang:filter_less_than', self._payload.encode('utf8'))
@staticmethod
def from_runner_api_parameter(unused_ptransform, payload, unused_context):
return FilterLessThanTransform(payload.decode('utf8'))
@ptransform.PTransform.register_urn(TEST_PREFIX_URN, None)
@beam.typehints.with_output_types(unicode)
class PrefixTransform(ptransform.PTransform):
def __init__(self, payload):
self._payload = payload
def expand(self, pcoll):
return pcoll | 'TestLabel' >> beam.Map(
lambda x: '{}{}'.format(self._payload, x))
def to_runner_api_parameter(self, unused_context):
return TEST_PREFIX_URN, ImplicitSchemaPayloadBuilder(
{'data': self._payload}).payload()
@staticmethod
def from_runner_api_parameter(unused_ptransform, payload, unused_context):
return PrefixTransform(parse_string_payload(payload)['data'])
@ptransform.PTransform.register_urn(TEST_MULTI_URN, None)
class MutltiTransform(ptransform.PTransform):
def expand(self, pcolls):
return {
'main': (pcolls['main1'], pcolls['main2'])
| beam.Flatten()
| beam.Map(lambda x, s: x + s, beam.pvalue.AsSingleton(
pcolls['side'])).with_output_types(unicode),
'side': pcolls['side']
| beam.Map(lambda x: x + x).with_output_types(unicode),
}
def to_runner_api_parameter(self, unused_context):
return TEST_MULTI_URN, None
@staticmethod
def from_runner_api_parameter(
unused_ptransform, unused_parameter, unused_context):
return MutltiTransform()
@ptransform.PTransform.register_urn(TEST_GBK_URN, None)
class GBKTransform(ptransform.PTransform):
def expand(self, pcoll):
return pcoll | 'TestLabel' >> beam.GroupByKey()
def to_runner_api_parameter(self, unused_context):
return TEST_GBK_URN, None
@staticmethod
def from_runner_api_parameter(
unused_ptransform, unused_parameter, unused_context):
return GBKTransform()
@ptransform.PTransform.register_urn(TEST_CGBK_URN, None)
class CoGBKTransform(ptransform.PTransform):
class ConcatFn(beam.DoFn):
def process(self, element):
(k, v) = element
return [(k, v['col1'] + v['col2'])]
def expand(self, pcoll):
return pcoll \
| beam.CoGroupByKey() \
| beam.ParDo(self.ConcatFn()).with_output_types(
typing.Tuple[int, typing.Iterable[unicode]])
def to_runner_api_parameter(self, unused_context):
return TEST_CGBK_URN, None
@staticmethod
def from_runner_api_parameter(
unused_ptransform, unused_parameter, unused_context):
return CoGBKTransform()
@ptransform.PTransform.register_urn(TEST_COMGL_URN, None)
class CombineGloballyTransform(ptransform.PTransform):
def expand(self, pcoll):
return pcoll \
| beam.CombineGlobally(sum).with_output_types(int)
def to_runner_api_parameter(self, unused_context):
return TEST_COMGL_URN, None
@staticmethod
def from_runner_api_parameter(
unused_ptransform, unused_parameter, unused_context):
return CombineGloballyTransform()
@ptransform.PTransform.register_urn(TEST_COMPK_URN, None)
class CombinePerKeyTransform(ptransform.PTransform):
def expand(self, pcoll):
return pcoll \
| beam.CombinePerKey(sum).with_output_types(
typing.Tuple[unicode, int])
def to_runner_api_parameter(self, unused_context):
return TEST_COMPK_URN, None
@staticmethod
def from_runner_api_parameter(
unused_ptransform, unused_parameter, unused_context):
return CombinePerKeyTransform()
@ptransform.PTransform.register_urn(TEST_FLATTEN_URN, None)
class FlattenTransform(ptransform.PTransform):
def expand(self, pcoll):
return pcoll.values() | beam.Flatten().with_output_types(int)
def to_runner_api_parameter(self, unused_context):
return TEST_FLATTEN_URN, None
@staticmethod
def from_runner_api_parameter(
unused_ptransform, unused_parameter, unused_context):
return FlattenTransform()
@ptransform.PTransform.register_urn(TEST_PARTITION_URN, None)
class PartitionTransform(ptransform.PTransform):
def expand(self, pcoll):
col1, col2 = pcoll | beam.Partition(
lambda elem, n: 0 if elem % 2 == 0 else 1, 2)
typed_col1 = col1 | beam.Map(lambda x: x).with_output_types(int)
typed_col2 = col2 | beam.Map(lambda x: x).with_output_types(int)
return {'0': typed_col1, '1': typed_col2}
def to_runner_api_parameter(self, unused_context):
return TEST_PARTITION_URN, None
@staticmethod
def from_runner_api_parameter(
unused_ptransform, unused_parameter, unused_context):
return PartitionTransform()
@ptransform.PTransform.register_urn('payload', bytes)
class PayloadTransform(ptransform.PTransform):
def __init__(self, payload):
self._payload = payload
def expand(self, pcoll):
return pcoll | beam.Map(lambda x, s: x + s, self._payload)
def to_runner_api_parameter(self, unused_context):
return b'payload', self._payload.encode('ascii')
@staticmethod
def from_runner_api_parameter(unused_ptransform, payload, unused_context):
return PayloadTransform(payload.decode('ascii'))
@ptransform.PTransform.register_urn('fib', bytes)
class FibTransform(ptransform.PTransform):
def __init__(self, level):
self._level = level
def expand(self, p):
if self._level <= 2:
return p | beam.Create([1])
else:
a = p | 'A' >> beam.ExternalTransform(
'fib',
str(self._level - 1).encode('ascii'),
expansion_service.ExpansionServiceServicer())
b = p | 'B' >> beam.ExternalTransform(
'fib',
str(self._level - 2).encode('ascii'),
expansion_service.ExpansionServiceServicer())
return ((a, b)
| beam.Flatten()
| beam.CombineGlobally(sum).without_defaults())
def to_runner_api_parameter(self, unused_context):
return 'fib', str(self._level).encode('ascii')
@staticmethod
def from_runner_api_parameter(unused_ptransform, level, unused_context):
return FibTransform(int(level.decode('ascii')))
def parse_string_payload(input_byte):
payload = ExternalConfigurationPayload()
payload.ParseFromString(input_byte)
coder = StrUtf8Coder()
return {
k: coder.decode_nested(v.payload)
for k,
v in payload.configuration.items()
}
server = None
def cleanup(unused_signum, unused_frame):
_LOGGER.info('Shutting down expansion service.')
server.stop(None)
def main(unused_argv):
parser = argparse.ArgumentParser()
parser.add_argument(
'-p', '--port', type=int, help='port on which to serve the job api')
options = parser.parse_args()
global server
server = grpc.server(UnboundedThreadPoolExecutor())
beam_expansion_api_pb2_grpc.add_ExpansionServiceServicer_to_server(
expansion_service.ExpansionServiceServicer(
PipelineOptions(
["--experiments", "beam_fn_api", "--sdk_location", "container"])),
server)
server.add_insecure_port('localhost:{}'.format(options.port))
server.start()
_LOGGER.info('Listening for expansion requests at %d', options.port)
signal.signal(signal.SIGTERM, cleanup)
signal.signal(signal.SIGINT, cleanup)
# blocking main thread forever.
signal.pause()
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
main(sys.argv)
| [
"logging.getLogger",
"apache_beam.transforms.external.ImplicitSchemaPayloadBuilder",
"apache_beam.CombineGlobally",
"apache_beam.Flatten",
"apache_beam.portability.api.external_transforms_pb2.ExternalConfigurationPayload",
"apache_beam.CombinePerKey",
"argparse.ArgumentParser",
"apache_beam.Partition",
"apache_beam.pipeline.PipelineOptions",
"apache_beam.coders.StrUtf8Coder",
"apache_beam.transforms.ptransform.PTransform.register_urn",
"apache_beam.utils.thread_pool_executor.UnboundedThreadPoolExecutor",
"apache_beam.typehints.with_output_types",
"signal.pause",
"apache_beam.transforms.combiners.Count.PerElement",
"apache_beam.Map",
"apache_beam.runners.portability.expansion_service.ExpansionServiceServicer",
"signal.signal",
"apache_beam.CoGroupByKey",
"apache_beam.pvalue.AsSingleton",
"apache_beam.GroupByKey",
"apache_beam.Create"
] | [((1714, 1741), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1731, 1741), False, 'import logging\n'), ((2170, 2241), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['"""beam:transforms:xlang:count"""', 'None'], {}), "('beam:transforms:xlang:count', None)\n", (2204, 2241), False, 'from apache_beam.transforms import ptransform\n'), ((2622, 2712), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['"""beam:transforms:xlang:filter_less_than_eq"""', 'bytes'], {}), "('beam:transforms:xlang:filter_less_than_eq',\n bytes)\n", (2656, 2712), False, 'from apache_beam.transforms import ptransform\n'), ((3278, 3335), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['TEST_PREFIX_URN', 'None'], {}), '(TEST_PREFIX_URN, None)\n', (3312, 3335), False, 'from apache_beam.transforms import ptransform\n'), ((3337, 3378), 'apache_beam.typehints.with_output_types', 'beam.typehints.with_output_types', (['unicode'], {}), '(unicode)\n', (3369, 3378), True, 'import apache_beam as beam\n'), ((3925, 3981), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['TEST_MULTI_URN', 'None'], {}), '(TEST_MULTI_URN, None)\n', (3959, 3981), False, 'from apache_beam.transforms import ptransform\n'), ((4595, 4649), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['TEST_GBK_URN', 'None'], {}), '(TEST_GBK_URN, None)\n', (4629, 4649), False, 'from apache_beam.transforms import ptransform\n'), ((4995, 5050), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['TEST_CGBK_URN', 'None'], {}), '(TEST_CGBK_URN, None)\n', (5029, 5050), False, 'from apache_beam.transforms import ptransform\n'), ((5650, 5706), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['TEST_COMGL_URN', 'None'], {}), '(TEST_COMGL_URN, None)\n', (5684, 5706), False, 'from apache_beam.transforms import ptransform\n'), ((6107, 6163), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['TEST_COMPK_URN', 'None'], {}), '(TEST_COMPK_URN, None)\n', (6141, 6163), False, 'from apache_beam.transforms import ptransform\n'), ((6597, 6655), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['TEST_FLATTEN_URN', 'None'], {}), '(TEST_FLATTEN_URN, None)\n', (6631, 6655), False, 'from apache_beam.transforms import ptransform\n'), ((7027, 7087), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['TEST_PARTITION_URN', 'None'], {}), '(TEST_PARTITION_URN, None)\n', (7061, 7087), False, 'from apache_beam.transforms import ptransform\n'), ((7678, 7730), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['"""payload"""', 'bytes'], {}), "('payload', bytes)\n", (7712, 7730), False, 'from apache_beam.transforms import ptransform\n'), ((8185, 8233), 'apache_beam.transforms.ptransform.PTransform.register_urn', 'ptransform.PTransform.register_urn', (['"""fib"""', 'bytes'], {}), "('fib', bytes)\n", (8219, 8233), False, 'from apache_beam.transforms import ptransform\n'), ((9170, 9200), 'apache_beam.portability.api.external_transforms_pb2.ExternalConfigurationPayload', 'ExternalConfigurationPayload', ([], {}), '()\n', (9198, 9200), False, 'from apache_beam.portability.api.external_transforms_pb2 import ExternalConfigurationPayload\n'), ((9249, 9263), 'apache_beam.coders.StrUtf8Coder', 'StrUtf8Coder', ([], {}), '()\n', (9261, 9263), False, 'from apache_beam.coders import StrUtf8Coder\n'), ((9540, 9565), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (9563, 9565), False, 'import argparse\n'), ((10163, 10201), 'signal.signal', 'signal.signal', (['signal.SIGTERM', 'cleanup'], {}), '(signal.SIGTERM, cleanup)\n', (10176, 10201), False, 'import signal\n'), ((10204, 10241), 'signal.signal', 'signal.signal', (['signal.SIGINT', 'cleanup'], {}), '(signal.SIGINT, cleanup)\n', (10217, 10241), False, 'import signal\n'), ((10278, 10292), 'signal.pause', 'signal.pause', ([], {}), '()\n', (10290, 10292), False, 'import signal\n'), ((9735, 9764), 'apache_beam.utils.thread_pool_executor.UnboundedThreadPoolExecutor', 'UnboundedThreadPoolExecutor', ([], {}), '()\n', (9762, 9764), False, 'from apache_beam.utils.thread_pool_executor import UnboundedThreadPoolExecutor\n'), ((2343, 2369), 'apache_beam.transforms.combiners.Count.PerElement', 'combine.Count.PerElement', ([], {}), '()\n', (2367, 2369), True, 'import apache_beam.transforms.combiners as combine\n'), ((7189, 7249), 'apache_beam.Partition', 'beam.Partition', (['(lambda elem, n: 0 if elem % 2 == 0 else 1)', '(2)'], {}), '(lambda elem, n: 0 if elem % 2 == 0 else 1, 2)\n', (7203, 7249), True, 'import apache_beam as beam\n'), ((7884, 7927), 'apache_beam.Map', 'beam.Map', (['(lambda x, s: x + s)', 'self._payload'], {}), '(lambda x, s: x + s, self._payload)\n', (7892, 7927), True, 'import apache_beam as beam\n'), ((9896, 9981), 'apache_beam.pipeline.PipelineOptions', 'PipelineOptions', (["['--experiments', 'beam_fn_api', '--sdk_location', 'container']"], {}), "(['--experiments', 'beam_fn_api', '--sdk_location', 'container']\n )\n", (9911, 9981), False, 'from apache_beam.pipeline import PipelineOptions\n'), ((10324, 10343), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (10341, 10343), False, 'import logging\n'), ((4754, 4771), 'apache_beam.GroupByKey', 'beam.GroupByKey', ([], {}), '()\n', (4769, 4771), True, 'import apache_beam as beam\n'), ((5282, 5301), 'apache_beam.CoGroupByKey', 'beam.CoGroupByKey', ([], {}), '()\n', (5299, 5301), True, 'import apache_beam as beam\n'), ((8396, 8412), 'apache_beam.Create', 'beam.Create', (['[1]'], {}), '([1])\n', (8407, 8412), True, 'import apache_beam as beam\n'), ((3689, 3742), 'apache_beam.transforms.external.ImplicitSchemaPayloadBuilder', 'ImplicitSchemaPayloadBuilder', (["{'data': self._payload}"], {}), "({'data': self._payload})\n", (3717, 3742), False, 'from apache_beam.transforms.external import ImplicitSchemaPayloadBuilder\n'), ((4130, 4144), 'apache_beam.Flatten', 'beam.Flatten', ([], {}), '()\n', (4142, 4144), True, 'import apache_beam as beam\n'), ((5821, 5846), 'apache_beam.CombineGlobally', 'beam.CombineGlobally', (['sum'], {}), '(sum)\n', (5841, 5846), True, 'import apache_beam as beam\n'), ((6276, 6299), 'apache_beam.CombinePerKey', 'beam.CombinePerKey', (['sum'], {}), '(sum)\n', (6294, 6299), True, 'import apache_beam as beam\n'), ((6758, 6772), 'apache_beam.Flatten', 'beam.Flatten', ([], {}), '()\n', (6770, 6772), True, 'import apache_beam as beam\n'), ((7283, 7304), 'apache_beam.Map', 'beam.Map', (['(lambda x: x)'], {}), '(lambda x: x)\n', (7291, 7304), True, 'import apache_beam as beam\n'), ((7352, 7373), 'apache_beam.Map', 'beam.Map', (['(lambda x: x)'], {}), '(lambda x: x)\n', (7360, 7373), True, 'import apache_beam as beam\n'), ((8792, 8806), 'apache_beam.Flatten', 'beam.Flatten', ([], {}), '()\n', (8804, 8806), True, 'import apache_beam as beam\n'), ((4307, 4332), 'apache_beam.Map', 'beam.Map', (['(lambda x: x + x)'], {}), '(lambda x: x + x)\n', (4315, 4332), True, 'import apache_beam as beam\n'), ((8543, 8587), 'apache_beam.runners.portability.expansion_service.ExpansionServiceServicer', 'expansion_service.ExpansionServiceServicer', ([], {}), '()\n', (8585, 8587), False, 'from apache_beam.runners.portability import expansion_service\n'), ((8709, 8753), 'apache_beam.runners.portability.expansion_service.ExpansionServiceServicer', 'expansion_service.ExpansionServiceServicer', ([], {}), '()\n', (8751, 8753), False, 'from apache_beam.runners.portability import expansion_service\n'), ((8823, 8848), 'apache_beam.CombineGlobally', 'beam.CombineGlobally', (['sum'], {}), '(sum)\n', (8843, 8848), True, 'import apache_beam as beam\n'), ((4184, 4223), 'apache_beam.pvalue.AsSingleton', 'beam.pvalue.AsSingleton', (["pcolls['side']"], {}), "(pcolls['side'])\n", (4207, 4223), True, 'import apache_beam as beam\n')] |
"""Calculation history Class"""
from calc.calculations.addition import Addition
from calc.calculations.subtraction import Subtraction
from calc.calculations.multiplication import Multiplication
from calc.calculations.division import Division
class Calculations:
"""Calculations class manages the history of calculations"""
history = []
# pylint: disable=too-few-public-methods
@staticmethod
def clear_history():
"""clear the history of calculations"""
Calculations.history.clear()
return True
@staticmethod
def count_history():
"""get number of items in history"""
return len(Calculations.history)
@staticmethod
def get_last_calculation_object():
"""get last calculation"""
return Calculations.history[-1]
@staticmethod
def get_last_calculation_result_value():
"""get last calculation"""
calculation = Calculations.get_last_calculation_object()
return calculation.get_result()
@staticmethod
def get_first_calculation():
"""get first calculation"""
return Calculations.history[0]
@staticmethod
def get_calculation(num):
""" get a specific calculation from history"""
return Calculations.history[num]
@staticmethod
def add_calculation(calculation):
""" get a generic calculation from history"""
return Calculations.history.append(calculation)
@staticmethod
def add_addition_calculation_to_history(values):
"""create an addition and add object to history using factory method create"""
Calculations.add_calculation(Addition.create(values))
#Get the result of the calculation
return True
@staticmethod
def add_subtraction_calculation_to_history(values):
"""create a subtraction object to history using factory method create"""
Calculations.add_calculation(Subtraction.create(values))
return True
@staticmethod
def add_multiplication_calculation_to_history(values):
"""Add a multiplication object to history using factory method create"""
Calculations.add_calculation(Multiplication.create(values))
return True
@staticmethod
def add_division_calculation_to_history(values):
"Add a division object to history using factory method create"
Calculations.add_calculation(Division.create(values))
return True | [
"calc.calculations.multiplication.Multiplication.create",
"calc.calculations.division.Division.create",
"calc.calculations.subtraction.Subtraction.create",
"calc.calculations.addition.Addition.create"
] | [((1632, 1655), 'calc.calculations.addition.Addition.create', 'Addition.create', (['values'], {}), '(values)\n', (1647, 1655), False, 'from calc.calculations.addition import Addition\n'), ((1912, 1938), 'calc.calculations.subtraction.Subtraction.create', 'Subtraction.create', (['values'], {}), '(values)\n', (1930, 1938), False, 'from calc.calculations.subtraction import Subtraction\n'), ((2155, 2184), 'calc.calculations.multiplication.Multiplication.create', 'Multiplication.create', (['values'], {}), '(values)\n', (2176, 2184), False, 'from calc.calculations.multiplication import Multiplication\n'), ((2385, 2408), 'calc.calculations.division.Division.create', 'Division.create', (['values'], {}), '(values)\n', (2400, 2408), False, 'from calc.calculations.division import Division\n')] |
# coding: utf-8
"""
Merlin
API Guide for accessing Merlin's model management, deployment, and serving functionalities # noqa: E501
OpenAPI spec version: 0.7.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from client.api_client import ApiClient
class LogApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def logs_get(self, name, pod_name, namespace, cluster, **kwargs): # noqa: E501
"""Retrieve log from a container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logs_get(name, pod_name, namespace, cluster, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str pod_name: (required)
:param str namespace: (required)
:param str cluster: (required)
:param str follow:
:param str limit_bytes:
:param str pretty:
:param str previous:
:param str since_seconds:
:param str since_time:
:param str tail_lines:
:param str timestamps:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.logs_get_with_http_info(name, pod_name, namespace, cluster, **kwargs) # noqa: E501
else:
(data) = self.logs_get_with_http_info(name, pod_name, namespace, cluster, **kwargs) # noqa: E501
return data
def logs_get_with_http_info(self, name, pod_name, namespace, cluster, **kwargs): # noqa: E501
"""Retrieve log from a container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logs_get_with_http_info(name, pod_name, namespace, cluster, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str pod_name: (required)
:param str namespace: (required)
:param str cluster: (required)
:param str follow:
:param str limit_bytes:
:param str pretty:
:param str previous:
:param str since_seconds:
:param str since_time:
:param str tail_lines:
:param str timestamps:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pod_name', 'namespace', 'cluster', 'follow', 'limit_bytes', 'pretty', 'previous', 'since_seconds', 'since_time', 'tail_lines', 'timestamps'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method logs_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `logs_get`") # noqa: E501
# verify the required parameter 'pod_name' is set
if ('pod_name' not in params or
params['pod_name'] is None):
raise ValueError("Missing the required parameter `pod_name` when calling `logs_get`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `logs_get`") # noqa: E501
# verify the required parameter 'cluster' is set
if ('cluster' not in params or
params['cluster'] is None):
raise ValueError("Missing the required parameter `cluster` when calling `logs_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'pod_name' in params:
query_params.append(('pod_name', params['pod_name'])) # noqa: E501
if 'namespace' in params:
query_params.append(('namespace', params['namespace'])) # noqa: E501
if 'cluster' in params:
query_params.append(('cluster', params['cluster'])) # noqa: E501
if 'follow' in params:
query_params.append(('follow', params['follow'])) # noqa: E501
if 'limit_bytes' in params:
query_params.append(('limit_bytes', params['limit_bytes'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'previous' in params:
query_params.append(('previous', params['previous'])) # noqa: E501
if 'since_seconds' in params:
query_params.append(('since_seconds', params['since_seconds'])) # noqa: E501
if 'since_time' in params:
query_params.append(('since_time', params['since_time'])) # noqa: E501
if 'tail_lines' in params:
query_params.append(('tail_lines', params['tail_lines'])) # noqa: E501
if 'timestamps' in params:
query_params.append(('timestamps', params['timestamps'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| [
"six.iteritems",
"client.api_client.ApiClient"
] | [((3490, 3521), 'six.iteritems', 'six.iteritems', (["params['kwargs']"], {}), "(params['kwargs'])\n", (3503, 3521), False, 'import six\n'), ((722, 733), 'client.api_client.ApiClient', 'ApiClient', ([], {}), '()\n', (731, 733), False, 'from client.api_client import ApiClient\n')] |
"""Define the NonlinearBlockJac class."""
from openmdao.recorders.recording_iteration_stack import Recording
from openmdao.solvers.solver import NonlinearSolver
from openmdao.utils.mpi import multi_proc_fail_check
class NonlinearBlockJac(NonlinearSolver):
"""
Nonlinear block Jacobi solver.
"""
SOLVER = 'NL: NLBJ'
def _single_iteration(self):
"""
Perform the operations in the iteration loop.
"""
system = self._system
self._solver_info.append_subsolver()
system._transfer('nonlinear', 'fwd')
with Recording('NonlinearBlockJac', 0, self) as rec:
# If this is a parallel group, check for analysis errors and reraise.
if len(system._subsystems_myproc) != len(system._subsystems_allprocs):
with multi_proc_fail_check(system.comm):
for subsys in system._subsystems_myproc:
subsys._solve_nonlinear()
else:
for subsys in system._subsystems_myproc:
subsys._solve_nonlinear()
system._check_child_reconf()
rec.abs = 0.0
rec.rel = 0.0
self._solver_info.pop()
def _mpi_print_header(self):
"""
Print header text before solving.
"""
if (self.options['iprint'] > 0):
pathname = self._system.pathname
if pathname:
nchar = len(pathname)
prefix = self._solver_info.prefix
header = prefix + "\n"
header += prefix + nchar * "=" + "\n"
header += prefix + pathname + "\n"
header += prefix + nchar * "="
print(header)
def _run_apply(self):
"""
Run the apply_nonlinear method on the system.
"""
system = self._system
# If this is a parallel group, check for analysis errors and reraise.
if len(system._subsystems_myproc) != len(system._subsystems_allprocs):
with multi_proc_fail_check(system.comm):
super(NonlinearBlockJac, self)._run_apply()
else:
super(NonlinearBlockJac, self)._run_apply()
| [
"openmdao.recorders.recording_iteration_stack.Recording",
"openmdao.utils.mpi.multi_proc_fail_check"
] | [((580, 619), 'openmdao.recorders.recording_iteration_stack.Recording', 'Recording', (['"""NonlinearBlockJac"""', '(0)', 'self'], {}), "('NonlinearBlockJac', 0, self)\n", (589, 619), False, 'from openmdao.recorders.recording_iteration_stack import Recording\n'), ((2041, 2075), 'openmdao.utils.mpi.multi_proc_fail_check', 'multi_proc_fail_check', (['system.comm'], {}), '(system.comm)\n', (2062, 2075), False, 'from openmdao.utils.mpi import multi_proc_fail_check\n'), ((815, 849), 'openmdao.utils.mpi.multi_proc_fail_check', 'multi_proc_fail_check', (['system.comm'], {}), '(system.comm)\n', (836, 849), False, 'from openmdao.utils.mpi import multi_proc_fail_check\n')] |
# Copyright (c) 2021 GradsFlow. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
from gradsflow.models import Model
class DummyModel(Model):
def __init__(self):
learner = torch.nn.Linear(1, 4)
super().__init__(learner)
def backward(self, loss: torch.Tensor):
return None
def train_step(self, batch):
return {"loss": torch.as_tensor(1), "metrics": {"accuracy": 1}}
def val_step(self, batch):
return {"loss": torch.as_tensor(1), "metrics": {"accuracy": 1}}
| [
"torch.as_tensor",
"torch.nn.Linear"
] | [((727, 748), 'torch.nn.Linear', 'torch.nn.Linear', (['(1)', '(4)'], {}), '(1, 4)\n', (742, 748), False, 'import torch\n'), ((906, 924), 'torch.as_tensor', 'torch.as_tensor', (['(1)'], {}), '(1)\n', (921, 924), False, 'import torch\n'), ((1010, 1028), 'torch.as_tensor', 'torch.as_tensor', (['(1)'], {}), '(1)\n', (1025, 1028), False, 'import torch\n')] |
# MemPool.py
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import logging
from lib.serialize import uint256_to_shortstr
class MemPool(object):
def __init__(self):
self.pool = {}
# setup logging
logging.basicConfig(level=logging.DEBUG)
self.logger = logging.getLogger(__name__)
def add(self, tx):
tx.calc_sha256()
hash = tx.sha256
hashstr = uint256_to_shortstr(hash)
if hash in self.pool:
self.log.write("MemPool.add(%s): already known" % (hashstr,))
return False
if not tx.is_valid():
self.log.write("MemPool.add(%s): invalid TX" % (hashstr, ))
return False
self.pool[hash] = tx
self.log.write("MemPool.add(%s), poolsz %d" % (hashstr, len(self.pool)))
return True
def remove(self, hash):
if hash not in self.pool:
return False
del self.pool[hash]
return True
def size(self):
return len(self.pool)
| [
"logging.basicConfig",
"logging.getLogger",
"lib.serialize.uint256_to_shortstr"
] | [((302, 342), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (321, 342), False, 'import logging\n'), ((359, 386), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (376, 386), False, 'import logging\n'), ((458, 483), 'lib.serialize.uint256_to_shortstr', 'uint256_to_shortstr', (['hash'], {}), '(hash)\n', (477, 483), False, 'from lib.serialize import uint256_to_shortstr\n')] |
from django import forms
from django.core.validators import MinValueValidator, MinLengthValidator
class OriginForm(forms.Form):
origin_address = forms.CharField(validators=[MinLengthValidator(1)], widget=forms.TextInput(attrs={'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder': '123 Tech St, Silicon Valley, CA 00000'}))
class DestinationForm(forms.Form):
destination_address = forms.CharField(validators=[MinLengthValidator(1)], widget=forms.TextInput(attrs={'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder': '123 Tech St, Silicon Valley, CA 00000'}))
class GasPriceForm(forms.Form):
gas_price = forms.FloatField(validators=[MinValueValidator(0.01)], widget=forms.TextInput(attrs={'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder': '1.23'}))
class MpgForm(forms.Form):
mpg = forms.FloatField(validators=[MinValueValidator(0.01)], widget=forms.TextInput(attrs={'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder': '12'}))
class NumPeopleForm(forms.Form):
num_people = forms.IntegerField(validators=[MinValueValidator(1)], widget=forms.TextInput(attrs={'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder': '1 (default is 1 if left blank)'}))
class DistanceForm(forms.Form):
distance = forms.FloatField(validators=[MinValueValidator(0.01)], widget=forms.TextInput(attrs={'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder': '15.2'}))
| [
"django.core.validators.MinValueValidator",
"django.core.validators.MinLengthValidator",
"django.forms.TextInput"
] | [((209, 351), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder':\n '123 Tech St, Silicon Valley, CA 00000'}"}), "(attrs={'class': 'form-control', 'id':\n 'inlineFormInputGroup', 'placeholder':\n '123 Tech St, Silicon Valley, CA 00000'})\n", (224, 351), False, 'from django import forms\n'), ((466, 608), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder':\n '123 Tech St, Silicon Valley, CA 00000'}"}), "(attrs={'class': 'form-control', 'id':\n 'inlineFormInputGroup', 'placeholder':\n '123 Tech St, Silicon Valley, CA 00000'})\n", (481, 608), False, 'from django import forms\n'), ((713, 818), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder': '1.23'}"}), "(attrs={'class': 'form-control', 'id':\n 'inlineFormInputGroup', 'placeholder': '1.23'})\n", (728, 818), False, 'from django import forms\n'), ((916, 1019), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder': '12'}"}), "(attrs={'class': 'form-control', 'id':\n 'inlineFormInputGroup', 'placeholder': '12'})\n", (931, 1019), False, 'from django import forms\n'), ((1129, 1260), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder':\n '1 (default is 1 if left blank)'}"}), "(attrs={'class': 'form-control', 'id':\n 'inlineFormInputGroup', 'placeholder': '1 (default is 1 if left blank)'})\n", (1144, 1260), False, 'from django import forms\n'), ((1368, 1473), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'class': 'form-control', 'id': 'inlineFormInputGroup', 'placeholder': '15.2'}"}), "(attrs={'class': 'form-control', 'id':\n 'inlineFormInputGroup', 'placeholder': '15.2'})\n", (1383, 1473), False, 'from django import forms\n'), ((178, 199), 'django.core.validators.MinLengthValidator', 'MinLengthValidator', (['(1)'], {}), '(1)\n', (196, 199), False, 'from django.core.validators import MinValueValidator, MinLengthValidator\n'), ((435, 456), 'django.core.validators.MinLengthValidator', 'MinLengthValidator', (['(1)'], {}), '(1)\n', (453, 456), False, 'from django.core.validators import MinValueValidator, MinLengthValidator\n'), ((680, 703), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0.01)'], {}), '(0.01)\n', (697, 703), False, 'from django.core.validators import MinValueValidator, MinLengthValidator\n'), ((883, 906), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0.01)'], {}), '(0.01)\n', (900, 906), False, 'from django.core.validators import MinValueValidator, MinLengthValidator\n'), ((1099, 1119), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(1)'], {}), '(1)\n', (1116, 1119), False, 'from django.core.validators import MinValueValidator, MinLengthValidator\n'), ((1335, 1358), 'django.core.validators.MinValueValidator', 'MinValueValidator', (['(0.01)'], {}), '(0.01)\n', (1352, 1358), False, 'from django.core.validators import MinValueValidator, MinLengthValidator\n')] |
from django.contrib import admin
from . import models
class SupplierAdmin(admin.ModelAdmin):
list_display = ('supplier_name', 'contact', )
search_fields = ['supplier_name', 'contact', ]
admin.site.register(models.Suppliers, SupplierAdmin)
class InventoryUserAdmin(admin.ModelAdmin):
list_display = ('employee_name', 'user_type')
search_fields = ['employee_name', 'user_type']
list_filter = ("user_type",)
admin.site.register(models.InventoryUser, InventoryUserAdmin)
class ProductsAdmin(admin.ModelAdmin):
list_display = ('name', 'quantity', 'cost_price', 'selling_price',)
search_fields = ['name', 'quantity', 'cost_price', 'selling_price',]
list_filter = ("branch", "supplier",)
admin.site.register(models.Product, ProductsAdmin)
| [
"django.contrib.admin.site.register"
] | [((197, 249), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Suppliers', 'SupplierAdmin'], {}), '(models.Suppliers, SupplierAdmin)\n', (216, 249), False, 'from django.contrib import admin\n'), ((430, 491), 'django.contrib.admin.site.register', 'admin.site.register', (['models.InventoryUser', 'InventoryUserAdmin'], {}), '(models.InventoryUser, InventoryUserAdmin)\n', (449, 491), False, 'from django.contrib import admin\n'), ((720, 770), 'django.contrib.admin.site.register', 'admin.site.register', (['models.Product', 'ProductsAdmin'], {}), '(models.Product, ProductsAdmin)\n', (739, 770), False, 'from django.contrib import admin\n')] |
# -*- coding: utf-8 -*-
"""
Description of example
"""
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui, mkQApp
import numpy as np
app = mkQApp()
# win.setWindowTitle('pyqtgraph example: ____')
if __name__ == '__main__':
pg.exec()
| [
"pyqtgraph.Qt.mkQApp",
"pyqtgraph.exec"
] | [((152, 160), 'pyqtgraph.Qt.mkQApp', 'mkQApp', ([], {}), '()\n', (158, 160), False, 'from pyqtgraph.Qt import QtCore, QtGui, mkQApp\n'), ((242, 251), 'pyqtgraph.exec', 'pg.exec', ([], {}), '()\n', (249, 251), True, 'import pyqtgraph as pg\n')] |
# -*- encoding: utf-8 -*-
import multiprocessing as mp
import time
from pudb.remote import set_trace
def worker(worker_id):
""" Simple worker process"""
i = 0
while i < 10:
if worker_id == 1: # debug process with id 1
set_trace(term_size=(80, 24))
time.sleep(1) # represents some work
print('In Process {}, i:{}'.format(worker_id, i))
i = i + 1
if __name__ == '__main__':
processes = []
for p_id in range(2): # 2 worker processes
p = mp.Process(target=worker, args=(p_id,))
p.start()
processes.append(p)
for p in processes:
p.join()
| [
"multiprocessing.Process",
"pudb.remote.set_trace",
"time.sleep"
] | [((292, 305), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (302, 305), False, 'import time\n'), ((514, 553), 'multiprocessing.Process', 'mp.Process', ([], {'target': 'worker', 'args': '(p_id,)'}), '(target=worker, args=(p_id,))\n', (524, 553), True, 'import multiprocessing as mp\n'), ((254, 283), 'pudb.remote.set_trace', 'set_trace', ([], {'term_size': '(80, 24)'}), '(term_size=(80, 24))\n', (263, 283), False, 'from pudb.remote import set_trace\n')] |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Implements a frozen dictionary-like object"""
import collections
import copy
import common.memo as memo
class frozendict(collections.Mapping):
"""A frozen dictionary class"""
def __init__(self, *args, **kwargs):
self._data = dict(*args, **kwargs)
def __iter__(self):
return iter(self._data)
def __len__(self):
return len(self._data)
def __getitem__(self, key):
return self._data[key]
@memo.memo_i()
def __hash__(self):
return hash(self.itemtuple())
def __str__(self):
return str(self._data)
def __repr__(self):
return '%s(%s)' % (type(self).__name__, str(self))
def __eq__(self, other):
return self._data == other
def __ne__(self, other):
return not self == other
def __deepcopy__(self, _memo):
return copy.deepcopy(self._data)
@memo.memo_i()
def itemtuple(self):
return tuple(sorted(self.iteritems()))
def mutableDict(self):
"""
Returns a mutable dictionary copy, replacing 'frozendict' with 'dict's.
This function uses the 'copy.deepcopy' method to create a mutable deep copy
of the dictionary.
Note that due to the one-size-fits-all behavior of 'deepcopy', the result
can be anything from heavyhanded to incorrect depending on the contents of
the dictionary. The caller should make sure they understand the operation
and its behavior on all of the dictionary's subtypes before using it.
Returns: (dict) A mutable clone of the dictionary and its members.
"""
return copy.deepcopy(self)
def extend(self, **kwargs):
"""Returns a copy of this object with the 'kwargs' fields updated."""
ndata = self.mutableDict()
ndata.update(kwargs)
return type(self)(**ndata)
| [
"common.memo.memo_i",
"copy.deepcopy"
] | [((587, 600), 'common.memo.memo_i', 'memo.memo_i', ([], {}), '()\n', (598, 600), True, 'import common.memo as memo\n'), ((975, 988), 'common.memo.memo_i', 'memo.memo_i', ([], {}), '()\n', (986, 988), True, 'import common.memo as memo\n'), ((945, 970), 'copy.deepcopy', 'copy.deepcopy', (['self._data'], {}), '(self._data)\n', (958, 970), False, 'import copy\n'), ((1670, 1689), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (1683, 1689), False, 'import copy\n')] |
# -*- coding: utf-8 -*-
"""Utilities common to CIFAR10 and CIFAR100 datasets.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import sys
from six.moves import cPickle
def load_batch(fpath, label_key='labels'):
"""Internal utility for parsing CIFAR data.
# Arguments
fpath: path the file to parse.
label_key: key for label data in the retrieve
dictionary.
# Returns
A tuple `(data, labels)`.
"""
with open(fpath, 'rb') as f:
if sys.version_info < (3,):
d = cPickle.load(f)
else:
d = cPickle.load(f, encoding='bytes')
# decode utf8
d_decoded = {}
for k, v in d.items():
d_decoded[k.decode('utf8')] = v
d = d_decoded
data = d['data']
labels = d[label_key]
data = data.reshape(data.shape[0], 3, 32, 32)
return data, labels | [
"six.moves.cPickle.load"
] | [((602, 617), 'six.moves.cPickle.load', 'cPickle.load', (['f'], {}), '(f)\n', (614, 617), False, 'from six.moves import cPickle\n'), ((648, 681), 'six.moves.cPickle.load', 'cPickle.load', (['f'], {'encoding': '"""bytes"""'}), "(f, encoding='bytes')\n", (660, 681), False, 'from six.moves import cPickle\n')] |
from core.advbase import *
from slot.d import *
def module():
return Luther
class Luther(Adv):
a1 = ('cc',0.10,'hit15')
conf = {}
conf ['slots.d'] = Leviathan()
conf['acl'] = """
`dragon
`s1
`s2, seq=5 and cancel
`s3, seq=5 and cancel or fsc
`fs, seq=5
"""
coab = ['Blade', 'Xander', 'Tiki']
if __name__ == '__main__':
from core.simulate import test_with_argv
test_with_argv(None, *sys.argv) | [
"core.simulate.test_with_argv"
] | [((440, 471), 'core.simulate.test_with_argv', 'test_with_argv', (['None', '*sys.argv'], {}), '(None, *sys.argv)\n', (454, 471), False, 'from core.simulate import test_with_argv\n')] |
import hashlib
from io import BytesIO
import logging
import os
from typing import Any, cast, Dict, List, Optional, Sequence, Type, TYPE_CHECKING, Union
from pkg_resources import parse_version
import wandb
from wandb import util
from ._private import MEDIA_TMP
from .base_types.media import BatchableMedia, Media
from .helper_types.bounding_boxes_2d import BoundingBoxes2D
from .helper_types.classes import Classes
from .helper_types.image_mask import ImageMask
if TYPE_CHECKING: # pragma: no cover
import matplotlib # type: ignore
import numpy as np # type: ignore
import PIL # type: ignore
import torch # type: ignore
from wandb.apis.public import Artifact as PublicArtifact
from ..wandb_artifacts import Artifact as LocalArtifact
from ..wandb_run import Run as LocalRun
ImageDataType = Union[
"matplotlib.artist.Artist", "PIL.Image", "TorchTensorType", "np.ndarray"
]
ImageDataOrPathType = Union[str, "Image", ImageDataType]
TorchTensorType = Union["torch.Tensor", "torch.Variable"]
def _server_accepts_image_filenames() -> bool:
# Newer versions of wandb accept large image filenames arrays
# but older versions would have issues with this.
max_cli_version = util._get_max_cli_version()
if max_cli_version is None:
return False
return parse_version("0.12.10") <= parse_version(max_cli_version)
class Image(BatchableMedia):
"""Format images for logging to W&B.
Arguments:
data_or_path: (numpy array, string, io) Accepts numpy array of
image data, or a PIL image. The class attempts to infer
the data format and converts it.
mode: (string) The PIL mode for an image. Most common are "L", "RGB",
"RGBA". Full explanation at https://pillow.readthedocs.io/en/4.2.x/handbook/concepts.html#concept-modes.
caption: (string) Label for display of image.
Examples:
### Create a wandb.Image from a numpy array
<!--yeadoc-test:log-image-numpy->
```python
import numpy as np
import wandb
wandb.init()
examples = []
for i in range(3):
pixels = np.random.randint(low=0, high=256, size=(100, 100, 3))
image = wandb.Image(pixels, caption=f"random field {i}")
examples.append(image)
wandb.log({"examples": examples})
```
### Create a wandb.Image from a PILImage
<!--yeadoc-test:log-image-pil->
```python
import numpy as np
from PIL import Image as PILImage
import wandb
wandb.init()
examples = []
for i in range(3):
pixels = np.random.randint(low=0, high=256, size=(100, 100, 3), dtype=np.uint8)
pil_image = PILImage.fromarray(pixels, mode="RGB")
image = wandb.Image(pil_image, caption=f"random field {i}")
examples.append(image)
wandb.log({"examples": examples})
```
"""
MAX_ITEMS = 108
# PIL limit
MAX_DIMENSION = 65500
_log_type = "image-file"
format: Optional[str]
_grouping: Optional[int]
_caption: Optional[str]
_width: Optional[int]
_height: Optional[int]
_image: Optional["PIL.Image"]
_classes: Optional["Classes"]
_boxes: Optional[Dict[str, "BoundingBoxes2D"]]
_masks: Optional[Dict[str, "ImageMask"]]
def __init__(
self,
data_or_path: "ImageDataOrPathType",
mode: Optional[str] = None,
caption: Optional[str] = None,
grouping: Optional[int] = None,
classes: Optional[Union["Classes", Sequence[dict]]] = None,
boxes: Optional[Union[Dict[str, "BoundingBoxes2D"], Dict[str, dict]]] = None,
masks: Optional[Union[Dict[str, "ImageMask"], Dict[str, dict]]] = None,
) -> None:
super(Image, self).__init__()
# TODO: We should remove grouping, it's a terrible name and I don't
# think anyone uses it.
self._grouping = None
self._caption = None
self._width = None
self._height = None
self._image = None
self._classes = None
self._boxes = None
self._masks = None
# Allows the user to pass an Image object as the first parameter and have a perfect copy,
# only overriding additional metdata passed in. If this pattern is compelling, we can generalize.
if isinstance(data_or_path, Image):
self._initialize_from_wbimage(data_or_path)
elif isinstance(data_or_path, str):
self._initialize_from_path(data_or_path)
else:
self._initialize_from_data(data_or_path, mode)
self._set_initialization_meta(grouping, caption, classes, boxes, masks)
def _set_initialization_meta(
self,
grouping: Optional[int] = None,
caption: Optional[str] = None,
classes: Optional[Union["Classes", Sequence[dict]]] = None,
boxes: Optional[Union[Dict[str, "BoundingBoxes2D"], Dict[str, dict]]] = None,
masks: Optional[Union[Dict[str, "ImageMask"], Dict[str, dict]]] = None,
) -> None:
if grouping is not None:
self._grouping = grouping
if caption is not None:
self._caption = caption
total_classes = {}
if boxes:
if not isinstance(boxes, dict):
raise ValueError('Images "boxes" argument must be a dictionary')
boxes_final: Dict[str, BoundingBoxes2D] = {}
for key in boxes:
box_item = boxes[key]
if isinstance(box_item, BoundingBoxes2D):
boxes_final[key] = box_item
elif isinstance(box_item, dict):
# TODO: Consider injecting top-level classes if user-provided is empty
boxes_final[key] = BoundingBoxes2D(box_item, key)
total_classes.update(boxes_final[key]._class_labels)
self._boxes = boxes_final
if masks:
if not isinstance(masks, dict):
raise ValueError('Images "masks" argument must be a dictionary')
masks_final: Dict[str, ImageMask] = {}
for key in masks:
mask_item = masks[key]
if isinstance(mask_item, ImageMask):
masks_final[key] = mask_item
elif isinstance(mask_item, dict):
# TODO: Consider injecting top-level classes if user-provided is empty
masks_final[key] = ImageMask(mask_item, key)
if hasattr(masks_final[key], "_val"):
total_classes.update(masks_final[key]._val["class_labels"])
self._masks = masks_final
if classes is not None:
if isinstance(classes, Classes):
total_classes.update(
{val["id"]: val["name"] for val in classes._class_set}
)
else:
total_classes.update({val["id"]: val["name"] for val in classes})
if len(total_classes.keys()) > 0:
self._classes = Classes(
[
{"id": key, "name": total_classes[key]}
for key in total_classes.keys()
]
)
self._width, self._height = self.image.size # type: ignore
self._free_ram()
def _initialize_from_wbimage(self, wbimage: "Image") -> None:
self._grouping = wbimage._grouping
self._caption = wbimage._caption
self._width = wbimage._width
self._height = wbimage._height
self._image = wbimage._image
self._classes = wbimage._classes
self._path = wbimage._path
self._is_tmp = wbimage._is_tmp
self._extension = wbimage._extension
self._sha256 = wbimage._sha256
self._size = wbimage._size
self.format = wbimage.format
self._artifact_source = wbimage._artifact_source
self._artifact_target = wbimage._artifact_target
# We do not want to implicitly copy boxes or masks, just the image-related data.
# self._boxes = wbimage._boxes
# self._masks = wbimage._masks
def _initialize_from_path(self, path: str) -> None:
pil_image = util.get_module(
"PIL.Image",
required='wandb.Image needs the PIL package. To get it, run "pip install pillow".',
)
self._set_file(path, is_tmp=False)
self._image = pil_image.open(path)
self._image.load()
ext = os.path.splitext(path)[1][1:]
self.format = ext
def _initialize_from_data(self, data: "ImageDataType", mode: str = None,) -> None:
pil_image = util.get_module(
"PIL.Image",
required='wandb.Image needs the PIL package. To get it, run "pip install pillow".',
)
if util.is_matplotlib_typename(util.get_full_typename(data)):
buf = BytesIO()
util.ensure_matplotlib_figure(data).savefig(buf)
self._image = pil_image.open(buf)
elif isinstance(data, pil_image.Image):
self._image = data
elif util.is_pytorch_tensor_typename(util.get_full_typename(data)):
vis_util = util.get_module(
"torchvision.utils", "torchvision is required to render images"
)
if hasattr(data, "requires_grad") and data.requires_grad:
data = data.detach()
data = vis_util.make_grid(data, normalize=True)
self._image = pil_image.fromarray(
data.mul(255).clamp(0, 255).byte().permute(1, 2, 0).cpu().numpy()
)
else:
if hasattr(data, "numpy"): # TF data eager tensors
data = data.numpy()
if data.ndim > 2:
data = data.squeeze() # get rid of trivial dimensions as a convenience
self._image = pil_image.fromarray(
self.to_uint8(data), mode=mode or self.guess_mode(data)
)
tmp_path = os.path.join(MEDIA_TMP.name, str(util.generate_id()) + ".png")
self.format = "png"
self._image.save(tmp_path, transparency=None)
self._set_file(tmp_path, is_tmp=True)
@classmethod
def from_json(
cls: Type["Image"], json_obj: dict, source_artifact: "PublicArtifact"
) -> "Image":
classes = None
if json_obj.get("classes") is not None:
classes = source_artifact.get(json_obj["classes"]["path"])
masks = json_obj.get("masks")
_masks: Optional[Dict[str, ImageMask]] = None
if masks:
_masks = {}
for key in masks:
_masks[key] = ImageMask.from_json(masks[key], source_artifact)
_masks[key]._set_artifact_source(source_artifact)
_masks[key]._key = key
boxes = json_obj.get("boxes")
_boxes: Optional[Dict[str, BoundingBoxes2D]] = None
if boxes:
_boxes = {}
for key in boxes:
_boxes[key] = BoundingBoxes2D.from_json(boxes[key], source_artifact)
_boxes[key]._key = key
return cls(
source_artifact.get_path(json_obj["path"]).download(),
caption=json_obj.get("caption"),
grouping=json_obj.get("grouping"),
classes=classes,
boxes=_boxes,
masks=_masks,
)
@classmethod
def get_media_subdir(cls: Type["Image"]) -> str:
return os.path.join("media", "images")
def bind_to_run(
self,
run: "LocalRun",
key: Union[int, str],
step: Union[int, str],
id_: Optional[Union[int, str]] = None,
ignore_copy_err: Optional[bool] = None,
) -> None:
super().bind_to_run(run, key, step, id_, ignore_copy_err=ignore_copy_err)
if self._boxes is not None:
for i, k in enumerate(self._boxes):
id_ = "{}{}".format(id_, i) if id_ is not None else None
self._boxes[k].bind_to_run(
run, key, step, id_, ignore_copy_err=ignore_copy_err
)
if self._masks is not None:
for i, k in enumerate(self._masks):
id_ = "{}{}".format(id_, i) if id_ is not None else None
self._masks[k].bind_to_run(
run, key, step, id_, ignore_copy_err=ignore_copy_err
)
def to_json(self, run_or_artifact: Union["LocalRun", "LocalArtifact"]) -> dict:
json_dict = super(Image, self).to_json(run_or_artifact)
json_dict["_type"] = Image._log_type
json_dict["format"] = self.format
if self._width is not None:
json_dict["width"] = self._width
if self._height is not None:
json_dict["height"] = self._height
if self._grouping:
json_dict["grouping"] = self._grouping
if self._caption:
json_dict["caption"] = self._caption
if isinstance(run_or_artifact, wandb.wandb_sdk.wandb_artifacts.Artifact):
artifact = run_or_artifact
if (
self._masks is not None or self._boxes is not None
) and self._classes is None:
raise ValueError(
"classes must be passed to wandb.Image which have masks or bounding boxes when adding to artifacts"
)
if self._classes is not None:
class_id = hashlib.md5(
str(self._classes._class_set).encode("utf-8")
).hexdigest()
class_name = os.path.join("media", "classes", class_id + "_cls",)
classes_entry = artifact.add(self._classes, class_name)
json_dict["classes"] = {
"type": "classes-file",
"path": classes_entry.path,
"digest": classes_entry.digest,
}
elif not isinstance(run_or_artifact, wandb.wandb_sdk.wandb_run.Run):
raise ValueError("to_json accepts wandb_run.Run or wandb_artifact.Artifact")
if self._boxes:
json_dict["boxes"] = {
k: box.to_json(run_or_artifact) for (k, box) in self._boxes.items()
}
if self._masks:
json_dict["masks"] = {
k: mask.to_json(run_or_artifact) for (k, mask) in self._masks.items()
}
return json_dict
def guess_mode(self, data: "np.ndarray") -> str:
"""
Guess what type of image the np.array is representing
"""
# TODO: do we want to support dimensions being at the beginning of the array?
if data.ndim == 2:
return "L"
elif data.shape[-1] == 3:
return "RGB"
elif data.shape[-1] == 4:
return "RGBA"
else:
raise ValueError(
"Un-supported shape for image conversion %s" % list(data.shape)
)
@classmethod
def to_uint8(cls, data: "np.ndarray") -> "np.ndarray":
"""
Converts floating point image on the range [0,1] and integer images
on the range [0,255] to uint8, clipping if necessary.
"""
np = util.get_module(
"numpy",
required="wandb.Image requires numpy if not supplying PIL Images: pip install numpy",
)
# I think it's better to check the image range vs the data type, since many
# image libraries will return floats between 0 and 255
# some images have range -1...1 or 0-1
dmin = np.min(data)
if dmin < 0:
data = (data - np.min(data)) / np.ptp(data)
if np.max(data) <= 1.0:
data = (data * 255).astype(np.int32)
# assert issubclass(data.dtype.type, np.integer), 'Illegal image format.'
return data.clip(0, 255).astype(np.uint8)
@classmethod
def seq_to_json(
cls: Type["Image"],
seq: Sequence["BatchableMedia"],
run: "LocalRun",
key: str,
step: Union[int, str],
) -> dict:
"""
Combines a list of images into a meta dictionary object describing the child images.
"""
if TYPE_CHECKING:
seq = cast(Sequence["Image"], seq)
jsons = [obj.to_json(run) for obj in seq]
media_dir = cls.get_media_subdir()
for obj in jsons:
expected = util.to_forward_slash_path(media_dir)
if not obj["path"].startswith(expected):
raise ValueError(
"Files in an array of Image's must be in the {} directory, not {}".format(
cls.get_media_subdir(), obj["path"]
)
)
num_images_to_log = len(seq)
width, height = seq[0].image.size # type: ignore
format = jsons[0]["format"]
def size_equals_image(image: "Image") -> bool:
img_width, img_height = image.image.size # type: ignore
return img_width == width and img_height == height # type: ignore
sizes_match = all(size_equals_image(img) for img in seq)
if not sizes_match:
logging.warning(
"Images sizes do not match. This will causes images to be display incorrectly in the UI."
)
meta = {
"_type": "images/separated",
"width": width,
"height": height,
"format": format,
"count": num_images_to_log,
}
if _server_accepts_image_filenames():
meta["filenames"] = [obj["path"] for obj in jsons]
else:
wandb.termwarn(
"Unable to log image array filenames. In some cases, this can prevent images from being"
"viewed in the UI. Please upgrade your wandb server",
repeat=False,
)
captions = Image.all_captions(seq)
if captions:
meta["captions"] = captions
all_masks = Image.all_masks(seq, run, key, step)
if all_masks:
meta["all_masks"] = all_masks
all_boxes = Image.all_boxes(seq, run, key, step)
if all_boxes:
meta["all_boxes"] = all_boxes
return meta
@classmethod
def all_masks(
cls: Type["Image"],
images: Sequence["Image"],
run: "LocalRun",
run_key: str,
step: Union[int, str],
) -> Union[List[Optional[dict]], bool]:
all_mask_groups: List[Optional[dict]] = []
for image in images:
if image._masks:
mask_group = {}
for k in image._masks:
mask = image._masks[k]
mask_group[k] = mask.to_json(run)
all_mask_groups.append(mask_group)
else:
all_mask_groups.append(None)
if all_mask_groups and not all(x is None for x in all_mask_groups):
return all_mask_groups
else:
return False
@classmethod
def all_boxes(
cls: Type["Image"],
images: Sequence["Image"],
run: "LocalRun",
run_key: str,
step: Union[int, str],
) -> Union[List[Optional[dict]], bool]:
all_box_groups: List[Optional[dict]] = []
for image in images:
if image._boxes:
box_group = {}
for k in image._boxes:
box = image._boxes[k]
box_group[k] = box.to_json(run)
all_box_groups.append(box_group)
else:
all_box_groups.append(None)
if all_box_groups and not all(x is None for x in all_box_groups):
return all_box_groups
else:
return False
@classmethod
def all_captions(
cls: Type["Image"], images: Sequence["Media"]
) -> Union[bool, Sequence[Optional[str]]]:
return cls.captions(images)
def __ne__(self, other: object) -> bool:
return not self.__eq__(other)
def __eq__(self, other: object) -> bool:
if not isinstance(other, Image):
return False
else:
self_image = self.image
other_image = other.image
if self_image is not None:
self_image = list(self_image.getdata())
if other_image is not None:
other_image = list(other_image.getdata())
return (
self._grouping == other._grouping
and self._caption == other._caption
and self._width == other._width
and self._height == other._height
and self_image == other_image
and self._classes == other._classes
)
def to_data_array(self) -> List[Any]:
res = []
if self.image is not None:
data = list(self.image.getdata())
for i in range(self.image.height):
res.append(data[i * self.image.width : (i + 1) * self.image.width])
self._free_ram()
return res
def _free_ram(self) -> None:
if self._path is not None:
self._image = None
@property
def image(self) -> Optional["PIL.Image"]:
if self._image is None:
if self._path is not None:
pil_image = util.get_module(
"PIL.Image",
required='wandb.Image needs the PIL package. To get it, run "pip install pillow".',
)
self._image = pil_image.open(self._path)
self._image.load()
return self._image
| [
"wandb.util.get_full_typename",
"numpy.ptp",
"wandb.util.generate_id",
"os.path.join",
"io.BytesIO",
"logging.warning",
"wandb.util.get_module",
"numpy.max",
"os.path.splitext",
"pkg_resources.parse_version",
"wandb.util._get_max_cli_version",
"wandb.termwarn",
"wandb.util.ensure_matplotlib_figure",
"numpy.min",
"wandb.util.to_forward_slash_path",
"typing.cast"
] | [((1238, 1265), 'wandb.util._get_max_cli_version', 'util._get_max_cli_version', ([], {}), '()\n', (1263, 1265), False, 'from wandb import util\n'), ((1330, 1354), 'pkg_resources.parse_version', 'parse_version', (['"""0.12.10"""'], {}), "('0.12.10')\n", (1343, 1354), False, 'from pkg_resources import parse_version\n'), ((1358, 1388), 'pkg_resources.parse_version', 'parse_version', (['max_cli_version'], {}), '(max_cli_version)\n', (1371, 1388), False, 'from pkg_resources import parse_version\n'), ((8266, 8383), 'wandb.util.get_module', 'util.get_module', (['"""PIL.Image"""'], {'required': '"""wandb.Image needs the PIL package. To get it, run "pip install pillow"."""'}), '(\'PIL.Image\', required=\n \'wandb.Image needs the PIL package. To get it, run "pip install pillow".\')\n', (8281, 8383), False, 'from wandb import util\n'), ((8705, 8822), 'wandb.util.get_module', 'util.get_module', (['"""PIL.Image"""'], {'required': '"""wandb.Image needs the PIL package. To get it, run "pip install pillow"."""'}), '(\'PIL.Image\', required=\n \'wandb.Image needs the PIL package. To get it, run "pip install pillow".\')\n', (8720, 8822), False, 'from wandb import util\n'), ((11509, 11540), 'os.path.join', 'os.path.join', (['"""media"""', '"""images"""'], {}), "('media', 'images')\n", (11521, 11540), False, 'import os\n'), ((15244, 15364), 'wandb.util.get_module', 'util.get_module', (['"""numpy"""'], {'required': '"""wandb.Image requires numpy if not supplying PIL Images: pip install numpy"""'}), "('numpy', required=\n 'wandb.Image requires numpy if not supplying PIL Images: pip install numpy'\n )\n", (15259, 15364), False, 'from wandb import util\n'), ((15601, 15613), 'numpy.min', 'np.min', (['data'], {}), '(data)\n', (15607, 15613), True, 'import numpy as np\n'), ((8892, 8920), 'wandb.util.get_full_typename', 'util.get_full_typename', (['data'], {}), '(data)\n', (8914, 8920), False, 'from wandb import util\n'), ((8941, 8950), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (8948, 8950), False, 'from io import BytesIO\n'), ((15702, 15714), 'numpy.max', 'np.max', (['data'], {}), '(data)\n', (15708, 15714), True, 'import numpy as np\n'), ((16263, 16291), 'typing.cast', 'cast', (["Sequence['Image']", 'seq'], {}), "(Sequence['Image'], seq)\n", (16267, 16291), False, 'from typing import Any, cast, Dict, List, Optional, Sequence, Type, TYPE_CHECKING, Union\n'), ((16437, 16474), 'wandb.util.to_forward_slash_path', 'util.to_forward_slash_path', (['media_dir'], {}), '(media_dir)\n', (16463, 16474), False, 'from wandb import util\n'), ((17199, 17315), 'logging.warning', 'logging.warning', (['"""Images sizes do not match. This will causes images to be display incorrectly in the UI."""'], {}), "(\n 'Images sizes do not match. This will causes images to be display incorrectly in the UI.'\n )\n", (17214, 17315), False, 'import logging\n'), ((17668, 17846), 'wandb.termwarn', 'wandb.termwarn', (['"""Unable to log image array filenames. In some cases, this can prevent images from beingviewed in the UI. Please upgrade your wandb server"""'], {'repeat': '(False)'}), "(\n 'Unable to log image array filenames. In some cases, this can prevent images from beingviewed in the UI. Please upgrade your wandb server'\n , repeat=False)\n", (17682, 17846), False, 'import wandb\n'), ((8541, 8563), 'os.path.splitext', 'os.path.splitext', (['path'], {}), '(path)\n', (8557, 8563), False, 'import os\n'), ((13622, 13673), 'os.path.join', 'os.path.join', (['"""media"""', '"""classes"""', "(class_id + '_cls')"], {}), "('media', 'classes', class_id + '_cls')\n", (13634, 13673), False, 'import os\n'), ((15678, 15690), 'numpy.ptp', 'np.ptp', (['data'], {}), '(data)\n', (15684, 15690), True, 'import numpy as np\n'), ((21355, 21472), 'wandb.util.get_module', 'util.get_module', (['"""PIL.Image"""'], {'required': '"""wandb.Image needs the PIL package. To get it, run "pip install pillow"."""'}), '(\'PIL.Image\', required=\n \'wandb.Image needs the PIL package. To get it, run "pip install pillow".\')\n', (21370, 21472), False, 'from wandb import util\n'), ((8963, 8998), 'wandb.util.ensure_matplotlib_figure', 'util.ensure_matplotlib_figure', (['data'], {}), '(data)\n', (8992, 8998), False, 'from wandb import util\n'), ((9182, 9210), 'wandb.util.get_full_typename', 'util.get_full_typename', (['data'], {}), '(data)\n', (9204, 9210), False, 'from wandb import util\n'), ((9236, 9321), 'wandb.util.get_module', 'util.get_module', (['"""torchvision.utils"""', '"""torchvision is required to render images"""'], {}), "('torchvision.utils', 'torchvision is required to render images'\n )\n", (9251, 9321), False, 'from wandb import util\n'), ((10075, 10093), 'wandb.util.generate_id', 'util.generate_id', ([], {}), '()\n', (10091, 10093), False, 'from wandb import util\n'), ((15662, 15674), 'numpy.min', 'np.min', (['data'], {}), '(data)\n', (15668, 15674), True, 'import numpy as np\n')] |
#!/usr/bin/env python
"""Pi digits example
Example shows arbitrary precision using mpmath with the
computation of the digits of pi.
"""
from mpmath import libmp, pi
from mpmath import functions as mpf_funs
import math
from time import clock
import sys
def display_fraction(digits, skip=0, colwidth=10, columns=5):
"""Pretty printer for first n digits of a fraction"""
perline = colwidth * columns
printed = 0
for linecount in range((len(digits) - skip) // (colwidth * columns)):
line = digits[skip + linecount*perline:skip + (linecount + 1)*perline]
for i in range(columns):
print(line[i*colwidth: (i + 1)*colwidth],)
print(":", (linecount + 1)*perline)
if (linecount + 1) % 10 == 0:
print
printed += colwidth*columns
rem = (len(digits) - skip) % (colwidth * columns)
if rem:
buf = digits[-rem:]
s = ""
for i in range(columns):
s += buf[:colwidth].ljust(colwidth + 1, " ")
buf = buf[colwidth:]
print(s + ":", printed + colwidth*columns)
def calculateit(func, base, n, tofile):
"""Writes first n base-digits of a mpmath function to file"""
prec = 100
intpart = libmp.numeral(3, base)
if intpart == 0:
skip = 0
else:
skip = len(intpart)
print("Step 1 of 2: calculating binary value...")
prec = int(n*math.log(base, 2)) + 10
t = clock()
a = func(prec)
step1_time = clock() - t
print("Step 2 of 2: converting to specified base...")
t = clock()
d = libmp.bin_to_radix(a.man, -a.exp, base, n)
d = libmp.numeral(d, base, n)
step2_time = clock() - t
print("\nWriting output...\n")
if tofile:
out_ = sys.stdout
sys.stdout = tofile
print("%i base-%i digits of pi:\n" % (n, base))
print(intpart, ".\n")
display_fraction(d, skip, colwidth=10, columns=5)
if tofile:
sys.stdout = out_
print("\nFinished in %f seconds (%f calc, %f convert)" % \
((step1_time + step2_time), step1_time, step2_time))
def interactive():
"""Simple function to interact with user"""
print("Compute digits of pi with SymPy\n")
base = input("Which base? (2-36, 10 for decimal) \n> ")
digits = input("How many digits? (enter a big number, say, 10000)\n> ")
tofile = raw_input("Output to file? (enter a filename, or just press enter\nto print directly to the screen) \n> ")
if tofile:
tofile = open(tofile, "w")
calculateit(pi, base, digits, tofile)
def main():
"""A non-interactive runner"""
base = 16
digits = 500
tofile = None
calculateit(pi, base, digits, tofile)
if __name__ == "__main__":
interactive()
| [
"mpmath.libmp.numeral",
"mpmath.libmp.bin_to_radix",
"time.clock",
"math.log"
] | [((1224, 1246), 'mpmath.libmp.numeral', 'libmp.numeral', (['(3)', 'base'], {}), '(3, base)\n', (1237, 1246), False, 'from mpmath import libmp, pi\n'), ((1426, 1433), 'time.clock', 'clock', ([], {}), '()\n', (1431, 1433), False, 'from time import clock\n'), ((1548, 1555), 'time.clock', 'clock', ([], {}), '()\n', (1553, 1555), False, 'from time import clock\n'), ((1564, 1606), 'mpmath.libmp.bin_to_radix', 'libmp.bin_to_radix', (['a.man', '(-a.exp)', 'base', 'n'], {}), '(a.man, -a.exp, base, n)\n', (1582, 1606), False, 'from mpmath import libmp, pi\n'), ((1615, 1640), 'mpmath.libmp.numeral', 'libmp.numeral', (['d', 'base', 'n'], {}), '(d, base, n)\n', (1628, 1640), False, 'from mpmath import libmp, pi\n'), ((1470, 1477), 'time.clock', 'clock', ([], {}), '()\n', (1475, 1477), False, 'from time import clock\n'), ((1658, 1665), 'time.clock', 'clock', ([], {}), '()\n', (1663, 1665), False, 'from time import clock\n'), ((1394, 1411), 'math.log', 'math.log', (['base', '(2)'], {}), '(base, 2)\n', (1402, 1411), False, 'import math\n')] |
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import re
from hacking import core
import pycodestyle
PYTHON_CLIENTS = ['cinder', 'glance', 'keystone', 'nova', 'swift', 'neutron',
'ironic', 'heat', 'sahara']
PYTHON_CLIENT_RE = re.compile('import (%s)client' % '|'.join(PYTHON_CLIENTS))
TEST_DEFINITION = re.compile(r'^\s*def test.*')
SETUP_TEARDOWN_CLASS_DEFINITION = re.compile(r'^\s+def (setUp|tearDown)Class')
SCENARIO_DECORATOR = re.compile(r'\s*@.*services\((.*)\)')
RAND_NAME_HYPHEN_RE = re.compile(r".*rand_name\(.+[\-\_][\"\']\)")
mutable_default_args = re.compile(r"^\s*def .+\((.+=\{\}|.+=\[\])")
TESTTOOLS_SKIP_DECORATOR = re.compile(r'\s*@testtools\.skip\((.*)\)')
METHOD = re.compile(r"^ def .+")
METHOD_GET_RESOURCE = re.compile(r"^\s*def (list|show)\_.+")
METHOD_DELETE_RESOURCE = re.compile(r"^\s*def delete_.+")
CLASS = re.compile(r"^class .+")
EX_ATTRIBUTE = re.compile(r'(\s+|\()(e|ex|exc|exception).message(\s+|\))')
NEGATIVE_TEST_DECORATOR = re.compile(
r'\s*@decorators\.attr\(type=.*negative.*\)')
_HAVE_NEGATIVE_DECORATOR = False
@core.flake8ext
def import_no_clients_in_api_and_scenario_tests(physical_line, filename):
"""Check for client imports from tempest/api & tempest/scenario tests
T102: Cannot import OpenStack python clients
"""
if "tempest/api" in filename or "tempest/scenario" in filename:
res = PYTHON_CLIENT_RE.match(physical_line)
if res:
return (physical_line.find(res.group(1)),
("T102: python clients import not allowed"
" in tempest/api/* or tempest/scenario/* tests"))
@core.flake8ext
def scenario_tests_need_service_tags(physical_line, filename,
previous_logical):
"""Check that scenario tests have service tags
T104: Scenario tests require a services decorator
"""
if 'tempest/scenario/' in filename and '/test_' in filename:
if TEST_DEFINITION.match(physical_line):
if not SCENARIO_DECORATOR.match(previous_logical):
return (physical_line.find('def'),
"T104: Scenario tests require a service decorator")
@core.flake8ext
def no_setup_teardown_class_for_tests(physical_line, filename):
if pycodestyle.noqa(physical_line):
return
if 'tempest/test.py' in filename or 'tempest/lib/' in filename:
return
if SETUP_TEARDOWN_CLASS_DEFINITION.match(physical_line):
return (physical_line.find('def'),
"T105: (setUp|tearDown)Class can not be used in tests")
@core.flake8ext
def service_tags_not_in_module_path(physical_line, filename):
"""Check that a service tag isn't in the module path
A service tag should only be added if the service name isn't already in
the module path.
T107
"""
# NOTE(mtreinish) Scenario tests always need service tags, but subdirs are
# created for services like heat which would cause false negatives for
# those tests, so just exclude the scenario tests.
if 'tempest/scenario' not in filename:
matches = SCENARIO_DECORATOR.match(physical_line)
if matches:
services = matches.group(1).split(',')
for service in services:
service_name = service.strip().strip("'")
modulepath = os.path.split(filename)[0]
if service_name in modulepath:
return (physical_line.find(service_name),
"T107: service tag should not be in path")
@core.flake8ext
def no_hyphen_at_end_of_rand_name(logical_line, filename):
"""Check no hyphen at the end of rand_name() argument
T108
"""
msg = "T108: hyphen should not be specified at the end of rand_name()"
if RAND_NAME_HYPHEN_RE.match(logical_line):
return 0, msg
@core.flake8ext
def no_mutable_default_args(logical_line):
"""Check that mutable object isn't used as default argument
N322: Method's default argument shouldn't be mutable
"""
msg = "N322: Method's default argument shouldn't be mutable!"
if mutable_default_args.match(logical_line):
yield (0, msg)
@core.flake8ext
def no_testtools_skip_decorator(logical_line):
"""Check that methods do not have the testtools.skip decorator
T109
"""
if TESTTOOLS_SKIP_DECORATOR.match(logical_line):
yield (0, "T109: Cannot use testtools.skip decorator; instead use "
"decorators.skip_because from tempest.lib")
def _common_service_clients_check(logical_line, physical_line, filename,
ignored_list_file=None):
if not re.match('tempest/(lib/)?services/.*', filename):
return False
if ignored_list_file is not None:
ignored_list = []
with open('tempest/hacking/' + ignored_list_file) as f:
for line in f:
ignored_list.append(line.strip())
if filename in ignored_list:
return False
if not METHOD.match(physical_line):
return False
if pycodestyle.noqa(physical_line):
return False
return True
@core.flake8ext
def get_resources_on_service_clients(physical_line, logical_line, filename,
line_number, lines):
"""Check that service client names of GET should be consistent
T110
"""
if not _common_service_clients_check(logical_line, physical_line,
filename, 'ignored_list_T110.txt'):
return
for line in lines[line_number:]:
if METHOD.match(line) or CLASS.match(line):
# the end of a method
return
if 'self.get(' not in line and ('self.show_resource(' not in line and
'self.list_resources(' not in line):
continue
if METHOD_GET_RESOURCE.match(logical_line):
return
msg = ("T110: [GET /resources] methods should be list_<resource name>s"
" or show_<resource name>")
yield (0, msg)
@core.flake8ext
def delete_resources_on_service_clients(physical_line, logical_line, filename,
line_number, lines):
"""Check that service client names of DELETE should be consistent
T111
"""
if not _common_service_clients_check(logical_line, physical_line,
filename, 'ignored_list_T111.txt'):
return
for line in lines[line_number:]:
if METHOD.match(line) or CLASS.match(line):
# the end of a method
return
if 'self.delete(' not in line and 'self.delete_resource(' not in line:
continue
if METHOD_DELETE_RESOURCE.match(logical_line):
return
msg = ("T111: [DELETE /resources/<id>] methods should be "
"delete_<resource name>")
yield (0, msg)
@core.flake8ext
def dont_import_local_tempest_into_lib(logical_line, filename):
"""Check that tempest.lib should not import local tempest code
T112
"""
if 'tempest/lib/' not in filename:
return
if not ('from tempest' in logical_line or
'import tempest' in logical_line):
return
if ('from tempest.lib' in logical_line or
'import tempest.lib' in logical_line):
return
msg = ("T112: tempest.lib should not import local tempest code to avoid "
"circular dependency")
yield (0, msg)
@core.flake8ext
def use_rand_uuid_instead_of_uuid4(logical_line, filename):
"""Check that tests use data_utils.rand_uuid() instead of uuid.uuid4()
T113
"""
if 'tempest/lib/' in filename:
return
if 'uuid.uuid4()' not in logical_line:
return
msg = ("T113: Tests should use data_utils.rand_uuid()/rand_uuid_hex() "
"instead of uuid.uuid4()/uuid.uuid4().hex")
yield (0, msg)
@core.flake8ext
def dont_use_config_in_tempest_lib(logical_line, filename):
"""Check that tempest.lib doesn't use tempest config
T114
"""
if 'tempest/lib/' not in filename:
return
if ('tempest.config' in logical_line or
'from tempest import config' in logical_line or
'oslo_config' in logical_line):
msg = ('T114: tempest.lib can not have any dependency on tempest '
'config.')
yield(0, msg)
@core.flake8ext
def dont_put_admin_tests_on_nonadmin_path(logical_line,
filename):
"""Check admin tests should exist under admin path
T115
"""
if 'tempest/api/' not in filename:
return
if not re.match(r'class .*Test.*\(.*Admin.*\):', logical_line):
return
if not re.match(r'.\/tempest\/api\/.*\/admin\/.*', filename):
msg = 'T115: All admin tests should exist under admin path.'
yield(0, msg)
@core.flake8ext
def unsupported_exception_attribute_PY3(logical_line):
"""Check Unsupported 'message' exception attribute in PY3
T116
"""
result = EX_ATTRIBUTE.search(logical_line)
msg = ("[T116] Unsupported 'message' Exception attribute in PY3")
if result:
yield(0, msg)
@core.flake8ext
def negative_test_attribute_always_applied_to_negative_tests(physical_line,
filename):
"""Check ``@decorators.attr(type=['negative'])`` applied to negative tests.
T117
"""
global _HAVE_NEGATIVE_DECORATOR
if re.match(r'.\/tempest\/api\/.*_negative.*', filename):
if NEGATIVE_TEST_DECORATOR.match(physical_line):
_HAVE_NEGATIVE_DECORATOR = True
return
if TEST_DEFINITION.match(physical_line):
if not _HAVE_NEGATIVE_DECORATOR:
return (
0, "T117: Must apply `@decorators.attr(type=['negative'])`"
" to all negative API tests"
)
_HAVE_NEGATIVE_DECORATOR = False
| [
"os.path.split",
"re.match",
"pycodestyle.noqa",
"re.compile"
] | [((878, 907), 're.compile', 're.compile', (['"""^\\\\s*def test.*"""'], {}), "('^\\\\s*def test.*')\n", (888, 907), False, 'import re\n'), ((942, 986), 're.compile', 're.compile', (['"""^\\\\s+def (setUp|tearDown)Class"""'], {}), "('^\\\\s+def (setUp|tearDown)Class')\n", (952, 986), False, 'import re\n'), ((1008, 1047), 're.compile', 're.compile', (['"""\\\\s*@.*services\\\\((.*)\\\\)"""'], {}), "('\\\\s*@.*services\\\\((.*)\\\\)')\n", (1018, 1047), False, 'import re\n'), ((1068, 1118), 're.compile', 're.compile', (['""".*rand_name\\\\(.+[\\\\-\\\\_][\\\\"\\\\\']\\\\)"""'], {}), '(\'.*rand_name\\\\(.+[\\\\-\\\\_][\\\\"\\\\\\\']\\\\)\')\n', (1078, 1118), False, 'import re\n'), ((1136, 1185), 're.compile', 're.compile', (['"""^\\\\s*def .+\\\\((.+=\\\\{\\\\}|.+=\\\\[\\\\])"""'], {}), "('^\\\\s*def .+\\\\((.+=\\\\{\\\\}|.+=\\\\[\\\\])')\n", (1146, 1185), False, 'import re\n'), ((1208, 1253), 're.compile', 're.compile', (['"""\\\\s*@testtools\\\\.skip\\\\((.*)\\\\)"""'], {}), "('\\\\s*@testtools\\\\.skip\\\\((.*)\\\\)')\n", (1218, 1253), False, 'import re\n'), ((1260, 1285), 're.compile', 're.compile', (['"""^ def .+"""'], {}), "('^ def .+')\n", (1270, 1285), False, 'import re\n'), ((1309, 1348), 're.compile', 're.compile', (['"""^\\\\s*def (list|show)\\\\_.+"""'], {}), "('^\\\\s*def (list|show)\\\\_.+')\n", (1319, 1348), False, 'import re\n'), ((1373, 1405), 're.compile', 're.compile', (['"""^\\\\s*def delete_.+"""'], {}), "('^\\\\s*def delete_.+')\n", (1383, 1405), False, 'import re\n'), ((1414, 1437), 're.compile', 're.compile', (['"""^class .+"""'], {}), "('^class .+')\n", (1424, 1437), False, 'import re\n'), ((1454, 1516), 're.compile', 're.compile', (['"""(\\\\s+|\\\\()(e|ex|exc|exception).message(\\\\s+|\\\\))"""'], {}), "('(\\\\s+|\\\\()(e|ex|exc|exception).message(\\\\s+|\\\\))')\n", (1464, 1516), False, 'import re\n'), ((1540, 1599), 're.compile', 're.compile', (['"""\\\\s*@decorators\\\\.attr\\\\(type=.*negative.*\\\\)"""'], {}), "('\\\\s*@decorators\\\\.attr\\\\(type=.*negative.*\\\\)')\n", (1550, 1599), False, 'import re\n'), ((2829, 2860), 'pycodestyle.noqa', 'pycodestyle.noqa', (['physical_line'], {}), '(physical_line)\n', (2845, 2860), False, 'import pycodestyle\n'), ((5623, 5654), 'pycodestyle.noqa', 'pycodestyle.noqa', (['physical_line'], {}), '(physical_line)\n', (5639, 5654), False, 'import pycodestyle\n'), ((10089, 10144), 're.match', 're.match', (['""".\\\\/tempest\\\\/api\\\\/.*_negative.*"""', 'filename'], {}), "('.\\\\/tempest\\\\/api\\\\/.*_negative.*', filename)\n", (10097, 10144), False, 'import re\n'), ((5213, 5261), 're.match', 're.match', (['"""tempest/(lib/)?services/.*"""', 'filename'], {}), "('tempest/(lib/)?services/.*', filename)\n", (5221, 5261), False, 'import re\n'), ((9244, 9300), 're.match', 're.match', (['"""class .*Test.*\\\\(.*Admin.*\\\\):"""', 'logical_line'], {}), "('class .*Test.*\\\\(.*Admin.*\\\\):', logical_line)\n", (9252, 9300), False, 'import re\n'), ((9328, 9385), 're.match', 're.match', (['""".\\\\/tempest\\\\/api\\\\/.*\\\\/admin\\\\/.*"""', 'filename'], {}), "('.\\\\/tempest\\\\/api\\\\/.*\\\\/admin\\\\/.*', filename)\n", (9336, 9385), False, 'import re\n'), ((3896, 3919), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (3909, 3919), False, 'import os\n')] |
import SimpleXMLRPCServer
import sys
import logging
from K8055Controller import K8055Controller
logging.basicConfig()
controller_log = logging.getLogger("Controller")
class Controller:
def __init__(self):
self.k8055 = K8055Controller()
controller_log.debug("initialized")
def reset(self):
self.k8055.reset()
controller_log.debug("reset")
return 0
def turn_on(self, i):
self.k8055.turn_on(i)
controller_log.debug('turned on %i' % (i))
return 0
def turn_off(self, i):
self.k8055.turn_off(i)
controller_log.debug('turned off %i' % (i))
return 0
def set_analog(self, i, level):
if (i == 1):
self.k8055.set_analog1(level)
else:
self.k8055.set_analog2(level)
return 0
controller = Controller()
server = SimpleXMLRPCServer.SimpleXMLRPCServer(("d6349.mysql.zone.ee", 7000))
server.register_instance(controller)
server.serve_forever() | [
"logging.basicConfig",
"SimpleXMLRPCServer.SimpleXMLRPCServer",
"logging.getLogger",
"K8055Controller.K8055Controller"
] | [((98, 119), 'logging.basicConfig', 'logging.basicConfig', ([], {}), '()\n', (117, 119), False, 'import logging\n'), ((138, 169), 'logging.getLogger', 'logging.getLogger', (['"""Controller"""'], {}), "('Controller')\n", (155, 169), False, 'import logging\n'), ((890, 958), 'SimpleXMLRPCServer.SimpleXMLRPCServer', 'SimpleXMLRPCServer.SimpleXMLRPCServer', (["('d6349.mysql.zone.ee', 7000)"], {}), "(('d6349.mysql.zone.ee', 7000))\n", (927, 958), False, 'import SimpleXMLRPCServer\n'), ((234, 251), 'K8055Controller.K8055Controller', 'K8055Controller', ([], {}), '()\n', (249, 251), False, 'from K8055Controller import K8055Controller\n')] |
# vim: set filetype=python ts=4 sw=4
# -*- coding: utf-8 -*-
"""This module retrieves AWS credentials after authenticating with Okta."""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
from future import standard_library
from tokendito import aws_helpers
from tokendito import helpers
from tokendito import okta_helpers
from tokendito import settings
standard_library.install_aliases()
def cli(args):
"""Tokendito retrieves AWS credentials after authenticating with Okta."""
# Set some required initial values
args = helpers.setup(args)
logging.debug("tokendito retrieves AWS credentials after authenticating with Okta.")
# Collect and organize user specific information
helpers.process_options(args)
# Authenticate okta and AWS also use assumerole to assign the role
logging.debug("Authenticate user with Okta and AWS.")
secret_session_token = okta_helpers.authenticate_user(
settings.okta_org, settings.okta_username, settings.okta_password
)
saml_response_string, saml_xml = aws_helpers.authenticate_to_roles(
secret_session_token, settings.okta_aws_app_url
)
assume_role_response, role_name = aws_helpers.select_assumeable_role(
saml_response_string, saml_xml
)
aws_helpers.ensure_keys_work(assume_role_response)
helpers.set_local_credentials(
assume_role_response, role_name, settings.aws_region, settings.aws_output
)
| [
"tokendito.helpers.set_local_credentials",
"tokendito.aws_helpers.authenticate_to_roles",
"tokendito.aws_helpers.ensure_keys_work",
"logging.debug",
"tokendito.aws_helpers.select_assumeable_role",
"tokendito.helpers.setup",
"future.standard_library.install_aliases",
"tokendito.okta_helpers.authenticate_user",
"tokendito.helpers.process_options"
] | [((404, 438), 'future.standard_library.install_aliases', 'standard_library.install_aliases', ([], {}), '()\n', (436, 438), False, 'from future import standard_library\n'), ((584, 603), 'tokendito.helpers.setup', 'helpers.setup', (['args'], {}), '(args)\n', (597, 603), False, 'from tokendito import helpers\n'), ((609, 698), 'logging.debug', 'logging.debug', (['"""tokendito retrieves AWS credentials after authenticating with Okta."""'], {}), "(\n 'tokendito retrieves AWS credentials after authenticating with Okta.')\n", (622, 698), False, 'import logging\n'), ((752, 781), 'tokendito.helpers.process_options', 'helpers.process_options', (['args'], {}), '(args)\n', (775, 781), False, 'from tokendito import helpers\n'), ((858, 911), 'logging.debug', 'logging.debug', (['"""Authenticate user with Okta and AWS."""'], {}), "('Authenticate user with Okta and AWS.')\n", (871, 911), False, 'import logging\n'), ((940, 1041), 'tokendito.okta_helpers.authenticate_user', 'okta_helpers.authenticate_user', (['settings.okta_org', 'settings.okta_username', 'settings.okta_password'], {}), '(settings.okta_org, settings.okta_username,\n settings.okta_password)\n', (970, 1041), False, 'from tokendito import okta_helpers\n'), ((1090, 1177), 'tokendito.aws_helpers.authenticate_to_roles', 'aws_helpers.authenticate_to_roles', (['secret_session_token', 'settings.okta_aws_app_url'], {}), '(secret_session_token, settings.\n okta_aws_app_url)\n', (1123, 1177), False, 'from tokendito import aws_helpers\n'), ((1226, 1292), 'tokendito.aws_helpers.select_assumeable_role', 'aws_helpers.select_assumeable_role', (['saml_response_string', 'saml_xml'], {}), '(saml_response_string, saml_xml)\n', (1260, 1292), False, 'from tokendito import aws_helpers\n'), ((1312, 1362), 'tokendito.aws_helpers.ensure_keys_work', 'aws_helpers.ensure_keys_work', (['assume_role_response'], {}), '(assume_role_response)\n', (1340, 1362), False, 'from tokendito import aws_helpers\n'), ((1368, 1477), 'tokendito.helpers.set_local_credentials', 'helpers.set_local_credentials', (['assume_role_response', 'role_name', 'settings.aws_region', 'settings.aws_output'], {}), '(assume_role_response, role_name, settings.\n aws_region, settings.aws_output)\n', (1397, 1477), False, 'from tokendito import helpers\n')] |
#!/usr/bin/python
# (c) 2020, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
'''
na_ontap_autosupport_invoke
'''
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'
}
DOCUMENTATION = '''
module: na_ontap_autosupport_invoke
author: NetApp Ansible Team (@carchi8py) <<EMAIL>>
short_description: NetApp ONTAP send AutoSupport message
extends_documentation_fragment:
- netapp.ontap.netapp.na_ontap
version_added: '20.4.0'
description:
- Send an AutoSupport message from a node
options:
name:
description:
- The name of the node to send the message to.
- Not specifying this option invokes AutoSupport on all nodes in the cluster.
type: str
autosupport_message:
description:
- Text sent in the subject line of the AutoSupport message.
type: str
aliases:
- message
version_added: 20.8.0
type:
description:
- Type of AutoSupport Collection to Issue.
choices: ['test', 'performance', 'all']
default: 'all'
type: str
uri:
description:
- send the AutoSupport message to the destination you specify instead of the configured destination.
type: str
'''
EXAMPLES = '''
- name: Send message
na_ontap_autosupport_invoke:
name: node1
message: invoked test autosupport rest
uri: http://1.2.3.4/delivery_uri
type: test
hostname: "{{ hostname }}"
username: "{{ username }}"
password: "{{ password }}"
'''
RETURN = '''
'''
import traceback
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible_collections.netapp.ontap.plugins.module_utils.netapp_module import NetAppModule
from ansible_collections.netapp.ontap.plugins.module_utils.netapp import OntapRestAPI
import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils
HAS_NETAPP_LIB = netapp_utils.has_netapp_lib()
class NetAppONTAPasupInvoke(object):
''' send ASUP message '''
def __init__(self):
self.use_rest = False
self.argument_spec = netapp_utils.na_ontap_host_argument_spec()
self.argument_spec.update(dict(
name=dict(required=False, type='str'),
autosupport_message=dict(required=False, type='str', aliases=["message"]),
type=dict(required=False, choices=[
'test', 'performance', 'all'], default='all'),
uri=dict(required=False, type='str')
))
self.module = AnsibleModule(
argument_spec=self.argument_spec,
supports_check_mode=True
)
self.na_helper = NetAppModule()
self.parameters = self.na_helper.set_parameters(self.module.params)
# REST API should be used for ONTAP 9.6 or higher.
self.rest_api = OntapRestAPI(self.module)
if self.rest_api.is_rest():
self.use_rest = True
else:
if not HAS_NETAPP_LIB:
self.module.fail_json(msg="the python NetApp-Lib module is required")
else:
self.server = netapp_utils.setup_na_ontap_zapi(module=self.module)
def get_nodes(self):
nodes = list()
node_obj = netapp_utils.zapi.NaElement('system-node-get-iter')
desired_attributes = netapp_utils.zapi.NaElement('desired-attributes')
node_details_info = netapp_utils.zapi.NaElement('node-details-info')
node_details_info.add_new_child('node', '')
desired_attributes.add_child_elem(node_details_info)
node_obj.add_child_elem(desired_attributes)
try:
result = self.server.invoke_successfully(node_obj, True)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg=to_native(error), exception=traceback.format_exc())
if result.get_child_by_name('num-records') and \
int(result.get_child_content('num-records')) > 0:
node_info = result.get_child_by_name('attributes-list')
if node_info is not None:
nodes = [node_details.get_child_content('node') for node_details in node_info.get_children()]
return nodes
def send_zapi_message(self, params, node_name):
params['node-name'] = node_name
send_message = netapp_utils.zapi.NaElement.create_node_with_children('autosupport-invoke', **params)
try:
self.server.invoke_successfully(send_message, enable_tunneling=False)
except netapp_utils.zapi.NaApiError as error:
self.module.fail_json(msg="Error on sending autosupport message to node %s: %s."
% (node_name, to_native(error)),
exception=traceback.format_exc())
def send_message(self):
params = dict()
if self.parameters.get('autosupport_message'):
params['message'] = self.parameters['autosupport_message']
if self.parameters.get('type'):
params['type'] = self.parameters['type']
if self.parameters.get('uri'):
params['uri'] = self.parameters['uri']
if self.use_rest:
if self.parameters.get('name'):
params['node.name'] = self.parameters['name']
node_name = params['node.name']
else:
node_name = '*'
api = 'support/autosupport/messages'
dummy, error = self.rest_api.post(api, params)
if error is not None:
self.module.fail_json(msg="Error on sending autosupport message to node %s: %s."
% (node_name, error))
else:
if self.parameters.get('name'):
node_names = [self.parameters['name']]
else:
# simulate REST behavior by sending to all nodes in the cluster
node_names = self.get_nodes()
for name in node_names:
self.send_zapi_message(params, name)
def ems_log_event(self):
results = netapp_utils.get_cserver(self.server)
cserver = netapp_utils.setup_na_ontap_zapi(module=self.module, vserver=results)
return netapp_utils.ems_log_event("na_ontap_autosupport_invoke", cserver)
def apply(self):
if not self.use_rest:
self.ems_log_event()
if self.module.check_mode:
pass
else:
self.send_message()
self.module.exit_json(changed=True)
def main():
message = NetAppONTAPasupInvoke()
message.apply()
if __name__ == '__main__':
main()
| [
"traceback.format_exc",
"ansible.module_utils.basic.AnsibleModule",
"ansible_collections.netapp.ontap.plugins.module_utils.netapp.zapi.NaElement.create_node_with_children",
"ansible_collections.netapp.ontap.plugins.module_utils.netapp_module.NetAppModule",
"ansible_collections.netapp.ontap.plugins.module_utils.netapp.setup_na_ontap_zapi",
"ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI",
"ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib",
"ansible_collections.netapp.ontap.plugins.module_utils.netapp.ems_log_event",
"ansible_collections.netapp.ontap.plugins.module_utils.netapp.get_cserver",
"ansible_collections.netapp.ontap.plugins.module_utils.netapp.na_ontap_host_argument_spec",
"ansible_collections.netapp.ontap.plugins.module_utils.netapp.zapi.NaElement",
"ansible.module_utils._text.to_native"
] | [((2077, 2106), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.has_netapp_lib', 'netapp_utils.has_netapp_lib', ([], {}), '()\n', (2104, 2106), True, 'import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils\n'), ((2259, 2301), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.na_ontap_host_argument_spec', 'netapp_utils.na_ontap_host_argument_spec', ([], {}), '()\n', (2299, 2301), True, 'import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils\n'), ((2674, 2747), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'self.argument_spec', 'supports_check_mode': '(True)'}), '(argument_spec=self.argument_spec, supports_check_mode=True)\n', (2687, 2747), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((2807, 2821), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp_module.NetAppModule', 'NetAppModule', ([], {}), '()\n', (2819, 2821), False, 'from ansible_collections.netapp.ontap.plugins.module_utils.netapp_module import NetAppModule\n'), ((2982, 3007), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.OntapRestAPI', 'OntapRestAPI', (['self.module'], {}), '(self.module)\n', (2994, 3007), False, 'from ansible_collections.netapp.ontap.plugins.module_utils.netapp import OntapRestAPI\n'), ((3381, 3432), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.zapi.NaElement', 'netapp_utils.zapi.NaElement', (['"""system-node-get-iter"""'], {}), "('system-node-get-iter')\n", (3408, 3432), True, 'import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils\n'), ((3462, 3511), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.zapi.NaElement', 'netapp_utils.zapi.NaElement', (['"""desired-attributes"""'], {}), "('desired-attributes')\n", (3489, 3511), True, 'import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils\n'), ((3540, 3588), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.zapi.NaElement', 'netapp_utils.zapi.NaElement', (['"""node-details-info"""'], {}), "('node-details-info')\n", (3567, 3588), True, 'import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils\n'), ((4456, 4545), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.zapi.NaElement.create_node_with_children', 'netapp_utils.zapi.NaElement.create_node_with_children', (['"""autosupport-invoke"""'], {}), "('autosupport-invoke',\n **params)\n", (4509, 4545), True, 'import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils\n'), ((6205, 6242), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.get_cserver', 'netapp_utils.get_cserver', (['self.server'], {}), '(self.server)\n', (6229, 6242), True, 'import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils\n'), ((6261, 6330), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.setup_na_ontap_zapi', 'netapp_utils.setup_na_ontap_zapi', ([], {'module': 'self.module', 'vserver': 'results'}), '(module=self.module, vserver=results)\n', (6293, 6330), True, 'import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils\n'), ((6346, 6412), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.ems_log_event', 'netapp_utils.ems_log_event', (['"""na_ontap_autosupport_invoke"""', 'cserver'], {}), "('na_ontap_autosupport_invoke', cserver)\n", (6372, 6412), True, 'import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils\n'), ((3260, 3312), 'ansible_collections.netapp.ontap.plugins.module_utils.netapp.setup_na_ontap_zapi', 'netapp_utils.setup_na_ontap_zapi', ([], {'module': 'self.module'}), '(module=self.module)\n', (3292, 3312), True, 'import ansible_collections.netapp.ontap.plugins.module_utils.netapp as netapp_utils\n'), ((3928, 3944), 'ansible.module_utils._text.to_native', 'to_native', (['error'], {}), '(error)\n', (3937, 3944), False, 'from ansible.module_utils._text import to_native\n'), ((3956, 3978), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3976, 3978), False, 'import traceback\n'), ((4895, 4917), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (4915, 4917), False, 'import traceback\n'), ((4832, 4848), 'ansible.module_utils._text.to_native', 'to_native', (['error'], {}), '(error)\n', (4841, 4848), False, 'from ansible.module_utils._text import to_native\n')] |
from freight.api.serializer import serialize
from freight.testutils import TestCase
class UserSerializerTest(TestCase):
def test_simple(self):
user = self.create_user()
result = serialize(user)
assert result["id"] == str(user.id)
assert result["name"] == user.name
| [
"freight.api.serializer.serialize"
] | [((201, 216), 'freight.api.serializer.serialize', 'serialize', (['user'], {}), '(user)\n', (210, 216), False, 'from freight.api.serializer import serialize\n')] |
import sys
import numpy as np
from matplotlib import pyplot as pl
from rw import WriteGTiff
fn = '../pozo-steep-vegetated-pcl.npy'
pts = np.load(fn)
x, y, z, c = pts[:, 0], pts[:, 1], pts[:, 2], pts[:, 5]
ix = (0.2 * (x - x.min())).astype('int')
iy = (0.2 * (y - y.min())).astype('int')
shape = (100, 100)
xb = np.arange(shape[1]+1)
yb = np.arange(shape[0]+1)
fg, ax = pl.subplots(ncols = 2, nrows = 2,
figsize = (10.24, 10.24),
sharex = True, sharey = True)
uc = (2, 5)
for j in range(len(uc)):
print('Class %i' % uc[j])
b = c == uc[j]
cx, cy, cz = ix[b], iy[b], z[b]
mean = np.zeros(shape)
stdr = np.zeros(shape)
for i in range(shape[0]):
print('% 3d%%' % i)
for k in range(shape[1]):
b = (cy == i) * (cx == k)
mean[i, k] = cz[b].mean()
stdr[i, k] = cz[b].std()
fname = 'pozo_5m_dem_mean_cl%i.tif' % uc[j]
WriteGTiff(fname, mean, x.min(), y.min()+500, step = 5)
np.save('pozo_5m_dem_mean_cl%i.npy' % uc[j], mean)
np.save('pozo_5m_dem_stdr_cl%i.npy' % uc[j], stdr)
ax[0, j].set_title('Class %i' % uc[j])
im = ax[0, j].pcolormesh(xb, yb,
np.ma.masked_invalid(mean),
cmap = pl.cm.viridis_r)
cb = fg.colorbar(im, ax = ax[0, j])
cb.set_label('Mean elevation [m]')
im = ax[1, j].pcolormesh(xb, yb,
np.ma.masked_invalid(stdr),
cmap = pl.cm.magma_r)
cb = fg.colorbar(im, ax = ax[1, j])
cb.set_label('Elevation STD')
ax[0, j].set_aspect('equal')
ax[1, j].set_aspect('equal')
pl.savefig('%s.png' % sys.argv[0][:-3])
| [
"matplotlib.pyplot.savefig",
"numpy.zeros",
"numpy.save",
"numpy.ma.masked_invalid",
"numpy.load",
"matplotlib.pyplot.subplots",
"numpy.arange"
] | [((138, 149), 'numpy.load', 'np.load', (['fn'], {}), '(fn)\n', (145, 149), True, 'import numpy as np\n'), ((313, 336), 'numpy.arange', 'np.arange', (['(shape[1] + 1)'], {}), '(shape[1] + 1)\n', (322, 336), True, 'import numpy as np\n'), ((340, 363), 'numpy.arange', 'np.arange', (['(shape[0] + 1)'], {}), '(shape[0] + 1)\n', (349, 363), True, 'import numpy as np\n'), ((371, 450), 'matplotlib.pyplot.subplots', 'pl.subplots', ([], {'ncols': '(2)', 'nrows': '(2)', 'figsize': '(10.24, 10.24)', 'sharex': '(True)', 'sharey': '(True)'}), '(ncols=2, nrows=2, figsize=(10.24, 10.24), sharex=True, sharey=True)\n', (382, 450), True, 'from matplotlib import pyplot as pl\n'), ((1599, 1638), 'matplotlib.pyplot.savefig', 'pl.savefig', (["('%s.png' % sys.argv[0][:-3])"], {}), "('%s.png' % sys.argv[0][:-3])\n", (1609, 1638), True, 'from matplotlib import pyplot as pl\n'), ((620, 635), 'numpy.zeros', 'np.zeros', (['shape'], {}), '(shape)\n', (628, 635), True, 'import numpy as np\n'), ((647, 662), 'numpy.zeros', 'np.zeros', (['shape'], {}), '(shape)\n', (655, 662), True, 'import numpy as np\n'), ((981, 1031), 'numpy.save', 'np.save', (["('pozo_5m_dem_mean_cl%i.npy' % uc[j])", 'mean'], {}), "('pozo_5m_dem_mean_cl%i.npy' % uc[j], mean)\n", (988, 1031), True, 'import numpy as np\n'), ((1036, 1086), 'numpy.save', 'np.save', (["('pozo_5m_dem_stdr_cl%i.npy' % uc[j])", 'stdr'], {}), "('pozo_5m_dem_stdr_cl%i.npy' % uc[j], stdr)\n", (1043, 1086), True, 'import numpy as np\n'), ((1185, 1211), 'numpy.ma.masked_invalid', 'np.ma.masked_invalid', (['mean'], {}), '(mean)\n', (1205, 1211), True, 'import numpy as np\n'), ((1390, 1416), 'numpy.ma.masked_invalid', 'np.ma.masked_invalid', (['stdr'], {}), '(stdr)\n', (1410, 1416), True, 'import numpy as np\n')] |
import os
import pytest
import torch
from hivemind import RemoteExpert
from hivemind.moe.server import background_server
CUSTOM_EXPERTS_PATH = os.path.join(os.path.dirname(__file__), "test_utils", "custom_networks.py")
@pytest.mark.forked
def test_custom_expert(hid_dim=16):
with background_server(
expert_cls="perceptron",
num_experts=2,
device="cpu",
hidden_dim=hid_dim,
num_handlers=2,
no_dht=True,
custom_module_path=CUSTOM_EXPERTS_PATH,
) as (server_endpoint, _):
expert0 = RemoteExpert("expert.0", server_endpoint)
expert1 = RemoteExpert("expert.1", server_endpoint)
for batch_size in (1, 4):
batch = torch.randn(batch_size, hid_dim)
output0 = expert0(batch)
output1 = expert1(batch)
loss = output0.sum()
loss.backward()
loss = output1.sum()
loss.backward()
@pytest.mark.forked
def test_multihead_expert(hid_dim=16):
with background_server(
expert_cls="multihead",
num_experts=2,
device="cpu",
hidden_dim=hid_dim,
num_handlers=2,
no_dht=True,
custom_module_path=CUSTOM_EXPERTS_PATH,
) as (server_endpoint, _):
expert0 = RemoteExpert("expert.0", server_endpoint)
expert1 = RemoteExpert("expert.1", server_endpoint)
for batch_size in (1, 4):
batch = (
torch.randn(batch_size, hid_dim),
torch.randn(batch_size, 2 * hid_dim),
torch.randn(batch_size, 3 * hid_dim),
)
output0 = expert0(*batch)
output1 = expert1(*batch)
loss = output0.sum()
loss.backward()
loss = output1.sum()
loss.backward()
| [
"os.path.dirname",
"hivemind.moe.server.background_server",
"hivemind.RemoteExpert",
"torch.randn"
] | [((159, 184), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (174, 184), False, 'import os\n'), ((289, 458), 'hivemind.moe.server.background_server', 'background_server', ([], {'expert_cls': '"""perceptron"""', 'num_experts': '(2)', 'device': '"""cpu"""', 'hidden_dim': 'hid_dim', 'num_handlers': '(2)', 'no_dht': '(True)', 'custom_module_path': 'CUSTOM_EXPERTS_PATH'}), "(expert_cls='perceptron', num_experts=2, device='cpu',\n hidden_dim=hid_dim, num_handlers=2, no_dht=True, custom_module_path=\n CUSTOM_EXPERTS_PATH)\n", (306, 458), False, 'from hivemind.moe.server import background_server\n'), ((556, 597), 'hivemind.RemoteExpert', 'RemoteExpert', (['"""expert.0"""', 'server_endpoint'], {}), "('expert.0', server_endpoint)\n", (568, 597), False, 'from hivemind import RemoteExpert\n'), ((616, 657), 'hivemind.RemoteExpert', 'RemoteExpert', (['"""expert.1"""', 'server_endpoint'], {}), "('expert.1', server_endpoint)\n", (628, 657), False, 'from hivemind import RemoteExpert\n'), ((1014, 1182), 'hivemind.moe.server.background_server', 'background_server', ([], {'expert_cls': '"""multihead"""', 'num_experts': '(2)', 'device': '"""cpu"""', 'hidden_dim': 'hid_dim', 'num_handlers': '(2)', 'no_dht': '(True)', 'custom_module_path': 'CUSTOM_EXPERTS_PATH'}), "(expert_cls='multihead', num_experts=2, device='cpu',\n hidden_dim=hid_dim, num_handlers=2, no_dht=True, custom_module_path=\n CUSTOM_EXPERTS_PATH)\n", (1031, 1182), False, 'from hivemind.moe.server import background_server\n'), ((1280, 1321), 'hivemind.RemoteExpert', 'RemoteExpert', (['"""expert.0"""', 'server_endpoint'], {}), "('expert.0', server_endpoint)\n", (1292, 1321), False, 'from hivemind import RemoteExpert\n'), ((1340, 1381), 'hivemind.RemoteExpert', 'RemoteExpert', (['"""expert.1"""', 'server_endpoint'], {}), "('expert.1', server_endpoint)\n", (1352, 1381), False, 'from hivemind import RemoteExpert\n'), ((713, 745), 'torch.randn', 'torch.randn', (['batch_size', 'hid_dim'], {}), '(batch_size, hid_dim)\n', (724, 745), False, 'import torch\n'), ((1455, 1487), 'torch.randn', 'torch.randn', (['batch_size', 'hid_dim'], {}), '(batch_size, hid_dim)\n', (1466, 1487), False, 'import torch\n'), ((1505, 1541), 'torch.randn', 'torch.randn', (['batch_size', '(2 * hid_dim)'], {}), '(batch_size, 2 * hid_dim)\n', (1516, 1541), False, 'import torch\n'), ((1559, 1595), 'torch.randn', 'torch.randn', (['batch_size', '(3 * hid_dim)'], {}), '(batch_size, 3 * hid_dim)\n', (1570, 1595), False, 'import torch\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = "Ringo"
'''
价格动量 策略 (难度:初级)
参考: https://www.shinnytech.com/blog/momentum-strategy/
注: 该示例策略仅用于功能示范, 实盘时请根据自己的策略/经验进行修改
'''
from tqsdk import TqAccount, TqApi, TargetPosTask
# 设置指定合约,获取N条K线计算价格动量
SYMBOL = "SHFE.au1912"
N = 15
api = TqApi()
klines = api.get_kline_serial(SYMBOL, 60*60*24, N)
quote = api.get_quote(SYMBOL)
target_pos = TargetPosTask(api, SYMBOL)
position = api.get_position(SYMBOL)
# 编写价格动量函数AR,以前N-1日K线计算价格动量ar
def AR(kline1):
spread_ho = sum(kline1.high[:-1] - kline1.open[:-1])
spread_oc = sum(kline1.open[:-1] - kline1.low[:-1])
# spread_oc 为0时,设置为最小价格跳动值
if spread_oc == 0:
spread_oc = quote.price_tick
ar = (spread_ho/spread_oc)*100
return ar
ar = AR(klines)
print("策略开始启动")
while True:
api.wait_update()
# 生成新K线时,重新计算价格动量值ar
if api.is_changing(klines.iloc[-1], "datetime"):
ar = AR(klines)
print("价格动量是:", ar)
# 每次最新价发生变动时,重新进行判断
if api.is_changing(quote, "last_price"):
# 开仓策略
if position.pos_long == 0 and position.pos_short == 0:
# 如果ar大于110并且小于150,开多仓
if 110 < ar < 150:
print("价值动量超过110,小于150,做多")
target_pos.set_target_volume(100)
# 如果ar大于50,小于90,开空仓
elif 50 < ar < 90:
print("价值动量大于50,小于90,做空")
target_pos.set_target_volume(-100)
# 止损策略,多头下当前ar值小于90则平仓止损,空头下当前ar值大于110则平仓止损
elif (position.pos_long > 0 and ar < 90) or (position.pos_short > 0 and ar > 110):
print("止损平仓")
target_pos.set_target_volume(0)
| [
"tqsdk.TargetPosTask",
"tqsdk.TqApi"
] | [((295, 302), 'tqsdk.TqApi', 'TqApi', ([], {}), '()\n', (300, 302), False, 'from tqsdk import TqAccount, TqApi, TargetPosTask\n'), ((397, 423), 'tqsdk.TargetPosTask', 'TargetPosTask', (['api', 'SYMBOL'], {}), '(api, SYMBOL)\n', (410, 423), False, 'from tqsdk import TqAccount, TqApi, TargetPosTask\n')] |
from Library.CreateATree import CreateATree
tree = CreateATree.BinarySearchTree()
nodesList = list((4, 5, 1, 3, 2))
for i in range(0, len(nodesList)):
tree.insert(nodesList[i])
#tree.printInorder()
tree.printPreorder()
#tree.printPostorder()
| [
"Library.CreateATree.CreateATree.BinarySearchTree"
] | [((52, 82), 'Library.CreateATree.CreateATree.BinarySearchTree', 'CreateATree.BinarySearchTree', ([], {}), '()\n', (80, 82), False, 'from Library.CreateATree import CreateATree\n')] |
'''
@author <NAME>
@since 10.8.2019
'''
import sys
from jnius import autoclass
from Conf.Conf import *
class ServiceBase():
def __init__(self):
PythonServiceClass = autoclass('org.kivy.android.PythonService')
self.Context = autoclass('android.content.Context')
self.Service = PythonServiceClass.mService
#set autorestart to be imune to task swiping on Android 9
self.Service.setAutoRestartService(True)
self.confDict = {k: v for k,v in globals().items() if k.isupper() and k.startswith("SMS")}
for k, v in confDict.items():
setattr(self, k, v)
def killGeneric(self, error):
print(repr(error))
PythonService.setAutoRestartService(False)
print("Autorestart of the service disabled.")
print("Attempting to kill service permanently.")
PythonService.stop()
#service takes time to stop. flow thus continues to next block of code
#sys.exit() is to prevent subsequent code from execution
#both calls are neccesary to avoid "Scheduling restart of crashed service process"
#in case we called only sys.exit()
#this applies even if we have setAutoRestartService(False)
print("Exiting python script")
sys.exit() | [
"jnius.autoclass",
"sys.exit"
] | [((182, 225), 'jnius.autoclass', 'autoclass', (['"""org.kivy.android.PythonService"""'], {}), "('org.kivy.android.PythonService')\n", (191, 225), False, 'from jnius import autoclass\n'), ((249, 285), 'jnius.autoclass', 'autoclass', (['"""android.content.Context"""'], {}), "('android.content.Context')\n", (258, 285), False, 'from jnius import autoclass\n'), ((1271, 1281), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1279, 1281), False, 'import sys\n')] |
# api/queue/__init__.py
import os
from flask import Flask
from flask_bootstrap import Bootstrap
# instantiate the extensions
bootstrap = Bootstrap()
def create_app(script_info=None):
# instantiate the app
app = Flask(
__name__,
template_folder="../client/templates",
static_folder="../client/static",
)
# set config
app_settings = os.getenv("APP_SETTINGS")
app.config.from_object(app_settings)
# set up extensions
bootstrap.init_app(app)
# register blueprints
from api.queue.push.views import main_blueprint
app.register_blueprint(main_blueprint)
# shell context for flask cli
app.shell_context_processor({"app": app})
return app
| [
"flask.Flask",
"os.getenv",
"flask_bootstrap.Bootstrap"
] | [((141, 152), 'flask_bootstrap.Bootstrap', 'Bootstrap', ([], {}), '()\n', (150, 152), False, 'from flask_bootstrap import Bootstrap\n'), ((226, 319), 'flask.Flask', 'Flask', (['__name__'], {'template_folder': '"""../client/templates"""', 'static_folder': '"""../client/static"""'}), "(__name__, template_folder='../client/templates', static_folder=\n '../client/static')\n", (231, 319), False, 'from flask import Flask\n'), ((383, 408), 'os.getenv', 'os.getenv', (['"""APP_SETTINGS"""'], {}), "('APP_SETTINGS')\n", (392, 408), False, 'import os\n')] |
import sys
import os
import psycopg2
import base64
from cryptography.hazmat.primitives import serialization, hashes
from cryptography.hazmat.primitives.asymmetric import padding, rsa
from cryptography.hazmat.backends import default_backend
import time
if len(sys.argv) < 2:
print("Please enter either create or remove as a argv[1]")
sys.exit(0)
with psycopg2.connect("dbname='auth_db' user='auth_db' host='authdb' [redacted-2]") as conn:
with conn.cursor() as cursor:
if sys.argv[1] == "generate":
#Load the key or generate a new one:
cursor.execute("CREATE TABLE IF NOT EXISTS key (key varchar(4096),time bigint UNIQUE PRIMARY KEY)")
privkey = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend())
pem = privkey.private_bytes(encoding=serialization.Encoding.PEM,format=serialization.PrivateFormat.TraditionalOpenSSL,encryption_algorithm=serialization.NoEncryption())
cursor.execute("INSERT INTO key (key,time) VALUES('"+str(pem.decode("utf-8"))+"',"+str(int(time.time()))+")")
conn.commit()
print("New key generated!")
elif sys.argv[1] == "generate_if_needed":
#Load the key or generate a new one:
cursor.execute("CREATE TABLE IF NOT EXISTS key (key varchar(4096),time bigint UNIQUE PRIMARY KEY)")
cursor.execute("SELECT * FROM key")
res = cursor.fetchall()
if len(res) == 0:
privkey = rsa.generate_private_key(public_exponent=65537, key_size=2048, backend=default_backend())
pem = privkey.private_bytes(encoding=serialization.Encoding.PEM,format=serialization.PrivateFormat.TraditionalOpenSSL,encryption_algorithm=serialization.NoEncryption())
cursor.execute("INSERT INTO key (key,time) VALUES('"+str(pem.decode("utf-8"))+"',"+str(int(time.time()))+")")
conn.commit()
print("New key generated, as database was empty!")
else:
print("Database has key ready!")
elif sys.argv[1] == "drop":
cursor.execute("DROP TABLE key")
conn.commit()
print("Dropped old keys")
else:
print("Invalid option! Try 'drop', 'generate' or 'generate_if_needed'...") | [
"psycopg2.connect",
"cryptography.hazmat.primitives.serialization.NoEncryption",
"sys.exit",
"time.time",
"cryptography.hazmat.backends.default_backend"
] | [((343, 354), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (351, 354), False, 'import sys\n'), ((361, 439), 'psycopg2.connect', 'psycopg2.connect', (['"""dbname=\'auth_db\' user=\'auth_db\' host=\'authdb\' [redacted-2]"""'], {}), '("dbname=\'auth_db\' user=\'auth_db\' host=\'authdb\' [redacted-2]")\n', (377, 439), False, 'import psycopg2\n'), ((781, 798), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (796, 798), False, 'from cryptography.hazmat.backends import default_backend\n'), ((951, 979), 'cryptography.hazmat.primitives.serialization.NoEncryption', 'serialization.NoEncryption', ([], {}), '()\n', (977, 979), False, 'from cryptography.hazmat.primitives import serialization, hashes\n'), ((1622, 1639), 'cryptography.hazmat.backends.default_backend', 'default_backend', ([], {}), '()\n', (1637, 1639), False, 'from cryptography.hazmat.backends import default_backend\n'), ((1796, 1824), 'cryptography.hazmat.primitives.serialization.NoEncryption', 'serialization.NoEncryption', ([], {}), '()\n', (1822, 1824), False, 'from cryptography.hazmat.primitives import serialization, hashes\n'), ((1084, 1095), 'time.time', 'time.time', ([], {}), '()\n', (1093, 1095), False, 'import time\n'), ((1933, 1944), 'time.time', 'time.time', ([], {}), '()\n', (1942, 1944), False, 'import time\n')] |
import panel as pn
import param
from awesome_panel_extensions.frameworks.fast import FastTemplate, FastTextInput
WIDGETS = {
"some_text": {"type": FastTextInput, "readonly": True, "sizing_mode": "fixed", "width": 400}
}
class ParameterizedApp(param.Parameterized):
some_text = param.String(default="This is some text")
view = param.Parameter()
def __init__(self, **params):
super().__init__(**params)
self.view = pn.Param(self, parameters=["some_text"], widgets=WIDGETS)
parameterized_app = ParameterizedApp()
paremeterized_template = FastTemplate(main=[parameterized_app.view])
paremeterized_template.servable()
| [
"param.Parameter",
"panel.Param",
"awesome_panel_extensions.frameworks.fast.FastTemplate",
"param.String"
] | [((575, 618), 'awesome_panel_extensions.frameworks.fast.FastTemplate', 'FastTemplate', ([], {'main': '[parameterized_app.view]'}), '(main=[parameterized_app.view])\n', (587, 618), False, 'from awesome_panel_extensions.frameworks.fast import FastTemplate, FastTextInput\n'), ((289, 330), 'param.String', 'param.String', ([], {'default': '"""This is some text"""'}), "(default='This is some text')\n", (301, 330), False, 'import param\n'), ((342, 359), 'param.Parameter', 'param.Parameter', ([], {}), '()\n', (357, 359), False, 'import param\n'), ((451, 508), 'panel.Param', 'pn.Param', (['self'], {'parameters': "['some_text']", 'widgets': 'WIDGETS'}), "(self, parameters=['some_text'], widgets=WIDGETS)\n", (459, 508), True, 'import panel as pn\n')] |
# Importing section
import json
import requests
import argparse
import hashlib
import time
from http import HTTPStatus
# Main
if __name__ == "__main__":
arg_parser = argparse.ArgumentParser()
args = arg_parser.parse_args()
set_cmd = 'updateSla'
params = {
'idx': 'sla04',
'start': 3000,
'end': 3900
}
cmd_url = 'http://localhost:9119/%s' % set_cmd
headers = {'Content-Type': 'application/json', 'Accept': 'application/json'}
print('COMMAND: %s' % cmd_url)
print('PARAMS: %s' % params)
r = requests.post(cmd_url, headers=headers, json=params)
data = json.loads(r.text)
print('RESPONSE: %s\n' % data)
# Wait some seconds to be sure that the transaction has been handled
time.sleep(5)
check_tx_url = 'http://localhost:9119/checkTx/%s' % data['tx_hash']
print('CHECK TX: %s' % check_tx_url)
r = requests.get(check_tx_url)
data = json.loads(r.text)
print('RESPONSE: %s\n' % data)
| [
"json.loads",
"requests.post",
"argparse.ArgumentParser",
"requests.get",
"time.sleep"
] | [((173, 198), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (196, 198), False, 'import argparse\n'), ((594, 646), 'requests.post', 'requests.post', (['cmd_url'], {'headers': 'headers', 'json': 'params'}), '(cmd_url, headers=headers, json=params)\n', (607, 646), False, 'import requests\n'), ((658, 676), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (668, 676), False, 'import json\n'), ((790, 803), 'time.sleep', 'time.sleep', (['(5)'], {}), '(5)\n', (800, 803), False, 'import time\n'), ((926, 952), 'requests.get', 'requests.get', (['check_tx_url'], {}), '(check_tx_url)\n', (938, 952), False, 'import requests\n'), ((965, 983), 'json.loads', 'json.loads', (['r.text'], {}), '(r.text)\n', (975, 983), False, 'import json\n')] |
"""
Test harness for smp.py
"""
import sys
import os
sys.path.append('/Users/ptendick/open-source-workspace/cannr Image/source/cannr/lib')
os.environ['PATH'] = '/Library/Frameworks/Python.framework/Versions/3.7/bin:' + os.environ['PATH']
import cannr
import smp
# Test openProcess by opening a Flask process
def test_openProcess1():
return smp.openProcess(
{"processInfo": "processInfo"},
['python', '/Users/ptendick/open-source-workspace/cannr Image/test/flaskSample.py', '5000', '1'])
# Test openProcess by opening a Plumber process
def test_openProcess2():
return smp.openProcess(
{"processInfo": "processInfo"},
['Rscript', '--vanilla', '/Users/ptendick/open-source-workspace/cannr Image/source/cannr/runApp.R',
'/Users/ptendick/open-source-workspace/cannr Image/test/hello.R', '5001', '2'])
# Test countPorts
def test_countPorts():
projectFilePath = '/Users/ptendick/open-source-workspace/MyRTAM Service/working/project1/project.json'
project = cannr.readJSONFile(projectFilePath)
return smp.countPorts(project)
| [
"cannr.readJSONFile",
"smp.openProcess",
"sys.path.append",
"smp.countPorts"
] | [((53, 143), 'sys.path.append', 'sys.path.append', (['"""/Users/ptendick/open-source-workspace/cannr Image/source/cannr/lib"""'], {}), "(\n '/Users/ptendick/open-source-workspace/cannr Image/source/cannr/lib')\n", (68, 143), False, 'import sys\n'), ((351, 504), 'smp.openProcess', 'smp.openProcess', (["{'processInfo': 'processInfo'}", "['python',\n '/Users/ptendick/open-source-workspace/cannr Image/test/flaskSample.py',\n '5000', '1']"], {}), "({'processInfo': 'processInfo'}, ['python',\n '/Users/ptendick/open-source-workspace/cannr Image/test/flaskSample.py',\n '5000', '1'])\n", (366, 504), False, 'import smp\n'), ((604, 843), 'smp.openProcess', 'smp.openProcess', (["{'processInfo': 'processInfo'}", "['Rscript', '--vanilla',\n '/Users/ptendick/open-source-workspace/cannr Image/source/cannr/runApp.R',\n '/Users/ptendick/open-source-workspace/cannr Image/test/hello.R',\n '5001', '2']"], {}), "({'processInfo': 'processInfo'}, ['Rscript', '--vanilla',\n '/Users/ptendick/open-source-workspace/cannr Image/source/cannr/runApp.R',\n '/Users/ptendick/open-source-workspace/cannr Image/test/hello.R',\n '5001', '2'])\n", (619, 843), False, 'import smp\n'), ((1030, 1065), 'cannr.readJSONFile', 'cannr.readJSONFile', (['projectFilePath'], {}), '(projectFilePath)\n', (1048, 1065), False, 'import cannr\n'), ((1082, 1105), 'smp.countPorts', 'smp.countPorts', (['project'], {}), '(project)\n', (1096, 1105), False, 'import smp\n')] |
# Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Version-independent api tests"""
import httplib2
from oslo_serialization import jsonutils
from six.moves import http_client
from glance.tests import functional
# TODO(rosmaita): all the EXPERIMENTAL stuff in this file can be ripped out
# when v2.6 becomes CURRENT in Queens
def _generate_v1_versions(url):
v1_versions = {'versions': [
{
'id': 'v1.1',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
{
'id': 'v1.0',
'status': 'DEPRECATED',
'links': [{'rel': 'self', 'href': url % '1'}],
},
]}
return v1_versions
def _generate_v2_versions(url):
version_list = []
version_list.extend([
{
'id': 'v2.6',
'status': 'CURRENT',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.5',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.4',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.3',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.2',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.1',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
},
{
'id': 'v2.0',
'status': 'SUPPORTED',
'links': [{'rel': 'self', 'href': url % '2'}],
}
])
v2_versions = {'versions': version_list}
return v2_versions
def _generate_all_versions(url):
v1 = _generate_v1_versions(url)
v2 = _generate_v2_versions(url)
all_versions = {'versions': v2['versions'] + v1['versions']}
return all_versions
class TestApiVersions(functional.FunctionalTest):
def test_version_configurations(self):
"""Test that versioning is handled properly through all channels"""
# v1 and v2 api enabled
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = _generate_all_versions(url)
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(versions, content)
def test_v2_api_configuration(self):
self.api_server.enable_v1_api = False
self.api_server.enable_v2_api = True
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = _generate_v2_versions(url)
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(versions, content)
def test_v1_api_configuration(self):
self.api_server.enable_v1_api = True
self.api_server.enable_v2_api = False
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
versions = _generate_v1_versions(url)
# Verify version choices returned.
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(versions, content)
class TestApiPaths(functional.FunctionalTest):
def setUp(self):
super(TestApiPaths, self).setUp()
self.start_servers(**self.__dict__.copy())
url = 'http://127.0.0.1:%d/v%%s/' % self.api_port
self.versions = _generate_all_versions(url)
images = {'images': []}
self.images_json = jsonutils.dumps(images)
def test_get_root_path(self):
"""Assert GET / with `no Accept:` header.
Verify version choices returned.
Bug lp:803260 no Accept header causes a 500 in glance-api
"""
path = 'http://%s:%d' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_images_path(self):
"""Assert GET /images with `no Accept:` header.
Verify version choices returned.
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_v1_images_path(self):
"""GET /v1/images with `no Accept:` header.
Verify empty images list returned.
"""
path = 'http://%s:%d/v1/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.OK, response.status)
def test_get_root_path_with_unknown_header(self):
"""Assert GET / with Accept: unknown header
Verify version choices returned. Verify message in API log about
unknown accept header.
"""
path = 'http://%s:%d/' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'unknown'}
response, content_json = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_root_path_with_openstack_header(self):
"""Assert GET / with an Accept: application/vnd.openstack.images-v1
Verify empty image list returned
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v1'}
response, content = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.OK, response.status)
self.assertEqual(self.images_json, content.decode())
def test_get_images_path_with_openstack_header(self):
"""Assert GET /images with a
`Accept: application/vnd.openstack.compute-v1` header.
Verify version choices returned. Verify message in API log
about unknown accept header.
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.compute-v1'}
response, content_json = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_v10_images_path(self):
"""Assert GET /v1.0/images with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/v1.a/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
def test_get_v1a_images_path(self):
"""Assert GET /v1.a/images with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/v1.a/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
def test_get_va1_images_path(self):
"""Assert GET /va.1/images with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/va.1/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_versions_path(self):
"""Assert GET /versions with no Accept: header
Verify version choices returned
"""
path = 'http://%s:%d/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.OK, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_versions_path_with_openstack_header(self):
"""Assert GET /versions with the
`Accept: application/vnd.openstack.images-v1` header.
Verify version choices returned.
"""
path = 'http://%s:%d/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v1'}
response, content_json = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.OK, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_v1_versions_path(self):
"""Assert GET /v1/versions with `no Accept:` header
Verify 404 returned
"""
path = 'http://%s:%d/v1/versions' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content = http.request(path, 'GET')
self.assertEqual(http_client.NOT_FOUND, response.status)
def test_get_versions_choices(self):
"""Verify version choices returned"""
path = 'http://%s:%d/v10' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_images_path_with_openstack_v2_header(self):
"""Assert GET /images with a
`Accept: application/vnd.openstack.compute-v2` header.
Verify version choices returned. Verify message in API log
about unknown version in accept header.
"""
path = 'http://%s:%d/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
headers = {'Accept': 'application/vnd.openstack.images-v10'}
response, content_json = http.request(path, 'GET', headers=headers)
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
def test_get_v12_images_path(self):
"""Assert GET /v1.2/images with `no Accept:` header
Verify version choices returned
"""
path = 'http://%s:%d/v1.2/images' % ('127.0.0.1', self.api_port)
http = httplib2.Http()
response, content_json = http.request(path, 'GET')
self.assertEqual(http_client.MULTIPLE_CHOICES, response.status)
content = jsonutils.loads(content_json.decode())
self.assertEqual(self.versions, content)
| [
"httplib2.Http",
"oslo_serialization.jsonutils.dumps"
] | [((3116, 3131), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (3129, 3131), False, 'import httplib2\n'), ((3773, 3788), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (3786, 3788), False, 'import httplib2\n'), ((4430, 4445), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (4443, 4445), False, 'import httplib2\n'), ((5011, 5034), 'oslo_serialization.jsonutils.dumps', 'jsonutils.dumps', (['images'], {}), '(images)\n', (5026, 5034), False, 'from oslo_serialization import jsonutils\n'), ((5316, 5331), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (5329, 5331), False, 'import httplib2\n'), ((5798, 5813), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (5811, 5813), False, 'import httplib2\n'), ((6284, 6299), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (6297, 6299), False, 'import httplib2\n'), ((6712, 6727), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (6725, 6727), False, 'import httplib2\n'), ((7291, 7306), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (7304, 7306), False, 'import httplib2\n'), ((7923, 7938), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (7936, 7938), False, 'import httplib2\n'), ((8501, 8516), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (8514, 8516), False, 'import httplib2\n'), ((8882, 8897), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (8895, 8897), False, 'import httplib2\n'), ((9263, 9278), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (9276, 9278), False, 'import httplib2\n'), ((9747, 9762), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (9760, 9762), False, 'import httplib2\n'), ((10288, 10303), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (10301, 10303), False, 'import httplib2\n'), ((10842, 10857), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (10855, 10857), False, 'import httplib2\n'), ((11145, 11160), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (11158, 11160), False, 'import httplib2\n'), ((11770, 11785), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (11783, 11785), False, 'import httplib2\n'), ((12350, 12365), 'httplib2.Http', 'httplib2.Http', ([], {}), '()\n', (12363, 12365), False, 'import httplib2\n')] |
# Copyright 2016 Quora, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Module with assertion helpers.
The advantages of using a method like
assert_eq(expected, actual)
instead of
assert expected == actual
include:
1 - On failures, assert_eq prints an informative message of the actual
values compared (e.g. AssertionError: 1 != 2) for free, which makes it
faster and easier to iterate on tests.
2 - In the context of refactors, basic asserts incorrectly shift the burden of
adding printouts and writing good test code to people refactoring code
rather than the person who initially wrote the code.
"""
__all__ = [
"assert_is",
"assert_is_not",
"assert_is_instance",
"assert_eq",
"assert_dict_eq",
"assert_ne",
"assert_gt",
"assert_ge",
"assert_lt",
"assert_le",
"assert_in",
"assert_not_in",
"assert_in_with_tolerance",
"assert_unordered_list_eq",
"assert_raises",
"AssertRaises",
# Strings
"assert_is_substring",
"assert_is_not_substring",
"assert_startswith",
"assert_endswith",
]
# The unittest.py testing framework checks for this variable in a module to
# filter out stack frames from that module from the test output, in order to
# make the output more concise.
# __unittest = 1
import traceback
from .inspection import get_full_name
_number_types = (int, float, complex)
def _assert_fail_message(message, expected, actual, comparison_str, extra):
if message:
return message
if extra:
return "%a %s %a (%s)" % (expected, comparison_str, actual, extra)
return "%a %s %a" % (expected, comparison_str, actual)
def assert_is(expected, actual, message=None, extra=None):
"""Raises an AssertionError if expected is not actual."""
assert expected is actual, _assert_fail_message(
message, expected, actual, "is not", extra
)
def assert_is_not(expected, actual, message=None, extra=None):
"""Raises an AssertionError if expected is actual."""
assert expected is not actual, _assert_fail_message(
message, expected, actual, "is", extra
)
def assert_is_instance(value, types, message=None, extra=None):
"""Raises an AssertionError if value is not an instance of type(s)."""
assert isinstance(value, types), _assert_fail_message(
message, value, types, "is not an instance of", extra
)
def assert_eq(expected, actual, message=None, tolerance=None, extra=None):
"""Raises an AssertionError if expected != actual.
If tolerance is specified, raises an AssertionError if either
- expected or actual isn't a number, or
- the difference between expected and actual is larger than the tolerance.
"""
if tolerance is None:
assert expected == actual, _assert_fail_message(
message, expected, actual, "!=", extra
)
else:
assert isinstance(tolerance, _number_types), (
"tolerance parameter to assert_eq must be a number: %a" % tolerance
)
assert isinstance(expected, _number_types) and isinstance(
actual, _number_types
), "parameters must be numbers when tolerance is specified: %a, %a" % (
expected,
actual,
)
diff = abs(expected - actual)
assert diff <= tolerance, _assert_fail_message(
message, expected, actual, "is more than %a away from" % tolerance, extra
)
def _dict_path_string(path):
if len(path) == 0:
return "(root)"
return "->".join(map(ascii, path))
def assert_dict_eq(expected, actual, number_tolerance=None, dict_path=[]):
"""Asserts that two dictionaries are equal, producing a custom message if they are not."""
assert_is_instance(expected, dict)
assert_is_instance(actual, dict)
expected_keys = set(expected.keys())
actual_keys = set(actual.keys())
assert expected_keys <= actual_keys, "Actual dict at %s is missing keys: %a" % (
_dict_path_string(dict_path),
expected_keys - actual_keys,
)
assert actual_keys <= expected_keys, "Actual dict at %s has extra keys: %a" % (
_dict_path_string(dict_path),
actual_keys - expected_keys,
)
for k in expected_keys:
key_path = dict_path + [k]
assert_is_instance(
actual[k],
type(expected[k]),
extra="Types don't match for %s" % _dict_path_string(key_path),
)
assert_is_instance(
expected[k],
type(actual[k]),
extra="Types don't match for %s" % _dict_path_string(key_path),
)
if isinstance(actual[k], dict):
assert_dict_eq(
expected[k],
actual[k],
number_tolerance=number_tolerance,
dict_path=key_path,
)
elif isinstance(actual[k], _number_types):
assert_eq(
expected[k],
actual[k],
extra="Value doesn't match for %s" % _dict_path_string(key_path),
tolerance=number_tolerance,
)
else:
assert_eq(
expected[k],
actual[k],
extra="Value doesn't match for %s" % _dict_path_string(key_path),
)
def assert_ne(expected, actual, message=None, tolerance=None, extra=None):
"""Raises an AssertionError if expected == actual.
If tolerance is specified, raises an AssertionError if either
- expected or actual isn't a number, or
- the difference between expected and actual is smaller than the tolerance.
"""
if tolerance is None:
assert expected != actual, _assert_fail_message(
message, expected, actual, "==", extra
)
else:
assert isinstance(tolerance, _number_types), (
"tolerance parameter to assert_eq must be a number: %a" % tolerance
)
assert isinstance(expected, _number_types) and isinstance(
actual, _number_types
), "parameters must be numbers when tolerance is specified: %a, %a" % (
expected,
actual,
)
diff = abs(expected - actual)
assert diff > tolerance, _assert_fail_message(
message, expected, actual, "is less than %a away from" % tolerance, extra
)
def assert_gt(left, right, message=None, extra=None):
"""Raises an AssertionError if left_hand <= right_hand."""
assert left > right, _assert_fail_message(message, left, right, "<=", extra)
def assert_ge(left, right, message=None, extra=None):
"""Raises an AssertionError if left_hand < right_hand."""
assert left >= right, _assert_fail_message(message, left, right, "<", extra)
def assert_lt(left, right, message=None, extra=None):
"""Raises an AssertionError if left_hand >= right_hand."""
assert left < right, _assert_fail_message(message, left, right, ">=", extra)
def assert_le(left, right, message=None, extra=None):
"""Raises an AssertionError if left_hand > right_hand."""
assert left <= right, _assert_fail_message(message, left, right, ">", extra)
def assert_in(obj, seq, message=None, extra=None):
"""Raises an AssertionError if obj is not in seq."""
assert obj in seq, _assert_fail_message(message, obj, seq, "is not in", extra)
def assert_not_in(obj, seq, message=None, extra=None):
"""Raises an AssertionError if obj is in iter."""
# for very long strings, provide a truncated error
if isinstance(seq, str) and obj in seq and len(seq) > 200:
index = seq.find(obj)
start_index = index - 50
if start_index > 0:
truncated = "(truncated) ..."
else:
truncated = ""
start_index = 0
end_index = index + len(obj) + 50
truncated += seq[start_index:end_index]
if end_index < len(seq):
truncated += "... (truncated)"
assert False, _assert_fail_message(message, obj, truncated, "is in", extra)
assert obj not in seq, _assert_fail_message(message, obj, seq, "is in", extra)
def assert_in_with_tolerance(obj, seq, tolerance, message=None, extra=None):
"""Raises an AssertionError if obj is not in seq using assert_eq cmp."""
for i in seq:
try:
assert_eq(obj, i, tolerance=tolerance, message=message, extra=extra)
return
except AssertionError:
pass
assert False, _assert_fail_message(message, obj, seq, "is not in", extra)
def assert_unordered_list_eq(expected, actual, message=None):
"""Raises an AssertionError if the objects contained
in expected are not equal to the objects contained
in actual without regard to their order.
This takes quadratic time in the umber of elements in actual; don't use it for very long lists.
"""
missing_in_actual = []
missing_in_expected = list(actual)
for x in expected:
try:
missing_in_expected.remove(x)
except ValueError:
missing_in_actual.append(x)
if missing_in_actual or missing_in_expected:
if not message:
message = (
"%a not equal to %a; missing items: %a in expected, %a in actual."
% (expected, actual, missing_in_expected, missing_in_actual)
)
assert False, message
def assert_raises(fn, *expected_exception_types):
"""Raises an AssertionError if calling fn does not raise one of the expected_exception-types."""
with AssertRaises(*expected_exception_types):
fn()
class AssertRaises(object):
"""With-context that asserts that the code within the context raises the specified exception."""
def __init__(self, *expected_exception_types, **kwargs):
# when you don't specify the exception expected, it's easy to write buggy tests that appear
# to pass but actually throw an exception different from the expected one
assert (
len(expected_exception_types) >= 1
), "You must specify the exception type when using AssertRaises"
self.expected_exception_types = set(expected_exception_types)
self.expected_exception_found = None
self.extra = kwargs.pop("extra", None)
assert_eq({}, kwargs)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type in self.expected_exception_types:
# Return True to suppress the Exception if the type matches. For details,
# see: http://docs.python.org/release/2.5.2/lib/typecontextmanager.html
self.expected_exception_found = exc_val
return True
for t in self.expected_exception_types:
if isinstance(exc_val, t):
self.expected_exception_found = exc_val
return True
expected = ", ".join(map(get_full_name, self.expected_exception_types))
if exc_type is None:
message = "No exception raised, but expected: %s" % expected
if self.extra is not None:
message += " (%s)" % self.extra
else:
template = (
"{TYPE}: {VAL} is raised, but expected:"
" {EXPECTED}{EXTRA_STR}\n\n{STACK}"
)
message = template.format(
TYPE=get_full_name(exc_type),
VAL=exc_val,
EXPECTED=expected,
STACK="".join(traceback.format_tb(exc_tb)),
EXTRA_STR=(" (%s)" % self.extra) if self.extra is not None else "",
)
raise AssertionError(message)
# ===================================================
# Strings
# ===================================================
def assert_is_substring(substring, subject, message=None, extra=None):
"""Raises an AssertionError if substring is not a substring of subject."""
assert (
(subject is not None)
and (substring is not None)
and (subject.find(substring) != -1)
), _assert_fail_message(message, substring, subject, "is not in", extra)
def assert_is_not_substring(substring, subject, message=None, extra=None):
"""Raises an AssertionError if substring is a substring of subject."""
assert (
(subject is not None)
and (substring is not None)
and (subject.find(substring) == -1)
), _assert_fail_message(message, substring, subject, "is in", extra)
def assert_startswith(prefix, subject, message=None, extra=None):
"""Raises an AssertionError if the subject string does not start with prefix."""
assert (
(type(subject) is str)
and (type(prefix) is str)
and (subject.startswith(prefix))
), _assert_fail_message(message, subject, prefix, "does not start with", extra)
def assert_endswith(suffix, subject, message=None, extra=None):
"""Raises an AssertionError if the subject string does not end with suffix."""
assert (
(type(subject) is str) and (type(suffix) is str) and (subject.endswith(suffix))
), _assert_fail_message(message, subject, suffix, "does not end with", extra)
| [
"traceback.format_tb"
] | [((11990, 12017), 'traceback.format_tb', 'traceback.format_tb', (['exc_tb'], {}), '(exc_tb)\n', (12009, 12017), False, 'import traceback\n')] |
import os
import random
from typing import Any, Dict, List, Union
import numpy as np
import torch
from colorama import Fore, Style
from sklearn.metrics import f1_score
from sklearn.metrics import precision_recall_fscore_support as score
from sklearn.metrics import precision_score, recall_score
def highlight(input_: Any) -> str:
input_ = str(input_)
return str(Fore.YELLOW + str(input_) + Style.RESET_ALL)
def get_intent_labels(args: Any) -> List[str]:
return [
label.strip()
for label in open(
os.path.join(args.data_dir, args.intent_label_file), "r", encoding="utf-8"
)
]
def get_slot_labels(args: Any) -> List[str]:
return [
label.strip()
for label in open(
os.path.join(args.data_dir, args.slot_label_file), "r", encoding="utf-8"
)
]
def get_pos_labels(args: Any) -> List[str]:
return [
label.strip()
for label in open(
os.path.join(args.data_dir, args.pos_label_file), "r", encoding="utf-8"
)
]
def set_torch_seed(seed: Any, no_cuda: bool) -> None:
random.seed(seed)
np.random.seed(seed)
torch.manual_seed(seed) # type: ignore
if not no_cuda and torch.cuda.is_available():
torch.cuda.manual_seed_all(seed) # type: ignore
def compute_metrics(
intent_preds: List[str],
intent_labels: List[str],
slot_preds: List[List[str]],
slot_labels: List[List[str]],
) -> Dict[Any, Any]:
assert (
len(intent_preds) == len(intent_labels) == len(slot_preds) == len(slot_labels)
)
results: Dict[Any, Any] = {}
intent_result = get_intent_acc(intent_preds, intent_labels)
slot_result = get_slot_metrics(slot_preds, slot_labels)
sementic_result = get_sentence_frame_acc(
intent_preds, intent_labels, slot_preds, slot_labels
)
# New metrics added following Dan's request.
slot_simple_result = get_slot_simple_metrics(slot_preds, slot_labels)
partial_match_result = get_partial_match_metrics(slot_preds, slot_labels)
results.update(intent_result)
results.update(slot_result)
results.update(sementic_result)
results.update(slot_simple_result)
results.update(partial_match_result)
return results
def simplify_tokens(preds: List[str]) -> List[str]:
simple_preds = []
for p in preds:
if p.endswith("TERM"):
simple_preds.append("TERM")
elif p.endswith("DEF"):
simple_preds.append("DEF")
else:
simple_preds.append(p)
return simple_preds
def get_partial_match_metrics(
preds: List[List[str]], labels: List[List[str]]
) -> Dict[Any, Any]:
"""
Suppose there are N such pairs in the gold data and the system predicts M such pairs. Say a ‘partial match’ happens when the system predicts a pair <term,defn> and there is some overlap (at least one token) between the predicted and gold term spans AND there is some overlap between the predicted and gold definition spans. Let X be the number of partial matches. What are
Partial match precision = P/M
Partial match recall = P/N
"""
assert len(preds) == len(labels)
both_in_preds, both_in_labels = [], []
partial_matches, exact_matches = [], []
for pred_sent, label_sent in zip(preds, labels):
simple_pred_sent = simplify_tokens(pred_sent)
simple_label_sent = simplify_tokens(label_sent)
# check whether term/def exist together
both_in_pred = "TERM" in simple_pred_sent and "DEF" in simple_pred_sent
both_in_label = "TERM" in simple_label_sent and "DEF" in simple_label_sent
both_in_preds.append(both_in_pred)
both_in_labels.append(both_in_label)
partial_match = False
exact_match = False
match: List[Union[str, bool]] = []
if both_in_pred and both_in_label:
for p, l in zip(simple_pred_sent, simple_label_sent):
if p == l:
match.append(p)
else:
match.append(False)
if "TERM" in match and "DEF" in match:
partial_match = True
if False not in match:
exact_match = True
partial_matches.append(partial_match)
exact_matches.append(exact_match)
count_both_in_preds = sum(both_in_preds) # N
count_both_in_labels = sum(both_in_labels) # M
count_partial_matches = sum(partial_matches) # P
count_exact_matches = sum(exact_matches) # E
partial_precision = count_partial_matches / count_both_in_preds
partial_recall = count_partial_matches / count_both_in_labels
partial_fscore = (
2 * partial_precision * partial_recall / (partial_precision + partial_recall)
)
exact_precision = count_exact_matches / count_both_in_preds
exact_recall = count_exact_matches / count_both_in_labels
exact_fscore = 2 * exact_precision * exact_recall / (exact_precision + exact_recall)
return {
"partial_match_precision": partial_precision,
"partial_match_recall": partial_recall,
"partial_match_f1": partial_fscore,
"exact_match_precision": exact_precision,
"excat_match_recall": exact_recall,
"excat_match_f1": exact_fscore,
}
def get_slot_simple_metrics(
preds: List[List[str]], labels: List[List[str]]
) -> Dict[Any, Any]:
"""
Conceptually, define the following new types of ‘virtual tags’
TERM = B-term OR I-Term (ie the union of those two tags)
DEF = B-Def OR I-Def
Now, what are the P,R & F1 numbers for TERM and DEF? (I think these matter because users may just care about accuracy of term and defn matching and the macro averaged scores conflate other things like recall on these metrics and precision on O. Likewise the current macro average treats missing the first word in a definition differently from skipping the last word.
"""
assert len(preds) == len(labels)
# flatten
preds_flattened = [p for ps in preds for p in ps]
labels_flattened = [l for ls in labels for l in ls]
# simplify by replacing {B,I}-TERM to TERM and {B,I}-DEF to DEF
simple_preds = simplify_tokens(preds_flattened)
simple_labels = simplify_tokens(labels_flattened)
assert len(simple_preds) == len(simple_labels)
label_names = ["O", "TERM", "DEF"]
p, r, f, s = score(simple_labels, simple_preds, average=None, labels=label_names)
s = [int(si) for si in s]
p = [round(float(pi), 3) for pi in p]
r = [round(float(pi), 3) for pi in r]
f = [round(float(pi), 3) for pi in f]
per_class = {"p": list(p), "r": list(r), "f": list(f), "s": list(s)}
# pprint(per_class)
return {
"slot_merged_TERM_precision": per_class["p"][1],
"slot_merged_TERM_recall": per_class["r"][1],
"slot_merged_TERM_f1": per_class["f"][1],
"slot_merged_DEFINITION_precision": per_class["p"][2],
"slot_merged_DEFINITION_recall": per_class["r"][2],
"slot_merged_DEFINITION_f1": per_class["f"][2],
}
def get_slot_metrics(preds: List[List[str]], labels: List[List[str]]) -> Dict[Any, Any]:
assert len(preds) == len(labels)
# flatten
preds_flattened = [p for ps in preds for p in ps]
labels_flattened = [l for ls in labels for l in ls]
macro_f1 = f1_score(labels_flattened, preds_flattened, average="macro")
micro_f1 = f1_score(labels_flattened, preds_flattened, average="micro")
macro_p = precision_score(labels_flattened, preds_flattened, average="macro")
micro_p = precision_score(labels_flattened, preds_flattened, average="micro")
macro_r = recall_score(labels_flattened, preds_flattened, average="macro")
micro_r = recall_score(labels_flattened, preds_flattened, average="micro")
label_names = ["O", "B-TERM", "I-TERM", "B-DEF", "I-DEF"]
p, r, f, s = score(
labels_flattened, preds_flattened, average=None, labels=label_names
)
s = [int(si) for si in s]
p = [round(float(pi), 3) for pi in p]
r = [round(float(pi), 3) for pi in r]
f = [round(float(pi), 3) for pi in f]
per_class = {"p": list(p), "r": list(r), "f": list(f), "s": list(s)}
# print(per_class)
return {
"slot_precision_macro": macro_p,
"slot_recall_macro": macro_r,
"slot_f1_macro": macro_f1,
"slot_precision_micro": micro_p,
"slot_recall_micro": micro_r,
"slot_f1_micro": micro_f1,
"slot_precision_per_label": per_class["p"],
"slot_recal_per_label": per_class["r"],
"slot_f1_per_label": per_class["f"],
"slot_num_per_label": per_class["s"],
}
def get_intent_acc(preds: List[str], labels: List[str]) -> Dict[Any, Any]:
acc = (preds == labels).mean()
return {"intent_acc": acc}
def read_prediction_text(args: Any) -> List[str]:
return [
text.strip()
for text in open(
os.path.join(args.pred_dir, args.pred_input_file), "r", encoding="utf-8"
)
]
def get_sentence_frame_acc(
intent_preds: List[str],
intent_labels: List[str],
slot_preds: List[List[str]],
slot_labels: List[List[str]],
) -> Dict[Any, Any]:
"""For the cases that intent and all the slots are correct (in one sentence)"""
# Get the intent comparison result
intent_result = intent_preds == intent_labels
# Get the slot comparision result
slot_result = []
for preds, labels in zip(slot_preds, slot_labels):
assert len(preds) == len(labels)
one_sent_result = True
for p, l in zip(preds, labels):
if p != l:
one_sent_result = False
break
slot_result.append(one_sent_result)
slot_result = np.array(slot_result)
sementic_acc = np.multiply(intent_result, slot_result).mean()
return {"sementic_frame_acc": sementic_acc}
| [
"torch.cuda.manual_seed_all",
"torch.manual_seed",
"numpy.multiply",
"sklearn.metrics.f1_score",
"sklearn.metrics.precision_recall_fscore_support",
"os.path.join",
"random.seed",
"sklearn.metrics.precision_score",
"sklearn.metrics.recall_score",
"numpy.array",
"torch.cuda.is_available",
"numpy.random.seed"
] | [((1110, 1127), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (1121, 1127), False, 'import random\n'), ((1132, 1152), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (1146, 1152), True, 'import numpy as np\n'), ((1157, 1180), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (1174, 1180), False, 'import torch\n'), ((6364, 6432), 'sklearn.metrics.precision_recall_fscore_support', 'score', (['simple_labels', 'simple_preds'], {'average': 'None', 'labels': 'label_names'}), '(simple_labels, simple_preds, average=None, labels=label_names)\n', (6369, 6432), True, 'from sklearn.metrics import precision_recall_fscore_support as score\n'), ((7315, 7375), 'sklearn.metrics.f1_score', 'f1_score', (['labels_flattened', 'preds_flattened'], {'average': '"""macro"""'}), "(labels_flattened, preds_flattened, average='macro')\n", (7323, 7375), False, 'from sklearn.metrics import f1_score\n'), ((7391, 7451), 'sklearn.metrics.f1_score', 'f1_score', (['labels_flattened', 'preds_flattened'], {'average': '"""micro"""'}), "(labels_flattened, preds_flattened, average='micro')\n", (7399, 7451), False, 'from sklearn.metrics import f1_score\n'), ((7466, 7533), 'sklearn.metrics.precision_score', 'precision_score', (['labels_flattened', 'preds_flattened'], {'average': '"""macro"""'}), "(labels_flattened, preds_flattened, average='macro')\n", (7481, 7533), False, 'from sklearn.metrics import precision_score, recall_score\n'), ((7548, 7615), 'sklearn.metrics.precision_score', 'precision_score', (['labels_flattened', 'preds_flattened'], {'average': '"""micro"""'}), "(labels_flattened, preds_flattened, average='micro')\n", (7563, 7615), False, 'from sklearn.metrics import precision_score, recall_score\n'), ((7630, 7694), 'sklearn.metrics.recall_score', 'recall_score', (['labels_flattened', 'preds_flattened'], {'average': '"""macro"""'}), "(labels_flattened, preds_flattened, average='macro')\n", (7642, 7694), False, 'from sklearn.metrics import precision_score, recall_score\n'), ((7709, 7773), 'sklearn.metrics.recall_score', 'recall_score', (['labels_flattened', 'preds_flattened'], {'average': '"""micro"""'}), "(labels_flattened, preds_flattened, average='micro')\n", (7721, 7773), False, 'from sklearn.metrics import precision_score, recall_score\n'), ((7854, 7928), 'sklearn.metrics.precision_recall_fscore_support', 'score', (['labels_flattened', 'preds_flattened'], {'average': 'None', 'labels': 'label_names'}), '(labels_flattened, preds_flattened, average=None, labels=label_names)\n', (7859, 7928), True, 'from sklearn.metrics import precision_recall_fscore_support as score\n'), ((9714, 9735), 'numpy.array', 'np.array', (['slot_result'], {}), '(slot_result)\n', (9722, 9735), True, 'import numpy as np\n'), ((1220, 1245), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (1243, 1245), False, 'import torch\n'), ((1255, 1287), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['seed'], {}), '(seed)\n', (1281, 1287), False, 'import torch\n'), ((9756, 9795), 'numpy.multiply', 'np.multiply', (['intent_result', 'slot_result'], {}), '(intent_result, slot_result)\n', (9767, 9795), True, 'import numpy as np\n'), ((541, 592), 'os.path.join', 'os.path.join', (['args.data_dir', 'args.intent_label_file'], {}), '(args.data_dir, args.intent_label_file)\n', (553, 592), False, 'import os\n'), ((753, 802), 'os.path.join', 'os.path.join', (['args.data_dir', 'args.slot_label_file'], {}), '(args.data_dir, args.slot_label_file)\n', (765, 802), False, 'import os\n'), ((962, 1010), 'os.path.join', 'os.path.join', (['args.data_dir', 'args.pos_label_file'], {}), '(args.data_dir, args.pos_label_file)\n', (974, 1010), False, 'import os\n'), ((8901, 8950), 'os.path.join', 'os.path.join', (['args.pred_dir', 'args.pred_input_file'], {}), '(args.pred_dir, args.pred_input_file)\n', (8913, 8950), False, 'import os\n')] |
"""
PROBLEM
A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers
is 9009 = 91 × 99.
Find the largest palindrome made from the product of two 3-digit numbers.
ANSWER:
906609
Solve time ~ 0.760 seconds
"""
from itertools import product
import unittest
from util.utils import timeit
class Problem4:
def __init__(self, num_digits):
self.lower = 10 ** (num_digits - 1) - 1
self.upper = 10 ** num_digits - 1
@staticmethod
def is_palindrome(num):
return str(num) == str(num)[::-1]
@timeit
def solve(self):
pds = []
for i, j in product(range(self.lower, self.upper), repeat=2):
if self.is_palindrome(i * j):
pds.append(i * j)
return max(pds)
class Solution4(unittest.TestCase):
def setUp(self):
self.problem = Problem4(3)
def test_solution(self):
self.assertEqual(906609, self.problem.solve())
if __name__ == '__main__':
unittest.main()
| [
"unittest.main"
] | [((1013, 1028), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1026, 1028), False, 'import unittest\n')] |
import sys
import json
import logging
import argparse
import warnings
import requests
from indexclient import errors
# DEPRECATED 11/2019 -- interacts with old `/alias/` endpoint.
# For creating aliases for indexd records, prefer using
# the `add_alias` function, which interacts with the new
# `/index/{GUID}/aliases` endpoint.
def info(host, port, name, **kwargs):
"""
Retrieve info by name.
"""
warnings.warn(
(
"This function is deprecated. For creating aliases for indexd "
"records, prefer using the `add_alias_for_did` function, which "
"interacts with the new `/index/{GUID}/aliases` endpoint."
),
DeprecationWarning,
)
resource = "http://{host}:{port}/alias/{name}".format(
host=host, port=port, name=name
)
res = requests.get(resource)
try:
res.raise_for_status()
except Exception as err:
raise errors.BaseIndexError(res.status_code, res.text)
try:
doc = res.json()
except ValueError as err:
reason = json.dumps({"error": "invalid json payload returned"})
raise errors.BaseIndexError(res.status_code, reason)
sys.stdout.write(json.dumps(doc))
def config(parser):
"""
Configure the info command.
"""
parser.set_defaults(func=info)
parser.add_argument("name", help="name of information to retrieve")
| [
"warnings.warn",
"indexclient.errors.BaseIndexError",
"json.dumps",
"requests.get"
] | [((418, 644), 'warnings.warn', 'warnings.warn', (['"""This function is deprecated. For creating aliases for indexd records, prefer using the `add_alias_for_did` function, which interacts with the new `/index/{GUID}/aliases` endpoint."""', 'DeprecationWarning'], {}), "(\n 'This function is deprecated. For creating aliases for indexd records, prefer using the `add_alias_for_did` function, which interacts with the new `/index/{GUID}/aliases` endpoint.'\n , DeprecationWarning)\n", (431, 644), False, 'import warnings\n'), ((828, 850), 'requests.get', 'requests.get', (['resource'], {}), '(resource)\n', (840, 850), False, 'import requests\n'), ((1204, 1219), 'json.dumps', 'json.dumps', (['doc'], {}), '(doc)\n', (1214, 1219), False, 'import json\n'), ((935, 983), 'indexclient.errors.BaseIndexError', 'errors.BaseIndexError', (['res.status_code', 'res.text'], {}), '(res.status_code, res.text)\n', (956, 983), False, 'from indexclient import errors\n'), ((1066, 1120), 'json.dumps', 'json.dumps', (["{'error': 'invalid json payload returned'}"], {}), "({'error': 'invalid json payload returned'})\n", (1076, 1120), False, 'import json\n'), ((1135, 1181), 'indexclient.errors.BaseIndexError', 'errors.BaseIndexError', (['res.status_code', 'reason'], {}), '(res.status_code, reason)\n', (1156, 1181), False, 'from indexclient import errors\n')] |
###############################################################################
# @todo add Pilot2-splash-app disclaimer
###############################################################################
""" Get's KRAS states """
import MDAnalysis as mda
from MDAnalysis.analysis import align
from MDAnalysis.lib.mdamath import make_whole
import os
import numpy as np
import math
############## Below section needs to be uncommented ############
import mummi_core
import mummi_ras
from mummi_core.utils import Naming
# # Logger has to be initialized the first thing in the script
from logging import getLogger
LOGGER = getLogger(__name__)
# # Innitilize MuMMI if it has not been done before
# MUMMI_ROOT = mummi.init(True)
# This is needed so the Naming works below
#@TODO fix this so we don't have these on import make them as an init
mummi_core.init()
dirKRASStates = Naming.dir_res('states')
dirKRASStructures = Naming.dir_res('structures')
# #RAS_ONLY_macrostate = np.loadtxt(os.path.join(dirKRASStates, "RAS-ONLY.microstates.txt"))
RAS_ONLY_macrostate = np.loadtxt(os.path.join(dirKRASStates, "ras-states.txt"),comments='#')
# #RAS_RAF_macrostate = np.loadtxt(os.path.join(dirKRASStates, "RAS-RAF.microstates.txt"))
RAS_RAF_macrostate = np.loadtxt(os.path.join(dirKRASStates, "ras-raf-states.txt"),comments='#') # Note diffrent number of columns so index change below
# TODO: CS, my edits to test
# RAS_ONLY_macrostate = np.loadtxt('ras-states.txt')
# RAS_RAF_macrostate = np.loadtxt('ras-raf-states.txt')
############## above section needs to be uncommented ############
# TODO: CS, my edits to test
# TODO: TSC, The reference structure has to currently be set as the 'RAS-ONLY-reference-structure.gro'
# TODO: TSC, path to the reference structure is: mummi_resources/structures/
kras_ref_universe = mda.Universe(os.path.join(dirKRASStructures, "RAS-ONLY-reference-structure.gro"))
# kras_ref_universe = mda.Universe("RAS-ONLY-reference-structure.gro")
# kras_ref_universe = mda.Universe('AA_pfpatch_000000004641_RAS_RAF2_411.gro')
# TODO: CS, not using these for x4 proteins; instead using protein_systems below to set num_res
######### Below hard codes the number of residues within RAS-only and RAS-RAF ##########
RAS_only_num_res = 184
RAS_RAF_num_res = 320
######### Above hard codes the number of residues within RAS-only and RAS-RAF ##########
####### This can be removed
# def get_kras(syst, kras_start):
# """Gets all atoms for a KRAS protein starting at 'kras_start'."""
# return syst.atoms[kras_start:kras_start+428]
####### This can be removed
def get_segids(u):
"""Identifies the list of segments within the system. Only needs to be called x1 time"""
segs = u.segments
segs = segs.segids
ras_segids = []
rasraf_segids = []
for i in range(len(segs)):
# print(segs[i])
if segs[i][-3:] == 'RAS':
ras_segids.append(segs[i])
if segs[i][-3:] == 'RAF':
rasraf_segids.append(segs[i])
return ras_segids, rasraf_segids
def get_protein_info(u,tag):
"""Uses the segments identified in get_segids to make a list of all proteins in the systems.\
Outputs a list of the first residue number of the protein, and whether it is 'RAS-ONLY', or 'RAS-RAF'.\
The 'tag' input defines what is used to identify the first residue of the protein. i.e. 'resname ACE1 and name BB'.\
Only needs to be called x1 time"""
ras_segids, rasraf_segids = get_segids(u)
if len(ras_segids) > 0:
RAS = u.select_atoms('segid '+ras_segids[0]+' and '+str(tag))
else:
RAS = []
if len(rasraf_segids) > 0:
RAF = u.select_atoms('segid '+rasraf_segids[0]+' and '+str(tag))
else:
RAF = []
protein_info = []#np.empty([len(RAS)+len(RAF),2])
for i in range(len(RAS)):
protein_info.append((RAS[i].resid,'RAS-ONLY'))
for i in range(len(RAF)):
protein_info.append((RAF[i].resid,'RAS-RAF'))
######## sort protein info
protein_info = sorted(protein_info)
######## sort protein info
return protein_info
def get_ref_kras():
"""Gets the reference KRAS struct. Only called x1 time when class is loaded"""
start_of_g_ref = kras_ref_universe.residues[0].resid
ref_selection = 'resid '+str(start_of_g_ref)+':'+str(start_of_g_ref+24)+' ' +\
str(start_of_g_ref+38)+':'+str(start_of_g_ref+54)+' ' +\
str(start_of_g_ref+67)+':'+str(start_of_g_ref+164)+' ' +\
'and (name CA or name BB)'
r2_26r40_56r69_166_ref = kras_ref_universe.select_atoms(str(ref_selection))
return kras_ref_universe.select_atoms(str(ref_selection)).positions - kras_ref_universe.select_atoms(str(ref_selection)).center_of_mass()
# Load inital ref frames (only need to do this once)
ref0 = get_ref_kras()
def getKRASstates(u,kras_indices):
"""Gets states for all KRAS proteins in path."""
# res_shift = 8
# all_glycine = u.select_atoms("resname GLY")
# kras_indices = []
# for i in range(0, len(all_glycine), 26):
# kras_indices.append(all_glycine[i].index)
########## Below is taken out of the function so it is only done once #########
# kras_indices = get_protein_info(u,'resname ACE1 and name BB')
########## Above is taken out of the function so it is only done once #########
# CS, for x4 cases:
# [{protein_x4: (protein_type, num_res)}]
protein_systems = [{'ras4a': ('RAS-ONLY', 185),
'ras4araf': ('RAS-RAF', 321),
'ras': ('RAS-ONLY', 184),
'rasraf': ('RAS-RAF', 320)}]
ALLOUT = []
for k in range(len(kras_indices)):
start_of_g = kras_indices[k][0]
protein_x4 = str(kras_indices[k][1])
try:
protein_type = [item[protein_x4] for item in protein_systems][0][0] # 'RAS-ONLY' OR 'RAS-RAF'
num_res = [item[protein_x4] for item in protein_systems][0][1]
except:
LOGGER.error('Check KRas naming between modules')
raise Exception('Error: unknown KRas name')
# TODO: CS, replacing this comment section with the above, to handle x4 protein types
# ---------------------------------------
# ALLOUT = []
# for k in range(len(kras_indices)):
# start_of_g = kras_indices[k][0]
# protein_type = str(kras_indices[k][1])
# ########## BELOW SECTION TO DETERMINE WHICH RESIDUES ARE PART OF THE PROTEIN GROUP - NEEDED FOR PBC REMOVAL ##############
# ########## POTENTIALLY REDO WITH A 'HARD-CODED' NUMBER OF RESIDUES PER PROTEIN GROUP (WHETHER RAS-ONLY OR RAS-RAF) #######
# ########## HAS BEEN REDONE WITH A 'HARD-CODED' NUMBER OF RESIDUES PER PROTEIN GROUP (WHETHER RAS-ONLY OR RAS-RAF) ########
# # if len(kras_indices) == 1:
# # krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(len(u.residues))+' and name BB') ####### HAS TO BE FIXED FOR BACKBONE ATOMS FOR SPECIFIC PROTEIN
# # elif len(kras_indices) > 1:
# # if k == len(kras_indices)-1:
# # krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(len(u.residues))+' and name BB')
# # else:
# # krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(kras_indices[k+1][0])+' and name BB')
# ########## ABOVE SECTION TO DETERMINE WHICH RESIDUES ARE PART OF THE PROTEIN GROUP - NEEDED FOR PBC REMOVAL ##############
#
# ########## Below hard codes the number of residues/beads in the RAS-ONLY and RAS-RAF simulations #########################
# if protein_type == 'RAS-ONLY':
# num_res = RAS_only_num_res
# elif protein_type == 'RAS-RAF':
# num_res = RAS_RAF_num_res
# ########## Above hard codes the number of residues/beads in the RAS-ONLY and RAS-RAF simulations #########################
# ---------------------------------------
# TODO: TSC, I changed the selection below, which can be used for the make_whole...
# krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(start_of_g+num_res)+' and (name CA or name BB)')
krases0_BB = u.select_atoms('resid '+str(start_of_g)+':'+str(start_of_g+num_res))
krases0_BB.guess_bonds()
r2_26r40_56r69_166 = u.select_atoms('resid '+str(start_of_g)+':'+str(start_of_g+24)+' ' +\
str(start_of_g+38)+':'+str(start_of_g+54)+' ' +\
str(start_of_g+67)+':'+str(start_of_g+164)+\
' and (name CA or name BB)')
u_selection = \
'resid '+str(start_of_g)+':'+str(start_of_g+24)+' '+str(start_of_g+38)+':'+str(start_of_g+54)+' ' +\
str(start_of_g+67)+':'+str(start_of_g+164)+' and (name CA or name BB)'
mobile0 = u.select_atoms(str(u_selection)).positions - u.select_atoms(str(u_selection)).center_of_mass()
# TODO: CS, something wrong with ref0 from get_kras_ref()
# just making ref0 = mobile0 to test for now
# ref0 = mobile0
# TSC removed this
R, RMSD_junk = align.rotation_matrix(mobile0, ref0)
######## TODO: TSC, Adjusted for AA lipid names ########
# lipids = u.select_atoms('resname POPX POPC PAPC POPE DIPE DPSM PAPS PAP6 CHOL')
lipids = u.select_atoms('resname POPC PAPC POPE DIPE SSM PAPS SAPI CHL1')
coords = ref0
RotMat = []
OS = []
r152_165 = krases0_BB.select_atoms('resid '+str(start_of_g+150)+':'+str(start_of_g+163)+' and (name CA or name BB)')
r65_74 = krases0_BB.select_atoms('resid '+str(start_of_g+63)+':'+str(start_of_g+72)+' and (name CA or name BB)')
timeframes = []
# TODO: CS, for AA need bonds to run make_whole()
# krases0_BB.guess_bonds()
# TODO: CS, turn off for now to test beyond this point
''' *** for AA, need to bring that back on once all else runs ***
'''
# @Tim and <NAME>. this was commented out - please check.
#make_whole(krases0_BB)
j, rmsd_junk = mda.analysis.align.rotation_matrix((r2_26r40_56r69_166.positions-r2_26r40_56r69_166.center_of_mass()), coords)
RotMat.append(j)
OS.append(r65_74.center_of_mass()-r152_165.center_of_mass())
timeframes.append(u.trajectory.time)
if protein_type == 'RAS-RAF':
z_pos = []
############### NEED TO CONFIRM THE SELECTION OF THE RAF LOOP RESIDUES BELOW ####################
############### TODO: TSC, zshifting is set to -1 (instead of -2), as there are ACE caps that are separate residues in AA
#zshifting=-1
if protein_x4 == 'rasraf':
zshifting = -1
elif protein_x4 == 'ras4araf':
zshifting = 0
else:
zshifting = 0
LOGGER.error('Found unsupported protein_x4 type')
raf_loops_selection = u.select_atoms('resid '+str(start_of_g+zshifting+291)+':'+str(start_of_g+zshifting+294)+' ' +\
str(start_of_g+zshifting+278)+':'+str(start_of_g+zshifting+281)+' ' +\
' and (name CA or name BB)')
############### NEED TO CONFIRM THE SELECTION OF THE RAF LOOP RESIDUES ABOVE ####################
diff = (lipids.center_of_mass()[2]-raf_loops_selection.center_of_mass(unwrap=True)[2])/10
if diff < 0:
diff = diff+(u.dimensions[2]/10)
z_pos.append(diff)
z_pos = np.array(z_pos)
RotMatNP = np.array(RotMat)
OS = np.array(OS)
OA = RotMatNP[:, 2, :]/(((RotMatNP[:, 2, 0]**2)+(RotMatNP[:, 2, 1]**2)+(RotMatNP[:, 2, 2]**2))**0.5)[:, None]
OWAS = np.arccos(RotMatNP[:, 2, 2])*180/math.pi
OC_temp = np.concatenate((OA, OS), axis=1)
t = ((OC_temp[:, 0]*OC_temp[:, 3])+(OC_temp[:, 1]*OC_temp[:, 4]) +
(OC_temp[:, 2]*OC_temp[:, 5]))/((OC_temp[:, 0]**2)+(OC_temp[:, 1]**2)+(OC_temp[:, 2]**2))
OC = OA*t[:, None]
ORS_tp = np.concatenate((OC, OS), axis=1)
ORS_norm = (((ORS_tp[:, 3]-ORS_tp[:, 0])**2)+((ORS_tp[:, 4]-ORS_tp[:, 1])**2)+((ORS_tp[:, 5]-ORS_tp[:, 2])**2))**0.5
ORS = (OS - OC)/ORS_norm[:, None]
OACRS = np.cross(OA, ORS)
OZCA = OA * OA[:, 2][:, None]
Z_unit = np.full([len(OZCA), 3], 1)
Z_adjust = np.array([0, 0, 1])
Z_unit = Z_unit*Z_adjust
Z_OZCA = Z_unit-OZCA
OZPACB = Z_OZCA/((Z_OZCA[:, 0]**2+Z_OZCA[:, 1]**2+Z_OZCA[:, 2]**2)**0.5)[:, None]
OROTNOTSIGNED = np.zeros([len(ORS)])
for i in range(len(ORS)):
OROTNOTSIGNED[i] = np.arccos(np.dot(OZPACB[i, :], ORS[i, :]) /
(np.sqrt(np.dot(OZPACB[i, :], OZPACB[i, :]))) *
(np.sqrt(np.dot(ORS[i, :], ORS[i, :]))))*180/math.pi
OZPACBCRS_cross = np.cross(OZPACB, ORS)
OZPACBCRS = OZPACBCRS_cross/((OZPACBCRS_cross[:, 0]**2+OZPACBCRS_cross[:, 1]**2+OZPACBCRS_cross[:, 2]**2)**0.5)[:, None]
OFORSIGN_temp = (OA - OZPACBCRS)**2
OFORSIGN = OFORSIGN_temp[:, 0]+OFORSIGN_temp[:, 1]+OFORSIGN_temp[:, 2]
OROT = OROTNOTSIGNED
for i in range(len(OROT)):
if OROT[i] < 0:
OROT[i] = -(OROT[i])
for i in range(len(OROT)):
if OFORSIGN[i] < 0.25:
OROT[i] = -(OROT[i])
###### Below introduces new shift to account for upper vs. lower leaflet #####
for i in range(len(OWAS)):
OWAS[i] = abs(-(OWAS[i])+180) # made this an absolute value so that the tilt remains positive
for i in range(len(OROT)):
if OROT[i] < 0:
OROT[i] = OROT[i]+180
elif OROT[i] > 0:
OROT[i] = OROT[i]-180
###### Above introduces new shift to account for upper vs. lower leaflet #####
###### Below might have to be updated to take into account the periodic nature of the rotation ######
if protein_type == 'RAS-ONLY':
states = np.zeros(len(OROT))
for j in range(len(OROT)):
diff0 = []
for i in range(len(RAS_ONLY_macrostate)):
#diff0.append([((RAS_ONLY_macrostate[i,0]-OWAS[j])**2+(RAS_ONLY_macrostate[i,1]-OROT[j])**2)**0.5, RAS_ONLY_macrostate[i,6]])
diff0.append([((RAS_ONLY_macrostate[i,1]-OWAS[j])**2+(RAS_ONLY_macrostate[i,0]-OROT[j])**2)**0.5, RAS_ONLY_macrostate[i,5]])
diff0.sort()
states[j] = diff0[0][1]
elif protein_type == 'RAS-RAF':
states = np.zeros(len(OROT))
for j in range(len(OROT)):
### below: adding in the requirements for the 'high-z' state ###
if (OROT[j] < -45 or OROT[j] > 140) and z_pos[j] > 4.8:
states[j] = 3
else:
### above: adding in the requirements for the 'high-z' state ###
diff0 = []
for i in range(len(RAS_RAF_macrostate)):
#diff0.append([((RAS_RAF_macrostate[i,0]-OWAS[j])**2+(RAS_RAF_macrostate[i,1]-OROT[j])**2)**0.5, RAS_RAF_macrostate[i,6]])
diff0.append([((RAS_RAF_macrostate[i,1]-OWAS[j])**2+(RAS_RAF_macrostate[i,0]-OROT[j])**2)**0.5, RAS_RAF_macrostate[i,4]])
diff0.sort()
states[j] = diff0[0][1]
###### Above might have to be updated to take into account the periodic nature of the rotation ######
###### Assume we want to remove this? Where is the code that reads this information? i.e. will there be knock-on effects? ######
###### If feedback code needs index 5 (two_states) from the output, deleting this four_states will shift that to index 4 #######
# four_states = np.zeros(len(OROT))
# for j in range(len(OROT)):
# diff0 = []
# for i in range(len(macrostate4)):
# diff0.append([((macrostate4[i,0]-OWAS[j])**2+(macrostate4[i,1]-OROT[j])**2)**0.5, macrostate4[i,6]])
# diff0.sort()
# four_states[j] = diff0[0][1]+1
###### below: old output details.... ######################################
###### Updated - RAS-only to NOT HAVE the Z-distance ######################
###### Updated - Added in the protein 'tag', i.e. RAS-ONLY or RAS-RAF #####
# OUTPUT = np.zeros([len(OROT), 6])
# for i in range(len(OROT)):
# OUTPUT[i] = timeframes[i], OWAS[i], OROT[i], z_pos[i], four_states[i], two_states[i]
###### above: old output details.... ######################################
###### below: NEW output details.... ######################################
if protein_type == 'RAS-ONLY':
OUTPUT = np.zeros([len(OROT), 6]).astype(object)
for i in range(len(OROT)):
OUTPUT[i] = str(protein_type), timeframes[i], OWAS[i], OROT[i], 'n/a', int(states[i])
elif protein_type == 'RAS-RAF':
OUTPUT = np.zeros([len(OROT), 6]).astype(object)
for i in range(len(OROT)):
OUTPUT[i] = str(protein_type), timeframes[i], OWAS[i], OROT[i], z_pos[i], int(states[i])
ALLOUT.append(OUTPUT)
return np.asarray(ALLOUT)
#np.savetxt(str(tpr)+"_tilt_rot_z_state.KRAS_"+str(k+1)+".txt", OUTPUT, fmt=['%i','%10.3f','%10.3f','%10.3f','%i','%i'], delimiter=' ')
| [
"logging.getLogger",
"MDAnalysis.analysis.align.rotation_matrix",
"mummi_core.utils.Naming.dir_res",
"numpy.arccos",
"numpy.cross",
"numpy.asarray",
"os.path.join",
"numpy.array",
"numpy.dot",
"mummi_core.init",
"numpy.concatenate"
] | [((621, 640), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (630, 640), False, 'from logging import getLogger\n'), ((840, 857), 'mummi_core.init', 'mummi_core.init', ([], {}), '()\n', (855, 857), False, 'import mummi_core\n'), ((875, 899), 'mummi_core.utils.Naming.dir_res', 'Naming.dir_res', (['"""states"""'], {}), "('states')\n", (889, 899), False, 'from mummi_core.utils import Naming\n'), ((920, 948), 'mummi_core.utils.Naming.dir_res', 'Naming.dir_res', (['"""structures"""'], {}), "('structures')\n", (934, 948), False, 'from mummi_core.utils import Naming\n'), ((1075, 1120), 'os.path.join', 'os.path.join', (['dirKRASStates', '"""ras-states.txt"""'], {}), "(dirKRASStates, 'ras-states.txt')\n", (1087, 1120), False, 'import os\n'), ((1259, 1308), 'os.path.join', 'os.path.join', (['dirKRASStates', '"""ras-raf-states.txt"""'], {}), "(dirKRASStates, 'ras-raf-states.txt')\n", (1271, 1308), False, 'import os\n'), ((1830, 1897), 'os.path.join', 'os.path.join', (['dirKRASStructures', '"""RAS-ONLY-reference-structure.gro"""'], {}), "(dirKRASStructures, 'RAS-ONLY-reference-structure.gro')\n", (1842, 1897), False, 'import os\n'), ((17304, 17322), 'numpy.asarray', 'np.asarray', (['ALLOUT'], {}), '(ALLOUT)\n', (17314, 17322), True, 'import numpy as np\n'), ((9165, 9201), 'MDAnalysis.analysis.align.rotation_matrix', 'align.rotation_matrix', (['mobile0', 'ref0'], {}), '(mobile0, ref0)\n', (9186, 9201), False, 'from MDAnalysis.analysis import align\n'), ((11647, 11663), 'numpy.array', 'np.array', (['RotMat'], {}), '(RotMat)\n', (11655, 11663), True, 'import numpy as np\n'), ((11678, 11690), 'numpy.array', 'np.array', (['OS'], {}), '(OS)\n', (11686, 11690), True, 'import numpy as np\n'), ((11883, 11915), 'numpy.concatenate', 'np.concatenate', (['(OA, OS)'], {'axis': '(1)'}), '((OA, OS), axis=1)\n', (11897, 11915), True, 'import numpy as np\n'), ((12138, 12170), 'numpy.concatenate', 'np.concatenate', (['(OC, OS)'], {'axis': '(1)'}), '((OC, OS), axis=1)\n', (12152, 12170), True, 'import numpy as np\n'), ((12354, 12371), 'numpy.cross', 'np.cross', (['OA', 'ORS'], {}), '(OA, ORS)\n', (12362, 12371), True, 'import numpy as np\n'), ((12473, 12492), 'numpy.array', 'np.array', (['[0, 0, 1]'], {}), '([0, 0, 1])\n', (12481, 12492), True, 'import numpy as np\n'), ((13009, 13030), 'numpy.cross', 'np.cross', (['OZPACB', 'ORS'], {}), '(OZPACB, ORS)\n', (13017, 13030), True, 'import numpy as np\n'), ((11611, 11626), 'numpy.array', 'np.array', (['z_pos'], {}), '(z_pos)\n', (11619, 11626), True, 'import numpy as np\n'), ((11824, 11852), 'numpy.arccos', 'np.arccos', (['RotMatNP[:, 2, 2]'], {}), '(RotMatNP[:, 2, 2])\n', (11833, 11852), True, 'import numpy as np\n'), ((12765, 12796), 'numpy.dot', 'np.dot', (['OZPACB[i, :]', 'ORS[i, :]'], {}), '(OZPACB[i, :], ORS[i, :])\n', (12771, 12796), True, 'import numpy as np\n'), ((12938, 12966), 'numpy.dot', 'np.dot', (['ORS[i, :]', 'ORS[i, :]'], {}), '(ORS[i, :], ORS[i, :])\n', (12944, 12966), True, 'import numpy as np\n'), ((12849, 12883), 'numpy.dot', 'np.dot', (['OZPACB[i, :]', 'OZPACB[i, :]'], {}), '(OZPACB[i, :], OZPACB[i, :])\n', (12855, 12883), True, 'import numpy as np\n')] |
"""
homeassistant.components.switch.hikvision
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Support turning on/off motion detection on Hikvision cameras.
Note: Currently works using default https port only.
CGI API Guide: http://bit.ly/1RuyUuF
Configuration:
To use the Hikvision motion detection switch you will need to add something
like the following to your config/configuration.yaml
switch:
platform: hikvisioncam
name: Hikvision Cam 1 Motion Detection
host: 192.168.1.32
username: YOUR_USERNAME
password: <PASSWORD>
Variables:
host
*Required
This is the IP address of your Hikvision camera. Example: 192.168.1.32
username
*Required
Your Hikvision camera username.
password
*<PASSWORD>
<PASSWORD>.
name
*Optional
The name to use when displaying this switch instance.
"""
from homeassistant.helpers.entity import ToggleEntity
from homeassistant.const import STATE_ON, STATE_OFF
from homeassistant.const import CONF_HOST, CONF_USERNAME, CONF_PASSWORD
import logging
try:
import hikvision.api
from hikvision.error import HikvisionError, MissingParamError
except ImportError:
hikvision.api = None
_LOGGING = logging.getLogger(__name__)
REQUIREMENTS = ['hikvision==0.4']
# pylint: disable=too-many-arguments
# pylint: disable=too-many-instance-attributes
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
""" Setup Hikvision Camera config. """
host = config.get(CONF_HOST, None)
port = config.get('port', "80")
name = config.get('name', "Hikvision Camera Motion Detection")
username = config.get(CONF_USERNAME, "admin")
password = config.get(CONF_PASSWORD, "<PASSWORD>")
if hikvision.api is None:
_LOGGING.error((
"Failed to import hikvision. Did you maybe not install the "
"'hikvision' dependency?"))
return False
try:
hikvision_cam = hikvision.api.CreateDevice(
host, port=port, username=username,
password=password, is_https=False)
except MissingParamError as param_err:
_LOGGING.error("Missing required param: %s", param_err)
return False
except HikvisionError as conn_err:
_LOGGING.error("Unable to connect: %s", conn_err)
return False
add_devices_callback([
HikvisionMotionSwitch(name, hikvision_cam)
])
class HikvisionMotionSwitch(ToggleEntity):
""" Provides a switch to toggle on/off motion detection. """
def __init__(self, name, hikvision_cam):
self._name = name
self._hikvision_cam = hikvision_cam
self._state = STATE_OFF
@property
def should_poll(self):
""" Poll for status regularly. """
return True
@property
def name(self):
""" Returns the name of the device if any. """
return self._name
@property
def state(self):
""" Returns the state of the device if any. """
return self._state
@property
def is_on(self):
""" True if device is on. """
return self._state == STATE_ON
def turn_on(self, **kwargs):
""" Turn the device on. """
_LOGGING.info("Turning on Motion Detection ")
self._hikvision_cam.enable_motion_detection()
def turn_off(self, **kwargs):
""" Turn the device off. """
_LOGGING.info("Turning off Motion Detection ")
self._hikvision_cam.disable_motion_detection()
def update(self):
""" Update Motion Detection state """
enabled = self._hikvision_cam.is_motion_detection_enabled()
_LOGGING.info('enabled: %s', enabled)
self._state = STATE_ON if enabled else STATE_OFF
| [
"logging.getLogger"
] | [((1149, 1176), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1166, 1176), False, 'import logging\n')] |
from typing import List, Optional, TYPE_CHECKING
import weakref
from PyQt5.QtCore import QEvent, Qt
from PyQt5.QtWidgets import (QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit,
QVBoxLayout, QWidget)
from electrumsv.app_state import app_state
from electrumsv.bitcoin import script_template_to_string
from electrumsv.constants import PaymentFlag, RECEIVING_SUBPATH
from electrumsv.i18n import _
from electrumsv.logs import logs
from electrumsv.wallet_database.tables import KeyInstanceRow
from electrumsv import web
from .amountedit import AmountEdit, BTCAmountEdit
from .constants import expiration_values
if TYPE_CHECKING:
from .main_window import ElectrumWindow
from .qrcodewidget import QRCodeWidget
from .qrwindow import QR_Window
from .request_list import RequestList
from .table_widgets import TableTopButtonLayout
from .util import ButtonsLineEdit, EnterButton, HelpLabel
class ReceiveView(QWidget):
_qr_window: Optional[QR_Window] = None
def __init__(self, main_window: 'ElectrumWindow', account_id: int) -> None:
super().__init__(main_window)
self._main_window = weakref.proxy(main_window)
self._account_id = account_id
self._account = main_window._wallet.get_account(account_id)
self._logger = logs.get_logger(f"receive-view[{self._account_id}]")
self._receive_key_id: Optional[int] = None
self._request_list_toolbar_layout = TableTopButtonLayout()
self._request_list_toolbar_layout.refresh_signal.connect(
self._main_window.refresh_wallet_display)
self._request_list_toolbar_layout.filter_signal.connect(self._filter_request_list)
form_layout = self.create_form_layout()
self._request_list = RequestList(self, main_window)
request_container = self.create_request_list_container()
vbox = QVBoxLayout(self)
vbox.addLayout(form_layout)
vbox.addSpacing(20)
vbox.addWidget(request_container, 1)
self.setLayout(vbox)
def clean_up(self) -> None:
# If there are no accounts there won't be a receive QR code object created yet.
if self._receive_qr is not None:
self._receive_qr.clean_up()
if self._qr_window is not None:
self._qr_window.close()
def create_form_layout(self) -> QHBoxLayout:
# A 4-column grid layout. All the stretch is in the last column.
# The exchange rate plugin adds a fiat widget in column 2
grid = QGridLayout()
grid.setSpacing(8)
grid.setColumnStretch(3, 1)
self._receive_destination_e = ButtonsLineEdit()
self._receive_destination_e.addCopyButton(app_state.app)
self._receive_destination_e.setReadOnly(True)
msg = _('Bitcoin SV payment destination where the payment should be received. '
'Note that each payment request uses a different Bitcoin SV payment destination.')
receive_address_label = HelpLabel(_('Receiving destination'), msg)
self._receive_destination_e.textChanged.connect(self._update_receive_qr)
self._receive_destination_e.setFocusPolicy(Qt.NoFocus)
grid.addWidget(receive_address_label, 0, 0)
grid.addWidget(self._receive_destination_e, 0, 1, 1, -1)
self._receive_message_e = QLineEdit()
grid.addWidget(QLabel(_('Description')), 1, 0)
grid.addWidget(self._receive_message_e, 1, 1, 1, -1)
self._receive_message_e.textChanged.connect(self._update_receive_qr)
self._receive_amount_e = BTCAmountEdit()
grid.addWidget(QLabel(_('Requested amount')), 2, 0)
grid.addWidget(self._receive_amount_e, 2, 1)
self._receive_amount_e.textChanged.connect(self._update_receive_qr)
self._fiat_receive_e = AmountEdit(app_state.fx.get_currency if app_state.fx else '')
if not app_state.fx or not app_state.fx.is_enabled():
self._fiat_receive_e.setVisible(False)
grid.addWidget(self._fiat_receive_e, 2, 2, Qt.AlignLeft)
self._main_window.connect_fields(self._receive_amount_e, self._fiat_receive_e)
self._expires_combo = QComboBox()
self._expires_combo.addItems([i[0] for i in expiration_values])
self._expires_combo.setCurrentIndex(3)
self._expires_combo.setFixedWidth(self._receive_amount_e.width())
msg = ' '.join([
_('Expiration date of your request.'),
_('This information is seen by the recipient if you send them '
'a signed payment request.'),
_('Expired requests have to be deleted manually from your list, '
'in order to free the corresponding Bitcoin SV addresses.'),
_('The Bitcoin SV address never expires and will always be part '
'of this ElectrumSV wallet.'),
])
grid.addWidget(HelpLabel(_('Request expires'), msg), 3, 0)
grid.addWidget(self._expires_combo, 3, 1)
self._expires_label = QLineEdit('')
self._expires_label.setReadOnly(1)
self._expires_label.setFocusPolicy(Qt.NoFocus)
self._expires_label.hide()
grid.addWidget(self._expires_label, 3, 1)
self._save_request_button = EnterButton(_('Save request'), self._save_form_as_request)
self._new_request_button = EnterButton(_('New'), self._new_payment_request)
self._receive_qr = QRCodeWidget(fixedSize=200)
self._receive_qr.link_to_window(self._toggle_qr_window)
buttons = QHBoxLayout()
buttons.addStretch(1)
buttons.addWidget(self._save_request_button)
buttons.addWidget(self._new_request_button)
grid.addLayout(buttons, 4, 1, 1, 2)
vbox_g = QVBoxLayout()
vbox_g.addLayout(grid)
vbox_g.addStretch()
hbox = QHBoxLayout()
hbox.addLayout(vbox_g)
hbox.addWidget(self._receive_qr)
return hbox
def create_request_list_container(self) -> QGroupBox:
layout = QVBoxLayout()
layout.setSpacing(0)
layout.setContentsMargins(6, 0, 6, 6)
layout.addLayout(self._request_list_toolbar_layout)
layout.addWidget(self._request_list)
request_box = QGroupBox()
request_box.setTitle(_('Requests'))
request_box.setAlignment(Qt.AlignCenter)
request_box.setContentsMargins(0, 0, 0, 0)
request_box.setLayout(layout)
return request_box
def update_widgets(self) -> None:
self._request_list.update()
def update_destination(self) -> None:
text = ""
if self._receive_key_id is not None:
script_template = self._account.get_script_template_for_id(self._receive_key_id)
if script_template is not None:
text = script_template_to_string(script_template)
self._receive_destination_e.setText(text)
def update_contents(self) -> None:
self._expires_label.hide()
self._expires_combo.show()
if self._account.is_deterministic():
fresh_key = self._account.get_fresh_keys(RECEIVING_SUBPATH, 1)[0]
self.set_receive_key(fresh_key)
def update_for_fx_quotes(self) -> None:
if self._account_id is not None:
edit = (self._fiat_receive_e
if self._fiat_receive_e.is_last_edited else self._receive_amount_e)
edit.textEdited.emit(edit.text())
# Bound to text fields in `_create_receive_form_layout`.
def _update_receive_qr(self) -> None:
if self._receive_key_id is None:
return
amount = self._receive_amount_e.get_amount()
message = self._receive_message_e.text()
self._save_request_button.setEnabled((amount is not None) or (message != ""))
script_template = self._account.get_script_template_for_id(self._receive_key_id)
address_text = script_template_to_string(script_template)
uri = web.create_URI(address_text, amount, message)
self._receive_qr.setData(uri)
if self._qr_window and self._qr_window.isVisible():
self._qr_window.set_content(self._receive_destination_e.text(), amount,
message, uri)
def _toggle_qr_window(self, event: QEvent) -> None:
if self._receive_key_id is None:
self.show_message(_("No available receiving destination."))
return
if not self._qr_window:
self._qr_window = QR_Window(self)
self._qr_window.setVisible(True)
self._qr_window_geometry = self._qr_window.geometry()
else:
if not self._qr_window.isVisible():
self._qr_window.setVisible(True)
self._qr_window.setGeometry(self._qr_window_geometry)
else:
self._qr_window_geometry = self._qr_window.geometry()
self._qr_window.setVisible(False)
self._update_receive_qr()
def set_fiat_ccy_enabled(self, flag: bool) -> None:
self._fiat_receive_e.setVisible(flag)
def get_bsv_edits(self) -> List[BTCAmountEdit]:
return [ self._receive_amount_e ]
def _save_form_as_request(self) -> None:
if not self._receive_key_id:
self._main_window.show_error(_('No receiving payment destination'))
return
amount = self._receive_amount_e.get_amount()
message = self._receive_message_e.text()
if not message and not amount:
self._main_window.show_error(_('No message or amount'))
return
def callback(exc_value: Optional[Exception]=None) -> None:
if exc_value is not None:
raise exc_value # pylint: disable=raising-bad-type
self._request_list.update_signal.emit()
i = self._expires_combo.currentIndex()
expiration = [x[1] for x in expiration_values][i]
row = self._account.requests.get_request_for_key_id(self._receive_key_id)
if row is None:
row = self._account.requests.create_request(self._receive_key_id,
PaymentFlag.UNPAID, amount, expiration, message, callback)
else:
# Expiration is just a label, so we don't use the value.
self._account.requests.update_request(row.paymentrequest_id, row.state, amount,
row.expiration, message, callback)
self._save_request_button.setEnabled(False)
def _new_payment_request(self) -> None:
keyinstances: List[KeyInstanceRow] = []
if self._account.is_deterministic():
keyinstances = self._account.get_fresh_keys(RECEIVING_SUBPATH, 1)
if not len(keyinstances):
if not self._account.is_deterministic():
msg = [
_('No more payment destinations in your wallet.'),
_('You are using a non-deterministic account, which '
'cannot create new payment destinations.'),
_('If you want to create new payment destinations, '
'use a deterministic account instead.')
]
self._main_window.show_message(' '.join(msg))
return
self._main_window.show_message(
_('Your wallet is broken and could not allocate a new payment destination.'))
self.update_contents()
self._new_request_button.setEnabled(False)
self._receive_message_e.setFocus(1)
def get_receive_key_id(self) -> Optional[int]:
return self._receive_key_id
# Only called from key list menu.
def receive_at_id(self, key_id: int) -> None:
self._receive_key_id = key_id
self._new_request_button.setEnabled(True)
self.update_destination()
self._main_window.show_receive_tab()
def set_receive_key_id(self, key_id: int) -> None:
self._receive_key_id = key_id
def set_receive_key(self, keyinstance: KeyInstanceRow) -> None:
self._receive_key_id = keyinstance.keyinstance_id
self._receive_message_e.setText("")
self._receive_amount_e.setAmount(None)
self.update_destination()
def set_form_contents(self, address_text: str, value: int, description: Optional[str]=None,
expires_description: str="") -> None:
self._receive_destination_e.setText(address_text)
self._receive_message_e.setText(description or "")
self._receive_amount_e.setAmount(value)
self._expires_combo.hide()
self._expires_label.show()
self._expires_label.setText(expires_description)
self._new_request_button.setEnabled(True)
def set_new_button_enabled(self, flag: bool) -> None:
self._new_request_button.setEnabled(flag)
def _filter_request_list(self, text: str) -> None:
self._request_list.filter(text)
| [
"electrumsv.i18n._",
"PyQt5.QtWidgets.QComboBox",
"electrumsv.web.create_URI",
"PyQt5.QtWidgets.QHBoxLayout",
"PyQt5.QtWidgets.QGridLayout",
"weakref.proxy",
"electrumsv.logs.logs.get_logger",
"PyQt5.QtWidgets.QGroupBox",
"electrumsv.app_state.app_state.fx.is_enabled",
"PyQt5.QtWidgets.QVBoxLayout",
"electrumsv.bitcoin.script_template_to_string",
"PyQt5.QtWidgets.QLineEdit"
] | [((1129, 1155), 'weakref.proxy', 'weakref.proxy', (['main_window'], {}), '(main_window)\n', (1142, 1155), False, 'import weakref\n'), ((1285, 1337), 'electrumsv.logs.logs.get_logger', 'logs.get_logger', (['f"""receive-view[{self._account_id}]"""'], {}), "(f'receive-view[{self._account_id}]')\n", (1300, 1337), False, 'from electrumsv.logs import logs\n'), ((1859, 1876), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', (['self'], {}), '(self)\n', (1870, 1876), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((2498, 2511), 'PyQt5.QtWidgets.QGridLayout', 'QGridLayout', ([], {}), '()\n', (2509, 2511), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((2765, 2923), 'electrumsv.i18n._', '_', (['"""Bitcoin SV payment destination where the payment should be received. Note that each payment request uses a different Bitcoin SV payment destination."""'], {}), "('Bitcoin SV payment destination where the payment should be received. Note that each payment request uses a different Bitcoin SV payment destination.'\n )\n", (2766, 2923), False, 'from electrumsv.i18n import _\n'), ((3309, 3320), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', ([], {}), '()\n', (3318, 3320), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((4143, 4154), 'PyQt5.QtWidgets.QComboBox', 'QComboBox', ([], {}), '()\n', (4152, 4154), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((4978, 4991), 'PyQt5.QtWidgets.QLineEdit', 'QLineEdit', (['""""""'], {}), "('')\n", (4987, 4991), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((5494, 5507), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (5505, 5507), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((5705, 5718), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (5716, 5718), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((5794, 5807), 'PyQt5.QtWidgets.QHBoxLayout', 'QHBoxLayout', ([], {}), '()\n', (5805, 5807), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((5977, 5990), 'PyQt5.QtWidgets.QVBoxLayout', 'QVBoxLayout', ([], {}), '()\n', (5988, 5990), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((6194, 6205), 'PyQt5.QtWidgets.QGroupBox', 'QGroupBox', ([], {}), '()\n', (6203, 6205), False, 'from PyQt5.QtWidgets import QComboBox, QGridLayout, QGroupBox, QHBoxLayout, QLabel, QLineEdit, QVBoxLayout, QWidget\n'), ((7849, 7891), 'electrumsv.bitcoin.script_template_to_string', 'script_template_to_string', (['script_template'], {}), '(script_template)\n', (7874, 7891), False, 'from electrumsv.bitcoin import script_template_to_string\n'), ((7907, 7952), 'electrumsv.web.create_URI', 'web.create_URI', (['address_text', 'amount', 'message'], {}), '(address_text, amount, message)\n', (7921, 7952), False, 'from electrumsv import web\n'), ((2980, 3006), 'electrumsv.i18n._', '_', (['"""Receiving destination"""'], {}), "('Receiving destination')\n", (2981, 3006), False, 'from electrumsv.i18n import _\n'), ((5224, 5241), 'electrumsv.i18n._', '_', (['"""Save request"""'], {}), "('Save request')\n", (5225, 5241), False, 'from electrumsv.i18n import _\n'), ((5318, 5326), 'electrumsv.i18n._', '_', (['"""New"""'], {}), "('New')\n", (5319, 5326), False, 'from electrumsv.i18n import _\n'), ((6235, 6248), 'electrumsv.i18n._', '_', (['"""Requests"""'], {}), "('Requests')\n", (6236, 6248), False, 'from electrumsv.i18n import _\n'), ((3351, 3367), 'electrumsv.i18n._', '_', (['"""Description"""'], {}), "('Description')\n", (3352, 3367), False, 'from electrumsv.i18n import _\n'), ((3594, 3615), 'electrumsv.i18n._', '_', (['"""Requested amount"""'], {}), "('Requested amount')\n", (3595, 3615), False, 'from electrumsv.i18n import _\n'), ((3882, 3907), 'electrumsv.app_state.app_state.fx.is_enabled', 'app_state.fx.is_enabled', ([], {}), '()\n', (3905, 3907), False, 'from electrumsv.app_state import app_state\n'), ((4385, 4422), 'electrumsv.i18n._', '_', (['"""Expiration date of your request."""'], {}), "('Expiration date of your request.')\n", (4386, 4422), False, 'from electrumsv.i18n import _\n'), ((4436, 4530), 'electrumsv.i18n._', '_', (['"""This information is seen by the recipient if you send them a signed payment request."""'], {}), "('This information is seen by the recipient if you send them a signed payment request.'\n )\n", (4437, 4530), False, 'from electrumsv.i18n import _\n'), ((4556, 4683), 'electrumsv.i18n._', '_', (['"""Expired requests have to be deleted manually from your list, in order to free the corresponding Bitcoin SV addresses."""'], {}), "('Expired requests have to be deleted manually from your list, in order to free the corresponding Bitcoin SV addresses.'\n )\n", (4557, 4683), False, 'from electrumsv.i18n import _\n'), ((4709, 4806), 'electrumsv.i18n._', '_', (['"""The Bitcoin SV address never expires and will always be part of this ElectrumSV wallet."""'], {}), "('The Bitcoin SV address never expires and will always be part of this ElectrumSV wallet.'\n )\n", (4710, 4806), False, 'from electrumsv.i18n import _\n'), ((4864, 4884), 'electrumsv.i18n._', '_', (['"""Request expires"""'], {}), "('Request expires')\n", (4865, 4884), False, 'from electrumsv.i18n import _\n'), ((6756, 6798), 'electrumsv.bitcoin.script_template_to_string', 'script_template_to_string', (['script_template'], {}), '(script_template)\n', (6781, 6798), False, 'from electrumsv.bitcoin import script_template_to_string\n'), ((8316, 8356), 'electrumsv.i18n._', '_', (['"""No available receiving destination."""'], {}), "('No available receiving destination.')\n", (8317, 8356), False, 'from electrumsv.i18n import _\n'), ((9243, 9280), 'electrumsv.i18n._', '_', (['"""No receiving payment destination"""'], {}), "('No receiving payment destination')\n", (9244, 9280), False, 'from electrumsv.i18n import _\n'), ((9484, 9509), 'electrumsv.i18n._', '_', (['"""No message or amount"""'], {}), "('No message or amount')\n", (9485, 9509), False, 'from electrumsv.i18n import _\n'), ((11239, 11315), 'electrumsv.i18n._', '_', (['"""Your wallet is broken and could not allocate a new payment destination."""'], {}), "('Your wallet is broken and could not allocate a new payment destination.')\n", (11240, 11315), False, 'from electrumsv.i18n import _\n'), ((10745, 10794), 'electrumsv.i18n._', '_', (['"""No more payment destinations in your wallet."""'], {}), "('No more payment destinations in your wallet.')\n", (10746, 10794), False, 'from electrumsv.i18n import _\n'), ((10816, 10914), 'electrumsv.i18n._', '_', (['"""You are using a non-deterministic account, which cannot create new payment destinations."""'], {}), "('You are using a non-deterministic account, which cannot create new payment destinations.'\n )\n", (10817, 10914), False, 'from electrumsv.i18n import _\n'), ((10956, 11050), 'electrumsv.i18n._', '_', (['"""If you want to create new payment destinations, use a deterministic account instead."""'], {}), "('If you want to create new payment destinations, use a deterministic account instead.'\n )\n", (10957, 11050), False, 'from electrumsv.i18n import _\n')] |
import os
import sys
if sys.version_info[0] == 2:
_ENCODE = sys.getfilesystemencoding()
def path_join(*args):
bin_args = map(lambda a: a.decode(_ENCODE), args)
return os.path.join(*bin_args).encode(_ENCODE)
def str_join(s, l):
bin_args = map(lambda a: a.decode(_ENCODE), l)
b = s.decode(_ENCODE)
return b.join(bin_args).encode(_ENCODE)
logfile_open = open
else:
path_join = os.path.join
str_join = str.join
def logfile_open(*args):
return open(*args, errors='replace')
| [
"sys.getfilesystemencoding",
"os.path.join"
] | [((63, 90), 'sys.getfilesystemencoding', 'sys.getfilesystemencoding', ([], {}), '()\n', (88, 90), False, 'import sys\n'), ((181, 204), 'os.path.join', 'os.path.join', (['*bin_args'], {}), '(*bin_args)\n', (193, 204), False, 'import os\n')] |
#!/usr/bin/env python
""" VAPI test """
import unittest
import os
import signal
from framework import VppTestCase, running_extended_tests, \
VppTestRunner, Worker
@unittest.skipUnless(running_extended_tests(), "part of extended tests")
class VOMTestCase(VppTestCase):
""" VPP Object Model Test """
def test_vom_cpp(self):
""" run C++ VOM tests """
var = "TEST_DIR"
built_root = os.getenv(var, None)
self.assertIsNotNone(built_root,
"Environment variable `%s' not set" % var)
executable = "%s/build/vom_test/vom_test" % built_root
worker = Worker(
[executable, "vpp object model", self.shm_prefix], self.logger)
worker.start()
timeout = 120
worker.join(timeout)
self.logger.info("Worker result is `%s'" % worker.result)
error = False
if worker.result is None:
try:
error = True
self.logger.error(
"Timeout! Worker did not finish in %ss" % timeout)
os.killpg(os.getpgid(worker.process.pid), signal.SIGTERM)
worker.join()
except:
raise Exception("Couldn't kill worker-spawned process")
if error:
raise Exception(
"Timeout! Worker did not finish in %ss" % timeout)
self.assert_equal(worker.result, 0, "Binary test return code")
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| [
"os.getpgid",
"os.getenv",
"framework.Worker",
"framework.running_extended_tests",
"unittest.main"
] | [((191, 215), 'framework.running_extended_tests', 'running_extended_tests', ([], {}), '()\n', (213, 215), False, 'from framework import VppTestCase, running_extended_tests, VppTestRunner, Worker\n'), ((1478, 1517), 'unittest.main', 'unittest.main', ([], {'testRunner': 'VppTestRunner'}), '(testRunner=VppTestRunner)\n', (1491, 1517), False, 'import unittest\n'), ((418, 438), 'os.getenv', 'os.getenv', (['var', 'None'], {}), '(var, None)\n', (427, 438), False, 'import os\n'), ((632, 702), 'framework.Worker', 'Worker', (["[executable, 'vpp object model', self.shm_prefix]", 'self.logger'], {}), "([executable, 'vpp object model', self.shm_prefix], self.logger)\n", (638, 702), False, 'from framework import VppTestCase, running_extended_tests, VppTestRunner, Worker\n'), ((1090, 1120), 'os.getpgid', 'os.getpgid', (['worker.process.pid'], {}), '(worker.process.pid)\n', (1100, 1120), False, 'import os\n')] |
from importlib import import_module
import re
from copy import deepcopy
from collections import OrderedDict
from astropy.utils.data_info import MixinInfo
from .column import Column
from .table import Table, QTable, has_info_class
from astropy.units.quantity import QuantityInfo
__construct_mixin_classes = ('astropy.time.core.Time',
'astropy.time.core.TimeDelta',
'astropy.units.quantity.Quantity',
'astropy.coordinates.angles.Latitude',
'astropy.coordinates.angles.Longitude',
'astropy.coordinates.angles.Angle',
'astropy.coordinates.distances.Distance',
'astropy.coordinates.earth.EarthLocation',
'astropy.coordinates.sky_coordinate.SkyCoord',
'astropy.table.table.NdarrayMixin',
'astropy.table.column.MaskedColumn')
class SerializedColumn(dict):
"""
Subclass of dict that is a used in the representation to contain the name
(and possible other info) for a mixin attribute (either primary data or an
array-like attribute) that is serialized as a column in the table.
Normally contains the single key ``name`` with the name of the column in the
table.
"""
pass
def _represent_mixin_as_column(col, name, new_cols, mixin_cols,
exclude_classes=()):
"""Carry out processing needed to serialize ``col`` in an output table
consisting purely of plain ``Column`` or ``MaskedColumn`` columns. This
relies on the object determine if any transformation is required and may
depend on the ``serialize_method`` and ``serialize_context`` context
variables. For instance a ``MaskedColumn`` may be stored directly to
FITS, but can also be serialized as separate data and mask columns.
This function builds up a list of plain columns in the ``new_cols`` arg (which
is passed as a persistent list). This includes both plain columns from the
original table and plain columns that represent data from serialized columns
(e.g. ``jd1`` and ``jd2`` arrays from a ``Time`` column).
For serialized columns the ``mixin_cols`` dict is updated with required
attributes and information to subsequently reconstruct the table.
Table mixin columns are always serialized and get represented by one
or more data columns. In earlier versions of the code *only* mixin
columns were serialized, hence the use within this code of "mixin"
to imply serialization. Starting with version 3.1, the non-mixin
``MaskedColumn`` can also be serialized.
"""
obj_attrs = col.info._represent_as_dict()
ordered_keys = col.info._represent_as_dict_attrs
# If serialization is not required (see function docstring above)
# or explicitly specified as excluded, then treat as a normal column.
if not obj_attrs or col.__class__ in exclude_classes:
new_cols.append(col)
return
# Subtlety here is handling mixin info attributes. The basic list of such
# attributes is: 'name', 'unit', 'dtype', 'format', 'description', 'meta'.
# - name: handled directly [DON'T store]
# - unit: DON'T store if this is a parent attribute
# - dtype: captured in plain Column if relevant [DON'T store]
# - format: possibly irrelevant but settable post-object creation [DO store]
# - description: DO store
# - meta: DO store
info = {}
for attr, nontrivial, xform in (('unit', lambda x: x is not None and x != '', str),
('format', lambda x: x is not None, None),
('description', lambda x: x is not None, None),
('meta', lambda x: x, None)):
col_attr = getattr(col.info, attr)
if nontrivial(col_attr):
info[attr] = xform(col_attr) if xform else col_attr
data_attrs = [key for key in ordered_keys if key in obj_attrs and
getattr(obj_attrs[key], 'shape', ())[:1] == col.shape[:1]]
for data_attr in data_attrs:
data = obj_attrs[data_attr]
# New column name combines the old name and attribute
# (e.g. skycoord.ra, skycoord.dec).unless it is the primary data
# attribute for the column (e.g. value for Quantity or data
# for MaskedColumn)
if data_attr == col.info._represent_as_dict_primary_data:
new_name = name
else:
new_name = name + '.' + data_attr
if not has_info_class(data, MixinInfo):
new_cols.append(Column(data, name=new_name, **info))
obj_attrs[data_attr] = SerializedColumn({'name': new_name})
else:
# recurse. This will define obj_attrs[new_name].
_represent_mixin_as_column(data, new_name, new_cols, obj_attrs)
obj_attrs[data_attr] = SerializedColumn(obj_attrs.pop(new_name))
# Strip out from info any attributes defined by the parent
for attr in col.info.attrs_from_parent:
if attr in info:
del info[attr]
if info:
obj_attrs['__info__'] = info
# Store the fully qualified class name
obj_attrs['__class__'] = col.__module__ + '.' + col.__class__.__name__
mixin_cols[name] = obj_attrs
def represent_mixins_as_columns(tbl, exclude_classes=()):
"""Represent input Table ``tbl`` using only `~astropy.table.Column`
or `~astropy.table.MaskedColumn` objects.
This function represents any mixin columns like `~astropy.time.Time` in
``tbl`` to one or more plain ``~astropy.table.Column`` objects and returns
a new Table. A single mixin column may be split into multiple column
components as needed for fully representing the column. This includes the
possibility of recursive splitting, as shown in the example below. The
new column names are formed as ``<column_name>.<component>``, e.g.
``sc.ra`` for a `~astropy.coordinates.SkyCoord` column named ``sc``.
In addition to splitting columns, this function updates the table ``meta``
dictionary to include a dict named ``__serialized_columns__`` which provides
additional information needed to construct the original mixin columns from
the split columns.
This function is used by astropy I/O when writing tables to ECSV, FITS,
HDF5 formats.
Note that if the table does not include any mixin columns then the original
table is returned with no update to ``meta``.
Parameters
----------
tbl : `~astropy.table.Table` or subclass
Table to represent mixins as Columns
exclude_classes : tuple of classes
Exclude any mixin columns which are instannces of any classes in the tuple
Returns
-------
tbl : `~astropy.table.Table`
New Table with updated columns, or else the original input ``tbl``
Examples
--------
>>> from astropy.table import Table, represent_mixins_as_columns
>>> from astropy.time import Time
>>> from astropy.coordinates import SkyCoord
>>> x = [100.0, 200.0]
>>> obstime = Time([1999.0, 2000.0], format='jyear')
>>> sc = SkyCoord([1, 2], [3, 4], unit='deg', obstime=obstime)
>>> tbl = Table([sc, x], names=['sc', 'x'])
>>> represent_mixins_as_columns(tbl)
<Table length=2>
sc.ra sc.dec sc.obstime.jd1 sc.obstime.jd2 x
deg deg
float64 float64 float64 float64 float64
------- ------- -------------- -------------- -------
1.0 3.0 2451180.0 -0.25 100.0
2.0 4.0 2451545.0 0.0 200.0
"""
# Dict of metadata for serializing each column, keyed by column name.
# Gets filled in place by _represent_mixin_as_column().
mixin_cols = {}
# List of columns for the output table. For plain Column objects
# this will just be the original column object.
new_cols = []
# Go through table columns and represent each column as one or more
# plain Column objects (in new_cols) + metadata (in mixin_cols).
for col in tbl.itercols():
_represent_mixin_as_column(col, col.info.name, new_cols, mixin_cols,
exclude_classes=exclude_classes)
# If no metadata was created then just return the original table.
if not mixin_cols:
return tbl
meta = deepcopy(tbl.meta)
meta['__serialized_columns__'] = mixin_cols
out = Table(new_cols, meta=meta, copy=False)
return out
def _construct_mixin_from_obj_attrs_and_info(obj_attrs, info):
cls_full_name = obj_attrs.pop('__class__')
# If this is a supported class then import the class and run
# the _construct_from_col method. Prevent accidentally running
# untrusted code by only importing known astropy classes.
if cls_full_name not in __construct_mixin_classes:
raise ValueError('unsupported class for construct {}'.format(cls_full_name))
mod_name, cls_name = re.match(r'(.+)\.(\w+)', cls_full_name).groups()
module = import_module(mod_name)
cls = getattr(module, cls_name)
for attr, value in info.items():
if attr in cls.info.attrs_from_parent:
obj_attrs[attr] = value
mixin = cls.info._construct_from_dict(obj_attrs)
for attr, value in info.items():
if attr not in obj_attrs:
setattr(mixin.info, attr, value)
return mixin
class _TableLite(OrderedDict):
"""
Minimal table-like object for _construct_mixin_from_columns. This allows
manipulating the object like a Table but without the actual overhead
for a full Table.
More pressing, there is an issue with constructing MaskedColumn, where the
encoded Column components (data, mask) are turned into a MaskedColumn.
When this happens in a real table then all other columns are immediately
Masked and a warning is issued. This is not desirable.
"""
def add_column(self, col, index=0):
colnames = self.colnames
self[col.info.name] = col
for ii, name in enumerate(colnames):
if ii >= index:
self.move_to_end(name)
@property
def colnames(self):
return list(self.keys())
def itercols(self):
return self.values()
def _construct_mixin_from_columns(new_name, obj_attrs, out):
data_attrs_map = {}
for name, val in obj_attrs.items():
if isinstance(val, SerializedColumn):
if 'name' in val:
data_attrs_map[val['name']] = name
else:
_construct_mixin_from_columns(name, val, out)
data_attrs_map[name] = name
for name in data_attrs_map.values():
del obj_attrs[name]
# Get the index where to add new column
idx = min(out.colnames.index(name) for name in data_attrs_map)
# Name is the column name in the table (e.g. "coord.ra") and
# data_attr is the object attribute name (e.g. "ra"). A different
# example would be a formatted time object that would have (e.g.)
# "time_col" and "value", respectively.
for name, data_attr in data_attrs_map.items():
col = out[name]
obj_attrs[data_attr] = col
del out[name]
info = obj_attrs.pop('__info__', {})
if len(data_attrs_map) == 1:
# col is the first and only serialized column; in that case, use info
# stored on the column.
for attr, nontrivial in (('unit', lambda x: x not in (None, '')),
('format', lambda x: x is not None),
('description', lambda x: x is not None),
('meta', lambda x: x)):
col_attr = getattr(col.info, attr)
if nontrivial(col_attr):
info[attr] = col_attr
info['name'] = new_name
col = _construct_mixin_from_obj_attrs_and_info(obj_attrs, info)
out.add_column(col, index=idx)
def _construct_mixins_from_columns(tbl):
if '__serialized_columns__' not in tbl.meta:
return tbl
meta = tbl.meta.copy()
mixin_cols = meta.pop('__serialized_columns__')
out = _TableLite(tbl.columns)
for new_name, obj_attrs in mixin_cols.items():
_construct_mixin_from_columns(new_name, obj_attrs, out)
# If no quantity subclasses are in the output then output as Table.
# For instance ascii.read(file, format='ecsv') doesn't specify an
# output class and should return the minimal table class that
# represents the table file.
has_quantities = any(isinstance(col.info, QuantityInfo)
for col in out.itercols())
out_cls = QTable if has_quantities else Table
return out_cls(list(out.values()), names=out.colnames, copy=False, meta=meta)
| [
"re.match",
"importlib.import_module",
"copy.deepcopy"
] | [((8503, 8521), 'copy.deepcopy', 'deepcopy', (['tbl.meta'], {}), '(tbl.meta)\n', (8511, 8521), False, 'from copy import deepcopy\n'), ((9171, 9194), 'importlib.import_module', 'import_module', (['mod_name'], {}), '(mod_name)\n', (9184, 9194), False, 'from importlib import import_module\n'), ((9109, 9149), 're.match', 're.match', (['"""(.+)\\\\.(\\\\w+)"""', 'cls_full_name'], {}), "('(.+)\\\\.(\\\\w+)', cls_full_name)\n", (9117, 9149), False, 'import re\n')] |
'''
Created on Jun 18, 2013
@author: <NAME>
All rights reserved.
'''
import time
from multiprocessing.pool import Pool
parallelSolve = False
infinity = 1 << 30
def solve(par):
H, U, D, F = par
day = 0
amountRise = U
currH = 0
while True:
amountRise = U * (1 - 0.01 * F * day)
currH += amountRise
if currH > H:
return 'success on day %d' % (day + 1)
currH -= D
if currH < 0:
return 'failure on day %d' % (day + 1)
day += 1
class Solver:
def getInput(self):
self.input = []
self.numOfTests = 0
while True:
H, U, D, F = map(int, self.fIn.readline().strip().split())
if H == 0:
break
self.numOfTests += 1
self.input.append((H, U, D, F))
def __init__(self):
self.fIn = open('input.txt')
self.fOut = open('output.txt', 'w')
self.results = []
def parallel(self):
self.getInput()
p = Pool(4)
millis1 = int(round(time.time() * 1000))
self.results = p.map(solve, self.input)
millis2 = int(round(time.time() * 1000))
print("Time in milliseconds: %d " % (millis2 - millis1))
self.makeOutput()
def sequential(self):
self.getInput()
millis1 = int(round(time.time() * 1000))
for i in self.input:
self.results.append(solve(i))
millis2 = int(round(time.time() * 1000))
print("Time in milliseconds: %d " % (millis2 - millis1))
self.makeOutput()
def makeOutput(self):
for test in range(self.numOfTests):
self.fOut.write("Case #%d: %s\n" % (test + 1, self.results[test]))
self.fIn.close()
self.fOut.close()
if __name__ == '__main__':
solver = Solver()
if parallelSolve:
solver.parallel()
else:
solver.sequential()
| [
"time.time",
"multiprocessing.pool.Pool"
] | [((1018, 1025), 'multiprocessing.pool.Pool', 'Pool', (['(4)'], {}), '(4)\n', (1022, 1025), False, 'from multiprocessing.pool import Pool\n'), ((1054, 1065), 'time.time', 'time.time', ([], {}), '()\n', (1063, 1065), False, 'import time\n'), ((1151, 1162), 'time.time', 'time.time', ([], {}), '()\n', (1160, 1162), False, 'import time\n'), ((1342, 1353), 'time.time', 'time.time', ([], {}), '()\n', (1351, 1353), False, 'import time\n'), ((1462, 1473), 'time.time', 'time.time', ([], {}), '()\n', (1471, 1473), False, 'import time\n')] |
from typing import Dict, Optional, List, Any
import torch
import torch.nn.functional as F
from allennlp.data import Vocabulary
from allennlp.models.model import Model
from allennlp.modules import FeedForward, TextFieldEmbedder, Seq2SeqEncoder
from allennlp.nn import InitializerApplicator, RegularizerApplicator
from allennlp.nn import util
from allennlp.training.metrics import CategoricalAccuracy, F1Measure
from overrides import overrides
@Model.register("text_classifier")
class TextClassifier(Model):
"""
Implements a basic text classifier:
1) Embed tokens using `text_field_embedder`
2) Seq2SeqEncoder, e.g. BiLSTM
3) Append the first and last encoder states
4) Final feedforward layer
Optimized with CrossEntropyLoss. Evaluated with CategoricalAccuracy & F1.
"""
def __init__(self, vocab: Vocabulary,
text_field_embedder: TextFieldEmbedder,
text_encoder: Seq2SeqEncoder,
classifier_feedforward: FeedForward,
verbose_metrics: False,
initializer: InitializerApplicator = InitializerApplicator(),
regularizer: Optional[RegularizerApplicator] = None,
) -> None:
super(TextClassifier, self).__init__(vocab, regularizer)
self.text_field_embedder = text_field_embedder
self.num_classes = self.vocab.get_vocab_size("labels")
self.text_encoder = text_encoder
self.classifier_feedforward = classifier_feedforward
self.prediction_layer = torch.nn.Linear(self.classifier_feedforward.get_output_dim() , self.num_classes)
self.label_accuracy = CategoricalAccuracy()
self.label_f1_metrics = {}
self.verbose_metrics = verbose_metrics
for i in range(self.num_classes):
self.label_f1_metrics[vocab.get_token_from_index(index=i, namespace="labels")] = F1Measure(positive_label=i)
self.loss = torch.nn.CrossEntropyLoss()
self.pool = lambda text, mask: util.get_final_encoder_states(text, mask, bidirectional=True)
initializer(self)
@overrides
def forward(self,
text: Dict[str, torch.LongTensor],
label: torch.IntTensor = None,
metadata: List[Dict[str, Any]] = None) -> Dict[str, torch.Tensor]:
"""
Parameters
----------
text : Dict[str, torch.LongTensor]
From a ``TextField``
label : torch.IntTensor, optional (default = None)
From a ``LabelField``
metadata : ``List[Dict[str, Any]]``, optional, (default = None)
Metadata containing the original tokenization of the premise and
hypothesis with 'premise_tokens' and 'hypothesis_tokens' keys respectively.
Returns
-------
An output dictionary consisting of:
label_logits : torch.FloatTensor
A tensor of shape ``(batch_size, num_labels)`` representing unnormalised log probabilities of the label.
label_probs : torch.FloatTensor
A tensor of shape ``(batch_size, num_labels)`` representing probabilities of the label.
loss : torch.FloatTensor, optional
A scalar loss to be optimised.
"""
embedded_text = self.text_field_embedder(text)
mask = util.get_text_field_mask(text)
encoded_text = self.text_encoder(embedded_text, mask)
pooled = self.pool(encoded_text, mask)
ff_hidden = self.classifier_feedforward(pooled)
logits = self.prediction_layer(ff_hidden)
class_probs = F.softmax(logits, dim=1)
output_dict = {"logits": logits}
if label is not None:
loss = self.loss(logits, label)
output_dict["loss"] = loss
# compute F1 per label
for i in range(self.num_classes):
metric = self.label_f1_metrics[self.vocab.get_token_from_index(index=i, namespace="labels")]
metric(class_probs, label)
self.label_accuracy(logits, label)
return output_dict
@overrides
def decode(self, output_dict: Dict[str, torch.Tensor]) -> Dict[str, torch.Tensor]:
class_probabilities = F.softmax(output_dict['logits'], dim=-1)
output_dict['class_probs'] = class_probabilities
return output_dict
def get_metrics(self, reset: bool = False) -> Dict[str, float]:
metric_dict = {}
sum_f1 = 0.0
for name, metric in self.label_f1_metrics.items():
metric_val = metric.get_metric(reset)
if self.verbose_metrics:
metric_dict[name + '_P'] = metric_val[0]
metric_dict[name + '_R'] = metric_val[1]
metric_dict[name + '_F1'] = metric_val[2]
sum_f1 += metric_val[2]
names = list(self.label_f1_metrics.keys())
total_len = len(names)
average_f1 = sum_f1 / total_len
metric_dict['average_F1'] = average_f1
metric_dict['accuracy'] = self.label_accuracy.get_metric(reset)
return metric_dict
| [
"torch.nn.CrossEntropyLoss",
"allennlp.nn.InitializerApplicator",
"allennlp.training.metrics.CategoricalAccuracy",
"allennlp.training.metrics.F1Measure",
"allennlp.models.model.Model.register",
"allennlp.nn.util.get_final_encoder_states",
"torch.nn.functional.softmax",
"allennlp.nn.util.get_text_field_mask"
] | [((446, 479), 'allennlp.models.model.Model.register', 'Model.register', (['"""text_classifier"""'], {}), "('text_classifier')\n", (460, 479), False, 'from allennlp.models.model import Model\n'), ((1102, 1125), 'allennlp.nn.InitializerApplicator', 'InitializerApplicator', ([], {}), '()\n', (1123, 1125), False, 'from allennlp.nn import InitializerApplicator, RegularizerApplicator\n'), ((1656, 1677), 'allennlp.training.metrics.CategoricalAccuracy', 'CategoricalAccuracy', ([], {}), '()\n', (1675, 1677), False, 'from allennlp.training.metrics import CategoricalAccuracy, F1Measure\n'), ((1945, 1972), 'torch.nn.CrossEntropyLoss', 'torch.nn.CrossEntropyLoss', ([], {}), '()\n', (1970, 1972), False, 'import torch\n'), ((3321, 3351), 'allennlp.nn.util.get_text_field_mask', 'util.get_text_field_mask', (['text'], {}), '(text)\n', (3345, 3351), False, 'from allennlp.nn import util\n'), ((3589, 3613), 'torch.nn.functional.softmax', 'F.softmax', (['logits'], {'dim': '(1)'}), '(logits, dim=1)\n', (3598, 3613), True, 'import torch.nn.functional as F\n'), ((4210, 4250), 'torch.nn.functional.softmax', 'F.softmax', (["output_dict['logits']"], {'dim': '(-1)'}), "(output_dict['logits'], dim=-1)\n", (4219, 4250), True, 'import torch.nn.functional as F\n'), ((1897, 1924), 'allennlp.training.metrics.F1Measure', 'F1Measure', ([], {'positive_label': 'i'}), '(positive_label=i)\n', (1906, 1924), False, 'from allennlp.training.metrics import CategoricalAccuracy, F1Measure\n'), ((2013, 2074), 'allennlp.nn.util.get_final_encoder_states', 'util.get_final_encoder_states', (['text', 'mask'], {'bidirectional': '(True)'}), '(text, mask, bidirectional=True)\n', (2042, 2074), False, 'from allennlp.nn import util\n')] |
#
# -*- coding: utf-8 -*-
"""Development related tasks to be run with 'invoke'"""
import os
import pathlib
import shutil
import invoke
TASK_ROOT = pathlib.Path(__file__).resolve().parent
TASK_ROOT_STR = str(TASK_ROOT)
# shared function
def rmrf(items, verbose=True):
"""Silently remove a list of directories or files"""
if isinstance(items, str):
items = [items]
for item in items:
if verbose:
print("Removing {}".format(item))
shutil.rmtree(item, ignore_errors=True)
# rmtree doesn't remove bare files
try:
os.remove(item)
except FileNotFoundError:
pass
# create namespaces
namespace = invoke.Collection()
namespace_clean = invoke.Collection('clean')
namespace.add_collection(namespace_clean, 'clean')
#####
#
# pytest, pylint, and codecov
#
#####
@invoke.task
def pytest(context, junit=False, pty=True, append_cov=False):
"""Run tests and code coverage using pytest"""
ROOT_PATH = TASK_ROOT.parent.parent
with context.cd(str(ROOT_PATH)):
command_str = 'pytest --cov=cmd2_myplugin --cov-report=term --cov-report=html'
if append_cov:
command_str += ' --cov-append'
if junit:
command_str += ' --junitxml=junit/test-results.xml'
command_str += ' ' + str((TASK_ROOT / 'tests').relative_to(ROOT_PATH))
context.run(command_str, pty=pty)
namespace.add_task(pytest)
@invoke.task
def pytest_clean(context):
"""Remove pytest cache and code coverage files and directories"""
# pylint: disable=unused-argument
with context.cd(TASK_ROOT_STR):
dirs = ['.pytest_cache', '.cache', '.coverage']
rmrf(dirs)
namespace_clean.add_task(pytest_clean, 'pytest')
@invoke.task
def pylint(context):
"""Check code quality using pylint"""
context.run('pylint --rcfile=cmd2_myplugin/pylintrc cmd2_myplugin')
namespace.add_task(pylint)
@invoke.task
def pylint_tests(context):
"""Check code quality of test suite using pylint"""
context.run('pylint --rcfile=tests/pylintrc tests')
namespace.add_task(pylint_tests)
#####
#
# build and distribute
#
#####
BUILDDIR = 'build'
DISTDIR = 'dist'
@invoke.task
def build_clean(context):
"""Remove the build directory"""
# pylint: disable=unused-argument
rmrf(BUILDDIR)
namespace_clean.add_task(build_clean, 'build')
@invoke.task
def dist_clean(context):
"""Remove the dist directory"""
# pylint: disable=unused-argument
rmrf(DISTDIR)
namespace_clean.add_task(dist_clean, 'dist')
@invoke.task
def eggs_clean(context):
"""Remove egg directories"""
# pylint: disable=unused-argument
dirs = set()
dirs.add('.eggs')
for name in os.listdir(os.curdir):
if name.endswith('.egg-info'):
dirs.add(name)
if name.endswith('.egg'):
dirs.add(name)
rmrf(dirs)
namespace_clean.add_task(eggs_clean, 'eggs')
@invoke.task
def bytecode_clean(context):
"""Remove __pycache__ directories and *.pyc files"""
# pylint: disable=unused-argument
dirs = set()
for root, dirnames, files in os.walk(os.curdir):
if '__pycache__' in dirnames:
dirs.add(os.path.join(root, '__pycache__'))
for file in files:
if file.endswith(".pyc"):
dirs.add(os.path.join(root, file))
print("Removing __pycache__ directories and .pyc files")
rmrf(dirs, verbose=False)
namespace_clean.add_task(bytecode_clean, 'bytecode')
#
# make a dummy clean task which runs all the tasks in the clean namespace
clean_tasks = list(namespace_clean.tasks.values())
@invoke.task(pre=list(namespace_clean.tasks.values()), default=True)
def clean_all(context):
"""Run all clean tasks"""
# pylint: disable=unused-argument
pass
namespace_clean.add_task(clean_all, 'all')
@invoke.task(pre=[clean_all])
def sdist(context):
"""Create a source distribution"""
context.run('python setup.py sdist')
namespace.add_task(sdist)
@invoke.task(pre=[clean_all])
def wheel(context):
"""Build a wheel distribution"""
context.run('python setup.py bdist_wheel')
namespace.add_task(wheel)
#
# these two tasks are commented out so you don't
# accidentally run them and upload this template to pypi
#
# @invoke.task(pre=[sdist, wheel])
# def pypi(context):
# """Build and upload a distribution to pypi"""
# context.run('twine upload dist/*')
# namespace.add_task(pypi)
# @invoke.task(pre=[sdist, wheel])
# def pypi_test(context):
# """Build and upload a distribution to https://test.pypi.org"""
# context.run('twine upload --repository-url https://test.pypi.org/legacy/ dist/*')
# namespace.add_task(pypi_test)
| [
"os.listdir",
"pathlib.Path",
"invoke.Collection",
"os.path.join",
"shutil.rmtree",
"invoke.task",
"os.walk",
"os.remove"
] | [((691, 710), 'invoke.Collection', 'invoke.Collection', ([], {}), '()\n', (708, 710), False, 'import invoke\n'), ((729, 755), 'invoke.Collection', 'invoke.Collection', (['"""clean"""'], {}), "('clean')\n", (746, 755), False, 'import invoke\n'), ((3856, 3884), 'invoke.task', 'invoke.task', ([], {'pre': '[clean_all]'}), '(pre=[clean_all])\n', (3867, 3884), False, 'import invoke\n'), ((4016, 4044), 'invoke.task', 'invoke.task', ([], {'pre': '[clean_all]'}), '(pre=[clean_all])\n', (4027, 4044), False, 'import invoke\n'), ((2731, 2752), 'os.listdir', 'os.listdir', (['os.curdir'], {}), '(os.curdir)\n', (2741, 2752), False, 'import os\n'), ((3132, 3150), 'os.walk', 'os.walk', (['os.curdir'], {}), '(os.curdir)\n', (3139, 3150), False, 'import os\n'), ((482, 521), 'shutil.rmtree', 'shutil.rmtree', (['item'], {'ignore_errors': '(True)'}), '(item, ignore_errors=True)\n', (495, 521), False, 'import shutil\n'), ((150, 172), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (162, 172), False, 'import pathlib\n'), ((590, 605), 'os.remove', 'os.remove', (['item'], {}), '(item)\n', (599, 605), False, 'import os\n'), ((3211, 3244), 'os.path.join', 'os.path.join', (['root', '"""__pycache__"""'], {}), "(root, '__pycache__')\n", (3223, 3244), False, 'import os\n'), ((3336, 3360), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (3348, 3360), False, 'import os\n')] |
"""Support for Epson Workforce Printer."""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import CONF_HOST, CONF_MONITORED_CONDITIONS
from homeassistant.exceptions import PlatformNotReady
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
REQUIREMENTS = ['epsonprinter==0.0.8']
_LOGGER = logging.getLogger(__name__)
MONITORED_CONDITIONS = {
'black': ['Inklevel Black', '%', 'mdi:water'],
'magenta': ['Inklevel Magenta', '%', 'mdi:water'],
'cyan': ['Inklevel Cyan', '%', 'mdi:water'],
'yellow': ['Inklevel Yellow', '%', 'mdi:water'],
'clean': ['Inklevel Cleaning', '%', 'mdi:water'],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_MONITORED_CONDITIONS):
vol.All(cv.ensure_list, [vol.In(MONITORED_CONDITIONS)]),
})
SCAN_INTERVAL = timedelta(minutes=60)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Set up the cartridge sensor."""
host = config.get(CONF_HOST)
from epsonprinter_pkg.epsonprinterapi import EpsonPrinterAPI
api = EpsonPrinterAPI(host)
if not api.available:
raise PlatformNotReady()
sensors = [EpsonPrinterCartridge(api, condition)
for condition in config[CONF_MONITORED_CONDITIONS]]
add_devices(sensors, True)
class EpsonPrinterCartridge(Entity):
"""Representation of a cartridge sensor."""
def __init__(self, api, cartridgeidx):
"""Initialize a cartridge sensor."""
self._api = api
self._id = cartridgeidx
self._name = MONITORED_CONDITIONS[self._id][0]
self._unit = MONITORED_CONDITIONS[self._id][1]
self._icon = MONITORED_CONDITIONS[self._id][2]
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
@property
def unit_of_measurement(self):
"""Return the unit the value is expressed in."""
return self._unit
@property
def state(self):
"""Return the state of the device."""
return self._api.getSensorValue(self._id)
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._api.available
def update(self):
"""Get the latest data from the Epson printer."""
self._api.update()
| [
"logging.getLogger",
"voluptuous.Required",
"datetime.timedelta",
"epsonprinter_pkg.epsonprinterapi.EpsonPrinterAPI",
"homeassistant.exceptions.PlatformNotReady",
"voluptuous.In"
] | [((451, 478), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (468, 478), False, 'import logging\n'), ((980, 1001), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(60)'}), '(minutes=60)\n', (989, 1001), False, 'from datetime import timedelta\n'), ((1220, 1241), 'epsonprinter_pkg.epsonprinterapi.EpsonPrinterAPI', 'EpsonPrinterAPI', (['host'], {}), '(host)\n', (1235, 1241), False, 'from epsonprinter_pkg.epsonprinterapi import EpsonPrinterAPI\n'), ((815, 838), 'voluptuous.Required', 'vol.Required', (['CONF_HOST'], {}), '(CONF_HOST)\n', (827, 838), True, 'import voluptuous as vol\n'), ((855, 894), 'voluptuous.Required', 'vol.Required', (['CONF_MONITORED_CONDITIONS'], {}), '(CONF_MONITORED_CONDITIONS)\n', (867, 894), True, 'import voluptuous as vol\n'), ((1282, 1300), 'homeassistant.exceptions.PlatformNotReady', 'PlatformNotReady', ([], {}), '()\n', (1298, 1300), False, 'from homeassistant.exceptions import PlatformNotReady\n'), ((929, 957), 'voluptuous.In', 'vol.In', (['MONITORED_CONDITIONS'], {}), '(MONITORED_CONDITIONS)\n', (935, 957), True, 'import voluptuous as vol\n')] |
# -*- coding: utf-8 ; test-case-name: bridgedb.test.test_runner -*-
#
# This file is part of BridgeDB, a Tor bridge distribution system.
#
# :authors: <NAME> 0xA3ADB67A2CDB8B35 <<EMAIL>>
# please also see AUTHORS file
# :copyright: (c) 2007-2015, The Tor Project, Inc.
# (c) 2007-2015, all entities within the AUTHORS file
# (c) 2012-2015, Isis Lovecruft
# :license: 3-clause BSD, see included LICENSE for information
"""Classes for running components and servers, as well as daemonisation.
** Module Overview: **
"""
from __future__ import print_function
import logging
import sys
import os
from twisted.python import procutils
def find(filename):
"""Find the executable ``filename``.
:param string filename: The executable to search for. Must be in the
effective user ID's $PATH.
:rtype: string
:returns: The location of the executable, if found. Otherwise, returns
None.
"""
executable = None
logging.debug("Searching for installed '%s'..." % filename)
which = procutils.which(filename, os.X_OK)
if len(which) > 0:
for that in which:
if os.stat(that).st_uid == os.geteuid():
executable = that
break
if not executable:
return None
logging.debug("Found installed script at '%s'" % executable)
return executable
def generateDescriptors(count=None, rundir=None):
"""Run a script which creates fake bridge descriptors for testing purposes.
This will run Leekspin_ to create bridge server descriptors, bridge
extra-info descriptors, and networkstatus document.
.. warning: This function can take a very long time to run, especially in
headless environments where entropy sources are minimal, because it
creates the keys for each mocked OR, which are embedded in the server
descriptors, used to calculate the OR fingerprints, and sign the
descriptors, among other things.
.. _Leekspin: https://gitweb.torproject.org/user/isis/leekspin.git
:param integer count: Number of mocked bridges to generate descriptor
for. (default: 3)
:type rundir: string or None
:param rundir: If given, use this directory as the current working
directory for the bridge descriptor generator script to run in. The
directory MUST already exist, and the descriptor files will be created
in it. If None, use the whatever directory we are currently in.
"""
import subprocess
import os.path
proc = None
statuscode = 0
script = 'leekspin'
rundir = rundir if os.path.isdir(rundir) else None
count = count if count else 3
try:
proc = subprocess.Popen([script, '-n', str(count)],
close_fds=True, cwd=rundir)
finally:
if proc is not None:
proc.wait()
if proc.returncode:
print("There was an error generating bridge descriptors.",
"(Returncode: %d)" % proc.returncode)
statuscode = proc.returncode
else:
print("Sucessfully generated %s descriptors." % str(count))
del subprocess
return statuscode
def doDumpBridges(config):
"""Dump bridges by assignment to a file.
This function handles the commandline '--dump-bridges' option.
:type config: :class:`bridgedb.Main.Conf`
:param config: The current configuration.
"""
import bridgedb.Bucket as bucket
bucketManager = bucket.BucketManager(config)
bucketManager.assignBridgesToBuckets()
bucketManager.dumpBridges()
| [
"bridgedb.Bucket.BucketManager",
"logging.debug",
"os.geteuid",
"os.path.isdir",
"os.stat",
"twisted.python.procutils.which"
] | [((984, 1043), 'logging.debug', 'logging.debug', (['("Searching for installed \'%s\'..." % filename)'], {}), '("Searching for installed \'%s\'..." % filename)\n', (997, 1043), False, 'import logging\n'), ((1056, 1090), 'twisted.python.procutils.which', 'procutils.which', (['filename', 'os.X_OK'], {}), '(filename, os.X_OK)\n', (1071, 1090), False, 'from twisted.python import procutils\n'), ((1299, 1359), 'logging.debug', 'logging.debug', (['("Found installed script at \'%s\'" % executable)'], {}), '("Found installed script at \'%s\'" % executable)\n', (1312, 1359), False, 'import logging\n'), ((3541, 3569), 'bridgedb.Bucket.BucketManager', 'bucket.BucketManager', (['config'], {}), '(config)\n', (3561, 3569), True, 'import bridgedb.Bucket as bucket\n'), ((2625, 2646), 'os.path.isdir', 'os.path.isdir', (['rundir'], {}), '(rundir)\n', (2638, 2646), False, 'import os\n'), ((1181, 1193), 'os.geteuid', 'os.geteuid', ([], {}), '()\n', (1191, 1193), False, 'import os\n'), ((1157, 1170), 'os.stat', 'os.stat', (['that'], {}), '(that)\n', (1164, 1170), False, 'import os\n')] |
from flask import request
from flask_jwt import jwt_required
from flask_restful import Resource
from main.server import app, cache, db
from main.server.models import Message, MessageSchema
messages_schema = MessageSchema(many=True)
message_schema = MessageSchema()
@app.after_request
def add_header(response):
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Credentials'] = 'true'
response.headers['Access-Control-Allow-Methods'] = 'GET, POST'
response.headers[
'Access-Control-Allow-Headers'] = 'Access-Control-Allow-Headers, Origin,Accept, X-Requested-With, Content-Type, Access-Control-Request-Method, Access-Control-Request-Headers'
return response
class MessageCount(Resource):
@cache.cached(timeout=100)
def get(self):
"""Gets the number of messages available on the server"""
return {'status': 'success', 'count': Message.query.count()}, 200
class MessageListRangeResource(Resource):
@cache.cached(timeout=100)
def get(self, lower, upper):
"""Gets a range of messages on the server"""
if int(lower) < 1:
return {'status': 'fail', 'messages': 'Invalid index: ' + str(lower)}, 400
if int(lower) > int(upper):
return {'status': 'fail',
'messages': 'Upper range cannot be less than lower range: ' + str(lower) + '>' + str(upper)}, 400
messages = Message.query.filter(Message.messageID >= int(lower)).filter(Message.messageID <= int(upper))
if not messages:
return {'status': 'fail',
'messages': 'Out of range: ' + str(lower) + ' - ' + str(upper) + ' does not exist'}, 404
messages = messages_schema.dump(messages)
if not Message.query.filter_by(messageID=upper).first(): # the last item in the range
return {'status': 'success', 'messages': messages}, 206 # Partial Content Served
return {'status': 'success', 'messages': messages}, 200
class MessageListResource(Resource):
@cache.cached(timeout=100)
def get(self):
"""Gets all messages on the server"""
messages = Message.query.all()
messages = messages_schema.dump(messages)
if not messages:
return {'status': 'success', 'messages': messages}, 206 # Partial Content Served
return {'status': 'success', 'messages': messages}, 200
@jwt_required()
def post(self):
"""Add message"""
json_data = request.get_json(force=True)
if not json_data:
return {'status': 'fail', 'message': 'No input data'}, 400
errors = message_schema.validate(json_data)
if errors:
return {'status': 'fail', 'message': 'Error handling request'}, 422
data = message_schema.load(json_data)
message = Message.query.filter_by(orig_msg=data.get('orig_msg')).first()
if message:
return {'status': 'fail', 'message': 'Message already exists'}, 400
message = Message(orig_msg=data.get('orig_msg'),
tl_msg=data.get('tl_msg'),
country=data.get('country'),
username=data.get('username'))
db.session.add(message)
db.session.commit()
return {'status': 'success', 'message': 'Message successfully created'}, 201
class MessageResource(Resource):
@cache.cached(timeout=100)
def get(self, messageID):
""""Get a message by message ID"""
message = Message.query.filter_by(messageID=messageID)
if not message.first():
return {'status': 'fail', 'message': 'No message with ID ' + str(messageID) + ' exists'}, 404
message = messages_schema.dump(message)
return {'status': 'success', 'messages': message}, 200
@jwt_required()
def delete(self, messageID):
"""delete a message by ID"""
message = Message.query.filter_by(messageID=messageID)
if not message.first():
return {'status': 'fail', 'message': 'No message with ID ' + str(messageID) + ' exists'}, 404
message.delete()
db.session.commit()
return {'status': 'sucess', 'message': 'Message Deleted'}, 200
| [
"main.server.models.Message.query.all",
"main.server.models.Message.query.count",
"main.server.db.session.commit",
"main.server.cache.cached",
"flask.request.get_json",
"main.server.models.MessageSchema",
"main.server.db.session.add",
"flask_jwt.jwt_required",
"main.server.models.Message.query.filter_by"
] | [((210, 234), 'main.server.models.MessageSchema', 'MessageSchema', ([], {'many': '(True)'}), '(many=True)\n', (223, 234), False, 'from main.server.models import Message, MessageSchema\n'), ((252, 267), 'main.server.models.MessageSchema', 'MessageSchema', ([], {}), '()\n', (265, 267), False, 'from main.server.models import Message, MessageSchema\n'), ((768, 793), 'main.server.cache.cached', 'cache.cached', ([], {'timeout': '(100)'}), '(timeout=100)\n', (780, 793), False, 'from main.server import app, cache, db\n'), ((1002, 1027), 'main.server.cache.cached', 'cache.cached', ([], {'timeout': '(100)'}), '(timeout=100)\n', (1014, 1027), False, 'from main.server import app, cache, db\n'), ((2055, 2080), 'main.server.cache.cached', 'cache.cached', ([], {'timeout': '(100)'}), '(timeout=100)\n', (2067, 2080), False, 'from main.server import app, cache, db\n'), ((2426, 2440), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (2438, 2440), False, 'from flask_jwt import jwt_required\n'), ((3427, 3452), 'main.server.cache.cached', 'cache.cached', ([], {'timeout': '(100)'}), '(timeout=100)\n', (3439, 3452), False, 'from main.server import app, cache, db\n'), ((3846, 3860), 'flask_jwt.jwt_required', 'jwt_required', ([], {}), '()\n', (3858, 3860), False, 'from flask_jwt import jwt_required\n'), ((2165, 2184), 'main.server.models.Message.query.all', 'Message.query.all', ([], {}), '()\n', (2182, 2184), False, 'from main.server.models import Message, MessageSchema\n'), ((2507, 2535), 'flask.request.get_json', 'request.get_json', ([], {'force': '(True)'}), '(force=True)\n', (2523, 2535), False, 'from flask import request\n'), ((3249, 3272), 'main.server.db.session.add', 'db.session.add', (['message'], {}), '(message)\n', (3263, 3272), False, 'from main.server import app, cache, db\n'), ((3281, 3300), 'main.server.db.session.commit', 'db.session.commit', ([], {}), '()\n', (3298, 3300), False, 'from main.server import app, cache, db\n'), ((3544, 3588), 'main.server.models.Message.query.filter_by', 'Message.query.filter_by', ([], {'messageID': 'messageID'}), '(messageID=messageID)\n', (3567, 3588), False, 'from main.server.models import Message, MessageSchema\n'), ((3950, 3994), 'main.server.models.Message.query.filter_by', 'Message.query.filter_by', ([], {'messageID': 'messageID'}), '(messageID=messageID)\n', (3973, 3994), False, 'from main.server.models import Message, MessageSchema\n'), ((4168, 4187), 'main.server.db.session.commit', 'db.session.commit', ([], {}), '()\n', (4185, 4187), False, 'from main.server import app, cache, db\n'), ((925, 946), 'main.server.models.Message.query.count', 'Message.query.count', ([], {}), '()\n', (944, 946), False, 'from main.server.models import Message, MessageSchema\n'), ((1773, 1813), 'main.server.models.Message.query.filter_by', 'Message.query.filter_by', ([], {'messageID': 'upper'}), '(messageID=upper)\n', (1796, 1813), False, 'from main.server.models import Message, MessageSchema\n')] |
# -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
#
# Copyright 2017, Battelle Memorial Institute.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This material was prepared as an account of work sponsored by an agency of
# the United States Government. Neither the United States Government nor the
# United States Department of Energy, nor Battelle, nor any of their
# employees, nor any jurisdiction or organization that has cooperated in the
# development of these materials, makes any warranty, express or
# implied, or assumes any legal liability or responsibility for the accuracy,
# completeness, or usefulness or any information, apparatus, product,
# software, or process disclosed, or represents that its use would not infringe
# privately owned rights. Reference herein to any specific commercial product,
# process, or service by trade name, trademark, manufacturer, or otherwise
# does not necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors expressed
# herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY operated by
# BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
# }}}
import os
import weakref
from datetime import datetime
from .base import SubsystemBase
from volttron.platform.messaging.headers import TIMESTAMP
from volttron.platform.agent.utils import (get_aware_utc_now,
format_timestamp)
from volttron.platform.scheduling import periodic
from ..errors import Unreachable, VIPError
"""The heartbeat subsystem adds an optional periodic publish to all agents.
Heartbeats can be started with agents and toggled on and off at runtime.
"""
__docformat__ = 'reStructuredText'
__version__ = '1.0'
class Heartbeat(SubsystemBase):
def __init__(self, owner, core, rpc, pubsub, heartbeat_autostart,
heartbeat_period):
self.owner = owner
self.core = weakref.ref(core)
self.pubsub = weakref.ref(pubsub)
self.autostart = heartbeat_autostart
self.period = heartbeat_period
self.enabled = False
self.connect_error = False
def onsetup(sender, **kwargs):
rpc.export(self.start, 'heartbeat.start')
rpc.export(self.start_with_period, 'heartbeat.start_with_period')
rpc.export(self.stop, 'heartbeat.stop')
rpc.export(self.restart, 'heartbeat.restart')
rpc.export(self.set_period, 'heartbeat.set_period')
def onstart(sender, **kwargs):
if self.autostart:
self.start()
core.onsetup.connect(onsetup, self)
core.onstart.connect(onstart, self)
core.onconnected.connect(self.reconnect)
def start(self):
"""RPC method
Starts an agent's heartbeat.
"""
if not self.enabled:
self.scheduled = self.core().schedule(periodic(self.period), self.publish)
self.enabled = True
def start_with_period(self, period):
"""RPC method
Set period and start heartbeat.
:param period: Time in seconds between publishes.
"""
self.set_period(period)
self.start()
def reconnect(self, sender, **kwargs):
if self.connect_error:
self.restart()
self.connect_error = False
def stop(self):
"""RPC method
Stop an agent's heartbeat.
"""
if self.enabled:
# Trap the fact that scheduled may not have been
# set yet if the start hasn't been called.
try:
self.scheduled.cancel()
except AttributeError:
pass
self.enabled = False
def restart(self):
"""RPC method
Restart the heartbeat with the current period. The heartbeat will
be immediately sending the heartbeat to the message bus.
"""
self.stop()
self.start()
def set_period(self, period):
"""RPC method
Set heartbeat period.
:param period: Time in seconds between publishes.
"""
if self.enabled:
self.stop()
self.period = period
self.start()
else:
self.period = period
def publish(self):
topic = 'heartbeat/' + self.core().identity
headers = {TIMESTAMP: format_timestamp(get_aware_utc_now())}
message = self.owner.vip.health.get_status_value()
try:
self.pubsub().publish('pubsub', topic, headers, message)
except Unreachable as exc:
self.connect_error = True
self.stop()
| [
"volttron.platform.scheduling.periodic",
"weakref.ref",
"volttron.platform.agent.utils.get_aware_utc_now"
] | [((2660, 2677), 'weakref.ref', 'weakref.ref', (['core'], {}), '(core)\n', (2671, 2677), False, 'import weakref\n'), ((2700, 2719), 'weakref.ref', 'weakref.ref', (['pubsub'], {}), '(pubsub)\n', (2711, 2719), False, 'import weakref\n'), ((3626, 3647), 'volttron.platform.scheduling.periodic', 'periodic', (['self.period'], {}), '(self.period)\n', (3634, 3647), False, 'from volttron.platform.scheduling import periodic\n'), ((5119, 5138), 'volttron.platform.agent.utils.get_aware_utc_now', 'get_aware_utc_now', ([], {}), '()\n', (5136, 5138), False, 'from volttron.platform.agent.utils import get_aware_utc_now, format_timestamp\n')] |
"""Ni-Superalloy dataset.
Scientific Machine Learning Benchmark
A benchmark of regression models in chem- and materials informatics.
2019, <NAME>, Citrine Informatics.
See class NiSuperalloyDataset for details.
"""
import os
import json
import zipfile
from typing import List, Optional, Tuple, Union
import numpy as np
from smlb.exceptions import InvalidParameterError
from smlb.parameters import params
from smlb.tabular_data import TabularData
class NiSuperalloyDataset(TabularData):
"""
Ni-Superalloy dataset.
Based on:
<NAME>, <NAME>, <NAME>, <NAME>:
Design of a nickel-base superalloy using a neural network, Materials & Design 131: 358-365,
Elsevier, 2017. DOI 10.1016/j.matdes.2017.06.007
The dataset was downloaded from the Citrination platform (https://citrination.com),
dataset identifier #153493, Version 10.
There are 2800 rows.
The data have columns for composition (25 elements are present in at least one row),
whether the alloy was powder processed (0 or 1), whether it was pressure treated (0 or 1),
heat treatment time (hours) and temperature (degrees Celcius) for up to 4 heat treatment steps,
the total time spent in heat treatment (hours), the maximum heat treatment temperature
(degrees Celcius), and the area under the time-temperature curve (degrees Celcius * hours).
A value of 0 generally implies that the heat treatment step was not done, but there
are some missing values. The total time and max temperature are generally more reliable
than the individual heating steps. The total compositions do not always add up to 100%,
but with about a dozen exceptions they always add up to somewhere between 95% and 105%.
There are also three columns for a pressure treatment step (temperature, time, pressure),
but since only 51 rows have non-zero entries, this information is not used.
There are 5 labels: ultimate tensile strength (MPa), elongation (unitless), stress rupture
stress (MPa), stress rupture time (hours), and yield strength (MPa). Tensile strength and
elongation occur together in 898 rows, stress rupture stress and time occur together in
856 rows, and yield strength occurs in 1046 rows. 898+856+1046=2800, so every row has exactly
one output set. The other values are denoted as NaN.
"""
DEFAULT_PATH = os.path.split(os.path.realpath(__file__))[0] + "/ni_superalloys_3.json.zip"
POWDER_PROCESSED_NO = 0
POWDER_PROCESSED_YES = 1
def __init__(
self, labels_to_load: Optional[Union[str, List[str]]] = None, ignore_dubious: bool = False
):
"""Initialize Ni-superalloy dataset with specified labels.
Parameters:
labels_to_load (str or List[str]): which labels to load. Options are
'Yield Strength', 'Ultimate Tensile Strength', 'Stress Rupture Time',
'Stress Rupture Stress', and 'Elongation'.
If None, then all labels are loaded.
ignore_dubious: whether or not to ignore samples that have something
questionable about them
"""
labels_to_load = params.optional_(
labels_to_load,
lambda arg: params.any_(
arg, params.string, lambda arg: params.sequence(arg, type_=str),
),
)
ignore_dubious = params.boolean(ignore_dubious)
filepath = self.DEFAULT_PATH
data, labels = self._load_data_and_labels(filepath, labels_to_load, ignore_dubious)
super().__init__(data=data, labels=labels)
def _load_data_and_labels(
self,
filepath: str,
labels_to_load: Optional[List[str]] = None,
ignore_dubious: bool = False,
):
"""Load data and labels from .json file."""
raw = self._unzip_json_file(filepath)
if ignore_dubious:
raw = [e for e in raw if self._filter_dubious(e)]
# dtype=object is necessary because this is a mixed-type array (float and string)
data = np.array([self._parse_json_data(e) for e in raw], dtype=object)
labels = np.array([self._parse_json_labels(e, labels_to_load) for e in raw], dtype=float)
return data, labels
@staticmethod
def _unzip_json_file(filepath: str):
"""Open and read zipped json file."""
filename = os.path.basename(filepath)
assert (
filename[-4:] == ".zip"
), f"File path must point to a .zip file, instead got '{filepath}'"
with zipfile.ZipFile(filepath) as zf:
unzipped_filename = filename[:-4]
with zf.open(unzipped_filename) as fp:
raw = json.load(fp)
return raw
@staticmethod
def _extract_raw_composition(entry: dict) -> List[dict]:
"""Get composition in its raw form."""
raw_composition = entry.get("composition")
if raw_composition is None or not isinstance(raw_composition, list):
raise InvalidParameterError(
expected="Chemical composition as a list", got=raw_composition
)
return raw_composition
@staticmethod
def _filter_dubious(entry: dict) -> bool:
"""
Determine whether or not a json entry has something questionable about it.
Currently, the only thing filtered on is if the composition has an asterisk in it,
which occurs for 6 samples.
Parameters:
entry (dict): A json entry corresponding to a row in the dataset.
Returns: bool
True if the composition contains an asterisk.
"""
raw_composition = NiSuperalloyDataset._extract_raw_composition(entry)
composition_dict = NiSuperalloyDataset._parse_composition_as_dict(raw_composition)
composition_dict_float, exception_caught = NiSuperalloyDataset._dict_values_to_float(
composition_dict
)
return not exception_caught
def _parse_json_data(self, entry: dict):
"""
Helper function to parse data in a single row from the raw json.
Parameters:
entry (dict): A json entry corresponding to a row in the dataset.
Returns: array
Array of data in this row.
"""
assert entry["category"] == "system.chemical"
raw_composition = NiSuperalloyDataset._extract_raw_composition(entry)
composition: str = self._parse_composition(raw_composition)
properties = entry.get("properties")
if properties is None or not isinstance(properties, list):
raise InvalidParameterError(
expected="A list of dictionaries, one for each property", got=properties
)
heat_treatment_1_time = self._get_scalar_property(
properties, "Heat treatment 1 Time", units="hours", default_value=0
)
heat_treatment_1_temp = self._get_scalar_property(
properties, "Heat treatment 1 Temperature", units="$^{\\circ}$C", default_value=0
)
heat_treatment_2_time = self._get_scalar_property(
properties, "Heat treatment 2 Time", units="hours", default_value=0
)
heat_treatment_2_temp = self._get_scalar_property(
properties, "Heat treatment 2 Temperature", units="$^{\\circ}$C", default_value=0
)
heat_treatment_3_time = self._get_scalar_property(
properties, "Heat treatment 3 Time", units="hours", default_value=0
)
heat_treatment_3_temp = self._get_scalar_property(
properties, "Heat treatment 3 Temperature", units="$^{\\circ}$C", default_value=0
)
heat_treatment_4_time = self._get_scalar_property(
properties, "Heat treatment 4 Time", units="hours", default_value=0
)
heat_treatment_4_temp = self._get_scalar_property(
properties, "Heat treatment 4 Temperature", units="$^{\\circ}$C", default_value=0
)
total_heat_treatment_time = self._get_scalar_property(
properties, "Total heat treatment time", units="hours"
)
max_heat_treatment_temp = self._get_scalar_property(
properties, "Max Heat Treatment Temperature", units="$^{\\circ}$C"
)
area_under_heat_treatment_curve = self._get_scalar_property(
properties, "Area under heat treatment curve", units="$^{\\circ}$C * hours"
)
powder_processed_dict = {"No": self.POWDER_PROCESSED_NO, "Yes": self.POWDER_PROCESSED_YES}
powder_processed = self._get_categorical_property(
properties, "Powder processed", categories_dict=powder_processed_dict
)
data_array = [
composition,
heat_treatment_1_time,
heat_treatment_1_temp,
heat_treatment_2_time,
heat_treatment_2_temp,
heat_treatment_3_time,
heat_treatment_3_temp,
heat_treatment_4_time,
heat_treatment_4_temp,
total_heat_treatment_time,
max_heat_treatment_temp,
area_under_heat_treatment_curve,
powder_processed,
]
return data_array
def _parse_json_labels(self, entry: dict, labels_to_load: Optional[List[str]] = None):
"""
Helper function to parse labels in a single row from the raw json.
Parameters:
entry (dict): A json entry corresponding to a row in the dataset.
labels_to_load (List[str]): Optional list of labels to load.
Returns: array
Array of labels in this row that we are interested in.
"""
if labels_to_load is None:
labels_to_load = [
"Yield Strength",
"Ultimate Tensile Strength",
"Stress Rupture Time",
"Stress Rupture Stress",
"Elongation",
]
properties = entry.get("properties")
if properties is None or not isinstance(properties, list):
raise InvalidParameterError(
expected="A list of dictionaries, one for each property", got=properties
)
labels_array = []
for label in labels_to_load:
labels_array.append(self._get_scalar_property(properties, label, default_value=None))
return labels_array
@staticmethod
def _parse_composition(raw_composition: List[dict]) -> str:
"""
Helper function to parse composition as a string.
Parameters:
raw_composition (List[dict]): A list, each entry of which corresponds to an element.
An entry is a dict with an 'element' key and an 'idealWeightPercent' key.
The element is a string (e.g., 'Cu') and the weight percent is another dict
with a single key, 'value', pointing to a floating point number.
The values are in percentage points, and add up to ~100.
Returns: str
Chemical composition as string, e.g. 'Al5.5Ni94.0W0.5'
"""
composition_dict = NiSuperalloyDataset._parse_composition_as_dict(raw_composition)
composition_dict_float, _ = NiSuperalloyDataset._dict_values_to_float(composition_dict)
composition_str: str = ""
for element_name, element_amount in composition_dict_float.items():
if element_amount > 0:
composition_str += element_name + str(element_amount)
return composition_str
@staticmethod
def _parse_composition_as_dict(raw_composition: List[dict]) -> dict:
"""
Helper function to parse composition as a dictionary.
Parameters:
raw_composition (List[dict]): A list, each entry of which corresponds to an element.
An entry is a dict with an 'element' key and an 'idealWeightPercent' key.
The element is a string (e.g., 'Cu') and the weight percent is another dict
with a single key, 'value', pointing to a floating point number.
The values are in percentage points, and add up to ~100 (but not exactly).
Returns: dict
Chemical composition as a dictionary with the elements as keys
and their raw amounts as values
"""
composition_dict = dict()
for entry in raw_composition:
try:
element_name = entry["element"]
element_amount = entry["idealWeightPercent"]["value"]
except KeyError:
raise InvalidParameterError(
expected="Element amount as a dictionary of the form\n"
"{'element': <element name>,"
"'idealWeightPercent': "
"{'value': <element amount>}}",
got=entry,
)
composition_dict[element_name] = element_amount
return composition_dict
@staticmethod
def _dict_values_to_float(d: dict) -> Tuple[dict, bool]:
"""
Convert a dictionary's values to their floating point representations, if possible.
Parameters:
d: a dictionary
Returns: dict, bool
A modified version of `d`, and a boolean flag indicating whether or not
an Exception was caught
"""
d_copy = dict()
exception_caught = False
for key, value in d.items():
try:
value_float = float(value)
except ValueError:
exception_caught = True
value_float = NiSuperalloyDataset._parse_peculiar_amount(value)
d_copy[key] = value_float
return d_copy, exception_caught
@staticmethod
def _parse_peculiar_amount(x: str) -> float:
"""
Deals with dataset-specific-peculiarities in composition amounts.
Some composition amounts have a trailing asterisk, e.g., '2*'. The meaning is unclear.
Perhaps it denotes that the amount is imprecise. In any case, they only occur in 6
samples. The trailing asterisk will be ignored.
"""
if x[-1] == "*":
x = x[:-1]
try:
return float(x)
except ValueError:
raise InvalidParameterError("Amount as a float", x)
def _get_scalar_property(
self,
properties: List[dict],
property_name: str,
units: Optional[str] = None,
default_value: Optional[float] = None,
) -> float:
"""
A helper function to get a single scalar property.
This calls _get_single_property and then checks that the result can be
turned into a float.
Parameters:
properties: A list of dicts, each of which is a single property.
property_name: The name of the property to get the value of.
units: Optional expected units string.
default_value: Value to return if `property_name` is not present.
Raises:
InvalidParameterError: if the value cannot be expressed as a float
Returns: float
The value of the desired property.
"""
try:
val = self._get_single_property(properties, property_name, units, default_value)
if val is None:
return None
return float(val)
except ValueError:
raise InvalidParameterError(
expected=f"Property {property_name} should have a value "
f"that can be expressed as a float",
got=properties,
)
def _get_categorical_property(
self, properties: List[dict], property_name: str, categories_dict: dict
) -> int:
"""
Helper function to get a single categorical property as an int.
Parameters:
properties: A list of dicts, each of which is a single property.
property_name: The name of the property to get the value of.
categories_dict: Dict from the categorical property (string) to a unique integer value.
Raises:
InvalidParameterError: if the value is not in the expected list of possible categories
as given by the keys in `categories_dict`
Returns: int
An integer that corresponds to the value of the desired property.
"""
category = self._get_single_property(properties, property_name)
try:
return categories_dict[category]
except KeyError:
raise InvalidParameterError(
f"A value in the array: {categories_dict.keys()}", category
)
@staticmethod
def _get_single_property(
properties: List[dict], property_name: str, units: Optional[str] = None, default_value=None
):
"""
Helper function to get a single property.
Parameters:
properties: A list of dicts, each of which is a single property. Each entry is expected
to have a 'name' field that corresponds to the property name and a `scalars` field
that is a list with one entry, a dict of the form {'value': <property value>}.
It may also have a 'units' field.
property_name: The name of the property to get the value of. `properties` is expected
to have exactly one entry with the 'name' field equal to `property_name`.
units: Optional expected value of 'units' field. If specified, then there must be a
'units' field and its value must correspond to `units`.
default_value: Value to return if `property_name` is not present.
Raises:
InvalidParameterError: if `properties` does not conform to the expected structure
Returns:
The value of the property `property_name`
"""
matching_props = [prop for prop in properties if prop.get("name") == property_name]
if len(matching_props) == 0:
return default_value
elif len(matching_props) > 1:
raise InvalidParameterError(
expected=f"Only one entry in properties should have name" f" '{property_name}'",
got=properties,
)
matching_prop = matching_props[0]
try:
scalars = matching_prop["scalars"]
assert len(scalars) == 1
val = scalars[0]["value"]
if units is not None:
assert matching_prop["units"] == units
except (KeyError, AssertionError):
units_str = "" if units is None else f", 'units': {units}"
raise InvalidParameterError(
expected="Property as a dictionary of the form\n"
"{'name': <property name>, 'scalars': "
"[{'value': <property value>}]" + units_str + "}",
got=matching_prop,
)
return val
| [
"zipfile.ZipFile",
"os.path.realpath",
"smlb.parameters.params.sequence",
"smlb.exceptions.InvalidParameterError",
"os.path.basename",
"json.load",
"smlb.parameters.params.boolean"
] | [((3363, 3393), 'smlb.parameters.params.boolean', 'params.boolean', (['ignore_dubious'], {}), '(ignore_dubious)\n', (3377, 3393), False, 'from smlb.parameters import params\n'), ((4348, 4374), 'os.path.basename', 'os.path.basename', (['filepath'], {}), '(filepath)\n', (4364, 4374), False, 'import os\n'), ((4518, 4543), 'zipfile.ZipFile', 'zipfile.ZipFile', (['filepath'], {}), '(filepath)\n', (4533, 4543), False, 'import zipfile\n'), ((4976, 5066), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': '"""Chemical composition as a list"""', 'got': 'raw_composition'}), "(expected='Chemical composition as a list', got=\n raw_composition)\n", (4997, 5066), False, 'from smlb.exceptions import InvalidParameterError\n'), ((6578, 6678), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': '"""A list of dictionaries, one for each property"""', 'got': 'properties'}), "(expected=\n 'A list of dictionaries, one for each property', got=properties)\n", (6599, 6678), False, 'from smlb.exceptions import InvalidParameterError\n'), ((10031, 10131), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': '"""A list of dictionaries, one for each property"""', 'got': 'properties'}), "(expected=\n 'A list of dictionaries, one for each property', got=properties)\n", (10052, 10131), False, 'from smlb.exceptions import InvalidParameterError\n'), ((2377, 2403), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (2393, 2403), False, 'import os\n'), ((4670, 4683), 'json.load', 'json.load', (['fp'], {}), '(fp)\n', (4679, 4683), False, 'import json\n'), ((14260, 14305), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', (['"""Amount as a float"""', 'x'], {}), "('Amount as a float', x)\n", (14281, 14305), False, 'from smlb.exceptions import InvalidParameterError\n'), ((15407, 15545), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': 'f"""Property {property_name} should have a value that can be expressed as a float"""', 'got': 'properties'}), "(expected=\n f'Property {property_name} should have a value that can be expressed as a float'\n , got=properties)\n", (15428, 15545), False, 'from smlb.exceptions import InvalidParameterError\n'), ((18089, 18213), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': 'f"""Only one entry in properties should have name \'{property_name}\'"""', 'got': 'properties'}), '(expected=\n f"Only one entry in properties should have name \'{property_name}\'", got\n =properties)\n', (18110, 18213), False, 'from smlb.exceptions import InvalidParameterError\n'), ((18654, 18842), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': '(\n """Property as a dictionary of the form\n{\'name\': <property name>, \'scalars\': [{\'value\': <property value>}]"""\n + units_str + \'}\')', 'got': 'matching_prop'}), '(expected=\n """Property as a dictionary of the form\n{\'name\': <property name>, \'scalars\': [{\'value\': <property value>}]"""\n + units_str + \'}\', got=matching_prop)\n', (18675, 18842), False, 'from smlb.exceptions import InvalidParameterError\n'), ((12544, 12723), 'smlb.exceptions.InvalidParameterError', 'InvalidParameterError', ([], {'expected': '"""Element amount as a dictionary of the form\n{\'element\': <element name>,\'idealWeightPercent\': {\'value\': <element amount>}}"""', 'got': 'entry'}), '(expected=\n """Element amount as a dictionary of the form\n{\'element\': <element name>,\'idealWeightPercent\': {\'value\': <element amount>}}"""\n , got=entry)\n', (12565, 12723), False, 'from smlb.exceptions import InvalidParameterError\n'), ((3280, 3311), 'smlb.parameters.params.sequence', 'params.sequence', (['arg'], {'type_': 'str'}), '(arg, type_=str)\n', (3295, 3311), False, 'from smlb.parameters import params\n')] |
import metricbeat
import os
import pytest
import sys
import unittest
class Test(metricbeat.BaseTest):
COMPOSE_SERVICES = ['postgresql']
def common_checks(self, output):
# Ensure no errors or warnings exist in the log.
self.assert_no_logged_warnings()
for evt in output:
top_level_fields = metricbeat.COMMON_FIELDS + ["postgresql"]
self.assertCountEqual(self.de_dot(top_level_fields), evt.keys())
self.assert_fields_are_documented(evt)
def get_hosts(self):
username = "postgres"
host = self.compose_host()
dsn = "postgres://{}?sslmode=disable".format(host)
return (
[dsn],
username,
os.getenv("POSTGRESQL_PASSWORD"),
)
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_activity(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["activity"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "name" in evt["postgresql"]["activity"]["database"]
assert "oid" in evt["postgresql"]["activity"]["database"]
assert "state" in evt["postgresql"]["activity"]
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_database(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["database"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "name" in evt["postgresql"]["database"]
assert "oid" in evt["postgresql"]["database"]
assert "blocks" in evt["postgresql"]["database"]
assert "rows" in evt["postgresql"]["database"]
assert "conflicts" in evt["postgresql"]["database"]
assert "deadlocks" in evt["postgresql"]["database"]
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_bgwriter(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["bgwriter"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "checkpoints" in evt["postgresql"]["bgwriter"]
assert "buffers" in evt["postgresql"]["bgwriter"]
assert "stats_reset" in evt["postgresql"]["bgwriter"]
| [
"pytest.mark.tag",
"unittest.skipUnless",
"os.getenv"
] | [((779, 848), 'unittest.skipUnless', 'unittest.skipUnless', (['metricbeat.INTEGRATION_TESTS', '"""integration test"""'], {}), "(metricbeat.INTEGRATION_TESTS, 'integration test')\n", (798, 848), False, 'import unittest\n'), ((854, 884), 'pytest.mark.tag', 'pytest.mark.tag', (['"""integration"""'], {}), "('integration')\n", (869, 884), False, 'import pytest\n'), ((1728, 1797), 'unittest.skipUnless', 'unittest.skipUnless', (['metricbeat.INTEGRATION_TESTS', '"""integration test"""'], {}), "(metricbeat.INTEGRATION_TESTS, 'integration test')\n", (1747, 1797), False, 'import unittest\n'), ((1803, 1833), 'pytest.mark.tag', 'pytest.mark.tag', (['"""integration"""'], {}), "('integration')\n", (1818, 1833), False, 'import pytest\n'), ((2841, 2910), 'unittest.skipUnless', 'unittest.skipUnless', (['metricbeat.INTEGRATION_TESTS', '"""integration test"""'], {}), "(metricbeat.INTEGRATION_TESTS, 'integration test')\n", (2860, 2910), False, 'import unittest\n'), ((2916, 2946), 'pytest.mark.tag', 'pytest.mark.tag', (['"""integration"""'], {}), "('integration')\n", (2931, 2946), False, 'import pytest\n'), ((729, 761), 'os.getenv', 'os.getenv', (['"""POSTGRESQL_PASSWORD"""'], {}), "('POSTGRESQL_PASSWORD')\n", (738, 761), False, 'import os\n')] |
from django.contrib import admin
from books.models import Genre, Author, Book, TBR
# Register your models here.
admin.site.register(Genre)
admin.site.register(Author)
admin.site.register(Book)
admin.site.register(TBR) | [
"django.contrib.admin.site.register"
] | [((114, 140), 'django.contrib.admin.site.register', 'admin.site.register', (['Genre'], {}), '(Genre)\n', (133, 140), False, 'from django.contrib import admin\n'), ((141, 168), 'django.contrib.admin.site.register', 'admin.site.register', (['Author'], {}), '(Author)\n', (160, 168), False, 'from django.contrib import admin\n'), ((169, 194), 'django.contrib.admin.site.register', 'admin.site.register', (['Book'], {}), '(Book)\n', (188, 194), False, 'from django.contrib import admin\n'), ((195, 219), 'django.contrib.admin.site.register', 'admin.site.register', (['TBR'], {}), '(TBR)\n', (214, 219), False, 'from django.contrib import admin\n')] |
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from packaging import version
from datadog_checks.base.utils.common import get_docker_hostname
HERE = os.path.dirname(os.path.abspath(__file__))
ROOT = os.path.dirname(os.path.dirname(HERE))
RABBITMQ_VERSION_RAW = os.environ['RABBITMQ_VERSION']
RABBITMQ_VERSION = version.parse(RABBITMQ_VERSION_RAW)
CHECK_NAME = 'rabbitmq'
HOST = get_docker_hostname()
PORT = 15672
URL = 'http://{}:{}/api/'.format(HOST, PORT)
CONFIG = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': '<PASSWORD>',
'queues': ['test1'],
'tags': ["tag1:1", "tag2"],
'exchanges': ['test1'],
}
CONFIG_NO_NODES = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': '<PASSWORD>',
'queues': ['test1'],
'tags': ["tag1:1", "tag2"],
'exchanges': ['test1'],
'collect_node_metrics': False,
}
CONFIG_REGEX = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': '<PASSWORD>',
'queues_regexes': [r'test\d+'],
'exchanges_regexes': [r'test\d+'],
}
CONFIG_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': '<PASSWORD>',
'vhosts': ['/', 'myvhost'],
}
CONFIG_WITH_FAMILY = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': '<PASSWORD>',
'tag_families': True,
'queues_regexes': [r'(test)\d+'],
'exchanges_regexes': [r'(test)\d+'],
}
CONFIG_DEFAULT_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': '<PASSWORD>',
'vhosts': ['/', 'test'],
}
CONFIG_TEST_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': '<PASSWORD>',
'vhosts': ['test', 'test2'],
}
EXCHANGE_MESSAGE_STATS = {
'ack': 1.0,
'ack_details': {'rate': 1.0},
'confirm': 1.0,
'confirm_details': {'rate': 1.0},
'deliver_get': 1.0,
'deliver_get_details': {'rate': 1.0},
'publish': 1.0,
'publish_details': {'rate': 1.0},
'publish_in': 1.0,
'publish_in_details': {'rate': 1.0},
'publish_out': 1.0,
'publish_out_details': {'rate': 1.0},
'return_unroutable': 1.0,
'return_unroutable_details': {'rate': 1.0},
'redeliver': 1.0,
'redeliver_details': {'rate': 1.0},
}
| [
"os.path.abspath",
"os.path.dirname",
"packaging.version.parse",
"datadog_checks.base.utils.common.get_docker_hostname"
] | [((394, 429), 'packaging.version.parse', 'version.parse', (['RABBITMQ_VERSION_RAW'], {}), '(RABBITMQ_VERSION_RAW)\n', (407, 429), False, 'from packaging import version\n'), ((463, 484), 'datadog_checks.base.utils.common.get_docker_hostname', 'get_docker_hostname', ([], {}), '()\n', (482, 484), False, 'from datadog_checks.base.utils.common import get_docker_hostname\n'), ((247, 272), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (262, 272), False, 'import os\n'), ((297, 318), 'os.path.dirname', 'os.path.dirname', (['HERE'], {}), '(HERE)\n', (312, 318), False, 'import os\n')] |
"""
Binary serialization
NPY format
==========
A simple format for saving numpy arrays to disk with the full
information about them.
The ``.npy`` format is the standard binary file format in NumPy for
persisting a *single* arbitrary NumPy array on disk. The format stores all
of the shape and dtype information necessary to reconstruct the array
correctly even on another machine with a different architecture.
The format is designed to be as simple as possible while achieving
its limited goals.
The ``.npz`` format is the standard format for persisting *multiple* NumPy
arrays on disk. A ``.npz`` file is a zip file containing multiple ``.npy``
files, one for each array.
Capabilities
------------
- Can represent all NumPy arrays including nested record arrays and
object arrays.
- Represents the data in its native binary form.
- Supports Fortran-contiguous arrays directly.
- Stores all of the necessary information to reconstruct the array
including shape and dtype on a machine of a different
architecture. Both little-endian and big-endian arrays are
supported, and a file with little-endian numbers will yield
a little-endian array on any machine reading the file. The
types are described in terms of their actual sizes. For example,
if a machine with a 64-bit C "long int" writes out an array with
"long ints", a reading machine with 32-bit C "long ints" will yield
an array with 64-bit integers.
- Is straightforward to reverse engineer. Datasets often live longer than
the programs that created them. A competent developer should be
able to create a solution in their preferred programming language to
read most ``.npy`` files that they have been given without much
documentation.
- Allows memory-mapping of the data. See `open_memmap`.
- Can be read from a filelike stream object instead of an actual file.
- Stores object arrays, i.e. arrays containing elements that are arbitrary
Python objects. Files with object arrays are not to be mmapable, but
can be read and written to disk.
Limitations
-----------
- Arbitrary subclasses of numpy.ndarray are not completely preserved.
Subclasses will be accepted for writing, but only the array data will
be written out. A regular numpy.ndarray object will be created
upon reading the file.
.. warning::
Due to limitations in the interpretation of structured dtypes, dtypes
with fields with empty names will have the names replaced by 'f0', 'f1',
etc. Such arrays will not round-trip through the format entirely
accurately. The data is intact; only the field names will differ. We are
working on a fix for this. This fix will not require a change in the
file format. The arrays with such structures can still be saved and
restored, and the correct dtype may be restored by using the
``loadedarray.view(correct_dtype)`` method.
File extensions
---------------
We recommend using the ``.npy`` and ``.npz`` extensions for files saved
in this format. This is by no means a requirement; applications may wish
to use these file formats but use an extension specific to the
application. In the absence of an obvious alternative, however,
we suggest using ``.npy`` and ``.npz``.
Version numbering
-----------------
The version numbering of these formats is independent of NumPy version
numbering. If the format is upgraded, the code in `numpy.io` will still
be able to read and write Version 1.0 files.
Format Version 1.0
------------------
The first 6 bytes are a magic string: exactly ``\\x93NUMPY``.
The next 1 byte is an unsigned byte: the major version number of the file
format, e.g. ``\\x01``.
The next 1 byte is an unsigned byte: the minor version number of the file
format, e.g. ``\\x00``. Note: the version of the file format is not tied
to the version of the numpy package.
The next 2 bytes form a little-endian unsigned short int: the length of
the header data HEADER_LEN.
The next HEADER_LEN bytes form the header data describing the array's
format. It is an ASCII string which contains a Python literal expression
of a dictionary. It is terminated by a newline (``\\n``) and padded with
spaces (``\\x20``) to make the total of
``len(magic string) + 2 + len(length) + HEADER_LEN`` be evenly divisible
by 64 for alignment purposes.
The dictionary contains three keys:
"descr" : dtype.descr
An object that can be passed as an argument to the `numpy.dtype`
constructor to create the array's dtype.
"fortran_order" : bool
Whether the array data is Fortran-contiguous or not. Since
Fortran-contiguous arrays are a common form of non-C-contiguity,
we allow them to be written directly to disk for efficiency.
"shape" : tuple of int
The shape of the array.
For repeatability and readability, the dictionary keys are sorted in
alphabetic order. This is for convenience only. A writer SHOULD implement
this if possible. A reader MUST NOT depend on this.
Following the header comes the array data. If the dtype contains Python
objects (i.e. ``dtype.hasobject is True``), then the data is a Python
pickle of the array. Otherwise the data is the contiguous (either C-
or Fortran-, depending on ``fortran_order``) bytes of the array.
Consumers can figure out the number of bytes by multiplying the number
of elements given by the shape (noting that ``shape=()`` means there is
1 element) by ``dtype.itemsize``.
Format Version 2.0
------------------
The version 1.0 format only allowed the array header to have a total size of
65535 bytes. This can be exceeded by structured arrays with a large number of
columns. The version 2.0 format extends the header size to 4 GiB.
`numpy.save` will automatically save in 2.0 format if the data requires it,
else it will always use the more compatible 1.0 format.
The description of the fourth element of the header therefore has become:
"The next 4 bytes form a little-endian unsigned int: the length of the header
data HEADER_LEN."
Format Version 3.0
------------------
This version replaces the ASCII string (which in practice was latin1) with
a utf8-encoded string, so supports structured types with any unicode field
names.
Notes
-----
The ``.npy`` format, including motivation for creating it and a comparison of
alternatives, is described in the
:doc:`"npy-format" NEP <neps:nep-0001-npy-format>`, however details have
evolved with time and this document is more current.
"""
import numpy
import io
import warnings
from numpy.lib.utils import safe_eval
from numpy.compat import (
isfileobj, os_fspath, pickle
)
__all__ = []
EXPECTED_KEYS = {'descr', 'fortran_order', 'shape'}
MAGIC_PREFIX = b'\x93NUMPY'
MAGIC_LEN = len(MAGIC_PREFIX) + 2
ARRAY_ALIGN = 64 # plausible values are powers of 2 between 16 and 4096
BUFFER_SIZE = 2**18 # size of buffer for reading npz files in bytes
# difference between version 1.0 and 2.0 is a 4 byte (I) header length
# instead of 2 bytes (H) allowing storage of large structured arrays
_header_size_info = {
(1, 0): ('<H', 'latin1'),
(2, 0): ('<I', 'latin1'),
(3, 0): ('<I', 'utf8'),
}
def _check_version(version):
if version not in [(1, 0), (2, 0), (3, 0), None]:
msg = "we only support format version (1,0), (2,0), and (3,0), not %s"
raise ValueError(msg % (version,))
def magic(major, minor):
""" Return the magic string for the given file format version.
Parameters
----------
major : int in [0, 255]
minor : int in [0, 255]
Returns
-------
magic : str
Raises
------
ValueError if the version cannot be formatted.
"""
if major < 0 or major > 255:
raise ValueError("major version must be 0 <= major < 256")
if minor < 0 or minor > 255:
raise ValueError("minor version must be 0 <= minor < 256")
return MAGIC_PREFIX + bytes([major, minor])
def read_magic(fp):
""" Read the magic string to get the version of the file format.
Parameters
----------
fp : filelike object
Returns
-------
major : int
minor : int
"""
magic_str = _read_bytes(fp, MAGIC_LEN, "magic string")
if magic_str[:-2] != MAGIC_PREFIX:
msg = "the magic string is not correct; expected %r, got %r"
raise ValueError(msg % (MAGIC_PREFIX, magic_str[:-2]))
major, minor = magic_str[-2:]
return major, minor
def _has_metadata(dt):
if dt.metadata is not None:
return True
elif dt.names is not None:
return any(_has_metadata(dt[k]) for k in dt.names)
elif dt.subdtype is not None:
return _has_metadata(dt.base)
else:
return False
def dtype_to_descr(dtype):
"""
Get a serializable descriptor from the dtype.
The .descr attribute of a dtype object cannot be round-tripped through
the dtype() constructor. Simple types, like dtype('float32'), have
a descr which looks like a record array with one field with '' as
a name. The dtype() constructor interprets this as a request to give
a default name. Instead, we construct descriptor that can be passed to
dtype().
Parameters
----------
dtype : dtype
The dtype of the array that will be written to disk.
Returns
-------
descr : object
An object that can be passed to `numpy.dtype()` in order to
replicate the input dtype.
"""
if _has_metadata(dtype):
warnings.warn("metadata on a dtype may be saved or ignored, but will "
"raise if saved when read. Use another form of storage.",
UserWarning, stacklevel=2)
if dtype.names is not None:
# This is a record array. The .descr is fine. XXX: parts of the
# record array with an empty name, like padding bytes, still get
# fiddled with. This needs to be fixed in the C implementation of
# dtype().
return dtype.descr
else:
return dtype.str
def descr_to_dtype(descr):
"""
Returns a dtype based off the given description.
This is essentially the reverse of `dtype_to_descr()`. It will remove
the valueless padding fields created by, i.e. simple fields like
dtype('float32'), and then convert the description to its corresponding
dtype.
Parameters
----------
descr : object
The object retreived by dtype.descr. Can be passed to
`numpy.dtype()` in order to replicate the input dtype.
Returns
-------
dtype : dtype
The dtype constructed by the description.
"""
if isinstance(descr, str):
# No padding removal needed
return numpy.dtype(descr)
elif isinstance(descr, tuple):
# subtype, will always have a shape descr[1]
dt = descr_to_dtype(descr[0])
return numpy.dtype((dt, descr[1]))
titles = []
names = []
formats = []
offsets = []
offset = 0
for field in descr:
if len(field) == 2:
name, descr_str = field
dt = descr_to_dtype(descr_str)
else:
name, descr_str, shape = field
dt = numpy.dtype((descr_to_dtype(descr_str), shape))
# Ignore padding bytes, which will be void bytes with '' as name
# Once support for blank names is removed, only "if name == ''" needed)
is_pad = (name == '' and dt.type is numpy.void and dt.names is None)
if not is_pad:
title, name = name if isinstance(name, tuple) else (None, name)
titles.append(title)
names.append(name)
formats.append(dt)
offsets.append(offset)
offset += dt.itemsize
return numpy.dtype({'names': names, 'formats': formats, 'titles': titles,
'offsets': offsets, 'itemsize': offset})
def header_data_from_array_1_0(array):
""" Get the dictionary of header metadata from a numpy.ndarray.
Parameters
----------
array : numpy.ndarray
Returns
-------
d : dict
This has the appropriate entries for writing its string representation
to the header of the file.
"""
d = {'shape': array.shape}
if array.flags.c_contiguous:
d['fortran_order'] = False
elif array.flags.f_contiguous:
d['fortran_order'] = True
else:
# Totally non-contiguous data. We will have to make it C-contiguous
# before writing. Note that we need to test for C_CONTIGUOUS first
# because a 1-D array is both C_CONTIGUOUS and F_CONTIGUOUS.
d['fortran_order'] = False
d['descr'] = dtype_to_descr(array.dtype)
return d
def _wrap_header(header, version):
"""
Takes a stringified header, and attaches the prefix and padding to it
"""
import struct
assert version is not None
fmt, encoding = _header_size_info[version]
if not isinstance(header, bytes): # always true on python 3
header = header.encode(encoding)
hlen = len(header) + 1
padlen = ARRAY_ALIGN - ((MAGIC_LEN + struct.calcsize(fmt) + hlen) % ARRAY_ALIGN)
try:
header_prefix = magic(*version) + struct.pack(fmt, hlen + padlen)
except struct.error:
msg = "Header length {} too big for version={}".format(hlen, version)
raise ValueError(msg) from None
# Pad the header with spaces and a final newline such that the magic
# string, the header-length short and the header are aligned on a
# ARRAY_ALIGN byte boundary. This supports memory mapping of dtypes
# aligned up to ARRAY_ALIGN on systems like Linux where mmap()
# offset must be page-aligned (i.e. the beginning of the file).
return header_prefix + header + b' '*padlen + b'\n'
def _wrap_header_guess_version(header):
"""
Like `_wrap_header`, but chooses an appropriate version given the contents
"""
try:
return _wrap_header(header, (1, 0))
except ValueError:
pass
try:
ret = _wrap_header(header, (2, 0))
except UnicodeEncodeError:
pass
else:
warnings.warn("Stored array in format 2.0. It can only be"
"read by NumPy >= 1.9", UserWarning, stacklevel=2)
return ret
header = _wrap_header(header, (3, 0))
warnings.warn("Stored array in format 3.0. It can only be "
"read by NumPy >= 1.17", UserWarning, stacklevel=2)
return header
def _write_array_header(fp, d, version=None):
""" Write the header for an array and returns the version used
Parameters
----------
fp : filelike object
d : dict
This has the appropriate entries for writing its string representation
to the header of the file.
version: tuple or None
None means use oldest that works
explicit version will raise a ValueError if the format does not
allow saving this data. Default: None
"""
header = ["{"]
for key, value in sorted(d.items()):
# Need to use repr here, since we eval these when reading
header.append("'%s': %s, " % (key, repr(value)))
header.append("}")
header = "".join(header)
if version is None:
header = _wrap_header_guess_version(header)
else:
header = _wrap_header(header, version)
fp.write(header)
def write_array_header_1_0(fp, d):
""" Write the header for an array using the 1.0 format.
Parameters
----------
fp : filelike object
d : dict
This has the appropriate entries for writing its string
representation to the header of the file.
"""
_write_array_header(fp, d, (1, 0))
def write_array_header_2_0(fp, d):
""" Write the header for an array using the 2.0 format.
The 2.0 format allows storing very large structured arrays.
.. versionadded:: 1.9.0
Parameters
----------
fp : filelike object
d : dict
This has the appropriate entries for writing its string
representation to the header of the file.
"""
_write_array_header(fp, d, (2, 0))
def read_array_header_1_0(fp):
"""
Read an array header from a filelike object using the 1.0 file format
version.
This will leave the file object located just after the header.
Parameters
----------
fp : filelike object
A file object or something with a `.read()` method like a file.
Returns
-------
shape : tuple of int
The shape of the array.
fortran_order : bool
The array data will be written out directly if it is either
C-contiguous or Fortran-contiguous. Otherwise, it will be made
contiguous before writing it out.
dtype : dtype
The dtype of the file's data.
Raises
------
ValueError
If the data is invalid.
"""
return _read_array_header(fp, version=(1, 0))
def read_array_header_2_0(fp):
"""
Read an array header from a filelike object using the 2.0 file format
version.
This will leave the file object located just after the header.
.. versionadded:: 1.9.0
Parameters
----------
fp : filelike object
A file object or something with a `.read()` method like a file.
Returns
-------
shape : tuple of int
The shape of the array.
fortran_order : bool
The array data will be written out directly if it is either
C-contiguous or Fortran-contiguous. Otherwise, it will be made
contiguous before writing it out.
dtype : dtype
The dtype of the file's data.
Raises
------
ValueError
If the data is invalid.
"""
return _read_array_header(fp, version=(2, 0))
def _filter_header(s):
"""Clean up 'L' in npz header ints.
Cleans up the 'L' in strings representing integers. Needed to allow npz
headers produced in Python2 to be read in Python3.
Parameters
----------
s : string
Npy file header.
Returns
-------
header : str
Cleaned up header.
"""
import tokenize
from io import StringIO
tokens = []
last_token_was_number = False
for token in tokenize.generate_tokens(StringIO(s).readline):
token_type = token[0]
token_string = token[1]
if (last_token_was_number and
token_type == tokenize.NAME and
token_string == "L"):
continue
else:
tokens.append(token)
last_token_was_number = (token_type == tokenize.NUMBER)
return tokenize.untokenize(tokens)
def _read_array_header(fp, version):
"""
see read_array_header_1_0
"""
# Read an unsigned, little-endian short int which has the length of the
# header.
import struct
hinfo = _header_size_info.get(version)
if hinfo is None:
raise ValueError("Invalid version {!r}".format(version))
hlength_type, encoding = hinfo
hlength_str = _read_bytes(fp, struct.calcsize(hlength_type), "array header length")
header_length = struct.unpack(hlength_type, hlength_str)[0]
header = _read_bytes(fp, header_length, "array header")
header = header.decode(encoding)
# The header is a pretty-printed string representation of a literal
# Python dictionary with trailing newlines padded to a ARRAY_ALIGN byte
# boundary. The keys are strings.
# "shape" : tuple of int
# "fortran_order" : bool
# "descr" : dtype.descr
# Versions (2, 0) and (1, 0) could have been created by a Python 2
# implementation before header filtering was implemented.
if version <= (2, 0):
header = _filter_header(header)
try:
d = safe_eval(header)
except SyntaxError as e:
msg = "Cannot parse header: {!r}"
raise ValueError(msg.format(header)) from e
if not isinstance(d, dict):
msg = "Header is not a dictionary: {!r}"
raise ValueError(msg.format(d))
if EXPECTED_KEYS != d.keys():
keys = sorted(d.keys())
msg = "Header does not contain the correct keys: {!r}"
raise ValueError(msg.format(keys))
# Sanity-check the values.
if (not isinstance(d['shape'], tuple) or
not all(isinstance(x, int) for x in d['shape'])):
msg = "shape is not valid: {!r}"
raise ValueError(msg.format(d['shape']))
if not isinstance(d['fortran_order'], bool):
msg = "fortran_order is not a valid bool: {!r}"
raise ValueError(msg.format(d['fortran_order']))
try:
dtype = descr_to_dtype(d['descr'])
except TypeError as e:
msg = "descr is not a valid dtype descriptor: {!r}"
raise ValueError(msg.format(d['descr'])) from e
return d['shape'], d['fortran_order'], dtype
def write_array(fp, array, version=None, allow_pickle=True, pickle_kwargs=None):
"""
Write an array to an NPY file, including a header.
If the array is neither C-contiguous nor Fortran-contiguous AND the
file_like object is not a real file object, this function will have to
copy data in memory.
Parameters
----------
fp : file_like object
An open, writable file object, or similar object with a
``.write()`` method.
array : ndarray
The array to write to disk.
version : (int, int) or None, optional
The version number of the format. None means use the oldest
supported version that is able to store the data. Default: None
allow_pickle : bool, optional
Whether to allow writing pickled data. Default: True
pickle_kwargs : dict, optional
Additional keyword arguments to pass to pickle.dump, excluding
'protocol'. These are only useful when pickling objects in object
arrays on Python 3 to Python 2 compatible format.
Raises
------
ValueError
If the array cannot be persisted. This includes the case of
allow_pickle=False and array being an object array.
Various other errors
If the array contains Python objects as part of its dtype, the
process of pickling them may raise various errors if the objects
are not picklable.
"""
_check_version(version)
_write_array_header(fp, header_data_from_array_1_0(array), version)
if array.itemsize == 0:
buffersize = 0
else:
# Set buffer size to 16 MiB to hide the Python loop overhead.
buffersize = max(16 * 1024 ** 2 // array.itemsize, 1)
if array.dtype.hasobject:
# We contain Python objects so we cannot write out the data
# directly. Instead, we will pickle it out
if not allow_pickle:
raise ValueError("Object arrays cannot be saved when "
"allow_pickle=False")
if pickle_kwargs is None:
pickle_kwargs = {}
pickle.dump(array, fp, protocol=3, **pickle_kwargs)
elif array.flags.f_contiguous and not array.flags.c_contiguous:
if isfileobj(fp):
array.T.tofile(fp)
else:
for chunk in numpy.nditer(
array, flags=['external_loop', 'buffered', 'zerosize_ok'],
buffersize=buffersize, order='F'):
fp.write(chunk.tobytes('C'))
else:
if isfileobj(fp):
array.tofile(fp)
else:
for chunk in numpy.nditer(
array, flags=['external_loop', 'buffered', 'zerosize_ok'],
buffersize=buffersize, order='C'):
fp.write(chunk.tobytes('C'))
def read_array(fp, allow_pickle=False, pickle_kwargs=None):
"""
Read an array from an NPY file.
Parameters
----------
fp : file_like object
If this is not a real file object, then this may take extra memory
and time.
allow_pickle : bool, optional
Whether to allow writing pickled data. Default: False
.. versionchanged:: 1.16.3
Made default False in response to CVE-2019-6446.
pickle_kwargs : dict
Additional keyword arguments to pass to pickle.load. These are only
useful when loading object arrays saved on Python 2 when using
Python 3.
Returns
-------
array : ndarray
The array from the data on disk.
Raises
------
ValueError
If the data is invalid, or allow_pickle=False and the file contains
an object array.
"""
version = read_magic(fp)
_check_version(version)
shape, fortran_order, dtype = _read_array_header(fp, version)
if len(shape) == 0:
count = 1
else:
count = numpy.multiply.reduce(shape, dtype=numpy.int64)
# Now read the actual data.
if dtype.hasobject:
# The array contained Python objects. We need to unpickle the data.
if not allow_pickle:
raise ValueError("Object arrays cannot be loaded when "
"allow_pickle=False")
if pickle_kwargs is None:
pickle_kwargs = {}
try:
array = pickle.load(fp, **pickle_kwargs)
except UnicodeError as err:
# Friendlier error message
raise UnicodeError("Unpickling a python object failed: %r\n"
"You may need to pass the encoding= option "
"to numpy.load" % (err,)) from err
else:
if isfileobj(fp):
# We can use the fast fromfile() function.
array = numpy.fromfile(fp, dtype=dtype, count=count)
else:
# This is not a real file. We have to read it the
# memory-intensive way.
# crc32 module fails on reads greater than 2 ** 32 bytes,
# breaking large reads from gzip streams. Chunk reads to
# BUFFER_SIZE bytes to avoid issue and reduce memory overhead
# of the read. In non-chunked case count < max_read_count, so
# only one read is performed.
# Use np.ndarray instead of np.empty since the latter does
# not correctly instantiate zero-width string dtypes; see
# https://github.com/numpy/numpy/pull/6430
array = numpy.ndarray(count, dtype=dtype)
if dtype.itemsize > 0:
# If dtype.itemsize == 0 then there's nothing more to read
max_read_count = BUFFER_SIZE // min(BUFFER_SIZE, dtype.itemsize)
for i in range(0, count, max_read_count):
read_count = min(max_read_count, count - i)
read_size = int(read_count * dtype.itemsize)
data = _read_bytes(fp, read_size, "array data")
array[i:i+read_count] = numpy.frombuffer(data, dtype=dtype,
count=read_count)
if fortran_order:
array.shape = shape[::-1]
array = array.transpose()
else:
array.shape = shape
return array
def open_memmap(filename, mode='r+', dtype=None, shape=None,
fortran_order=False, version=None):
"""
Open a .npy file as a memory-mapped array.
This may be used to read an existing file or create a new one.
Parameters
----------
filename : str or path-like
The name of the file on disk. This may *not* be a file-like
object.
mode : str, optional
The mode in which to open the file; the default is 'r+'. In
addition to the standard file modes, 'c' is also accepted to mean
"copy on write." See `memmap` for the available mode strings.
dtype : data-type, optional
The data type of the array if we are creating a new file in "write"
mode, if not, `dtype` is ignored. The default value is None, which
results in a data-type of `float64`.
shape : tuple of int
The shape of the array if we are creating a new file in "write"
mode, in which case this parameter is required. Otherwise, this
parameter is ignored and is thus optional.
fortran_order : bool, optional
Whether the array should be Fortran-contiguous (True) or
C-contiguous (False, the default) if we are creating a new file in
"write" mode.
version : tuple of int (major, minor) or None
If the mode is a "write" mode, then this is the version of the file
format used to create the file. None means use the oldest
supported version that is able to store the data. Default: None
Returns
-------
marray : memmap
The memory-mapped array.
Raises
------
ValueError
If the data or the mode is invalid.
IOError
If the file is not found or cannot be opened correctly.
See Also
--------
numpy.memmap
"""
if isfileobj(filename):
raise ValueError("Filename must be a string or a path-like object."
" Memmap cannot use existing file handles.")
if 'w' in mode:
# We are creating the file, not reading it.
# Check if we ought to create the file.
_check_version(version)
# Ensure that the given dtype is an authentic dtype object rather
# than just something that can be interpreted as a dtype object.
dtype = numpy.dtype(dtype)
if dtype.hasobject:
msg = "Array can't be memory-mapped: Python objects in dtype."
raise ValueError(msg)
d = dict(
descr=dtype_to_descr(dtype),
fortran_order=fortran_order,
shape=shape,
)
# If we got here, then it should be safe to create the file.
with open(os_fspath(filename), mode+'b') as fp:
_write_array_header(fp, d, version)
offset = fp.tell()
else:
# Read the header of the file first.
with open(os_fspath(filename), 'rb') as fp:
version = read_magic(fp)
_check_version(version)
shape, fortran_order, dtype = _read_array_header(fp, version)
if dtype.hasobject:
msg = "Array can't be memory-mapped: Python objects in dtype."
raise ValueError(msg)
offset = fp.tell()
if fortran_order:
order = 'F'
else:
order = 'C'
# We need to change a write-only mode to a read-write mode since we've
# already written data to the file.
if mode == 'w+':
mode = 'r+'
marray = numpy.memmap(filename, dtype=dtype, shape=shape, order=order,
mode=mode, offset=offset)
return marray
def _read_bytes(fp, size, error_template="ran out of data"):
"""
Read from file-like object until size bytes are read.
Raises ValueError if not EOF is encountered before size bytes are read.
Non-blocking objects only supported if they derive from io objects.
Required as e.g. ZipExtFile in python 2.6 can return less data than
requested.
"""
data = bytes()
while True:
# io files (default in python3) return None or raise on
# would-block, python2 file will truncate, probably nothing can be
# done about that. note that regular files can't be non-blocking
try:
r = fp.read(size - len(data))
data += r
if len(r) == 0 or len(data) == size:
break
except io.BlockingIOError:
pass
if len(data) != size:
msg = "EOF: reading %s, expected %d bytes got %d"
raise ValueError(msg % (error_template, size, len(data)))
else:
return data
| [
"struct.calcsize",
"numpy.compat.pickle.dump",
"numpy.fromfile",
"tokenize.untokenize",
"numpy.frombuffer",
"numpy.multiply.reduce",
"numpy.compat.isfileobj",
"numpy.nditer",
"numpy.memmap",
"io.StringIO",
"struct.pack",
"struct.unpack",
"numpy.ndarray",
"numpy.compat.os_fspath",
"warnings.warn",
"numpy.compat.pickle.load",
"numpy.dtype",
"numpy.lib.utils.safe_eval"
] | [((11608, 11719), 'numpy.dtype', 'numpy.dtype', (["{'names': names, 'formats': formats, 'titles': titles, 'offsets': offsets,\n 'itemsize': offset}"], {}), "({'names': names, 'formats': formats, 'titles': titles,\n 'offsets': offsets, 'itemsize': offset})\n", (11619, 11719), False, 'import numpy\n'), ((14171, 14288), 'warnings.warn', 'warnings.warn', (['"""Stored array in format 3.0. It can only be read by NumPy >= 1.17"""', 'UserWarning'], {'stacklevel': '(2)'}), "(\n 'Stored array in format 3.0. It can only be read by NumPy >= 1.17',\n UserWarning, stacklevel=2)\n", (14184, 14288), False, 'import warnings\n'), ((18411, 18438), 'tokenize.untokenize', 'tokenize.untokenize', (['tokens'], {}), '(tokens)\n', (18430, 18438), False, 'import tokenize\n'), ((28671, 28690), 'numpy.compat.isfileobj', 'isfileobj', (['filename'], {}), '(filename)\n', (28680, 28690), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((30329, 30420), 'numpy.memmap', 'numpy.memmap', (['filename'], {'dtype': 'dtype', 'shape': 'shape', 'order': 'order', 'mode': 'mode', 'offset': 'offset'}), '(filename, dtype=dtype, shape=shape, order=order, mode=mode,\n offset=offset)\n', (30341, 30420), False, 'import numpy\n'), ((9372, 9534), 'warnings.warn', 'warnings.warn', (['"""metadata on a dtype may be saved or ignored, but will raise if saved when read. Use another form of storage."""', 'UserWarning'], {'stacklevel': '(2)'}), "(\n 'metadata on a dtype may be saved or ignored, but will raise if saved when read. Use another form of storage.'\n , UserWarning, stacklevel=2)\n", (9385, 9534), False, 'import warnings\n'), ((10584, 10602), 'numpy.dtype', 'numpy.dtype', (['descr'], {}), '(descr)\n', (10595, 10602), False, 'import numpy\n'), ((13973, 14083), 'warnings.warn', 'warnings.warn', (['"""Stored array in format 2.0. It can only beread by NumPy >= 1.9"""', 'UserWarning'], {'stacklevel': '(2)'}), "('Stored array in format 2.0. It can only beread by NumPy >= 1.9',\n UserWarning, stacklevel=2)\n", (13986, 14083), False, 'import warnings\n'), ((18832, 18861), 'struct.calcsize', 'struct.calcsize', (['hlength_type'], {}), '(hlength_type)\n', (18847, 18861), False, 'import struct\n'), ((18906, 18946), 'struct.unpack', 'struct.unpack', (['hlength_type', 'hlength_str'], {}), '(hlength_type, hlength_str)\n', (18919, 18946), False, 'import struct\n'), ((19546, 19563), 'numpy.lib.utils.safe_eval', 'safe_eval', (['header'], {}), '(header)\n', (19555, 19563), False, 'from numpy.lib.utils import safe_eval\n'), ((22694, 22745), 'numpy.compat.pickle.dump', 'pickle.dump', (['array', 'fp'], {'protocol': '(3)'}), '(array, fp, protocol=3, **pickle_kwargs)\n', (22705, 22745), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((24465, 24512), 'numpy.multiply.reduce', 'numpy.multiply.reduce', (['shape'], {'dtype': 'numpy.int64'}), '(shape, dtype=numpy.int64)\n', (24486, 24512), False, 'import numpy\n'), ((25236, 25249), 'numpy.compat.isfileobj', 'isfileobj', (['fp'], {}), '(fp)\n', (25245, 25249), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((29155, 29173), 'numpy.dtype', 'numpy.dtype', (['dtype'], {}), '(dtype)\n', (29166, 29173), False, 'import numpy\n'), ((10744, 10771), 'numpy.dtype', 'numpy.dtype', (['(dt, descr[1])'], {}), '((dt, descr[1]))\n', (10755, 10771), False, 'import numpy\n'), ((13049, 13080), 'struct.pack', 'struct.pack', (['fmt', '(hlen + padlen)'], {}), '(fmt, hlen + padlen)\n', (13060, 13080), False, 'import struct\n'), ((18059, 18070), 'io.StringIO', 'StringIO', (['s'], {}), '(s)\n', (18067, 18070), False, 'from io import StringIO\n'), ((22825, 22838), 'numpy.compat.isfileobj', 'isfileobj', (['fp'], {}), '(fp)\n', (22834, 22838), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((23124, 23137), 'numpy.compat.isfileobj', 'isfileobj', (['fp'], {}), '(fp)\n', (23133, 23137), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((24892, 24924), 'numpy.compat.pickle.load', 'pickle.load', (['fp'], {}), '(fp, **pickle_kwargs)\n', (24903, 24924), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((25326, 25370), 'numpy.fromfile', 'numpy.fromfile', (['fp'], {'dtype': 'dtype', 'count': 'count'}), '(fp, dtype=dtype, count=count)\n', (25340, 25370), False, 'import numpy\n'), ((26029, 26062), 'numpy.ndarray', 'numpy.ndarray', (['count'], {'dtype': 'dtype'}), '(count, dtype=dtype)\n', (26042, 26062), False, 'import numpy\n'), ((22910, 23019), 'numpy.nditer', 'numpy.nditer', (['array'], {'flags': "['external_loop', 'buffered', 'zerosize_ok']", 'buffersize': 'buffersize', 'order': '"""F"""'}), "(array, flags=['external_loop', 'buffered', 'zerosize_ok'],\n buffersize=buffersize, order='F')\n", (22922, 23019), False, 'import numpy\n'), ((23207, 23316), 'numpy.nditer', 'numpy.nditer', (['array'], {'flags': "['external_loop', 'buffered', 'zerosize_ok']", 'buffersize': 'buffersize', 'order': '"""C"""'}), "(array, flags=['external_loop', 'buffered', 'zerosize_ok'],\n buffersize=buffersize, order='C')\n", (23219, 23316), False, 'import numpy\n'), ((29533, 29552), 'numpy.compat.os_fspath', 'os_fspath', (['filename'], {}), '(filename)\n', (29542, 29552), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((29723, 29742), 'numpy.compat.os_fspath', 'os_fspath', (['filename'], {}), '(filename)\n', (29732, 29742), False, 'from numpy.compat import isfileobj, os_fspath, pickle\n'), ((12954, 12974), 'struct.calcsize', 'struct.calcsize', (['fmt'], {}), '(fmt)\n', (12969, 12974), False, 'import struct\n'), ((26555, 26608), 'numpy.frombuffer', 'numpy.frombuffer', (['data'], {'dtype': 'dtype', 'count': 'read_count'}), '(data, dtype=dtype, count=read_count)\n', (26571, 26608), False, 'import numpy\n')] |
#!/usr/bin/env python
from zoneinfo import ZoneInfo
import flask
from dateutil.parser import parse
from flask_assets import Bundle, Environment
from logzero import logger, setup_logger
from webassets.filter import get_filter
from config import cfg
from apis import calendar as gcal
setup_logger(name=__name__)
app = flask.Flask(__name__)
libsass = get_filter(
"libsass",
as_output=True,
style="compressed",
)
assets = Environment(app) # create an Environment instance
bundles = { # define nested Bundle
"style": Bundle(
"scss/*.scss",
filters=(libsass),
output="style.css",
)
}
assets.register(bundles)
@app.route("/")
def events():
return flask.render_template(
"index.html",
calendar=gcal.load_calendar(
service=gcal.build_service(),
calendar_id=cfg.calendar_id,
),
)
@app.template_filter()
def parse_tz_datetime(datetime_str):
return parse(datetime_str).replace(tzinfo=ZoneInfo(app.config["display_timezone"]))
@app.template_filter()
def replace_tz(datetime_obj):
return datetime_obj.replace(tzinfo=ZoneInfo(app.config["display_timezone"]))
@app.template_filter()
def hex2rgb(hex, alpha=None):
"""Convert a string to all caps."""
if not hex.startswith("#"):
return hex
h = hex.lstrip("#")
try:
rgb = tuple(int(h[i : i + 2], 16) for i in (0, 2, 4)) # noqa
except Exception as err:
logger.exception(f"unable to convert {hex=} to rgb: {err}")
return h
if alpha is None:
return f"rgb({rgb[0]}, {rgb[1]}, {rgb[2]})"
else:
return f"rgba({rgb[0]}, {rgb[1]}, {rgb[2]}, {alpha})"
def get_base_url():
if prefix := cfg.gcs_bucket_prefix:
return f"https://{cfg.hostname}/{prefix}"
return f"https://{cfg.hostname}"
def create_app():
cfg.load()
# TODO: do this default settings thing better?
default_app_config = dict(
display_timezone=cfg.display_timezone,
FREEZER_BASE_URL=get_base_url(),
FREEZER_STATIC_IGNORE=["*.scss", ".webassets-cache/*", ".DS_Store"],
FREEZER_RELATIVE_URLS=False,
FREEZER_REMOVE_EXTRA_FILES=True,
)
logger.info(f"create_app() => {default_app_config=}")
app.config.update(default_app_config)
return app
if __name__ == "__main__":
app = create_app()
app.run(
host="0.0.0.0",
debug=True,
)
| [
"dateutil.parser.parse",
"logzero.logger.info",
"flask.Flask",
"zoneinfo.ZoneInfo",
"config.cfg.load",
"apis.calendar.build_service",
"flask_assets.Environment",
"logzero.setup_logger",
"webassets.filter.get_filter",
"logzero.logger.exception",
"flask_assets.Bundle"
] | [((286, 313), 'logzero.setup_logger', 'setup_logger', ([], {'name': '__name__'}), '(name=__name__)\n', (298, 313), False, 'from logzero import logger, setup_logger\n'), ((321, 342), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (332, 342), False, 'import flask\n'), ((353, 410), 'webassets.filter.get_filter', 'get_filter', (['"""libsass"""'], {'as_output': '(True)', 'style': '"""compressed"""'}), "('libsass', as_output=True, style='compressed')\n", (363, 410), False, 'from webassets.filter import get_filter\n'), ((435, 451), 'flask_assets.Environment', 'Environment', (['app'], {}), '(app)\n', (446, 451), False, 'from flask_assets import Bundle, Environment\n'), ((535, 593), 'flask_assets.Bundle', 'Bundle', (['"""scss/*.scss"""'], {'filters': 'libsass', 'output': '"""style.css"""'}), "('scss/*.scss', filters=libsass, output='style.css')\n", (541, 593), False, 'from flask_assets import Bundle, Environment\n'), ((1847, 1857), 'config.cfg.load', 'cfg.load', ([], {}), '()\n', (1855, 1857), False, 'from config import cfg\n'), ((2194, 2267), 'logzero.logger.info', 'logger.info', (['f"""create_app() => default_app_config={default_app_config!r}"""'], {}), "(f'create_app() => default_app_config={default_app_config!r}')\n", (2205, 2267), False, 'from logzero import logger, setup_logger\n'), ((952, 971), 'dateutil.parser.parse', 'parse', (['datetime_str'], {}), '(datetime_str)\n', (957, 971), False, 'from dateutil.parser import parse\n'), ((987, 1027), 'zoneinfo.ZoneInfo', 'ZoneInfo', (["app.config['display_timezone']"], {}), "(app.config['display_timezone'])\n", (995, 1027), False, 'from zoneinfo import ZoneInfo\n'), ((1123, 1163), 'zoneinfo.ZoneInfo', 'ZoneInfo', (["app.config['display_timezone']"], {}), "(app.config['display_timezone'])\n", (1131, 1163), False, 'from zoneinfo import ZoneInfo\n'), ((1451, 1515), 'logzero.logger.exception', 'logger.exception', (['f"""unable to convert hex={hex!r} to rgb: {err}"""'], {}), "(f'unable to convert hex={hex!r} to rgb: {err}')\n", (1467, 1515), False, 'from logzero import logger, setup_logger\n'), ((799, 819), 'apis.calendar.build_service', 'gcal.build_service', ([], {}), '()\n', (817, 819), True, 'from apis import calendar as gcal\n')] |
from typing import List
from ddq.taxonomy.reference import Reference
from ddq.topics.topic import Topic
class Logic(Topic):
def references(self) -> List[Reference]:
return [
Reference("Classical and Nonclassical Logics",
[("Eric", "Schechter")])
]
| [
"ddq.taxonomy.reference.Reference"
] | [((200, 271), 'ddq.taxonomy.reference.Reference', 'Reference', (['"""Classical and Nonclassical Logics"""', "[('Eric', 'Schechter')]"], {}), "('Classical and Nonclassical Logics', [('Eric', 'Schechter')])\n", (209, 271), False, 'from ddq.taxonomy.reference import Reference\n')] |
# Copyright (c) Facebook, Inc. and its affiliates.
import base64
import logging
import os
import sys
from tensorboardX import SummaryWriter
from pythia.utils.distributed_utils import is_main_process
from pythia.utils.general import (ckpt_name_from_core_args,
foldername_from_config_override)
from pythia.utils.timer import Timer
class Logger:
def __init__(self, config):
self.logger = None
self.summary_writer = None
if not is_main_process():
return
self.timer = Timer()
self.config = config
self.save_dir = config.training_parameters.save_dir
self.log_folder = ckpt_name_from_core_args(config)
self.log_folder += foldername_from_config_override(config)
time_format = "%Y-%m-%dT%H:%M:%S"
self.log_filename = ckpt_name_from_core_args(config) + "_"
self.log_filename += self.timer.get_time_hhmmss(None, format=time_format)
self.log_filename += ".log"
self.log_folder = os.path.join(self.save_dir, self.log_folder, "logs")
arg_log_dir = self.config.get("log_dir", None)
if arg_log_dir:
self.log_folder = arg_log_dir
if not os.path.exists(self.log_folder):
os.makedirs(self.log_folder)
tensorboard_folder = os.path.join(self.log_folder, "tensorboard")
self.summary_writer = SummaryWriter(tensorboard_folder)
self.log_filename = os.path.join(self.log_folder, self.log_filename)
print("Logging to:", self.log_filename)
logging.captureWarnings(True)
self.logger = logging.getLogger(__name__)
self._file_only_logger = logging.getLogger(__name__)
warnings_logger = logging.getLogger("py.warnings")
# Set level
level = config["training_parameters"].get("logger_level", "info")
self.logger.setLevel(getattr(logging, level.upper()))
self._file_only_logger.setLevel(getattr(logging, level.upper()))
formatter = logging.Formatter(
"%(asctime)s %(levelname)s: %(message)s", datefmt="%Y-%m-%dT%H:%M:%S"
)
# Add handler to file
channel = logging.FileHandler(filename=self.log_filename, mode="a")
channel.setFormatter(formatter)
self.logger.addHandler(channel)
self._file_only_logger.addHandler(channel)
warnings_logger.addHandler(channel)
# Add handler to stdout
channel = logging.StreamHandler(sys.stdout)
channel.setFormatter(formatter)
self.logger.addHandler(channel)
warnings_logger.addHandler(channel)
should_not_log = self.config["training_parameters"]["should_not_log"]
self.should_log = not should_not_log
# Single log wrapper map
self._single_log_map = set()
def __del__(self):
if getattr(self, "summary_writer", None) is not None:
self.summary_writer.close()
def write(self, x, level="info", donot_print=False):
if self.logger is None:
return
# if it should not log then just print it
if self.should_log:
if hasattr(self.logger, level):
if donot_print:
getattr(self._file_only_logger, level)(str(x))
else:
getattr(self.logger, level)(str(x))
else:
self.logger.error("Unknown log level type: %s" % level)
else:
print(str(x) + "\n")
def single_write(self, x, level="info"):
if x + "_" + level in self._single_log_map:
return
else:
self.write(x, level)
def add_scalar(self, key, value, iteration):
if self.summary_writer is None:
return
self.summary_writer.add_scalar(key, value, iteration)
def add_scalars(self, scalar_dict, iteration):
if self.summary_writer is None:
return
for key, val in scalar_dict.items():
self.summary_writer.add_scalar(key, val, iteration)
def add_histogram_for_model(self, model, iteration):
if self.summary_writer is None:
return
for name, param in model.named_parameters():
np_param = param.clone().cpu().data.numpy()
self.summary_writer.add_histogram(name, np_param, iteration)
| [
"logging.getLogger",
"os.path.exists",
"logging.StreamHandler",
"pythia.utils.timer.Timer",
"pythia.utils.general.foldername_from_config_override",
"tensorboardX.SummaryWriter",
"logging.captureWarnings",
"os.makedirs",
"logging.Formatter",
"pythia.utils.distributed_utils.is_main_process",
"os.path.join",
"logging.FileHandler",
"pythia.utils.general.ckpt_name_from_core_args"
] | [((551, 558), 'pythia.utils.timer.Timer', 'Timer', ([], {}), '()\n', (556, 558), False, 'from pythia.utils.timer import Timer\n'), ((674, 706), 'pythia.utils.general.ckpt_name_from_core_args', 'ckpt_name_from_core_args', (['config'], {}), '(config)\n', (698, 706), False, 'from pythia.utils.general import ckpt_name_from_core_args, foldername_from_config_override\n'), ((734, 773), 'pythia.utils.general.foldername_from_config_override', 'foldername_from_config_override', (['config'], {}), '(config)\n', (765, 773), False, 'from pythia.utils.general import ckpt_name_from_core_args, foldername_from_config_override\n'), ((1028, 1080), 'os.path.join', 'os.path.join', (['self.save_dir', 'self.log_folder', '"""logs"""'], {}), "(self.save_dir, self.log_folder, 'logs')\n", (1040, 1080), False, 'import os\n'), ((1323, 1367), 'os.path.join', 'os.path.join', (['self.log_folder', '"""tensorboard"""'], {}), "(self.log_folder, 'tensorboard')\n", (1335, 1367), False, 'import os\n'), ((1398, 1431), 'tensorboardX.SummaryWriter', 'SummaryWriter', (['tensorboard_folder'], {}), '(tensorboard_folder)\n', (1411, 1431), False, 'from tensorboardX import SummaryWriter\n'), ((1461, 1509), 'os.path.join', 'os.path.join', (['self.log_folder', 'self.log_filename'], {}), '(self.log_folder, self.log_filename)\n', (1473, 1509), False, 'import os\n'), ((1568, 1597), 'logging.captureWarnings', 'logging.captureWarnings', (['(True)'], {}), '(True)\n', (1591, 1597), False, 'import logging\n'), ((1621, 1648), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1638, 1648), False, 'import logging\n'), ((1682, 1709), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1699, 1709), False, 'import logging\n'), ((1736, 1768), 'logging.getLogger', 'logging.getLogger', (['"""py.warnings"""'], {}), "('py.warnings')\n", (1753, 1768), False, 'import logging\n'), ((2020, 2113), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(levelname)s: %(message)s"""'], {'datefmt': '"""%Y-%m-%dT%H:%M:%S"""'}), "('%(asctime)s %(levelname)s: %(message)s', datefmt=\n '%Y-%m-%dT%H:%M:%S')\n", (2037, 2113), False, 'import logging\n'), ((2180, 2237), 'logging.FileHandler', 'logging.FileHandler', ([], {'filename': 'self.log_filename', 'mode': '"""a"""'}), "(filename=self.log_filename, mode='a')\n", (2199, 2237), False, 'import logging\n'), ((2465, 2498), 'logging.StreamHandler', 'logging.StreamHandler', (['sys.stdout'], {}), '(sys.stdout)\n', (2486, 2498), False, 'import logging\n'), ((491, 508), 'pythia.utils.distributed_utils.is_main_process', 'is_main_process', ([], {}), '()\n', (506, 508), False, 'from pythia.utils.distributed_utils import is_main_process\n'), ((844, 876), 'pythia.utils.general.ckpt_name_from_core_args', 'ckpt_name_from_core_args', (['config'], {}), '(config)\n', (868, 876), False, 'from pythia.utils.general import ckpt_name_from_core_args, foldername_from_config_override\n'), ((1219, 1250), 'os.path.exists', 'os.path.exists', (['self.log_folder'], {}), '(self.log_folder)\n', (1233, 1250), False, 'import os\n'), ((1264, 1292), 'os.makedirs', 'os.makedirs', (['self.log_folder'], {}), '(self.log_folder)\n', (1275, 1292), False, 'import os\n')] |
import sys
import os
import sphinx_rtd_theme
source_path = os.path.normpath(
os.path.join(
os.path.abspath(
os.path.split(__file__)[0])))
try:
from conf_base import *
except ImportError:
sys.path.append(source_path)
from conf_base import *
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
templates_path = [os.path.join(source_path, 'phdoc_static')]
html_static_path = [os.path.join(source_path, 'phdoc_static')]
if not os.path.exists(templates_path[0]):
raise FileNotFoundError(templates_path[0])
blog_root = "http://www.xavierdupre.fr/app/ensae_teaching_cs/helpsphinx3/"
| [
"sphinx_rtd_theme.get_html_theme_path",
"os.path.exists",
"os.path.join",
"os.path.split",
"sys.path.append"
] | [((331, 369), 'sphinx_rtd_theme.get_html_theme_path', 'sphinx_rtd_theme.get_html_theme_path', ([], {}), '()\n', (367, 369), False, 'import sphinx_rtd_theme\n'), ((389, 430), 'os.path.join', 'os.path.join', (['source_path', '"""phdoc_static"""'], {}), "(source_path, 'phdoc_static')\n", (401, 430), False, 'import os\n'), ((452, 493), 'os.path.join', 'os.path.join', (['source_path', '"""phdoc_static"""'], {}), "(source_path, 'phdoc_static')\n", (464, 493), False, 'import os\n'), ((503, 536), 'os.path.exists', 'os.path.exists', (['templates_path[0]'], {}), '(templates_path[0])\n', (517, 536), False, 'import os\n'), ((222, 250), 'sys.path.append', 'sys.path.append', (['source_path'], {}), '(source_path)\n', (237, 250), False, 'import sys\n'), ((134, 157), 'os.path.split', 'os.path.split', (['__file__'], {}), '(__file__)\n', (147, 157), False, 'import os\n')] |
# Generated by Django 3.1.2 on 2020-10-11 10:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='task',
name='author',
field=models.CharField(default='Anonymous', max_length=100),
),
migrations.AlterField(
model_name='task',
name='deadline',
field=models.DateTimeField(default='2020-10-11 10:53'),
),
]
| [
"django.db.models.DateTimeField",
"django.db.models.CharField"
] | [((319, 372), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""Anonymous"""', 'max_length': '(100)'}), "(default='Anonymous', max_length=100)\n", (335, 372), False, 'from django.db import migrations, models\n'), ((494, 542), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'default': '"""2020-10-11 10:53"""'}), "(default='2020-10-11 10:53')\n", (514, 542), False, 'from django.db import migrations, models\n')] |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import time
import socket
from collections import Iterable
from typing import Union, Tuple
from mongoengine import connect
from notification_service.event_storage import BaseEventStorage
from notification_service.base_notification import BaseEvent
from notification_service.mongo_notification import MongoEvent
class MongoEventStorage(BaseEventStorage):
def __init__(self, *args, **kwargs):
self.db_conn = self.setup_connection(**kwargs)
self.server_ip = socket.gethostbyname(socket.gethostname())
def setup_connection(self, **kwargs):
db_conf = {
"host": kwargs.get("host"),
"port": kwargs.get("port"),
"db": kwargs.get("db"),
}
username = kwargs.get("username", None)
password = kwargs.get("password", None)
authentication_source = kwargs.get("authentication_source", "admin")
if (username or password) and not (username and password):
raise Exception("Please provide valid username and password")
if username and password:
db_conf.update({
"username": username,
"password": password,
"authentication_source": authentication_source
})
return connect(**db_conf)
def get_latest_version(self, key: str, namespace: str = None):
mongo_events = MongoEvent.get_by_key(key, 0, 1, "-version")
if not mongo_events:
return 0
return mongo_events[0].version
def add_event(self, event: BaseEvent, uuid: str):
kwargs = {
"server_ip": self.server_ip,
"create_time": int(time.time() * 1000),
"event_type": event.event_type,
"key": event.key,
"value": event.value,
"context": event.context,
"namespace": event.namespace,
"sender": event.sender,
"uuid": uuid
}
mongo_event = MongoEvent(**kwargs)
mongo_event.save()
mongo_event.reload()
event.create_time = mongo_event.create_time
event.version = mongo_event.version
return event
def list_events(self,
key: Union[str, Tuple[str]],
version: int = None,
event_type: str = None,
start_time: int = None,
namespace: str = None,
sender: str = None):
key = None if key == "" else key
version = None if version == 0 else version
event_type = None if event_type == "" else event_type
namespace = None if namespace == "" else namespace
sender = None if sender == "" else sender
if isinstance(key, str):
key = (key,)
elif isinstance(key, Iterable):
key = tuple(key)
res = MongoEvent.get_base_events(key, version, event_type, start_time, namespace, sender)
return res
def list_all_events(self, start_time: int):
res = MongoEvent.get_base_events_by_time(start_time)
return res
def list_all_events_from_version(self, start_version: int, end_version: int = None):
res = MongoEvent.get_base_events_by_version(start_version, end_version)
return res
def clean_up(self):
MongoEvent.delete_by_client(self.server_ip)
| [
"notification_service.mongo_notification.MongoEvent.get_base_events",
"time.time",
"notification_service.mongo_notification.MongoEvent.get_base_events_by_time",
"notification_service.mongo_notification.MongoEvent",
"mongoengine.connect",
"notification_service.mongo_notification.MongoEvent.get_base_events_by_version",
"notification_service.mongo_notification.MongoEvent.get_by_key",
"socket.gethostname",
"notification_service.mongo_notification.MongoEvent.delete_by_client"
] | [((2045, 2063), 'mongoengine.connect', 'connect', ([], {}), '(**db_conf)\n', (2052, 2063), False, 'from mongoengine import connect\n'), ((2155, 2199), 'notification_service.mongo_notification.MongoEvent.get_by_key', 'MongoEvent.get_by_key', (['key', '(0)', '(1)', '"""-version"""'], {}), "(key, 0, 1, '-version')\n", (2176, 2199), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((2737, 2757), 'notification_service.mongo_notification.MongoEvent', 'MongoEvent', ([], {}), '(**kwargs)\n', (2747, 2757), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((3625, 3712), 'notification_service.mongo_notification.MongoEvent.get_base_events', 'MongoEvent.get_base_events', (['key', 'version', 'event_type', 'start_time', 'namespace', 'sender'], {}), '(key, version, event_type, start_time, namespace,\n sender)\n', (3651, 3712), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((3791, 3837), 'notification_service.mongo_notification.MongoEvent.get_base_events_by_time', 'MongoEvent.get_base_events_by_time', (['start_time'], {}), '(start_time)\n', (3825, 3837), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((3961, 4026), 'notification_service.mongo_notification.MongoEvent.get_base_events_by_version', 'MongoEvent.get_base_events_by_version', (['start_version', 'end_version'], {}), '(start_version, end_version)\n', (3998, 4026), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((4079, 4122), 'notification_service.mongo_notification.MongoEvent.delete_by_client', 'MongoEvent.delete_by_client', (['self.server_ip'], {}), '(self.server_ip)\n', (4106, 4122), False, 'from notification_service.mongo_notification import MongoEvent\n'), ((1288, 1308), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (1306, 1308), False, 'import socket\n'), ((2435, 2446), 'time.time', 'time.time', ([], {}), '()\n', (2444, 2446), False, 'import time\n')] |
import vtk
# Read the file (to test that it was written correctly)
reader = vtk.vtkXMLImageDataReader()
reader.SetFileName("../data/wind_image.vti")
reader.Update()
print(reader.GetOutput())
# Convert the image to a polydata
imageDataGeometryFilter = vtk.vtkImageDataGeometryFilter()
imageDataGeometryFilter.SetInputConnection(reader.GetOutputPort())
imageDataGeometryFilter.Update()
scalarRange = reader.GetOutput().GetPointData().GetScalars().GetRange(-1)
contoursFilter = vtk.vtkContourFilter()
contoursFilter.SetInputConnection(imageDataGeometryFilter.GetOutputPort())
contoursFilter.GenerateValues(60, scalarRange)
contoursMapper = vtk.vtkPolyDataMapper()
contoursMapper.SetInputConnection(contoursFilter.GetOutputPort())
contoursMapper.SetColorModeToMapScalars()
contoursMapper.ScalarVisibilityOn()
contoursMapper.SelectColorArray("JPEGImage")
contoursMapper.SetScalarRange(scalarRange)
contoursActor = vtk.vtkActor()
contoursActor.SetMapper(contoursMapper)
actor = vtk.vtkActor()
actor.SetMapper(contoursMapper)
# Setup rendering
renderer = vtk.vtkRenderer()
renderer.AddActor(actor)
renderer.SetBackground(1,1,1)
renderer.ResetCamera()
renderWindow = vtk.vtkRenderWindow()
renderWindow.AddRenderer(renderer)
renderWindowInteractor = vtk.vtkRenderWindowInteractor()
renderWindowInteractor.SetRenderWindow(renderWindow)
renderWindowInteractor.Start()
| [
"vtk.vtkContourFilter",
"vtk.vtkXMLImageDataReader",
"vtk.vtkImageDataGeometryFilter",
"vtk.vtkRenderWindow",
"vtk.vtkRenderWindowInteractor",
"vtk.vtkPolyDataMapper",
"vtk.vtkActor",
"vtk.vtkRenderer"
] | [((77, 104), 'vtk.vtkXMLImageDataReader', 'vtk.vtkXMLImageDataReader', ([], {}), '()\n', (102, 104), False, 'import vtk\n'), ((252, 284), 'vtk.vtkImageDataGeometryFilter', 'vtk.vtkImageDataGeometryFilter', ([], {}), '()\n', (282, 284), False, 'import vtk\n'), ((477, 499), 'vtk.vtkContourFilter', 'vtk.vtkContourFilter', ([], {}), '()\n', (497, 499), False, 'import vtk\n'), ((640, 663), 'vtk.vtkPolyDataMapper', 'vtk.vtkPolyDataMapper', ([], {}), '()\n', (661, 663), False, 'import vtk\n'), ((913, 927), 'vtk.vtkActor', 'vtk.vtkActor', ([], {}), '()\n', (925, 927), False, 'import vtk\n'), ((977, 991), 'vtk.vtkActor', 'vtk.vtkActor', ([], {}), '()\n', (989, 991), False, 'import vtk\n'), ((1055, 1072), 'vtk.vtkRenderer', 'vtk.vtkRenderer', ([], {}), '()\n', (1070, 1072), False, 'import vtk\n'), ((1168, 1189), 'vtk.vtkRenderWindow', 'vtk.vtkRenderWindow', ([], {}), '()\n', (1187, 1189), False, 'import vtk\n'), ((1252, 1283), 'vtk.vtkRenderWindowInteractor', 'vtk.vtkRenderWindowInteractor', ([], {}), '()\n', (1281, 1283), False, 'import vtk\n')] |
# -*- encoding: utf-8 -*-
# Copyright (c) 2019 European Organization for Nuclear Research (CERN)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from unittest import mock
from watcher.common import exception
from watcher.decision_engine.datasources import manager
from watcher.decision_engine.model import model_root
from watcher.decision_engine.strategy import strategies
from watcher.tests import base
from watcher.tests.decision_engine.model import faker_cluster_state
class TestBaseStrategy(base.TestCase):
def setUp(self):
super(TestBaseStrategy, self).setUp()
# fake cluster
self.fake_c_cluster = faker_cluster_state.FakerModelCollector()
p_c_model = mock.patch.object(
strategies.BaseStrategy, "compute_model",
new_callable=mock.PropertyMock)
self.m_c_model = p_c_model.start()
self.addCleanup(p_c_model.stop)
p_audit_scope = mock.patch.object(
strategies.BaseStrategy, "audit_scope",
new_callable=mock.PropertyMock)
self.m_audit_scope = p_audit_scope.start()
self.addCleanup(p_audit_scope.stop)
self.m_audit_scope.return_value = mock.Mock()
self.m_c_model.return_value = model_root.ModelRoot()
self.strategy = strategies.DummyStrategy(config=mock.Mock())
class TestBaseStrategyDatasource(TestBaseStrategy):
def setUp(self):
super(TestBaseStrategyDatasource, self).setUp()
self.strategy = strategies.DummyStrategy(
config=mock.Mock(datasources=None))
@mock.patch.object(strategies.BaseStrategy, 'osc', None)
@mock.patch.object(manager, 'DataSourceManager')
@mock.patch.object(strategies.base, 'CONF')
def test_global_preference(self, m_conf, m_manager):
"""Test if the global preference is used"""
m_conf.watcher_datasources.datasources = \
['gnocchi', 'monasca', 'ceilometer']
# Make sure we access the property and not the underlying function.
m_manager.return_value.get_backend.return_value = \
mock.NonCallableMock()
# Access the property so that the configuration is read in order to
# get the correct datasource
self.strategy.datasource_backend
m_manager.assert_called_once_with(
config=m_conf.watcher_datasources, osc=None)
@mock.patch.object(strategies.BaseStrategy, 'osc', None)
@mock.patch.object(manager, 'DataSourceManager')
@mock.patch.object(strategies.base, 'CONF')
def test_global_preference_reverse(self, m_conf, m_manager):
"""Test if the global preference is used with another order"""
m_conf.watcher_datasources.datasources = \
['ceilometer', 'monasca', 'gnocchi']
# Make sure we access the property and not the underlying function.
m_manager.return_value.get_backend.return_value = \
mock.NonCallableMock()
# Access the property so that the configuration is read in order to
# get the correct datasource
self.strategy.datasource_backend
m_manager.assert_called_once_with(
config=m_conf.watcher_datasources, osc=None)
@mock.patch.object(strategies.BaseStrategy, 'osc', None)
@mock.patch.object(manager, 'DataSourceManager')
@mock.patch.object(strategies.base, 'CONF')
def test_strategy_preference_override(self, m_conf, m_manager):
"""Test if the global preference can be overridden"""
datasources = mock.Mock(datasources=['ceilometer'])
self.strategy = strategies.DummyStrategy(
config=datasources)
m_conf.watcher_datasources.datasources = \
['ceilometer', 'monasca', 'gnocchi']
# Access the property so that the configuration is read in order to
# get the correct datasource
self.strategy.datasource_backend
m_manager.assert_called_once_with(
config=datasources, osc=None)
class TestBaseStrategyException(TestBaseStrategy):
def setUp(self):
super(TestBaseStrategyException, self).setUp()
def test_exception_model(self):
self.m_c_model.return_value = None
self.assertRaises(
exception.ClusterStateNotDefined, self.strategy.execute)
def test_exception_stale_cdm(self):
self.fake_c_cluster.set_cluster_data_model_as_stale()
self.m_c_model.return_value = self.fake_c_cluster.cluster_data_model
self.assertRaises(
# TODO(Dantali0n) This should return ClusterStale,
# improve set_cluster_data_model_as_stale().
exception.ClusterStateNotDefined,
self.strategy.execute)
| [
"unittest.mock.Mock",
"watcher.decision_engine.strategy.strategies.DummyStrategy",
"unittest.mock.NonCallableMock",
"watcher.decision_engine.model.model_root.ModelRoot",
"unittest.mock.patch.object",
"watcher.tests.decision_engine.model.faker_cluster_state.FakerModelCollector"
] | [((2053, 2108), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.BaseStrategy', '"""osc"""', 'None'], {}), "(strategies.BaseStrategy, 'osc', None)\n", (2070, 2108), False, 'from unittest import mock\n'), ((2114, 2161), 'unittest.mock.patch.object', 'mock.patch.object', (['manager', '"""DataSourceManager"""'], {}), "(manager, 'DataSourceManager')\n", (2131, 2161), False, 'from unittest import mock\n'), ((2167, 2209), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.base', '"""CONF"""'], {}), "(strategies.base, 'CONF')\n", (2184, 2209), False, 'from unittest import mock\n'), ((2854, 2909), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.BaseStrategy', '"""osc"""', 'None'], {}), "(strategies.BaseStrategy, 'osc', None)\n", (2871, 2909), False, 'from unittest import mock\n'), ((2915, 2962), 'unittest.mock.patch.object', 'mock.patch.object', (['manager', '"""DataSourceManager"""'], {}), "(manager, 'DataSourceManager')\n", (2932, 2962), False, 'from unittest import mock\n'), ((2968, 3010), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.base', '"""CONF"""'], {}), "(strategies.base, 'CONF')\n", (2985, 3010), False, 'from unittest import mock\n'), ((3682, 3737), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.BaseStrategy', '"""osc"""', 'None'], {}), "(strategies.BaseStrategy, 'osc', None)\n", (3699, 3737), False, 'from unittest import mock\n'), ((3743, 3790), 'unittest.mock.patch.object', 'mock.patch.object', (['manager', '"""DataSourceManager"""'], {}), "(manager, 'DataSourceManager')\n", (3760, 3790), False, 'from unittest import mock\n'), ((3796, 3838), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.base', '"""CONF"""'], {}), "(strategies.base, 'CONF')\n", (3813, 3838), False, 'from unittest import mock\n'), ((1133, 1174), 'watcher.tests.decision_engine.model.faker_cluster_state.FakerModelCollector', 'faker_cluster_state.FakerModelCollector', ([], {}), '()\n', (1172, 1174), False, 'from watcher.tests.decision_engine.model import faker_cluster_state\n'), ((1196, 1292), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.BaseStrategy', '"""compute_model"""'], {'new_callable': 'mock.PropertyMock'}), "(strategies.BaseStrategy, 'compute_model', new_callable=\n mock.PropertyMock)\n", (1213, 1292), False, 'from unittest import mock\n'), ((1421, 1515), 'unittest.mock.patch.object', 'mock.patch.object', (['strategies.BaseStrategy', '"""audit_scope"""'], {'new_callable': 'mock.PropertyMock'}), "(strategies.BaseStrategy, 'audit_scope', new_callable=mock\n .PropertyMock)\n", (1438, 1515), False, 'from unittest import mock\n'), ((1674, 1685), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1683, 1685), False, 'from unittest import mock\n'), ((1725, 1747), 'watcher.decision_engine.model.model_root.ModelRoot', 'model_root.ModelRoot', ([], {}), '()\n', (1745, 1747), False, 'from watcher.decision_engine.model import model_root\n'), ((2569, 2591), 'unittest.mock.NonCallableMock', 'mock.NonCallableMock', ([], {}), '()\n', (2589, 2591), False, 'from unittest import mock\n'), ((3397, 3419), 'unittest.mock.NonCallableMock', 'mock.NonCallableMock', ([], {}), '()\n', (3417, 3419), False, 'from unittest import mock\n'), ((3992, 4029), 'unittest.mock.Mock', 'mock.Mock', ([], {'datasources': "['ceilometer']"}), "(datasources=['ceilometer'])\n", (4001, 4029), False, 'from unittest import mock\n'), ((4055, 4099), 'watcher.decision_engine.strategy.strategies.DummyStrategy', 'strategies.DummyStrategy', ([], {'config': 'datasources'}), '(config=datasources)\n', (4079, 4099), False, 'from watcher.decision_engine.strategy import strategies\n'), ((1804, 1815), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (1813, 1815), False, 'from unittest import mock\n'), ((2018, 2045), 'unittest.mock.Mock', 'mock.Mock', ([], {'datasources': 'None'}), '(datasources=None)\n', (2027, 2045), False, 'from unittest import mock\n')] |
import glob
import os
import sys
import utils
from recorder import StreamRec
OUTDIR = ""
def parse_args(a):
global OUTDIR
i = 1
while i < len(a):
if a[i] in ["-h", "--help", "/?"]:
usage()
if a[i] in ["-d", "--dir"]:
OUTDIR = a[i + 1]
i += 1
i += 1
def usage():
print("Record your favorite Twitch streams!")
print("Check an example of .stream file in data/ to see how to add a stream to record")
print()
print("Usage: %s [Options]" % (os.path.basename(sys.argv[0])))
print()
print("Options :")
print(" -d, --dir : Output directory")
print(" -h, --help : Help")
sys.exit(1)
def load_streams():
all_inst = []
stream_files = glob.glob('data/**/*.stream', recursive=True)
for stream_file in stream_files:
inst = StreamRec(stream_file, OUTDIR)
all_inst.append(inst)
for inst in all_inst:
inst.start()
for inst in all_inst:
inst.join()
def main():
utils.welcome()
parse_args(sys.argv)
utils.make_directory(OUTDIR)
load_streams()
if __name__ == '__main__':
main()
| [
"utils.welcome",
"utils.make_directory",
"os.path.basename",
"sys.exit",
"recorder.StreamRec",
"glob.glob"
] | [((681, 692), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (689, 692), False, 'import sys\n'), ((752, 797), 'glob.glob', 'glob.glob', (['"""data/**/*.stream"""'], {'recursive': '(True)'}), "('data/**/*.stream', recursive=True)\n", (761, 797), False, 'import glob\n'), ((1022, 1037), 'utils.welcome', 'utils.welcome', ([], {}), '()\n', (1035, 1037), False, 'import utils\n'), ((1067, 1095), 'utils.make_directory', 'utils.make_directory', (['OUTDIR'], {}), '(OUTDIR)\n', (1087, 1095), False, 'import utils\n'), ((850, 880), 'recorder.StreamRec', 'StreamRec', (['stream_file', 'OUTDIR'], {}), '(stream_file, OUTDIR)\n', (859, 880), False, 'from recorder import StreamRec\n'), ((529, 558), 'os.path.basename', 'os.path.basename', (['sys.argv[0]'], {}), '(sys.argv[0])\n', (545, 558), False, 'import os\n')] |
# Copyright (c) 2017 Huawei Technologies Co., Ltd.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from karbor.policies import base
GET_POLICY = 'protectable:get'
GET_ALL_POLICY = 'protectable:get_all'
INSTANCES_GET_POLICY = 'protectable:instance_get'
INSTANCES_GET_ALL_POLICY = 'protectable:instance_get_all'
protectables_policies = [
policy.DocumentedRuleDefault(
name=GET_POLICY,
check_str=base.RULE_ADMIN_OR_OWNER,
description='Show a protectable type.',
operations=[
{
'method': 'GET',
'path': '/protectables/{protectable_type}'
}
]),
policy.DocumentedRuleDefault(
name=GET_ALL_POLICY,
check_str=base.RULE_ADMIN_OR_OWNER,
description='List protectable types.',
operations=[
{
'method': 'GET',
'path': '/protectables'
}
]),
policy.DocumentedRuleDefault(
name=INSTANCES_GET_POLICY,
check_str=base.RULE_ADMIN_OR_OWNER,
description='Show a protectable instance.',
operations=[
{
'method': 'GET',
'path': '/protectables/{protectable_type}/'
'instances/{resource_id}'
}
]),
policy.DocumentedRuleDefault(
name=INSTANCES_GET_ALL_POLICY,
check_str=base.RULE_ADMIN_OR_OWNER,
description='List protectable instances.',
operations=[
{
'method': 'GET',
'path': '/protectables/{protectable_type}/instances'
}
]),
]
def list_rules():
return protectables_policies
| [
"oslo_policy.policy.DocumentedRuleDefault"
] | [((925, 1132), 'oslo_policy.policy.DocumentedRuleDefault', 'policy.DocumentedRuleDefault', ([], {'name': 'GET_POLICY', 'check_str': 'base.RULE_ADMIN_OR_OWNER', 'description': '"""Show a protectable type."""', 'operations': "[{'method': 'GET', 'path': '/protectables/{protectable_type}'}]"}), "(name=GET_POLICY, check_str=base.\n RULE_ADMIN_OR_OWNER, description='Show a protectable type.', operations\n =[{'method': 'GET', 'path': '/protectables/{protectable_type}'}])\n", (953, 1132), False, 'from oslo_policy import policy\n'), ((1229, 1420), 'oslo_policy.policy.DocumentedRuleDefault', 'policy.DocumentedRuleDefault', ([], {'name': 'GET_ALL_POLICY', 'check_str': 'base.RULE_ADMIN_OR_OWNER', 'description': '"""List protectable types."""', 'operations': "[{'method': 'GET', 'path': '/protectables'}]"}), "(name=GET_ALL_POLICY, check_str=base.\n RULE_ADMIN_OR_OWNER, description='List protectable types.', operations=\n [{'method': 'GET', 'path': '/protectables'}])\n", (1257, 1420), False, 'from oslo_policy import policy\n'), ((1517, 1765), 'oslo_policy.policy.DocumentedRuleDefault', 'policy.DocumentedRuleDefault', ([], {'name': 'INSTANCES_GET_POLICY', 'check_str': 'base.RULE_ADMIN_OR_OWNER', 'description': '"""Show a protectable instance."""', 'operations': "[{'method': 'GET', 'path':\n '/protectables/{protectable_type}/instances/{resource_id}'}]"}), "(name=INSTANCES_GET_POLICY, check_str=base.\n RULE_ADMIN_OR_OWNER, description='Show a protectable instance.',\n operations=[{'method': 'GET', 'path':\n '/protectables/{protectable_type}/instances/{resource_id}'}])\n", (1545, 1765), False, 'from oslo_policy import policy\n'), ((1886, 2123), 'oslo_policy.policy.DocumentedRuleDefault', 'policy.DocumentedRuleDefault', ([], {'name': 'INSTANCES_GET_ALL_POLICY', 'check_str': 'base.RULE_ADMIN_OR_OWNER', 'description': '"""List protectable instances."""', 'operations': "[{'method': 'GET', 'path': '/protectables/{protectable_type}/instances'}]"}), "(name=INSTANCES_GET_ALL_POLICY, check_str=base.\n RULE_ADMIN_OR_OWNER, description='List protectable instances.',\n operations=[{'method': 'GET', 'path':\n '/protectables/{protectable_type}/instances'}])\n", (1914, 2123), False, 'from oslo_policy import policy\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# routers are dictionaries of URL routing parameters.
#
# For each request, the effective router is:
# the built-in default base router (shown below),
# updated by the BASE router in routes.py routers,
# updated by the app-specific router in routes.py routers (if any),
# updated by the app-specific router from applcations/app/routes.py routers (if any)
#
#
# Router members:
#
# default_application: default application name
# applications: list of all recognized applications, or 'ALL' to use all currently installed applications
# Names in applications are always treated as an application names when they appear first in an incoming URL.
# Set applications to None to disable the removal of application names from outgoing URLs.
# domains: optional dict mapping domain names to application names
# The domain name can include a port number: domain.com:8080
# The application name can include a controller: appx/ctlrx
# path_prefix: a path fragment that is prefixed to all outgoing URLs and stripped from all incoming URLs
#
# Note: default_application, applications, domains & path_prefix are permitted only in the BASE router,
# and domain makes sense only in an application-specific router.
# The remaining members can appear in the BASE router (as defaults for all applications)
# or in application-specific routers.
#
# default_controller: name of default controller
# default_function: name of default function (all controllers)
# controllers: list of valid controllers in selected app
# or "DEFAULT" to use all controllers in the selected app plus 'static'
# or None to disable controller-name removal.
# Names in controllers are always treated as controller names when they appear in an incoming URL after
# the (optional) application and language names.
# languages: list of all supported languages
# Names in controllers are always treated as language names when they appear in an incoming URL after
# the (optional) application name.
# default_language
# The language code (for example: en, it-it) optionally appears in the URL following
# the application (which may be omitted). For incoming URLs, the code is copied to
# request.language; for outgoing URLs it is taken from request.language.
# If languages=None, language support is disabled.
# The default_language, if any, is omitted from the URL.
# root_static: list of static files accessed from root
# (mapped to the current application's static/ directory)
# Each application has its own root-static files.
# domain: the domain that maps to this application (alternative to using domains in the BASE router)
# map_hyphen: If True (default), hyphens in incoming /a/c/f fields are converted to underscores,
# and back to hyphens in outgoing URLs. Language, args and the query string are not affected.
# map_static: By default, the default application is not stripped from static URLs. Set map_static=True
# to override this policy.
# acfe_match: regex for valid application, controller, function, extension /a/c/f.e
# file_match: regex for valid file (used for static file names)
# args_match: regex for valid args
# This validation provides a measure of security.
# If it is changed, the application perform its own validation.
#
#
# The built-in default router supplies default values (undefined members are None):
#
# default_router = dict(
# default_application = 'init',
# applications = 'ALL',
# default_controller = 'default',
# controllers = 'DEFAULT',
# default_function = 'index',
# default_language = None,
# languages = None,
# root_static = ['favicon.ico', 'robots.txt'],
# domains = None,
# map_hyphen = True,
# acfe_match = r'\w+$', # legal app/ctlr/fcn/ext
# file_match = r'(\w+[-=./]?)+$', # legal file (path) name
# args_match = r'([\w@ -]+[=.]?)+$', # legal arg in args
# )
#
# See rewrite.map_url_in() and rewrite.map_url_out() for implementation details.
# This simple router set overrides only the default application name,
# but provides full rewrite functionality.
routers = dict(
# base router
BASE = dict(
default_application = 'welcome',
),
# 'admin' application router
admin = dict(
controllers = [], # don't remove controller names from admin URLs
map_hyphen = False, # don't map hyphens to underscores
),
)
# Error-handling redirects all HTTP errors (status codes >= 400) to a specified
# path. If you wish to use error-handling redirects, uncomment the tuple
# below. You can customize responses by adding a tuple entry with the first
# value in 'appName/HTTPstatusCode' format. ( Only HTTP codes >= 400 are
# routed. ) and the value as a path to redirect the user to. You may also use
# '*' as a wildcard.
#
# The error handling page is also passed the error code and ticket as
# variables. Traceback information will be stored in the ticket.
#
# routes_onerror = [
# (r'init/400', r'/init/default/login')
# ,(r'init/*', r'/init/static/fail.html')
# ,(r'*/404', r'/init/static/cantfind.html')
# ,(r'*/*', r'/init/error/index')
# ]
# specify action in charge of error handling
#
# error_handler = dict(application='error',
# controller='default',
# function='index')
# In the event that the error-handling page itself returns an error, web2py will
# fall back to its old static responses. You can customize them here.
# ErrorMessageTicket takes a string format dictionary containing (only) the
# "ticket" key.
# error_message = '<html><body><h1>Invalid request</h1></body></html>'
# error_message_ticket = '<html><body><h1>Internal error</h1>Ticket issued: <a href="/admin/default/ticket/%(ticket)s" target="_blank">%(ticket)s</a></body></html>'
def __routes_doctest():
'''
Dummy function for doctesting routes.py.
Use filter_url() to test incoming or outgoing routes;
filter_err() for error redirection.
filter_url() accepts overrides for method and remote host:
filter_url(url, method='get', remote='0.0.0.0', out=False)
filter_err() accepts overrides for application and ticket:
filter_err(status, application='app', ticket='tkt')
>>> import os
>>> import gluon.main
>>> from gluon.rewrite import load, filter_url, filter_err, get_effective_router
>>> load(routes=os.path.basename(__file__))
>>> filter_url('http://domain.com/abc', app=True)
'welcome'
>>> filter_url('http://domain.com/welcome', app=True)
'welcome'
>>> os.path.relpath(filter_url('http://domain.com/favicon.ico'))
'applications/welcome/static/favicon.ico'
>>> filter_url('http://domain.com/abc')
'/welcome/default/abc'
>>> filter_url('http://domain.com/index/abc')
"/welcome/default/index ['abc']"
>>> filter_url('http://domain.com/default/abc.css')
'/welcome/default/abc.css'
>>> filter_url('http://domain.com/default/index/abc')
"/welcome/default/index ['abc']"
>>> filter_url('http://domain.com/default/index/a bc')
"/welcome/default/index ['a bc']"
>>> filter_url('http://domain.com/admin/bad!ctl')
Traceback (most recent call last):
...
HTTP: 400 BAD REQUEST [invalid controller]
>>> filter_url('http://domain.com/admin/ctl/bad!fcn')
Traceback (most recent call last):
...
HTTP: 400 BAD REQUEST [invalid function]
>>> filter_url('http://domain.com/admin/ctl/fcn.bad!ext')
Traceback (most recent call last):
...
HTTP: 400 BAD REQUEST [invalid extension]
>>> filter_url('http://domain.com/admin/ctl/fcn/bad!arg')
Traceback (most recent call last):
...
HTTP: 400 BAD REQUEST [invalid arg <bad!arg>]
>>> filter_url('https://domain.com/app/ctr/fcn', out=True)
'/app/ctr/fcn'
>>> filter_url('https://domain.com/welcome/ctr/fcn', out=True)
'/ctr/fcn'
>>> filter_url('https://domain.com/welcome/default/fcn', out=True)
'/fcn'
>>> filter_url('https://domain.com/welcome/default/index', out=True)
'/'
>>> filter_url('https://domain.com/welcome/appadmin/index', out=True)
'/appadmin'
>>> filter_url('http://domain.com/welcome/default/fcn?query', out=True)
'/fcn?query'
>>> filter_url('http://domain.com/welcome/default/fcn#anchor', out=True)
'/fcn#anchor'
>>> filter_url('http://domain.com/welcome/default/fcn?query#anchor', out=True)
'/fcn?query#anchor'
>>> filter_url('http://domain.com/appadmin/fcn-1')
'/welcome/appadmin/fcn_1'
>>> filter_url('http://domain.com/welcome/appadmin/fcn_1', out=True)
'/appadmin/fcn-1'
>>> filter_url('http://domain.com/examples/appadmin/fcn-1')
'/examples/appadmin/fcn_1'
>>> filter_url('http://domain.com/examples/appadmin/fcn_1', out=True)
'/examples/appadmin/fcn-1'
>>> filter_url('http://domain.com/app/static/filename-with_underscore', out=True)
'/app/static/filename-with_underscore'
>>> os.path.relpath(filter_url('http://domain.com/admin/static/filename-with_underscore'))
'applications/admin/static/filename-with_underscore'
>>> filter_err(200)
200
>>> filter_err(399)
399
>>> filter_err(400)
400
'''
pass
if __name__ == '__main__':
import doctest
doctest.testmod()
| [
"doctest.testmod"
] | [((9489, 9506), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (9504, 9506), False, 'import doctest\n')] |
# ________
# /
# \ /
# \ /
# \/
import random
import textwrap
import emd_mean
import AdvEMDpy
import emd_basis
import emd_utils
import numpy as np
import pandas as pd
import cvxpy as cvx
import seaborn as sns
import matplotlib.pyplot as plt
from scipy.integrate import odeint
from scipy.ndimage import gaussian_filter
from emd_utils import time_extension, Utility
from scipy.interpolate import CubicSpline
from emd_hilbert import Hilbert, hilbert_spectrum
from emd_preprocess import Preprocess
from emd_mean import Fluctuation
from AdvEMDpy import EMD
# alternate packages
from PyEMD import EMD as pyemd0215
import emd as emd040
sns.set(style='darkgrid')
pseudo_alg_time = np.linspace(0, 2 * np.pi, 1001)
pseudo_alg_time_series = np.sin(pseudo_alg_time) + np.sin(5 * pseudo_alg_time)
pseudo_utils = Utility(time=pseudo_alg_time, time_series=pseudo_alg_time_series)
# plot 0 - addition
fig = plt.figure(figsize=(9, 4))
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('First Iteration of Sifting Algorithm')
plt.plot(pseudo_alg_time, pseudo_alg_time_series, label=r'$h_{(1,0)}(t)$', zorder=1)
plt.scatter(pseudo_alg_time[pseudo_utils.max_bool_func_1st_order_fd()],
pseudo_alg_time_series[pseudo_utils.max_bool_func_1st_order_fd()],
c='r', label=r'$M(t_i)$', zorder=2)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time) + 1, '--', c='r', label=r'$\tilde{h}_{(1,0)}^M(t)$', zorder=4)
plt.scatter(pseudo_alg_time[pseudo_utils.min_bool_func_1st_order_fd()],
pseudo_alg_time_series[pseudo_utils.min_bool_func_1st_order_fd()],
c='c', label=r'$m(t_j)$', zorder=3)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time) - 1, '--', c='c', label=r'$\tilde{h}_{(1,0)}^m(t)$', zorder=5)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time), '--', c='purple', label=r'$\tilde{h}_{(1,0)}^{\mu}(t)$', zorder=5)
plt.yticks(ticks=[-2, -1, 0, 1, 2])
plt.xticks(ticks=[0, np.pi, 2 * np.pi],
labels=[r'0', r'$\pi$', r'$2\pi$'])
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.95, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/pseudo_algorithm.png')
plt.show()
knots = np.arange(12)
time = np.linspace(0, 11, 1101)
basis = emd_basis.Basis(time=time, time_series=time)
b_spline_basis = basis.cubic_b_spline(knots)
chsi_basis = basis.chsi_basis(knots)
# plot 1
plt.title('Non-Natural Cubic B-Spline Bases at Boundary')
plt.plot(time[500:], b_spline_basis[2, 500:].T, '--', label=r'$ B_{-3,4}(t) $')
plt.plot(time[500:], b_spline_basis[3, 500:].T, '--', label=r'$ B_{-2,4}(t) $')
plt.plot(time[500:], b_spline_basis[4, 500:].T, '--', label=r'$ B_{-1,4}(t) $')
plt.plot(time[500:], b_spline_basis[5, 500:].T, '--', label=r'$ B_{0,4}(t) $')
plt.plot(time[500:], b_spline_basis[6, 500:].T, '--', label=r'$ B_{1,4}(t) $')
plt.xticks([5, 6], [r'$ \tau_0 $', r'$ \tau_1 $'])
plt.xlim(4.4, 6.6)
plt.plot(5 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
plt.plot(6 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
plt.legend(loc='upper left')
plt.savefig('jss_figures/boundary_bases.png')
plt.show()
# plot 1a - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
knots_uniform = np.linspace(0, 2 * np.pi, 51)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs = emd.empirical_mode_decomposition(knots=knots_uniform, edge_effect='anti-symmetric', verbose=False)[0]
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Uniform Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Uniform Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Uniform Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots_uniform)):
axs[i].plot(knots_uniform[j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_uniform.png')
plt.show()
# plot 1b - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs, _, _, _, knots, _, _ = emd.empirical_mode_decomposition(edge_effect='anti-symmetric',
optimise_knots=1, verbose=False)
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Statically Optimised Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Statically Optimised Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Statically Optimised Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots)):
axs[i].plot(knots[j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_1.png')
plt.show()
# plot 1c - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs, _, _, _, knots, _, _ = emd.empirical_mode_decomposition(edge_effect='anti-symmetric',
optimise_knots=2, verbose=False)
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Dynamically Optimised Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Dynamically Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Dynamically Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots[0][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots[1][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots[2][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots[i])):
axs[i].plot(knots[i][j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_2.png')
plt.show()
# plot 1d - addition
window = 81
fig, axs = plt.subplots(2, 1)
fig.subplots_adjust(hspace=0.4)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Preprocess Filtering Demonstration')
axs[1].set_title('Zoomed Region')
preprocess_time = pseudo_alg_time.copy()
np.random.seed(1)
random.seed(1)
preprocess_time_series = pseudo_alg_time_series + np.random.normal(0, 0.1, len(preprocess_time))
for i in random.sample(range(1000), 500):
preprocess_time_series[i] += np.random.normal(0, 1)
preprocess = Preprocess(time=preprocess_time, time_series=preprocess_time_series)
axs[0].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[0].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[0].plot(preprocess_time, preprocess.mean_filter(window_width=window)[1], label=textwrap.fill('Mean filter', 12))
axs[0].plot(preprocess_time, preprocess.median_filter(window_width=window)[1], label=textwrap.fill('Median filter', 13))
axs[0].plot(preprocess_time, preprocess.winsorize(window_width=window, a=0.8)[1], label=textwrap.fill('Windsorize filter', 12))
axs[0].plot(preprocess_time, preprocess.winsorize_interpolate(window_width=window, a=0.8)[1],
label=textwrap.fill('Windsorize interpolation filter', 14))
axs[0].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.90)[1], c='grey',
label=textwrap.fill('Quantile window', 12))
axs[0].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.10)[1], c='grey')
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), -3 * np.ones(101), '--', c='black',
label=textwrap.fill('Zoomed region', 10))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), 3 * np.ones(101), '--', c='black')
axs[0].plot(0.85 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].plot(1.15 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[1].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple', label=textwrap.fill('Noiseless time series', 12))
axs[1].plot(preprocess_time, preprocess.mean_filter(window_width=window)[1], label=textwrap.fill('Mean filter', 12))
axs[1].plot(preprocess_time, preprocess.median_filter(window_width=window)[1], label=textwrap.fill('Median filter', 13))
axs[1].plot(preprocess_time, preprocess.winsorize(window_width=window, a=0.8)[1], label=textwrap.fill('Windsorize filter', 12))
axs[1].plot(preprocess_time, preprocess.winsorize_interpolate(window_width=window, a=0.8)[1],
label=textwrap.fill('Windsorize interpolation filter', 14))
axs[1].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.90)[1], c='grey',
label=textwrap.fill('Quantile window', 12))
axs[1].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.10)[1], c='grey')
axs[1].set_xlim(0.85 * np.pi, 1.15 * np.pi)
axs[1].set_ylim(-3, 3)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[np.pi])
axs[1].set_xticklabels(labels=[r'$\pi$'])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, -0.15))
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
plt.savefig('jss_figures/preprocess_filter.png')
plt.show()
# plot 1e - addition
fig, axs = plt.subplots(2, 1)
fig.subplots_adjust(hspace=0.4)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Preprocess Smoothing Demonstration')
axs[1].set_title('Zoomed Region')
axs[0].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[0].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[0].plot(preprocess_time, preprocess.hp()[1],
label=textwrap.fill('Hodrick-Prescott smoothing', 12))
axs[0].plot(preprocess_time, preprocess.hw(order=51)[1],
label=textwrap.fill('Henderson-Whittaker smoothing', 13))
downsampled_and_decimated = preprocess.downsample()
axs[0].plot(downsampled_and_decimated[0], downsampled_and_decimated[1],
label=textwrap.fill('Downsampled & decimated', 11))
downsampled = preprocess.downsample(decimate=False)
axs[0].plot(downsampled[0], downsampled[1],
label=textwrap.fill('Downsampled', 13))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), -3 * np.ones(101), '--', c='black',
label=textwrap.fill('Zoomed region', 10))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), 3 * np.ones(101), '--', c='black')
axs[0].plot(0.85 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].plot(1.15 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[1].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[1].plot(preprocess_time, preprocess.hp()[1],
label=textwrap.fill('Hodrick-Prescott smoothing', 12))
axs[1].plot(preprocess_time, preprocess.hw(order=51)[1],
label=textwrap.fill('Henderson-Whittaker smoothing', 13))
axs[1].plot(downsampled_and_decimated[0], downsampled_and_decimated[1],
label=textwrap.fill('Downsampled & decimated', 13))
axs[1].plot(downsampled[0], downsampled[1],
label=textwrap.fill('Downsampled', 13))
axs[1].set_xlim(0.85 * np.pi, 1.15 * np.pi)
axs[1].set_ylim(-3, 3)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[np.pi])
axs[1].set_xticklabels(labels=[r'$\pi$'])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.06, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, -0.15))
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.06, box_1.y0, box_1.width * 0.85, box_1.height])
plt.savefig('jss_figures/preprocess_smooth.png')
plt.show()
# plot 2
fig, axs = plt.subplots(1, 2, sharey=True)
axs[0].set_title('Cubic B-Spline Bases')
axs[0].plot(time, b_spline_basis[2, :].T, '--', label='Basis 1')
axs[0].plot(time, b_spline_basis[3, :].T, '--', label='Basis 2')
axs[0].plot(time, b_spline_basis[4, :].T, '--', label='Basis 3')
axs[0].plot(time, b_spline_basis[5, :].T, '--', label='Basis 4')
axs[0].legend(loc='upper left')
axs[0].plot(5 * np.ones(100), np.linspace(-0.2, 0.8, 100), 'k-')
axs[0].plot(6 * np.ones(100), np.linspace(-0.2, 0.8, 100), 'k-')
axs[0].set_xticks([5, 6])
axs[0].set_xticklabels([r'$ \tau_k $', r'$ \tau_{k+1} $'])
axs[0].set_xlim(4.5, 6.5)
axs[1].set_title('Cubic Hermite Spline Bases')
axs[1].plot(time, chsi_basis[10, :].T, '--')
axs[1].plot(time, chsi_basis[11, :].T, '--')
axs[1].plot(time, chsi_basis[12, :].T, '--')
axs[1].plot(time, chsi_basis[13, :].T, '--')
axs[1].plot(5 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
axs[1].plot(6 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
axs[1].set_xticks([5, 6])
axs[1].set_xticklabels([r'$ \tau_k $', r'$ \tau_{k+1} $'])
axs[1].set_xlim(4.5, 6.5)
plt.savefig('jss_figures/comparing_bases.png')
plt.show()
# plot 3
a = 0.25
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
max_dash_time = np.linspace(maxima_x[-1] - width, maxima_x[-1] + width, 101)
max_dash = maxima_y[-1] * np.ones_like(max_dash_time)
min_dash_time = np.linspace(minima_x[-1] - width, minima_x[-1] + width, 101)
min_dash = minima_y[-1] * np.ones_like(min_dash_time)
dash_1_time = np.linspace(maxima_x[-1], minima_x[-1], 101)
dash_1 = np.linspace(maxima_y[-1], minima_y[-1], 101)
max_discard = maxima_y[-1]
max_discard_time = minima_x[-1] - maxima_x[-1] + minima_x[-1]
max_discard_dash_time = np.linspace(max_discard_time - width, max_discard_time + width, 101)
max_discard_dash = max_discard * np.ones_like(max_discard_dash_time)
dash_2_time = np.linspace(minima_x[-1], max_discard_time, 101)
dash_2 = np.linspace(minima_y[-1], max_discard, 101)
end_point_time = time[-1]
end_point = time_series[-1]
time_reflect = np.linspace((5 - a) * np.pi, (5 + a) * np.pi, 101)
time_series_reflect = np.flip(np.cos(np.linspace((5 - 2.6 * a) * np.pi,
(5 - a) * np.pi, 101)) + np.cos(5 * np.linspace((5 - 2.6 * a) * np.pi,
(5 - a) * np.pi, 101)))
time_series_anti_reflect = time_series_reflect[0] - time_series_reflect
utils = emd_utils.Utility(time=time, time_series=time_series_anti_reflect)
anti_max_bool = utils.max_bool_func_1st_order_fd()
anti_max_point_time = time_reflect[anti_max_bool]
anti_max_point = time_series_anti_reflect[anti_max_bool]
utils = emd_utils.Utility(time=time, time_series=time_series_reflect)
no_anchor_max_time = time_reflect[utils.max_bool_func_1st_order_fd()]
no_anchor_max = time_series_reflect[utils.max_bool_func_1st_order_fd()]
point_1 = 5.4
length_distance = np.linspace(maxima_y[-1], minima_y[-1], 101)
length_distance_time = point_1 * np.pi * np.ones_like(length_distance)
length_time = np.linspace(point_1 * np.pi - width, point_1 * np.pi + width, 101)
length_top = maxima_y[-1] * np.ones_like(length_time)
length_bottom = minima_y[-1] * np.ones_like(length_time)
point_2 = 5.2
length_distance_2 = np.linspace(time_series[-1], minima_y[-1], 101)
length_distance_time_2 = point_2 * np.pi * np.ones_like(length_distance_2)
length_time_2 = np.linspace(point_2 * np.pi - width, point_2 * np.pi + width, 101)
length_top_2 = time_series[-1] * np.ones_like(length_time_2)
length_bottom_2 = minima_y[-1] * np.ones_like(length_time_2)
symmetry_axis_1_time = minima_x[-1] * np.ones(101)
symmetry_axis_2_time = time[-1] * np.ones(101)
symmetry_axis = np.linspace(-2, 2, 101)
end_time = np.linspace(time[-1] - width, time[-1] + width, 101)
end_signal = time_series[-1] * np.ones_like(end_time)
anti_symmetric_time = np.linspace(time[-1] - 0.5, time[-1] + 0.5, 101)
anti_symmetric_signal = time_series[-1] * np.ones_like(anti_symmetric_time)
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.title('Symmetry Edge Effects Example')
plt.plot(time_reflect, time_series_reflect, 'g--', LineWidth=2, label=textwrap.fill('Symmetric signal', 10))
plt.plot(time_reflect[:51], time_series_anti_reflect[:51], '--', c='purple', LineWidth=2,
label=textwrap.fill('Anti-symmetric signal', 10))
plt.plot(max_dash_time, max_dash, 'k-')
plt.plot(min_dash_time, min_dash, 'k-')
plt.plot(dash_1_time, dash_1, 'k--')
plt.plot(dash_2_time, dash_2, 'k--')
plt.plot(length_distance_time, length_distance, 'k--')
plt.plot(length_distance_time_2, length_distance_2, 'k--')
plt.plot(length_time, length_top, 'k-')
plt.plot(length_time, length_bottom, 'k-')
plt.plot(length_time_2, length_top_2, 'k-')
plt.plot(length_time_2, length_bottom_2, 'k-')
plt.plot(end_time, end_signal, 'k-')
plt.plot(symmetry_axis_1_time, symmetry_axis, 'r--', zorder=1)
plt.plot(anti_symmetric_time, anti_symmetric_signal, 'r--', zorder=1)
plt.plot(symmetry_axis_2_time, symmetry_axis, 'r--', label=textwrap.fill('Axes of symmetry', 10), zorder=1)
plt.text(5.1 * np.pi, -0.7, r'$\beta$L')
plt.text(5.34 * np.pi, -0.05, 'L')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(max_discard_time, max_discard, c='purple', zorder=4, label=textwrap.fill('Symmetric Discard maxima', 10))
plt.scatter(end_point_time, end_point, c='orange', zorder=4, label=textwrap.fill('Symmetric Anchor maxima', 10))
plt.scatter(anti_max_point_time, anti_max_point, c='green', zorder=4, label=textwrap.fill('Anti-Symmetric maxima', 10))
plt.scatter(no_anchor_max_time, no_anchor_max, c='gray', zorder=4, label=textwrap.fill('Symmetric maxima', 10))
plt.xlim(3.9 * np.pi, 5.5 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_symmetry_anti.png')
plt.show()
# plot 4
a = 0.21
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
max_dash_1 = np.linspace(maxima_y[-1] - width, maxima_y[-1] + width, 101)
max_dash_2 = np.linspace(maxima_y[-2] - width, maxima_y[-2] + width, 101)
max_dash_time_1 = maxima_x[-1] * np.ones_like(max_dash_1)
max_dash_time_2 = maxima_x[-2] * np.ones_like(max_dash_1)
min_dash_1 = np.linspace(minima_y[-1] - width, minima_y[-1] + width, 101)
min_dash_2 = np.linspace(minima_y[-2] - width, minima_y[-2] + width, 101)
min_dash_time_1 = minima_x[-1] * np.ones_like(min_dash_1)
min_dash_time_2 = minima_x[-2] * np.ones_like(min_dash_1)
dash_1_time = np.linspace(maxima_x[-1], minima_x[-1], 101)
dash_1 = np.linspace(maxima_y[-1], minima_y[-1], 101)
dash_2_time = np.linspace(maxima_x[-1], minima_x[-2], 101)
dash_2 = np.linspace(maxima_y[-1], minima_y[-2], 101)
s1 = (minima_y[-2] - maxima_y[-1]) / (minima_x[-2] - maxima_x[-1])
slope_based_maximum_time = maxima_x[-1] + (maxima_x[-1] - maxima_x[-2])
slope_based_maximum = minima_y[-1] + (slope_based_maximum_time - minima_x[-1]) * s1
max_dash_time_3 = slope_based_maximum_time * np.ones_like(max_dash_1)
max_dash_3 = np.linspace(slope_based_maximum - width, slope_based_maximum + width, 101)
dash_3_time = np.linspace(minima_x[-1], slope_based_maximum_time, 101)
dash_3 = np.linspace(minima_y[-1], slope_based_maximum, 101)
s2 = (minima_y[-1] - maxima_y[-1]) / (minima_x[-1] - maxima_x[-1])
slope_based_minimum_time = minima_x[-1] + (minima_x[-1] - minima_x[-2])
slope_based_minimum = slope_based_maximum - (slope_based_maximum_time - slope_based_minimum_time) * s2
min_dash_time_3 = slope_based_minimum_time * np.ones_like(min_dash_1)
min_dash_3 = np.linspace(slope_based_minimum - width, slope_based_minimum + width, 101)
dash_4_time = np.linspace(slope_based_maximum_time, slope_based_minimum_time)
dash_4 = np.linspace(slope_based_maximum, slope_based_minimum)
maxima_dash = np.linspace(2.5 - width, 2.5 + width, 101)
maxima_dash_time_1 = maxima_x[-2] * np.ones_like(maxima_dash)
maxima_dash_time_2 = maxima_x[-1] * np.ones_like(maxima_dash)
maxima_dash_time_3 = slope_based_maximum_time * np.ones_like(maxima_dash)
maxima_line_dash_time = np.linspace(maxima_x[-2], slope_based_maximum_time, 101)
maxima_line_dash = 2.5 * np.ones_like(maxima_line_dash_time)
minima_dash = np.linspace(-3.4 - width, -3.4 + width, 101)
minima_dash_time_1 = minima_x[-2] * np.ones_like(minima_dash)
minima_dash_time_2 = minima_x[-1] * np.ones_like(minima_dash)
minima_dash_time_3 = slope_based_minimum_time * np.ones_like(minima_dash)
minima_line_dash_time = np.linspace(minima_x[-2], slope_based_minimum_time, 101)
minima_line_dash = -3.4 * np.ones_like(minima_line_dash_time)
# slightly edit signal to make difference between slope-based method and improved slope-based method more clear
time_series[time >= minima_x[-1]] = 1.5 * (time_series[time >= minima_x[-1]] - time_series[time == minima_x[-1]]) + \
time_series[time == minima_x[-1]]
improved_slope_based_maximum_time = time[-1]
improved_slope_based_maximum = time_series[-1]
improved_slope_based_minimum_time = slope_based_minimum_time
improved_slope_based_minimum = improved_slope_based_maximum + s2 * (improved_slope_based_minimum_time -
improved_slope_based_maximum_time)
min_dash_4 = np.linspace(improved_slope_based_minimum - width, improved_slope_based_minimum + width, 101)
min_dash_time_4 = improved_slope_based_minimum_time * np.ones_like(min_dash_4)
dash_final_time = np.linspace(improved_slope_based_maximum_time, improved_slope_based_minimum_time, 101)
dash_final = np.linspace(improved_slope_based_maximum, improved_slope_based_minimum, 101)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 0.9
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.title('Slope-Based Edge Effects Example')
plt.plot(max_dash_time_1, max_dash_1, 'k-')
plt.plot(max_dash_time_2, max_dash_2, 'k-')
plt.plot(max_dash_time_3, max_dash_3, 'k-')
plt.plot(min_dash_time_1, min_dash_1, 'k-')
plt.plot(min_dash_time_2, min_dash_2, 'k-')
plt.plot(min_dash_time_3, min_dash_3, 'k-')
plt.plot(min_dash_time_4, min_dash_4, 'k-')
plt.plot(maxima_dash_time_1, maxima_dash, 'k-')
plt.plot(maxima_dash_time_2, maxima_dash, 'k-')
plt.plot(maxima_dash_time_3, maxima_dash, 'k-')
plt.plot(minima_dash_time_1, minima_dash, 'k-')
plt.plot(minima_dash_time_2, minima_dash, 'k-')
plt.plot(minima_dash_time_3, minima_dash, 'k-')
plt.text(4.34 * np.pi, -3.2, r'$\Delta{t^{min}_{m}}$')
plt.text(4.74 * np.pi, -3.2, r'$\Delta{t^{min}_{m}}$')
plt.text(4.12 * np.pi, 2, r'$\Delta{t^{max}_{M}}$')
plt.text(4.50 * np.pi, 2, r'$\Delta{t^{max}_{M}}$')
plt.text(4.30 * np.pi, 0.35, r'$s_1$')
plt.text(4.43 * np.pi, -0.20, r'$s_2$')
plt.text(4.30 * np.pi + (minima_x[-1] - minima_x[-2]), 0.35 + (minima_y[-1] - minima_y[-2]), r'$s_1$')
plt.text(4.43 * np.pi + (slope_based_minimum_time - minima_x[-1]),
-0.20 + (slope_based_minimum - minima_y[-1]), r'$s_2$')
plt.text(4.50 * np.pi + (slope_based_minimum_time - minima_x[-1]),
1.20 + (slope_based_minimum - minima_y[-1]), r'$s_2$')
plt.plot(minima_line_dash_time, minima_line_dash, 'k--')
plt.plot(maxima_line_dash_time, maxima_line_dash, 'k--')
plt.plot(dash_1_time, dash_1, 'k--')
plt.plot(dash_2_time, dash_2, 'k--')
plt.plot(dash_3_time, dash_3, 'k--')
plt.plot(dash_4_time, dash_4, 'k--')
plt.plot(dash_final_time, dash_final, 'k--')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(slope_based_maximum_time, slope_based_maximum, c='orange', zorder=4,
label=textwrap.fill('Slope-based maximum', 11))
plt.scatter(slope_based_minimum_time, slope_based_minimum, c='purple', zorder=4,
label=textwrap.fill('Slope-based minimum', 11))
plt.scatter(improved_slope_based_maximum_time, improved_slope_based_maximum, c='deeppink', zorder=4,
label=textwrap.fill('Improved slope-based maximum', 11))
plt.scatter(improved_slope_based_minimum_time, improved_slope_based_minimum, c='dodgerblue', zorder=4,
label=textwrap.fill('Improved slope-based minimum', 11))
plt.xlim(3.9 * np.pi, 5.5 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-3, -2, -1, 0, 1, 2), ('-3', '-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_slope_based.png')
plt.show()
# plot 5
a = 0.25
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
A2 = np.abs(maxima_y[-2] - minima_y[-2]) / 2
A1 = np.abs(maxima_y[-1] - minima_y[-1]) / 2
P2 = 2 * np.abs(maxima_x[-2] - minima_x[-2])
P1 = 2 * np.abs(maxima_x[-1] - minima_x[-1])
Huang_time = (P1 / P2) * (time[time >= maxima_x[-2]] - time[time == maxima_x[-2]]) + maxima_x[-1]
Huang_wave = (A1 / A2) * (time_series[time >= maxima_x[-2]] - time_series[time == maxima_x[-2]]) + maxima_y[-1]
Coughlin_time = Huang_time
Coughlin_wave = A1 * np.cos(2 * np.pi * (1 / P1) * (Coughlin_time - Coughlin_time[0]))
Average_max_time = maxima_x[-1] + (maxima_x[-1] - maxima_x[-2])
Average_max = (maxima_y[-2] + maxima_y[-1]) / 2
Average_min_time = minima_x[-1] + (minima_x[-1] - minima_x[-2])
Average_min = (minima_y[-2] + minima_y[-1]) / 2
utils_Huang = emd_utils.Utility(time=time, time_series=Huang_wave)
Huang_max_bool = utils_Huang.max_bool_func_1st_order_fd()
Huang_min_bool = utils_Huang.min_bool_func_1st_order_fd()
utils_Coughlin = emd_utils.Utility(time=time, time_series=Coughlin_wave)
Coughlin_max_bool = utils_Coughlin.max_bool_func_1st_order_fd()
Coughlin_min_bool = utils_Coughlin.min_bool_func_1st_order_fd()
Huang_max_time = Huang_time[Huang_max_bool]
Huang_max = Huang_wave[Huang_max_bool]
Huang_min_time = Huang_time[Huang_min_bool]
Huang_min = Huang_wave[Huang_min_bool]
Coughlin_max_time = Coughlin_time[Coughlin_max_bool]
Coughlin_max = Coughlin_wave[Coughlin_max_bool]
Coughlin_min_time = Coughlin_time[Coughlin_min_bool]
Coughlin_min = Coughlin_wave[Coughlin_min_bool]
max_2_x_time = np.linspace(maxima_x[-2] - width, maxima_x[-2] + width, 101)
max_2_x_time_side = np.linspace(5.3 * np.pi - width, 5.3 * np.pi + width, 101)
max_2_x = maxima_y[-2] * np.ones_like(max_2_x_time)
min_2_x_time = np.linspace(minima_x[-2] - width, minima_x[-2] + width, 101)
min_2_x_time_side = np.linspace(5.3 * np.pi - width, 5.3 * np.pi + width, 101)
min_2_x = minima_y[-2] * np.ones_like(min_2_x_time)
dash_max_min_2_x = np.linspace(minima_y[-2], maxima_y[-2], 101)
dash_max_min_2_x_time = 5.3 * np.pi * np.ones_like(dash_max_min_2_x)
max_2_y = np.linspace(maxima_y[-2] - width, maxima_y[-2] + width, 101)
max_2_y_side = np.linspace(-1.8 - width, -1.8 + width, 101)
max_2_y_time = maxima_x[-2] * np.ones_like(max_2_y)
min_2_y = np.linspace(minima_y[-2] - width, minima_y[-2] + width, 101)
min_2_y_side = np.linspace(-1.8 - width, -1.8 + width, 101)
min_2_y_time = minima_x[-2] * np.ones_like(min_2_y)
dash_max_min_2_y_time = np.linspace(minima_x[-2], maxima_x[-2], 101)
dash_max_min_2_y = -1.8 * np.ones_like(dash_max_min_2_y_time)
max_1_x_time = np.linspace(maxima_x[-1] - width, maxima_x[-1] + width, 101)
max_1_x_time_side = np.linspace(5.4 * np.pi - width, 5.4 * np.pi + width, 101)
max_1_x = maxima_y[-1] * np.ones_like(max_1_x_time)
min_1_x_time = np.linspace(minima_x[-1] - width, minima_x[-1] + width, 101)
min_1_x_time_side = np.linspace(5.4 * np.pi - width, 5.4 * np.pi + width, 101)
min_1_x = minima_y[-1] * np.ones_like(min_1_x_time)
dash_max_min_1_x = np.linspace(minima_y[-1], maxima_y[-1], 101)
dash_max_min_1_x_time = 5.4 * np.pi * np.ones_like(dash_max_min_1_x)
max_1_y = np.linspace(maxima_y[-1] - width, maxima_y[-1] + width, 101)
max_1_y_side = np.linspace(-2.1 - width, -2.1 + width, 101)
max_1_y_time = maxima_x[-1] * np.ones_like(max_1_y)
min_1_y = np.linspace(minima_y[-1] - width, minima_y[-1] + width, 101)
min_1_y_side = np.linspace(-2.1 - width, -2.1 + width, 101)
min_1_y_time = minima_x[-1] * np.ones_like(min_1_y)
dash_max_min_1_y_time = np.linspace(minima_x[-1], maxima_x[-1], 101)
dash_max_min_1_y = -2.1 * np.ones_like(dash_max_min_1_y_time)
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Characteristic Wave Effects Example')
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.scatter(Huang_max_time, Huang_max, c='magenta', zorder=4, label=textwrap.fill('Huang maximum', 10))
plt.scatter(Huang_min_time, Huang_min, c='lime', zorder=4, label=textwrap.fill('Huang minimum', 10))
plt.scatter(Coughlin_max_time, Coughlin_max, c='darkorange', zorder=4,
label=textwrap.fill('Coughlin maximum', 14))
plt.scatter(Coughlin_min_time, Coughlin_min, c='dodgerblue', zorder=4,
label=textwrap.fill('Coughlin minimum', 14))
plt.scatter(Average_max_time, Average_max, c='orangered', zorder=4,
label=textwrap.fill('Average maximum', 14))
plt.scatter(Average_min_time, Average_min, c='cyan', zorder=4,
label=textwrap.fill('Average minimum', 14))
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.plot(Huang_time, Huang_wave, '--', c='darkviolet', label=textwrap.fill('Huang Characteristic Wave', 14))
plt.plot(Coughlin_time, Coughlin_wave, '--', c='darkgreen', label=textwrap.fill('Coughlin Characteristic Wave', 14))
plt.plot(max_2_x_time, max_2_x, 'k-')
plt.plot(max_2_x_time_side, max_2_x, 'k-')
plt.plot(min_2_x_time, min_2_x, 'k-')
plt.plot(min_2_x_time_side, min_2_x, 'k-')
plt.plot(dash_max_min_2_x_time, dash_max_min_2_x, 'k--')
plt.text(5.16 * np.pi, 0.85, r'$2a_2$')
plt.plot(max_2_y_time, max_2_y, 'k-')
plt.plot(max_2_y_time, max_2_y_side, 'k-')
plt.plot(min_2_y_time, min_2_y, 'k-')
plt.plot(min_2_y_time, min_2_y_side, 'k-')
plt.plot(dash_max_min_2_y_time, dash_max_min_2_y, 'k--')
plt.text(4.08 * np.pi, -2.2, r'$\frac{p_2}{2}$')
plt.plot(max_1_x_time, max_1_x, 'k-')
plt.plot(max_1_x_time_side, max_1_x, 'k-')
plt.plot(min_1_x_time, min_1_x, 'k-')
plt.plot(min_1_x_time_side, min_1_x, 'k-')
plt.plot(dash_max_min_1_x_time, dash_max_min_1_x, 'k--')
plt.text(5.42 * np.pi, -0.1, r'$2a_1$')
plt.plot(max_1_y_time, max_1_y, 'k-')
plt.plot(max_1_y_time, max_1_y_side, 'k-')
plt.plot(min_1_y_time, min_1_y, 'k-')
plt.plot(min_1_y_time, min_1_y_side, 'k-')
plt.plot(dash_max_min_1_y_time, dash_max_min_1_y, 'k--')
plt.text(4.48 * np.pi, -2.5, r'$\frac{p_1}{2}$')
plt.xlim(3.9 * np.pi, 5.6 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_characteristic_wave.png')
plt.show()
# plot 6
t = np.linspace(5, 95, 100)
signal_orig = np.cos(2 * np.pi * t / 50) + 0.6 * np.cos(2 * np.pi * t / 25) + 0.5 * np.sin(2 * np.pi * t / 200)
util_nn = emd_utils.Utility(time=t, time_series=signal_orig)
maxima = signal_orig[util_nn.max_bool_func_1st_order_fd()]
minima = signal_orig[util_nn.min_bool_func_1st_order_fd()]
cs_max = CubicSpline(t[util_nn.max_bool_func_1st_order_fd()], maxima)
cs_min = CubicSpline(t[util_nn.min_bool_func_1st_order_fd()], minima)
time = np.linspace(0, 5 * np.pi, 1001)
lsq_signal = np.cos(time) + np.cos(5 * time)
knots = np.linspace(0, 5 * np.pi, 101)
time_extended = time_extension(time)
time_series_extended = np.zeros_like(time_extended) / 0
time_series_extended[int(len(lsq_signal) - 1):int(2 * (len(lsq_signal) - 1) + 1)] = lsq_signal
neural_network_m = 200
neural_network_k = 100
# forward ->
P = np.zeros((int(neural_network_k + 1), neural_network_m))
for col in range(neural_network_m):
P[:-1, col] = lsq_signal[(-(neural_network_m + neural_network_k - col)):(-(neural_network_m - col))]
P[-1, col] = 1 # for additive constant
t = lsq_signal[-neural_network_m:]
# test - top
seed_weights = np.ones(neural_network_k) / neural_network_k
weights = 0 * seed_weights.copy()
train_input = P[:-1, :]
lr = 0.01
for iterations in range(1000):
output = np.matmul(weights, train_input)
error = (t - output)
gradients = error * (- train_input)
# guess average gradients
average_gradients = np.mean(gradients, axis=1)
# steepest descent
max_gradient_vector = average_gradients * (np.abs(average_gradients) == max(np.abs(average_gradients)))
adjustment = - lr * average_gradients
# adjustment = - lr * max_gradient_vector
weights += adjustment
# test - bottom
weights_right = np.hstack((weights, 0))
max_count_right = 0
min_count_right = 0
i_right = 0
while ((max_count_right < 1) or (min_count_right < 1)) and (i_right < len(lsq_signal) - 1):
time_series_extended[int(2 * (len(lsq_signal) - 1) + 1 + i_right)] = \
sum(weights_right * np.hstack((time_series_extended[
int(2 * (len(lsq_signal) - 1) + 1 - neural_network_k + i_right):
int(2 * (len(lsq_signal) - 1) + 1 + i_right)], 1)))
i_right += 1
if i_right > 1:
emd_utils_max = \
emd_utils.Utility(time=time_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)],
time_series=time_series_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)])
if sum(emd_utils_max.max_bool_func_1st_order_fd()) > 0:
max_count_right += 1
emd_utils_min = \
emd_utils.Utility(time=time_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)],
time_series=time_series_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)])
if sum(emd_utils_min.min_bool_func_1st_order_fd()) > 0:
min_count_right += 1
# backward <-
P = np.zeros((int(neural_network_k + 1), neural_network_m))
for col in range(neural_network_m):
P[:-1, col] = lsq_signal[int(col + 1):int(col + neural_network_k + 1)]
P[-1, col] = 1 # for additive constant
t = lsq_signal[:neural_network_m]
vx = cvx.Variable(int(neural_network_k + 1))
objective = cvx.Minimize(cvx.norm((2 * (vx * P) + 1 - t), 2)) # linear activation function is arbitrary
prob = cvx.Problem(objective)
result = prob.solve(verbose=True, solver=cvx.ECOS)
weights_left = np.array(vx.value)
max_count_left = 0
min_count_left = 0
i_left = 0
while ((max_count_left < 1) or (min_count_left < 1)) and (i_left < len(lsq_signal) - 1):
time_series_extended[int(len(lsq_signal) - 2 - i_left)] = \
2 * sum(weights_left * np.hstack((time_series_extended[int(len(lsq_signal) - 1 - i_left):
int(len(lsq_signal) - 1 - i_left + neural_network_k)],
1))) + 1
i_left += 1
if i_left > 1:
emd_utils_max = \
emd_utils.Utility(time=time_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))],
time_series=time_series_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))])
if sum(emd_utils_max.max_bool_func_1st_order_fd()) > 0:
max_count_left += 1
emd_utils_min = \
emd_utils.Utility(time=time_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))],
time_series=time_series_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))])
if sum(emd_utils_min.min_bool_func_1st_order_fd()) > 0:
min_count_left += 1
lsq_utils = emd_utils.Utility(time=time, time_series=lsq_signal)
utils_extended = emd_utils.Utility(time=time_extended, time_series=time_series_extended)
maxima = lsq_signal[lsq_utils.max_bool_func_1st_order_fd()]
maxima_time = time[lsq_utils.max_bool_func_1st_order_fd()]
maxima_extrapolate = time_series_extended[utils_extended.max_bool_func_1st_order_fd()][-1]
maxima_extrapolate_time = time_extended[utils_extended.max_bool_func_1st_order_fd()][-1]
minima = lsq_signal[lsq_utils.min_bool_func_1st_order_fd()]
minima_time = time[lsq_utils.min_bool_func_1st_order_fd()]
minima_extrapolate = time_series_extended[utils_extended.min_bool_func_1st_order_fd()][-2:]
minima_extrapolate_time = time_extended[utils_extended.min_bool_func_1st_order_fd()][-2:]
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Single Neuron Neural Network Example')
plt.plot(time, lsq_signal, zorder=2, label='Signal')
plt.plot(time_extended, time_series_extended, c='g', zorder=1, label=textwrap.fill('Extrapolated signal', 12))
plt.scatter(maxima_time, maxima, c='r', zorder=3, label='Maxima')
plt.scatter(minima_time, minima, c='b', zorder=3, label='Minima')
plt.scatter(maxima_extrapolate_time, maxima_extrapolate, c='magenta', zorder=3,
label=textwrap.fill('Extrapolated maxima', 12))
plt.scatter(minima_extrapolate_time, minima_extrapolate, c='cyan', zorder=4,
label=textwrap.fill('Extrapolated minima', 12))
plt.plot(((time[-302] + time[-301]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='k',
label=textwrap.fill('Neural network inputs', 13))
plt.plot(np.linspace(((time[-302] + time[-301]) / 2), ((time[-302] + time[-301]) / 2) + 0.1, 100),
-2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time[-302] + time[-301]) / 2), ((time[-302] + time[-301]) / 2) + 0.1, 100),
2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1002]) / 2),
((time_extended[-1001] + time_extended[-1002]) / 2) - 0.1, 100), -2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1002]) / 2),
((time_extended[-1001] + time_extended[-1002]) / 2) - 0.1, 100), 2.75 * np.ones(100), c='k')
plt.plot(((time_extended[-1001] + time_extended[-1002]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='k')
plt.plot(((time[-202] + time[-201]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='gray', linestyle='dashed',
label=textwrap.fill('Neural network targets', 13))
plt.plot(np.linspace(((time[-202] + time[-201]) / 2), ((time[-202] + time[-201]) / 2) + 0.1, 100),
-2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time[-202] + time[-201]) / 2), ((time[-202] + time[-201]) / 2) + 0.1, 100),
2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1000]) / 2),
((time_extended[-1001] + time_extended[-1000]) / 2) - 0.1, 100), -2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1000]) / 2),
((time_extended[-1001] + time_extended[-1000]) / 2) - 0.1, 100), 2.75 * np.ones(100), c='gray')
plt.plot(((time_extended[-1001] + time_extended[-1000]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='gray',
linestyle='dashed')
plt.xlim(3.4 * np.pi, 5.6 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/neural_network.png')
plt.show()
# plot 6a
np.random.seed(0)
time = np.linspace(0, 5 * np.pi, 1001)
knots_51 = np.linspace(0, 5 * np.pi, 51)
time_series = np.cos(2 * time) + np.cos(4 * time) + np.cos(8 * time)
noise = np.random.normal(0, 1, len(time_series))
time_series += noise
advemdpy = EMD(time=time, time_series=time_series)
imfs_51, hts_51, ifs_51 = advemdpy.empirical_mode_decomposition(knots=knots_51, max_imfs=3,
edge_effect='symmetric_anchor', verbose=False)[:3]
knots_31 = np.linspace(0, 5 * np.pi, 31)
imfs_31, hts_31, ifs_31 = advemdpy.empirical_mode_decomposition(knots=knots_31, max_imfs=2,
edge_effect='symmetric_anchor', verbose=False)[:3]
knots_11 = np.linspace(0, 5 * np.pi, 11)
imfs_11, hts_11, ifs_11 = advemdpy.empirical_mode_decomposition(knots=knots_11, max_imfs=1,
edge_effect='symmetric_anchor', verbose=False)[:3]
fig, axs = plt.subplots(3, 1)
plt.suptitle(textwrap.fill('Comparison of Trends Extracted with Different Knot Sequences', 40))
plt.subplots_adjust(hspace=0.1)
axs[0].plot(time, time_series, label='Time series')
axs[0].plot(time, imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21))
print(f'DFA fluctuation with 51 knots: {np.round(np.var(time_series - (imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :])), 3)}')
for knot in knots_51:
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[0].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[0].set_xticklabels(['', '', '', '', '', ''])
axs[0].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[0].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[0].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[0].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].plot(time, time_series, label='Time series')
axs[1].plot(time, imfs_31[1, :] + imfs_31[2, :], label=textwrap.fill('Sum of IMF 1 and IMF 2 with 31 knots', 19))
axs[1].plot(time, imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 2 and IMF 3 with 51 knots', 19))
print(f'DFA fluctuation with 31 knots: {np.round(np.var(time_series - (imfs_31[1, :] + imfs_31[2, :])), 3)}')
for knot in knots_31:
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[1].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[1].set_xticklabels(['', '', '', '', '', ''])
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
axs[1].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[1].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[1].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[1].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
axs[2].plot(time, time_series, label='Time series')
axs[2].plot(time, imfs_11[1, :], label='IMF 1 with 11 knots')
axs[2].plot(time, imfs_31[2, :], label='IMF 2 with 31 knots')
axs[2].plot(time, imfs_51[3, :], label='IMF 3 with 51 knots')
print(f'DFA fluctuation with 11 knots: {np.round(np.var(time_series - imfs_51[3, :]), 3)}')
for knot in knots_11:
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[2].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[2].set_xticklabels(['$0$', r'$\pi$', r'$2\pi$', r'$3\pi$', r'$4\pi$', r'$5\pi$'])
box_2 = axs[2].get_position()
axs[2].set_position([box_2.x0 - 0.05, box_2.y0, box_2.width * 0.85, box_2.height])
axs[2].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[2].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[2].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[2].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[2].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
plt.savefig('jss_figures/DFA_different_trends.png')
plt.show()
# plot 6b
fig, axs = plt.subplots(3, 1)
plt.suptitle(textwrap.fill('Comparison of Trends Extracted with Different Knot Sequences Zoomed Region', 40))
plt.subplots_adjust(hspace=0.1)
axs[0].plot(time, time_series, label='Time series')
axs[0].plot(time, imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21))
for knot in knots_51:
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[0].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[0].set_xticklabels(['', '', '', '', '', ''])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[0].set_ylim(-5.5, 5.5)
axs[0].set_xlim(0.95 * np.pi, 1.55 * np.pi)
axs[1].plot(time, time_series, label='Time series')
axs[1].plot(time, imfs_31[1, :] + imfs_31[2, :], label=textwrap.fill('Sum of IMF 1 and IMF 2 with 31 knots', 19))
axs[1].plot(time, imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 2 and IMF 3 with 51 knots', 19))
for knot in knots_31:
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[1].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[1].set_xticklabels(['', '', '', '', '', ''])
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
axs[1].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].set_ylim(-5.5, 5.5)
axs[1].set_xlim(0.95 * np.pi, 1.55 * np.pi)
axs[2].plot(time, time_series, label='Time series')
axs[2].plot(time, imfs_11[1, :], label='IMF 1 with 11 knots')
axs[2].plot(time, imfs_31[2, :], label='IMF 2 with 31 knots')
axs[2].plot(time, imfs_51[3, :], label='IMF 3 with 51 knots')
for knot in knots_11:
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[2].set_xticks([np.pi, (3 / 2) * np.pi])
axs[2].set_xticklabels([r'$\pi$', r'$\frac{3}{2}\pi$'])
box_2 = axs[2].get_position()
axs[2].set_position([box_2.x0 - 0.05, box_2.y0, box_2.width * 0.85, box_2.height])
axs[2].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[2].set_ylim(-5.5, 5.5)
axs[2].set_xlim(0.95 * np.pi, 1.55 * np.pi)
plt.savefig('jss_figures/DFA_different_trends_zoomed.png')
plt.show()
hs_ouputs = hilbert_spectrum(time, imfs_51, hts_51, ifs_51, max_frequency=12, plot=False)
# plot 6c
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 0.9
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Simple Sinusoidal Time Seres with Added Noise', 50))
x_hs, y, z = hs_ouputs
z_min, z_max = 0, np.abs(z).max()
ax.pcolormesh(x_hs, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
ax.plot(x_hs[0, :], 8 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 8$', Linewidth=3)
ax.plot(x_hs[0, :], 4 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 4$', Linewidth=3)
ax.plot(x_hs[0, :], 2 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 2$', Linewidth=3)
ax.set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi])
ax.set_xticklabels(['$0$', r'$\pi$', r'$2\pi$', r'$3\pi$', r'$4\pi$'])
plt.ylabel(r'Frequency (rad.s$^{-1}$)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.85, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/DFA_hilbert_spectrum.png')
plt.show()
# plot 6c
time = np.linspace(0, 5 * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
knots = np.linspace(0, 5 * np.pi, 51)
fluc = Fluctuation(time=time, time_series=time_series)
max_unsmoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='maxima', smooth=False)
max_smoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='maxima', smooth=True)
min_unsmoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='minima', smooth=False)
min_smoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='minima', smooth=True)
util = Utility(time=time, time_series=time_series)
maxima = util.max_bool_func_1st_order_fd()
minima = util.min_bool_func_1st_order_fd()
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title(textwrap.fill('Plot Demonstrating Unsmoothed Extrema Envelopes if Schoenberg–Whitney Conditions are Not Satisfied', 50))
plt.plot(time, time_series, label='Time series', zorder=2, LineWidth=2)
plt.scatter(time[maxima], time_series[maxima], c='r', label='Maxima', zorder=10)
plt.scatter(time[minima], time_series[minima], c='b', label='Minima', zorder=10)
plt.plot(time, max_unsmoothed[0], label=textwrap.fill('Unsmoothed maxima envelope', 10), c='darkorange')
plt.plot(time, max_smoothed[0], label=textwrap.fill('Smoothed maxima envelope', 10), c='red')
plt.plot(time, min_unsmoothed[0], label=textwrap.fill('Unsmoothed minima envelope', 10), c='cyan')
plt.plot(time, min_smoothed[0], label=textwrap.fill('Smoothed minima envelope', 10), c='blue')
for knot in knots[:-1]:
plt.plot(knot * np.ones(101), np.linspace(-3.0, -2.0, 101), '--', c='grey', zorder=1)
plt.plot(knots[-1] * np.ones(101), np.linspace(-3.0, -2.0, 101), '--', c='grey', label='Knots', zorder=1)
plt.xticks((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi),
(r'$0$', r'$\pi$', r'2$\pi$', r'3$\pi$', r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
plt.xlim(-0.25 * np.pi, 5.25 * np.pi)
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Schoenberg_Whitney_Conditions.png')
plt.show()
# plot 7
a = 0.25
width = 0.2
time = np.linspace((0 + a) * np.pi, (5 - a) * np.pi, 1001)
knots = np.linspace((0 + a) * np.pi, (5 - a) * np.pi, 11)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
inflection_bool = utils.inflection_point()
inflection_x = time[inflection_bool]
inflection_y = time_series[inflection_bool]
fluctuation = emd_mean.Fluctuation(time=time, time_series=time_series)
maxima_envelope = fluctuation.envelope_basis_function_approximation(knots, 'maxima', smooth=False,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
maxima_envelope_smooth = fluctuation.envelope_basis_function_approximation(knots, 'maxima', smooth=True,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
minima_envelope = fluctuation.envelope_basis_function_approximation(knots, 'minima', smooth=False,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
minima_envelope_smooth = fluctuation.envelope_basis_function_approximation(knots, 'minima', smooth=True,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
inflection_points_envelope = fluctuation.direct_detrended_fluctuation_estimation(knots,
smooth=True,
smoothing_penalty=0.2,
technique='inflection_points')[0]
binomial_points_envelope = fluctuation.direct_detrended_fluctuation_estimation(knots,
smooth=True,
smoothing_penalty=0.2,
technique='binomial_average', order=21,
increment=20)[0]
derivative_of_lsq = utils.derivative_forward_diff()
derivative_time = time[:-1]
derivative_knots = np.linspace(knots[0], knots[-1], 31)
# change (1) detrended_fluctuation_technique and (2) max_internal_iter and (3) debug (confusing with external debugging)
emd = AdvEMDpy.EMD(time=derivative_time, time_series=derivative_of_lsq)
imf_1_of_derivative = emd.empirical_mode_decomposition(knots=derivative_knots,
knot_time=derivative_time, text=False, verbose=False)[0][1, :]
utils = emd_utils.Utility(time=time[:-1], time_series=imf_1_of_derivative)
optimal_maxima = np.r_[False, utils.derivative_forward_diff() < 0, False] & \
np.r_[utils.zero_crossing() == 1, False]
optimal_minima = np.r_[False, utils.derivative_forward_diff() > 0, False] & \
np.r_[utils.zero_crossing() == 1, False]
EEMD_maxima_envelope = fluctuation.envelope_basis_function_approximation_fixed_points(knots, 'maxima',
optimal_maxima,
optimal_minima,
smooth=False,
smoothing_penalty=0.2,
edge_effect='none')[0]
EEMD_minima_envelope = fluctuation.envelope_basis_function_approximation_fixed_points(knots, 'minima',
optimal_maxima,
optimal_minima,
smooth=False,
smoothing_penalty=0.2,
edge_effect='none')[0]
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Detrended Fluctuation Analysis Examples')
plt.plot(time, time_series, LineWidth=2, label='Time series')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(time[optimal_maxima], time_series[optimal_maxima], c='darkred', zorder=4,
label=textwrap.fill('Optimal maxima', 10))
plt.scatter(time[optimal_minima], time_series[optimal_minima], c='darkblue', zorder=4,
label=textwrap.fill('Optimal minima', 10))
plt.scatter(inflection_x, inflection_y, c='magenta', zorder=4, label=textwrap.fill('Inflection points', 10))
plt.plot(time, maxima_envelope, c='darkblue', label=textwrap.fill('EMD envelope', 10))
plt.plot(time, minima_envelope, c='darkblue')
plt.plot(time, (maxima_envelope + minima_envelope) / 2, c='darkblue')
plt.plot(time, maxima_envelope_smooth, c='darkred', label=textwrap.fill('SEMD envelope', 10))
plt.plot(time, minima_envelope_smooth, c='darkred')
plt.plot(time, (maxima_envelope_smooth + minima_envelope_smooth) / 2, c='darkred')
plt.plot(time, EEMD_maxima_envelope, c='darkgreen', label=textwrap.fill('EEMD envelope', 10))
plt.plot(time, EEMD_minima_envelope, c='darkgreen')
plt.plot(time, (EEMD_maxima_envelope + EEMD_minima_envelope) / 2, c='darkgreen')
plt.plot(time, inflection_points_envelope, c='darkorange', label=textwrap.fill('Inflection point envelope', 10))
plt.plot(time, binomial_points_envelope, c='deeppink', label=textwrap.fill('Binomial average envelope', 10))
plt.plot(time, np.cos(time), c='black', label='True mean')
plt.xticks((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi), (r'$0$', r'$\pi$', r'2$\pi$', r'3$\pi$',
r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
plt.xlim(-0.25 * np.pi, 5.25 * np.pi)
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/detrended_fluctuation_analysis.png')
plt.show()
# Duffing Equation Example
def duffing_equation(xy, ts):
gamma = 0.1
epsilon = 1
omega = ((2 * np.pi) / 25)
return [xy[1], xy[0] - epsilon * xy[0] ** 3 + gamma * np.cos(omega * ts)]
t = np.linspace(0, 150, 1501)
XY0 = [1, 1]
solution = odeint(duffing_equation, XY0, t)
x = solution[:, 0]
dxdt = solution[:, 1]
x_points = [0, 50, 100, 150]
x_names = {0, 50, 100, 150}
y_points_1 = [-2, 0, 2]
y_points_2 = [-1, 0, 1]
fig, axs = plt.subplots(2, 1)
plt.subplots_adjust(hspace=0.2)
axs[0].plot(t, x)
axs[0].set_title('Duffing Equation Displacement')
axs[0].set_ylim([-2, 2])
axs[0].set_xlim([0, 150])
axs[1].plot(t, dxdt)
axs[1].set_title('Duffing Equation Velocity')
axs[1].set_ylim([-1.5, 1.5])
axs[1].set_xlim([0, 150])
axis = 0
for ax in axs.flat:
ax.label_outer()
if axis == 0:
ax.set_ylabel('x(t)')
ax.set_yticks(y_points_1)
if axis == 1:
ax.set_ylabel(r'$ \dfrac{dx(t)}{dt} $')
ax.set(xlabel='t')
ax.set_yticks(y_points_2)
ax.set_xticks(x_points)
ax.set_xticklabels(x_names)
axis += 1
plt.savefig('jss_figures/Duffing_equation.png')
plt.show()
# compare other packages Duffing - top
pyemd = pyemd0215()
py_emd = pyemd(x)
IP, IF, IA = emd040.spectra.frequency_transform(py_emd.T, 10, 'hilbert')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 0.2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using PyEMD 0.2.10', 40))
plt.pcolormesh(t, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht_pyemd.png')
plt.show()
plt.show()
emd_sift = emd040.sift.sift(x)
IP, IF, IA = emd040.spectra.frequency_transform(emd_sift, 10, 'hilbert')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 0.2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using emd 0.3.3', 40))
plt.pcolormesh(t, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht_emd.png')
plt.show()
# compare other packages Duffing - bottom
emd_duffing = AdvEMDpy.EMD(time=t, time_series=x)
emd_duff, emd_ht_duff, emd_if_duff, _, _, _, _ = emd_duffing.empirical_mode_decomposition(verbose=False)
fig, axs = plt.subplots(2, 1)
plt.subplots_adjust(hspace=0.3)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
axs[0].plot(t, emd_duff[1, :], label='AdvEMDpy')
axs[0].plot(t, py_emd[0, :], '--', label='PyEMD 0.2.10')
axs[0].plot(t, emd_sift[:, 0], '--', label='emd 0.3.3')
axs[0].set_title('IMF 1')
axs[0].set_ylim([-2, 2])
axs[0].set_xlim([0, 150])
axs[1].plot(t, emd_duff[2, :], label='AdvEMDpy')
print(f'AdvEMDpy driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - emd_duff[2, :])), 3)}')
axs[1].plot(t, py_emd[1, :], '--', label='PyEMD 0.2.10')
print(f'PyEMD driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - py_emd[1, :])), 3)}')
axs[1].plot(t, emd_sift[:, 1], '--', label='emd 0.3.3')
print(f'emd driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - emd_sift[:, 1])), 3)}')
axs[1].plot(t, 0.1 * np.cos(0.04 * 2 * np.pi * t), '--', label=r'$0.1$cos$(0.08{\pi}t)$')
axs[1].set_title('IMF 2')
axs[1].set_ylim([-0.2, 0.4])
axs[1].set_xlim([0, 150])
axis = 0
for ax in axs.flat:
ax.label_outer()
if axis == 0:
ax.set_ylabel(r'$\gamma_1(t)$')
ax.set_yticks([-2, 0, 2])
if axis == 1:
ax.set_ylabel(r'$\gamma_2(t)$')
ax.set_yticks([-0.2, 0, 0.2])
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
ax.set_xticks(x_points)
ax.set_xticklabels(x_names)
axis += 1
plt.savefig('jss_figures/Duffing_equation_imfs.png')
plt.show()
hs_ouputs = hilbert_spectrum(t, emd_duff, emd_ht_duff, emd_if_duff, max_frequency=1.3, plot=False)
ax = plt.subplot(111)
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using AdvEMDpy', 40))
x, y, z = hs_ouputs
y = y / (2 * np.pi)
z_min, z_max = 0, np.abs(z).max()
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
ax.pcolormesh(x, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht.png')
plt.show()
# Carbon Dioxide Concentration Example
CO2_data = pd.read_csv('Data/co2_mm_mlo.csv', header=51)
plt.plot(CO2_data['month'], CO2_data['decimal date'])
plt.title(textwrap.fill('Mean Monthly Concentration of Carbon Dioxide in the Atmosphere', 35))
plt.ylabel('Parts per million')
plt.xlabel('Time (years)')
plt.savefig('jss_figures/CO2_concentration.png')
plt.show()
signal = CO2_data['decimal date']
signal = np.asarray(signal)
time = CO2_data['month']
time = np.asarray(time)
# compare other packages Carbon Dioxide - top
pyemd = pyemd0215()
py_emd = pyemd(signal)
IP, IF, IA = emd040.spectra.frequency_transform(py_emd[:2, :].T, 12, 'hilbert')
print(f'PyEMD annual frequency error: {np.round(sum(np.abs(IF[:, 0] - np.ones_like(IF[:, 0]))), 3)}')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using PyEMD 0.2.10', 45))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.pcolormesh(time, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(time, np.ones_like(time), 'k--', label=textwrap.fill('Annual cycle', 10))
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert_pyemd.png')
plt.show()
emd_sift = emd040.sift.sift(signal)
IP, IF, IA = emd040.spectra.frequency_transform(emd_sift[:, :1], 12, 'hilbert')
print(f'emd annual frequency error: {np.round(sum(np.abs(IF - np.ones_like(IF)))[0], 3)}')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using emd 0.3.3', 45))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.pcolormesh(time, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(time, np.ones_like(time), 'k--', label=textwrap.fill('Annual cycle', 10))
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert_emd.png')
plt.show()
# compare other packages Carbon Dioxide - bottom
knots = np.linspace(time[0], time[-1], 200)
emd_example = AdvEMDpy.EMD(time=time, time_series=signal)
imfs, hts, ifs, _, _, _, _ = \
emd_example.empirical_mode_decomposition(knots=knots, knot_time=time, verbose=False)
print(f'AdvEMDpy annual frequency error: {np.round(sum(np.abs(ifs[1, :] / (2 * np.pi) - np.ones_like(ifs[1, :]))), 3)}')
fig, axs = plt.subplots(2, 2)
plt.subplots_adjust(hspace=0.5)
axs[0, 0].plot(time, signal)
axs[0, 1].plot(time, signal)
axs[0, 1].plot(time, imfs[0, :], label='Smoothed')
axs[0, 1].legend(loc='lower right')
axs[1, 0].plot(time, imfs[1, :])
axs[1, 1].plot(time, imfs[2, :])
axis = 0
for ax in axs.flat:
if axis == 0:
ax.set(ylabel=R'C0$_2$ concentration')
if axis == 1:
pass
if axis == 2:
ax.set(ylabel=R'C0$_2$ concentration')
ax.set(xlabel='Time (years)')
if axis == 3:
ax.set(xlabel='Time (years)')
axis += 1
plt.gcf().subplots_adjust(bottom=0.15)
axs[0, 0].set_title(r'Original CO$_2$ Concentration')
axs[0, 1].set_title('Smoothed CO$_2$ Concentration')
axs[1, 0].set_title('IMF 1')
axs[1, 1].set_title('Residual')
plt.gcf().subplots_adjust(bottom=0.15)
plt.savefig('jss_figures/CO2_EMD.png')
plt.show()
hs_ouputs = hilbert_spectrum(time, imfs, hts, ifs, max_frequency=10, which_imfs=[1], plot=False)
x_hs, y, z = hs_ouputs
y = y / (2 * np.pi)
z_min, z_max = 0, np.abs(z).max()
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.7
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
ax.pcolormesh(x_hs, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
ax.set_title(textwrap.fill(r'Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using AdvEMDpy', 40))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.plot(x_hs[0, :], np.ones_like(x_hs[0, :]), 'k--', label=textwrap.fill('Annual cycle', 10))
ax.axis([x_hs.min(), x_hs.max(), y.min(), y.max()])
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert.png')
plt.show()
| [
"pandas.read_csv",
"numpy.hstack",
"emd_utils.Utility",
"matplotlib.pyplot.ylabel",
"numpy.array",
"textwrap.fill",
"scipy.ndimage.gaussian_filter",
"numpy.sin",
"numpy.arange",
"numpy.mean",
"seaborn.set",
"emd_hilbert.hilbert_spectrum",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.asarray",
"numpy.zeros_like",
"numpy.linspace",
"emd.spectra.hilberthuang",
"matplotlib.pyplot.yticks",
"numpy.random.seed",
"matplotlib.pyplot.scatter",
"numpy.matmul",
"cvxpy.norm",
"emd.sift.sift",
"numpy.random.normal",
"numpy.abs",
"emd_preprocess.Preprocess",
"cvxpy.Problem",
"matplotlib.pyplot.savefig",
"numpy.ones",
"matplotlib.pyplot.xticks",
"scipy.integrate.odeint",
"matplotlib.pyplot.gcf",
"emd.spectra.define_hist_bins",
"emd_mean.Fluctuation",
"numpy.cos",
"emd.spectra.frequency_transform",
"matplotlib.pyplot.title",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show",
"matplotlib.pyplot.subplots_adjust",
"matplotlib.pyplot.text",
"numpy.ones_like",
"emd_utils.time_extension",
"random.seed",
"AdvEMDpy.EMD",
"matplotlib.pyplot.figure",
"emd_basis.Basis",
"PyEMD.EMD",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.subplots",
"numpy.var"
] | [((670, 695), 'seaborn.set', 'sns.set', ([], {'style': '"""darkgrid"""'}), "(style='darkgrid')\n", (677, 695), True, 'import seaborn as sns\n'), ((715, 746), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(1001)'], {}), '(0, 2 * np.pi, 1001)\n', (726, 746), True, 'import numpy as np\n'), ((841, 906), 'emd_utils.Utility', 'Utility', ([], {'time': 'pseudo_alg_time', 'time_series': 'pseudo_alg_time_series'}), '(time=pseudo_alg_time, time_series=pseudo_alg_time_series)\n', (848, 906), False, 'from emd_utils import time_extension, Utility\n'), ((934, 960), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(9, 4)'}), '(figsize=(9, 4))\n', (944, 960), True, 'import matplotlib.pyplot as plt\n'), ((966, 982), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (977, 982), True, 'import matplotlib.pyplot as plt\n'), ((1022, 1071), 'matplotlib.pyplot.title', 'plt.title', (['"""First Iteration of Sifting Algorithm"""'], {}), "('First Iteration of Sifting Algorithm')\n", (1031, 1071), True, 'import matplotlib.pyplot as plt\n'), ((1072, 1159), 'matplotlib.pyplot.plot', 'plt.plot', (['pseudo_alg_time', 'pseudo_alg_time_series'], {'label': '"""$h_{(1,0)}(t)$"""', 'zorder': '(1)'}), "(pseudo_alg_time, pseudo_alg_time_series, label='$h_{(1,0)}(t)$',\n zorder=1)\n", (1080, 1159), True, 'import matplotlib.pyplot as plt\n'), ((1899, 1934), 'matplotlib.pyplot.yticks', 'plt.yticks', ([], {'ticks': '[-2, -1, 0, 1, 2]'}), '(ticks=[-2, -1, 0, 1, 2])\n', (1909, 1934), True, 'import matplotlib.pyplot as plt\n'), ((1935, 2009), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {'ticks': '[0, np.pi, 2 * np.pi]', 'labels': "['0', '$\\\\pi$', '$2\\\\pi$']"}), "(ticks=[0, np.pi, 2 * np.pi], labels=['0', '$\\\\pi$', '$2\\\\pi$'])\n", (1945, 2009), True, 'import matplotlib.pyplot as plt\n'), ((2181, 2228), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/pseudo_algorithm.png"""'], {}), "('jss_figures/pseudo_algorithm.png')\n", (2192, 2228), True, 'import matplotlib.pyplot as plt\n'), ((2229, 2239), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2237, 2239), True, 'import matplotlib.pyplot as plt\n'), ((2249, 2262), 'numpy.arange', 'np.arange', (['(12)'], {}), '(12)\n', (2258, 2262), True, 'import numpy as np\n'), ((2270, 2294), 'numpy.linspace', 'np.linspace', (['(0)', '(11)', '(1101)'], {}), '(0, 11, 1101)\n', (2281, 2294), True, 'import numpy as np\n'), ((2304, 2348), 'emd_basis.Basis', 'emd_basis.Basis', ([], {'time': 'time', 'time_series': 'time'}), '(time=time, time_series=time)\n', (2319, 2348), False, 'import emd_basis\n'), ((2441, 2498), 'matplotlib.pyplot.title', 'plt.title', (['"""Non-Natural Cubic B-Spline Bases at Boundary"""'], {}), "('Non-Natural Cubic B-Spline Bases at Boundary')\n", (2450, 2498), True, 'import matplotlib.pyplot as plt\n'), ((2499, 2577), 'matplotlib.pyplot.plot', 'plt.plot', (['time[500:]', 'b_spline_basis[2, 500:].T', '"""--"""'], {'label': '"""$ B_{-3,4}(t) $"""'}), "(time[500:], b_spline_basis[2, 500:].T, '--', label='$ B_{-3,4}(t) $')\n", (2507, 2577), True, 'import matplotlib.pyplot as plt\n'), ((2579, 2657), 'matplotlib.pyplot.plot', 'plt.plot', (['time[500:]', 'b_spline_basis[3, 500:].T', '"""--"""'], {'label': '"""$ B_{-2,4}(t) $"""'}), "(time[500:], b_spline_basis[3, 500:].T, '--', label='$ B_{-2,4}(t) $')\n", (2587, 2657), True, 'import matplotlib.pyplot as plt\n'), ((2659, 2737), 'matplotlib.pyplot.plot', 'plt.plot', (['time[500:]', 'b_spline_basis[4, 500:].T', '"""--"""'], {'label': '"""$ B_{-1,4}(t) $"""'}), "(time[500:], b_spline_basis[4, 500:].T, '--', label='$ B_{-1,4}(t) $')\n", (2667, 2737), True, 'import matplotlib.pyplot as plt\n'), ((2739, 2816), 'matplotlib.pyplot.plot', 'plt.plot', (['time[500:]', 'b_spline_basis[5, 500:].T', '"""--"""'], {'label': '"""$ B_{0,4}(t) $"""'}), "(time[500:], b_spline_basis[5, 500:].T, '--', label='$ B_{0,4}(t) $')\n", (2747, 2816), True, 'import matplotlib.pyplot as plt\n'), ((2818, 2895), 'matplotlib.pyplot.plot', 'plt.plot', (['time[500:]', 'b_spline_basis[6, 500:].T', '"""--"""'], {'label': '"""$ B_{1,4}(t) $"""'}), "(time[500:], b_spline_basis[6, 500:].T, '--', label='$ B_{1,4}(t) $')\n", (2826, 2895), True, 'import matplotlib.pyplot as plt\n'), ((2897, 2947), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[5, 6]', "['$ \\\\tau_0 $', '$ \\\\tau_1 $']"], {}), "([5, 6], ['$ \\\\tau_0 $', '$ \\\\tau_1 $'])\n", (2907, 2947), True, 'import matplotlib.pyplot as plt\n'), ((2948, 2966), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(4.4)', '(6.6)'], {}), '(4.4, 6.6)\n', (2956, 2966), True, 'import matplotlib.pyplot as plt\n'), ((3091, 3119), 'matplotlib.pyplot.legend', 'plt.legend', ([], {'loc': '"""upper left"""'}), "(loc='upper left')\n", (3101, 3119), True, 'import matplotlib.pyplot as plt\n'), ((3120, 3165), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/boundary_bases.png"""'], {}), "('jss_figures/boundary_bases.png')\n", (3131, 3165), True, 'import matplotlib.pyplot as plt\n'), ((3166, 3176), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3174, 3176), True, 'import matplotlib.pyplot as plt\n'), ((3223, 3254), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(1001)'], {}), '(0, 2 * np.pi, 1001)\n', (3234, 3254), True, 'import numpy as np\n'), ((3368, 3397), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(51)'], {}), '(0, 2 * np.pi, 51)\n', (3379, 3397), True, 'import numpy as np\n'), ((3404, 3477), 'AdvEMDpy.EMD', 'EMD', ([], {'time': 'knot_demonstrate_time', 'time_series': 'knot_demonstrate_time_series'}), '(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)\n', (3407, 3477), False, 'from AdvEMDpy import EMD\n'), ((3599, 3617), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {}), '(3, 1)\n', (3611, 3617), True, 'import matplotlib.pyplot as plt\n'), ((4975, 5018), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/knot_uniform.png"""'], {}), "('jss_figures/knot_uniform.png')\n", (4986, 5018), True, 'import matplotlib.pyplot as plt\n'), ((5019, 5029), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5027, 5029), True, 'import matplotlib.pyplot as plt\n'), ((5076, 5107), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(1001)'], {}), '(0, 2 * np.pi, 1001)\n', (5087, 5107), True, 'import numpy as np\n'), ((5211, 5284), 'AdvEMDpy.EMD', 'EMD', ([], {'time': 'knot_demonstrate_time', 'time_series': 'knot_demonstrate_time_series'}), '(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)\n', (5214, 5284), False, 'from AdvEMDpy import EMD\n'), ((5484, 5502), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {}), '(3, 1)\n', (5496, 5502), True, 'import matplotlib.pyplot as plt\n'), ((6859, 6896), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/knot_1.png"""'], {}), "('jss_figures/knot_1.png')\n", (6870, 6896), True, 'import matplotlib.pyplot as plt\n'), ((6897, 6907), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6905, 6907), True, 'import matplotlib.pyplot as plt\n'), ((6954, 6985), 'numpy.linspace', 'np.linspace', (['(0)', '(2 * np.pi)', '(1001)'], {}), '(0, 2 * np.pi, 1001)\n', (6965, 6985), True, 'import numpy as np\n'), ((7089, 7162), 'AdvEMDpy.EMD', 'EMD', ([], {'time': 'knot_demonstrate_time', 'time_series': 'knot_demonstrate_time_series'}), '(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)\n', (7092, 7162), False, 'from AdvEMDpy import EMD\n'), ((7362, 7380), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {}), '(3, 1)\n', (7374, 7380), True, 'import matplotlib.pyplot as plt\n'), ((8735, 8772), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/knot_2.png"""'], {}), "('jss_figures/knot_2.png')\n", (8746, 8772), True, 'import matplotlib.pyplot as plt\n'), ((8773, 8783), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (8781, 8783), True, 'import matplotlib.pyplot as plt\n'), ((8829, 8847), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (8841, 8847), True, 'import matplotlib.pyplot as plt\n'), ((9173, 9190), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (9187, 9190), True, 'import numpy as np\n'), ((9191, 9205), 'random.seed', 'random.seed', (['(1)'], {}), '(1)\n', (9202, 9205), False, 'import random\n'), ((9414, 9482), 'emd_preprocess.Preprocess', 'Preprocess', ([], {'time': 'preprocess_time', 'time_series': 'preprocess_time_series'}), '(time=preprocess_time, time_series=preprocess_time_series)\n', (9424, 9482), False, 'from emd_preprocess import Preprocess\n'), ((12457, 12505), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/preprocess_filter.png"""'], {}), "('jss_figures/preprocess_filter.png')\n", (12468, 12505), True, 'import matplotlib.pyplot as plt\n'), ((12506, 12516), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (12514, 12516), True, 'import matplotlib.pyplot as plt\n'), ((12550, 12568), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (12562, 12568), True, 'import matplotlib.pyplot as plt\n'), ((15319, 15367), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/preprocess_smooth.png"""'], {}), "('jss_figures/preprocess_smooth.png')\n", (15330, 15367), True, 'import matplotlib.pyplot as plt\n'), ((15368, 15378), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (15376, 15378), True, 'import matplotlib.pyplot as plt\n'), ((15400, 15431), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'sharey': '(True)'}), '(1, 2, sharey=True)\n', (15412, 15431), True, 'import matplotlib.pyplot as plt\n'), ((16474, 16520), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/comparing_bases.png"""'], {}), "('jss_figures/comparing_bases.png')\n", (16485, 16520), True, 'import matplotlib.pyplot as plt\n'), ((16521, 16531), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (16529, 16531), True, 'import matplotlib.pyplot as plt\n'), ((16570, 16607), 'numpy.linspace', 'np.linspace', (['(0)', '((5 - a) * np.pi)', '(1001)'], {}), '(0, (5 - a) * np.pi, 1001)\n', (16581, 16607), True, 'import numpy as np\n'), ((16662, 16715), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (16679, 16715), False, 'import emd_utils\n'), ((16942, 17002), 'numpy.linspace', 'np.linspace', (['(maxima_x[-1] - width)', '(maxima_x[-1] + width)', '(101)'], {}), '(maxima_x[-1] - width, maxima_x[-1] + width, 101)\n', (16953, 17002), True, 'import numpy as np\n'), ((17073, 17133), 'numpy.linspace', 'np.linspace', (['(minima_x[-1] - width)', '(minima_x[-1] + width)', '(101)'], {}), '(minima_x[-1] - width, minima_x[-1] + width, 101)\n', (17084, 17133), True, 'import numpy as np\n'), ((17202, 17246), 'numpy.linspace', 'np.linspace', (['maxima_x[-1]', 'minima_x[-1]', '(101)'], {}), '(maxima_x[-1], minima_x[-1], 101)\n', (17213, 17246), True, 'import numpy as np\n'), ((17256, 17300), 'numpy.linspace', 'np.linspace', (['maxima_y[-1]', 'minima_y[-1]', '(101)'], {}), '(maxima_y[-1], minima_y[-1], 101)\n', (17267, 17300), True, 'import numpy as np\n'), ((17415, 17483), 'numpy.linspace', 'np.linspace', (['(max_discard_time - width)', '(max_discard_time + width)', '(101)'], {}), '(max_discard_time - width, max_discard_time + width, 101)\n', (17426, 17483), True, 'import numpy as np\n'), ((17567, 17615), 'numpy.linspace', 'np.linspace', (['minima_x[-1]', 'max_discard_time', '(101)'], {}), '(minima_x[-1], max_discard_time, 101)\n', (17578, 17615), True, 'import numpy as np\n'), ((17625, 17668), 'numpy.linspace', 'np.linspace', (['minima_y[-1]', 'max_discard', '(101)'], {}), '(minima_y[-1], max_discard, 101)\n', (17636, 17668), True, 'import numpy as np\n'), ((17740, 17790), 'numpy.linspace', 'np.linspace', (['((5 - a) * np.pi)', '((5 + a) * np.pi)', '(101)'], {}), '((5 - a) * np.pi, (5 + a) * np.pi, 101)\n', (17751, 17790), True, 'import numpy as np\n'), ((18184, 18250), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series_anti_reflect'}), '(time=time, time_series=time_series_anti_reflect)\n', (18201, 18250), False, 'import emd_utils\n'), ((18418, 18479), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series_reflect'}), '(time=time, time_series=time_series_reflect)\n', (18435, 18479), False, 'import emd_utils\n'), ((18655, 18699), 'numpy.linspace', 'np.linspace', (['maxima_y[-1]', 'minima_y[-1]', '(101)'], {}), '(maxima_y[-1], minima_y[-1], 101)\n', (18666, 18699), True, 'import numpy as np\n'), ((18785, 18851), 'numpy.linspace', 'np.linspace', (['(point_1 * np.pi - width)', '(point_1 * np.pi + width)', '(101)'], {}), '(point_1 * np.pi - width, point_1 * np.pi + width, 101)\n', (18796, 18851), True, 'import numpy as np\n'), ((18998, 19045), 'numpy.linspace', 'np.linspace', (['time_series[-1]', 'minima_y[-1]', '(101)'], {}), '(time_series[-1], minima_y[-1], 101)\n', (19009, 19045), True, 'import numpy as np\n'), ((19137, 19203), 'numpy.linspace', 'np.linspace', (['(point_2 * np.pi - width)', '(point_2 * np.pi + width)', '(101)'], {}), '(point_2 * np.pi - width, point_2 * np.pi + width, 101)\n', (19148, 19203), True, 'import numpy as np\n'), ((19441, 19464), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (19452, 19464), True, 'import numpy as np\n'), ((19477, 19529), 'numpy.linspace', 'np.linspace', (['(time[-1] - width)', '(time[-1] + width)', '(101)'], {}), '(time[-1] - width, time[-1] + width, 101)\n', (19488, 19529), True, 'import numpy as np\n'), ((19607, 19655), 'numpy.linspace', 'np.linspace', (['(time[-1] - 0.5)', '(time[-1] + 0.5)', '(101)'], {}), '(time[-1] - 0.5, time[-1] + 0.5, 101)\n', (19618, 19655), True, 'import numpy as np\n'), ((19738, 19754), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (19749, 19754), True, 'import matplotlib.pyplot as plt\n'), ((19794, 19850), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'time_series'], {'LineWidth': '(2)', 'label': '"""Signal"""'}), "(time, time_series, LineWidth=2, label='Signal')\n", (19802, 19850), True, 'import matplotlib.pyplot as plt\n'), ((19851, 19893), 'matplotlib.pyplot.title', 'plt.title', (['"""Symmetry Edge Effects Example"""'], {}), "('Symmetry Edge Effects Example')\n", (19860, 19893), True, 'import matplotlib.pyplot as plt\n'), ((20152, 20191), 'matplotlib.pyplot.plot', 'plt.plot', (['max_dash_time', 'max_dash', '"""k-"""'], {}), "(max_dash_time, max_dash, 'k-')\n", (20160, 20191), True, 'import matplotlib.pyplot as plt\n'), ((20192, 20231), 'matplotlib.pyplot.plot', 'plt.plot', (['min_dash_time', 'min_dash', '"""k-"""'], {}), "(min_dash_time, min_dash, 'k-')\n", (20200, 20231), True, 'import matplotlib.pyplot as plt\n'), ((20232, 20268), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_1_time', 'dash_1', '"""k--"""'], {}), "(dash_1_time, dash_1, 'k--')\n", (20240, 20268), True, 'import matplotlib.pyplot as plt\n'), ((20269, 20305), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_2_time', 'dash_2', '"""k--"""'], {}), "(dash_2_time, dash_2, 'k--')\n", (20277, 20305), True, 'import matplotlib.pyplot as plt\n'), ((20306, 20360), 'matplotlib.pyplot.plot', 'plt.plot', (['length_distance_time', 'length_distance', '"""k--"""'], {}), "(length_distance_time, length_distance, 'k--')\n", (20314, 20360), True, 'import matplotlib.pyplot as plt\n'), ((20361, 20419), 'matplotlib.pyplot.plot', 'plt.plot', (['length_distance_time_2', 'length_distance_2', '"""k--"""'], {}), "(length_distance_time_2, length_distance_2, 'k--')\n", (20369, 20419), True, 'import matplotlib.pyplot as plt\n'), ((20420, 20459), 'matplotlib.pyplot.plot', 'plt.plot', (['length_time', 'length_top', '"""k-"""'], {}), "(length_time, length_top, 'k-')\n", (20428, 20459), True, 'import matplotlib.pyplot as plt\n'), ((20460, 20502), 'matplotlib.pyplot.plot', 'plt.plot', (['length_time', 'length_bottom', '"""k-"""'], {}), "(length_time, length_bottom, 'k-')\n", (20468, 20502), True, 'import matplotlib.pyplot as plt\n'), ((20503, 20546), 'matplotlib.pyplot.plot', 'plt.plot', (['length_time_2', 'length_top_2', '"""k-"""'], {}), "(length_time_2, length_top_2, 'k-')\n", (20511, 20546), True, 'import matplotlib.pyplot as plt\n'), ((20547, 20593), 'matplotlib.pyplot.plot', 'plt.plot', (['length_time_2', 'length_bottom_2', '"""k-"""'], {}), "(length_time_2, length_bottom_2, 'k-')\n", (20555, 20593), True, 'import matplotlib.pyplot as plt\n'), ((20594, 20630), 'matplotlib.pyplot.plot', 'plt.plot', (['end_time', 'end_signal', '"""k-"""'], {}), "(end_time, end_signal, 'k-')\n", (20602, 20630), True, 'import matplotlib.pyplot as plt\n'), ((20631, 20693), 'matplotlib.pyplot.plot', 'plt.plot', (['symmetry_axis_1_time', 'symmetry_axis', '"""r--"""'], {'zorder': '(1)'}), "(symmetry_axis_1_time, symmetry_axis, 'r--', zorder=1)\n", (20639, 20693), True, 'import matplotlib.pyplot as plt\n'), ((20694, 20763), 'matplotlib.pyplot.plot', 'plt.plot', (['anti_symmetric_time', 'anti_symmetric_signal', '"""r--"""'], {'zorder': '(1)'}), "(anti_symmetric_time, anti_symmetric_signal, 'r--', zorder=1)\n", (20702, 20763), True, 'import matplotlib.pyplot as plt\n'), ((20872, 20912), 'matplotlib.pyplot.text', 'plt.text', (['(5.1 * np.pi)', '(-0.7)', '"""$\\\\beta$L"""'], {}), "(5.1 * np.pi, -0.7, '$\\\\beta$L')\n", (20880, 20912), True, 'import matplotlib.pyplot as plt\n'), ((20913, 20947), 'matplotlib.pyplot.text', 'plt.text', (['(5.34 * np.pi)', '(-0.05)', '"""L"""'], {}), "(5.34 * np.pi, -0.05, 'L')\n", (20921, 20947), True, 'import matplotlib.pyplot as plt\n'), ((20948, 21012), 'matplotlib.pyplot.scatter', 'plt.scatter', (['maxima_x', 'maxima_y'], {'c': '"""r"""', 'zorder': '(4)', 'label': '"""Maxima"""'}), "(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')\n", (20959, 21012), True, 'import matplotlib.pyplot as plt\n'), ((21013, 21077), 'matplotlib.pyplot.scatter', 'plt.scatter', (['minima_x', 'minima_y'], {'c': '"""b"""', 'zorder': '(4)', 'label': '"""Minima"""'}), "(minima_x, minima_y, c='b', zorder=4, label='Minima')\n", (21024, 21077), True, 'import matplotlib.pyplot as plt\n'), ((21541, 21575), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(3.9 * np.pi)', '(5.5 * np.pi)'], {}), '(3.9 * np.pi, 5.5 * np.pi)\n', (21549, 21575), True, 'import matplotlib.pyplot as plt\n'), ((21576, 21634), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(4 * np.pi, 5 * np.pi)', "('4$\\\\pi$', '5$\\\\pi$')"], {}), "((4 * np.pi, 5 * np.pi), ('4$\\\\pi$', '5$\\\\pi$'))\n", (21586, 21634), True, 'import matplotlib.pyplot as plt\n'), ((21635, 21693), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-2, -1, 0, 1, 2)', "('-2', '-1', '0', '1', '2')"], {}), "((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))\n", (21645, 21693), True, 'import matplotlib.pyplot as plt\n'), ((21853, 21910), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/edge_effects_symmetry_anti.png"""'], {}), "('jss_figures/edge_effects_symmetry_anti.png')\n", (21864, 21910), True, 'import matplotlib.pyplot as plt\n'), ((21911, 21921), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (21919, 21921), True, 'import matplotlib.pyplot as plt\n'), ((21960, 21997), 'numpy.linspace', 'np.linspace', (['(0)', '((5 - a) * np.pi)', '(1001)'], {}), '(0, (5 - a) * np.pi, 1001)\n', (21971, 21997), True, 'import numpy as np\n'), ((22052, 22105), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (22069, 22105), False, 'import emd_utils\n'), ((22329, 22389), 'numpy.linspace', 'np.linspace', (['(maxima_y[-1] - width)', '(maxima_y[-1] + width)', '(101)'], {}), '(maxima_y[-1] - width, maxima_y[-1] + width, 101)\n', (22340, 22389), True, 'import numpy as np\n'), ((22403, 22463), 'numpy.linspace', 'np.linspace', (['(maxima_y[-2] - width)', '(maxima_y[-2] + width)', '(101)'], {}), '(maxima_y[-2] - width, maxima_y[-2] + width, 101)\n', (22414, 22463), True, 'import numpy as np\n'), ((22593, 22653), 'numpy.linspace', 'np.linspace', (['(minima_y[-1] - width)', '(minima_y[-1] + width)', '(101)'], {}), '(minima_y[-1] - width, minima_y[-1] + width, 101)\n', (22604, 22653), True, 'import numpy as np\n'), ((22667, 22727), 'numpy.linspace', 'np.linspace', (['(minima_y[-2] - width)', '(minima_y[-2] + width)', '(101)'], {}), '(minima_y[-2] - width, minima_y[-2] + width, 101)\n', (22678, 22727), True, 'import numpy as np\n'), ((22859, 22903), 'numpy.linspace', 'np.linspace', (['maxima_x[-1]', 'minima_x[-1]', '(101)'], {}), '(maxima_x[-1], minima_x[-1], 101)\n', (22870, 22903), True, 'import numpy as np\n'), ((22913, 22957), 'numpy.linspace', 'np.linspace', (['maxima_y[-1]', 'minima_y[-1]', '(101)'], {}), '(maxima_y[-1], minima_y[-1], 101)\n', (22924, 22957), True, 'import numpy as np\n'), ((22972, 23016), 'numpy.linspace', 'np.linspace', (['maxima_x[-1]', 'minima_x[-2]', '(101)'], {}), '(maxima_x[-1], minima_x[-2], 101)\n', (22983, 23016), True, 'import numpy as np\n'), ((23026, 23070), 'numpy.linspace', 'np.linspace', (['maxima_y[-1]', 'minima_y[-2]', '(101)'], {}), '(maxima_y[-1], minima_y[-2], 101)\n', (23037, 23070), True, 'import numpy as np\n'), ((23380, 23454), 'numpy.linspace', 'np.linspace', (['(slope_based_maximum - width)', '(slope_based_maximum + width)', '(101)'], {}), '(slope_based_maximum - width, slope_based_maximum + width, 101)\n', (23391, 23454), True, 'import numpy as np\n'), ((23470, 23526), 'numpy.linspace', 'np.linspace', (['minima_x[-1]', 'slope_based_maximum_time', '(101)'], {}), '(minima_x[-1], slope_based_maximum_time, 101)\n', (23481, 23526), True, 'import numpy as np\n'), ((23536, 23587), 'numpy.linspace', 'np.linspace', (['minima_y[-1]', 'slope_based_maximum', '(101)'], {}), '(minima_y[-1], slope_based_maximum, 101)\n', (23547, 23587), True, 'import numpy as np\n'), ((23916, 23990), 'numpy.linspace', 'np.linspace', (['(slope_based_minimum - width)', '(slope_based_minimum + width)', '(101)'], {}), '(slope_based_minimum - width, slope_based_minimum + width, 101)\n', (23927, 23990), True, 'import numpy as np\n'), ((24006, 24069), 'numpy.linspace', 'np.linspace', (['slope_based_maximum_time', 'slope_based_minimum_time'], {}), '(slope_based_maximum_time, slope_based_minimum_time)\n', (24017, 24069), True, 'import numpy as np\n'), ((24079, 24132), 'numpy.linspace', 'np.linspace', (['slope_based_maximum', 'slope_based_minimum'], {}), '(slope_based_maximum, slope_based_minimum)\n', (24090, 24132), True, 'import numpy as np\n'), ((24148, 24190), 'numpy.linspace', 'np.linspace', (['(2.5 - width)', '(2.5 + width)', '(101)'], {}), '(2.5 - width, 2.5 + width, 101)\n', (24159, 24190), True, 'import numpy as np\n'), ((24413, 24469), 'numpy.linspace', 'np.linspace', (['maxima_x[-2]', 'slope_based_maximum_time', '(101)'], {}), '(maxima_x[-2], slope_based_maximum_time, 101)\n', (24424, 24469), True, 'import numpy as np\n'), ((24546, 24590), 'numpy.linspace', 'np.linspace', (['(-3.4 - width)', '(-3.4 + width)', '(101)'], {}), '(-3.4 - width, -3.4 + width, 101)\n', (24557, 24590), True, 'import numpy as np\n'), ((24813, 24869), 'numpy.linspace', 'np.linspace', (['minima_x[-2]', 'slope_based_minimum_time', '(101)'], {}), '(minima_x[-2], slope_based_minimum_time, 101)\n', (24824, 24869), True, 'import numpy as np\n'), ((25610, 25707), 'numpy.linspace', 'np.linspace', (['(improved_slope_based_minimum - width)', '(improved_slope_based_minimum + width)', '(101)'], {}), '(improved_slope_based_minimum - width, \n improved_slope_based_minimum + width, 101)\n', (25621, 25707), True, 'import numpy as np\n'), ((25801, 25891), 'numpy.linspace', 'np.linspace', (['improved_slope_based_maximum_time', 'improved_slope_based_minimum_time', '(101)'], {}), '(improved_slope_based_maximum_time,\n improved_slope_based_minimum_time, 101)\n', (25812, 25891), True, 'import numpy as np\n'), ((25901, 25977), 'numpy.linspace', 'np.linspace', (['improved_slope_based_maximum', 'improved_slope_based_minimum', '(101)'], {}), '(improved_slope_based_maximum, improved_slope_based_minimum, 101)\n', (25912, 25977), True, 'import numpy as np\n'), ((25984, 26000), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (25995, 26000), True, 'import matplotlib.pyplot as plt\n'), ((26164, 26220), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'time_series'], {'LineWidth': '(2)', 'label': '"""Signal"""'}), "(time, time_series, LineWidth=2, label='Signal')\n", (26172, 26220), True, 'import matplotlib.pyplot as plt\n'), ((26221, 26266), 'matplotlib.pyplot.title', 'plt.title', (['"""Slope-Based Edge Effects Example"""'], {}), "('Slope-Based Edge Effects Example')\n", (26230, 26266), True, 'import matplotlib.pyplot as plt\n'), ((26267, 26310), 'matplotlib.pyplot.plot', 'plt.plot', (['max_dash_time_1', 'max_dash_1', '"""k-"""'], {}), "(max_dash_time_1, max_dash_1, 'k-')\n", (26275, 26310), True, 'import matplotlib.pyplot as plt\n'), ((26311, 26354), 'matplotlib.pyplot.plot', 'plt.plot', (['max_dash_time_2', 'max_dash_2', '"""k-"""'], {}), "(max_dash_time_2, max_dash_2, 'k-')\n", (26319, 26354), True, 'import matplotlib.pyplot as plt\n'), ((26355, 26398), 'matplotlib.pyplot.plot', 'plt.plot', (['max_dash_time_3', 'max_dash_3', '"""k-"""'], {}), "(max_dash_time_3, max_dash_3, 'k-')\n", (26363, 26398), True, 'import matplotlib.pyplot as plt\n'), ((26399, 26442), 'matplotlib.pyplot.plot', 'plt.plot', (['min_dash_time_1', 'min_dash_1', '"""k-"""'], {}), "(min_dash_time_1, min_dash_1, 'k-')\n", (26407, 26442), True, 'import matplotlib.pyplot as plt\n'), ((26443, 26486), 'matplotlib.pyplot.plot', 'plt.plot', (['min_dash_time_2', 'min_dash_2', '"""k-"""'], {}), "(min_dash_time_2, min_dash_2, 'k-')\n", (26451, 26486), True, 'import matplotlib.pyplot as plt\n'), ((26487, 26530), 'matplotlib.pyplot.plot', 'plt.plot', (['min_dash_time_3', 'min_dash_3', '"""k-"""'], {}), "(min_dash_time_3, min_dash_3, 'k-')\n", (26495, 26530), True, 'import matplotlib.pyplot as plt\n'), ((26531, 26574), 'matplotlib.pyplot.plot', 'plt.plot', (['min_dash_time_4', 'min_dash_4', '"""k-"""'], {}), "(min_dash_time_4, min_dash_4, 'k-')\n", (26539, 26574), True, 'import matplotlib.pyplot as plt\n'), ((26575, 26622), 'matplotlib.pyplot.plot', 'plt.plot', (['maxima_dash_time_1', 'maxima_dash', '"""k-"""'], {}), "(maxima_dash_time_1, maxima_dash, 'k-')\n", (26583, 26622), True, 'import matplotlib.pyplot as plt\n'), ((26623, 26670), 'matplotlib.pyplot.plot', 'plt.plot', (['maxima_dash_time_2', 'maxima_dash', '"""k-"""'], {}), "(maxima_dash_time_2, maxima_dash, 'k-')\n", (26631, 26670), True, 'import matplotlib.pyplot as plt\n'), ((26671, 26718), 'matplotlib.pyplot.plot', 'plt.plot', (['maxima_dash_time_3', 'maxima_dash', '"""k-"""'], {}), "(maxima_dash_time_3, maxima_dash, 'k-')\n", (26679, 26718), True, 'import matplotlib.pyplot as plt\n'), ((26719, 26766), 'matplotlib.pyplot.plot', 'plt.plot', (['minima_dash_time_1', 'minima_dash', '"""k-"""'], {}), "(minima_dash_time_1, minima_dash, 'k-')\n", (26727, 26766), True, 'import matplotlib.pyplot as plt\n'), ((26767, 26814), 'matplotlib.pyplot.plot', 'plt.plot', (['minima_dash_time_2', 'minima_dash', '"""k-"""'], {}), "(minima_dash_time_2, minima_dash, 'k-')\n", (26775, 26814), True, 'import matplotlib.pyplot as plt\n'), ((26815, 26862), 'matplotlib.pyplot.plot', 'plt.plot', (['minima_dash_time_3', 'minima_dash', '"""k-"""'], {}), "(minima_dash_time_3, minima_dash, 'k-')\n", (26823, 26862), True, 'import matplotlib.pyplot as plt\n'), ((26863, 26917), 'matplotlib.pyplot.text', 'plt.text', (['(4.34 * np.pi)', '(-3.2)', '"""$\\\\Delta{t^{min}_{m}}$"""'], {}), "(4.34 * np.pi, -3.2, '$\\\\Delta{t^{min}_{m}}$')\n", (26871, 26917), True, 'import matplotlib.pyplot as plt\n'), ((26918, 26972), 'matplotlib.pyplot.text', 'plt.text', (['(4.74 * np.pi)', '(-3.2)', '"""$\\\\Delta{t^{min}_{m}}$"""'], {}), "(4.74 * np.pi, -3.2, '$\\\\Delta{t^{min}_{m}}$')\n", (26926, 26972), True, 'import matplotlib.pyplot as plt\n'), ((26973, 27024), 'matplotlib.pyplot.text', 'plt.text', (['(4.12 * np.pi)', '(2)', '"""$\\\\Delta{t^{max}_{M}}$"""'], {}), "(4.12 * np.pi, 2, '$\\\\Delta{t^{max}_{M}}$')\n", (26981, 27024), True, 'import matplotlib.pyplot as plt\n'), ((27025, 27075), 'matplotlib.pyplot.text', 'plt.text', (['(4.5 * np.pi)', '(2)', '"""$\\\\Delta{t^{max}_{M}}$"""'], {}), "(4.5 * np.pi, 2, '$\\\\Delta{t^{max}_{M}}$')\n", (27033, 27075), True, 'import matplotlib.pyplot as plt\n'), ((27077, 27113), 'matplotlib.pyplot.text', 'plt.text', (['(4.3 * np.pi)', '(0.35)', '"""$s_1$"""'], {}), "(4.3 * np.pi, 0.35, '$s_1$')\n", (27085, 27113), True, 'import matplotlib.pyplot as plt\n'), ((27116, 27153), 'matplotlib.pyplot.text', 'plt.text', (['(4.43 * np.pi)', '(-0.2)', '"""$s_2$"""'], {}), "(4.43 * np.pi, -0.2, '$s_2$')\n", (27124, 27153), True, 'import matplotlib.pyplot as plt\n'), ((27156, 27260), 'matplotlib.pyplot.text', 'plt.text', (['(4.3 * np.pi + (minima_x[-1] - minima_x[-2]))', '(0.35 + (minima_y[-1] - minima_y[-2]))', '"""$s_1$"""'], {}), "(4.3 * np.pi + (minima_x[-1] - minima_x[-2]), 0.35 + (minima_y[-1] -\n minima_y[-2]), '$s_1$')\n", (27164, 27260), True, 'import matplotlib.pyplot as plt\n'), ((27259, 27384), 'matplotlib.pyplot.text', 'plt.text', (['(4.43 * np.pi + (slope_based_minimum_time - minima_x[-1]))', '(-0.2 + (slope_based_minimum - minima_y[-1]))', '"""$s_2$"""'], {}), "(4.43 * np.pi + (slope_based_minimum_time - minima_x[-1]), -0.2 + (\n slope_based_minimum - minima_y[-1]), '$s_2$')\n", (27267, 27384), True, 'import matplotlib.pyplot as plt\n'), ((27391, 27514), 'matplotlib.pyplot.text', 'plt.text', (['(4.5 * np.pi + (slope_based_minimum_time - minima_x[-1]))', '(1.2 + (slope_based_minimum - minima_y[-1]))', '"""$s_2$"""'], {}), "(4.5 * np.pi + (slope_based_minimum_time - minima_x[-1]), 1.2 + (\n slope_based_minimum - minima_y[-1]), '$s_2$')\n", (27399, 27514), True, 'import matplotlib.pyplot as plt\n'), ((27522, 27578), 'matplotlib.pyplot.plot', 'plt.plot', (['minima_line_dash_time', 'minima_line_dash', '"""k--"""'], {}), "(minima_line_dash_time, minima_line_dash, 'k--')\n", (27530, 27578), True, 'import matplotlib.pyplot as plt\n'), ((27579, 27635), 'matplotlib.pyplot.plot', 'plt.plot', (['maxima_line_dash_time', 'maxima_line_dash', '"""k--"""'], {}), "(maxima_line_dash_time, maxima_line_dash, 'k--')\n", (27587, 27635), True, 'import matplotlib.pyplot as plt\n'), ((27636, 27672), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_1_time', 'dash_1', '"""k--"""'], {}), "(dash_1_time, dash_1, 'k--')\n", (27644, 27672), True, 'import matplotlib.pyplot as plt\n'), ((27673, 27709), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_2_time', 'dash_2', '"""k--"""'], {}), "(dash_2_time, dash_2, 'k--')\n", (27681, 27709), True, 'import matplotlib.pyplot as plt\n'), ((27710, 27746), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_3_time', 'dash_3', '"""k--"""'], {}), "(dash_3_time, dash_3, 'k--')\n", (27718, 27746), True, 'import matplotlib.pyplot as plt\n'), ((27747, 27783), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_4_time', 'dash_4', '"""k--"""'], {}), "(dash_4_time, dash_4, 'k--')\n", (27755, 27783), True, 'import matplotlib.pyplot as plt\n'), ((27784, 27828), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_final_time', 'dash_final', '"""k--"""'], {}), "(dash_final_time, dash_final, 'k--')\n", (27792, 27828), True, 'import matplotlib.pyplot as plt\n'), ((27829, 27893), 'matplotlib.pyplot.scatter', 'plt.scatter', (['maxima_x', 'maxima_y'], {'c': '"""r"""', 'zorder': '(4)', 'label': '"""Maxima"""'}), "(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')\n", (27840, 27893), True, 'import matplotlib.pyplot as plt\n'), ((27894, 27958), 'matplotlib.pyplot.scatter', 'plt.scatter', (['minima_x', 'minima_y'], {'c': '"""b"""', 'zorder': '(4)', 'label': '"""Minima"""'}), "(minima_x, minima_y, c='b', zorder=4, label='Minima')\n", (27905, 27958), True, 'import matplotlib.pyplot as plt\n'), ((28583, 28617), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(3.9 * np.pi)', '(5.5 * np.pi)'], {}), '(3.9 * np.pi, 5.5 * np.pi)\n', (28591, 28617), True, 'import matplotlib.pyplot as plt\n'), ((28618, 28676), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(4 * np.pi, 5 * np.pi)', "('4$\\\\pi$', '5$\\\\pi$')"], {}), "((4 * np.pi, 5 * np.pi), ('4$\\\\pi$', '5$\\\\pi$'))\n", (28628, 28676), True, 'import matplotlib.pyplot as plt\n'), ((28677, 28745), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-3, -2, -1, 0, 1, 2)', "('-3', '-2', '-1', '0', '1', '2')"], {}), "((-3, -2, -1, 0, 1, 2), ('-3', '-2', '-1', '0', '1', '2'))\n", (28687, 28745), True, 'import matplotlib.pyplot as plt\n'), ((28905, 28960), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/edge_effects_slope_based.png"""'], {}), "('jss_figures/edge_effects_slope_based.png')\n", (28916, 28960), True, 'import matplotlib.pyplot as plt\n'), ((28961, 28971), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (28969, 28971), True, 'import matplotlib.pyplot as plt\n'), ((29010, 29047), 'numpy.linspace', 'np.linspace', (['(0)', '((5 - a) * np.pi)', '(1001)'], {}), '(0, (5 - a) * np.pi, 1001)\n', (29021, 29047), True, 'import numpy as np\n'), ((29102, 29155), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (29119, 29155), False, 'import emd_utils\n'), ((30112, 30164), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'Huang_wave'}), '(time=time, time_series=Huang_wave)\n', (30129, 30164), False, 'import emd_utils\n'), ((30298, 30353), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'Coughlin_wave'}), '(time=time, time_series=Coughlin_wave)\n', (30315, 30353), False, 'import emd_utils\n'), ((30868, 30928), 'numpy.linspace', 'np.linspace', (['(maxima_x[-2] - width)', '(maxima_x[-2] + width)', '(101)'], {}), '(maxima_x[-2] - width, maxima_x[-2] + width, 101)\n', (30879, 30928), True, 'import numpy as np\n'), ((30949, 31007), 'numpy.linspace', 'np.linspace', (['(5.3 * np.pi - width)', '(5.3 * np.pi + width)', '(101)'], {}), '(5.3 * np.pi - width, 5.3 * np.pi + width, 101)\n', (30960, 31007), True, 'import numpy as np\n'), ((31076, 31136), 'numpy.linspace', 'np.linspace', (['(minima_x[-2] - width)', '(minima_x[-2] + width)', '(101)'], {}), '(minima_x[-2] - width, minima_x[-2] + width, 101)\n', (31087, 31136), True, 'import numpy as np\n'), ((31157, 31215), 'numpy.linspace', 'np.linspace', (['(5.3 * np.pi - width)', '(5.3 * np.pi + width)', '(101)'], {}), '(5.3 * np.pi - width, 5.3 * np.pi + width, 101)\n', (31168, 31215), True, 'import numpy as np\n'), ((31288, 31332), 'numpy.linspace', 'np.linspace', (['minima_y[-2]', 'maxima_y[-2]', '(101)'], {}), '(minima_y[-2], maxima_y[-2], 101)\n', (31299, 31332), True, 'import numpy as np\n'), ((31413, 31473), 'numpy.linspace', 'np.linspace', (['(maxima_y[-2] - width)', '(maxima_y[-2] + width)', '(101)'], {}), '(maxima_y[-2] - width, maxima_y[-2] + width, 101)\n', (31424, 31473), True, 'import numpy as np\n'), ((31489, 31533), 'numpy.linspace', 'np.linspace', (['(-1.8 - width)', '(-1.8 + width)', '(101)'], {}), '(-1.8 - width, -1.8 + width, 101)\n', (31500, 31533), True, 'import numpy as np\n'), ((31597, 31657), 'numpy.linspace', 'np.linspace', (['(minima_y[-2] - width)', '(minima_y[-2] + width)', '(101)'], {}), '(minima_y[-2] - width, minima_y[-2] + width, 101)\n', (31608, 31657), True, 'import numpy as np\n'), ((31673, 31717), 'numpy.linspace', 'np.linspace', (['(-1.8 - width)', '(-1.8 + width)', '(101)'], {}), '(-1.8 - width, -1.8 + width, 101)\n', (31684, 31717), True, 'import numpy as np\n'), ((31795, 31839), 'numpy.linspace', 'np.linspace', (['minima_x[-2]', 'maxima_x[-2]', '(101)'], {}), '(minima_x[-2], maxima_x[-2], 101)\n', (31806, 31839), True, 'import numpy as np\n'), ((31918, 31978), 'numpy.linspace', 'np.linspace', (['(maxima_x[-1] - width)', '(maxima_x[-1] + width)', '(101)'], {}), '(maxima_x[-1] - width, maxima_x[-1] + width, 101)\n', (31929, 31978), True, 'import numpy as np\n'), ((31999, 32057), 'numpy.linspace', 'np.linspace', (['(5.4 * np.pi - width)', '(5.4 * np.pi + width)', '(101)'], {}), '(5.4 * np.pi - width, 5.4 * np.pi + width, 101)\n', (32010, 32057), True, 'import numpy as np\n'), ((32126, 32186), 'numpy.linspace', 'np.linspace', (['(minima_x[-1] - width)', '(minima_x[-1] + width)', '(101)'], {}), '(minima_x[-1] - width, minima_x[-1] + width, 101)\n', (32137, 32186), True, 'import numpy as np\n'), ((32207, 32265), 'numpy.linspace', 'np.linspace', (['(5.4 * np.pi - width)', '(5.4 * np.pi + width)', '(101)'], {}), '(5.4 * np.pi - width, 5.4 * np.pi + width, 101)\n', (32218, 32265), True, 'import numpy as np\n'), ((32338, 32382), 'numpy.linspace', 'np.linspace', (['minima_y[-1]', 'maxima_y[-1]', '(101)'], {}), '(minima_y[-1], maxima_y[-1], 101)\n', (32349, 32382), True, 'import numpy as np\n'), ((32463, 32523), 'numpy.linspace', 'np.linspace', (['(maxima_y[-1] - width)', '(maxima_y[-1] + width)', '(101)'], {}), '(maxima_y[-1] - width, maxima_y[-1] + width, 101)\n', (32474, 32523), True, 'import numpy as np\n'), ((32539, 32583), 'numpy.linspace', 'np.linspace', (['(-2.1 - width)', '(-2.1 + width)', '(101)'], {}), '(-2.1 - width, -2.1 + width, 101)\n', (32550, 32583), True, 'import numpy as np\n'), ((32647, 32707), 'numpy.linspace', 'np.linspace', (['(minima_y[-1] - width)', '(minima_y[-1] + width)', '(101)'], {}), '(minima_y[-1] - width, minima_y[-1] + width, 101)\n', (32658, 32707), True, 'import numpy as np\n'), ((32723, 32767), 'numpy.linspace', 'np.linspace', (['(-2.1 - width)', '(-2.1 + width)', '(101)'], {}), '(-2.1 - width, -2.1 + width, 101)\n', (32734, 32767), True, 'import numpy as np\n'), ((32845, 32889), 'numpy.linspace', 'np.linspace', (['minima_x[-1]', 'maxima_x[-1]', '(101)'], {}), '(minima_x[-1], maxima_x[-1], 101)\n', (32856, 32889), True, 'import numpy as np\n'), ((32958, 32974), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (32969, 32974), True, 'import matplotlib.pyplot as plt\n'), ((33014, 33062), 'matplotlib.pyplot.title', 'plt.title', (['"""Characteristic Wave Effects Example"""'], {}), "('Characteristic Wave Effects Example')\n", (33023, 33062), True, 'import matplotlib.pyplot as plt\n'), ((33063, 33119), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'time_series'], {'LineWidth': '(2)', 'label': '"""Signal"""'}), "(time, time_series, LineWidth=2, label='Signal')\n", (33071, 33119), True, 'import matplotlib.pyplot as plt\n'), ((33824, 33888), 'matplotlib.pyplot.scatter', 'plt.scatter', (['maxima_x', 'maxima_y'], {'c': '"""r"""', 'zorder': '(4)', 'label': '"""Maxima"""'}), "(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')\n", (33835, 33888), True, 'import matplotlib.pyplot as plt\n'), ((33889, 33953), 'matplotlib.pyplot.scatter', 'plt.scatter', (['minima_x', 'minima_y'], {'c': '"""b"""', 'zorder': '(4)', 'label': '"""Minima"""'}), "(minima_x, minima_y, c='b', zorder=4, label='Minima')\n", (33900, 33953), True, 'import matplotlib.pyplot as plt\n'), ((34181, 34218), 'matplotlib.pyplot.plot', 'plt.plot', (['max_2_x_time', 'max_2_x', '"""k-"""'], {}), "(max_2_x_time, max_2_x, 'k-')\n", (34189, 34218), True, 'import matplotlib.pyplot as plt\n'), ((34219, 34261), 'matplotlib.pyplot.plot', 'plt.plot', (['max_2_x_time_side', 'max_2_x', '"""k-"""'], {}), "(max_2_x_time_side, max_2_x, 'k-')\n", (34227, 34261), True, 'import matplotlib.pyplot as plt\n'), ((34262, 34299), 'matplotlib.pyplot.plot', 'plt.plot', (['min_2_x_time', 'min_2_x', '"""k-"""'], {}), "(min_2_x_time, min_2_x, 'k-')\n", (34270, 34299), True, 'import matplotlib.pyplot as plt\n'), ((34300, 34342), 'matplotlib.pyplot.plot', 'plt.plot', (['min_2_x_time_side', 'min_2_x', '"""k-"""'], {}), "(min_2_x_time_side, min_2_x, 'k-')\n", (34308, 34342), True, 'import matplotlib.pyplot as plt\n'), ((34343, 34399), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_max_min_2_x_time', 'dash_max_min_2_x', '"""k--"""'], {}), "(dash_max_min_2_x_time, dash_max_min_2_x, 'k--')\n", (34351, 34399), True, 'import matplotlib.pyplot as plt\n'), ((34400, 34438), 'matplotlib.pyplot.text', 'plt.text', (['(5.16 * np.pi)', '(0.85)', '"""$2a_2$"""'], {}), "(5.16 * np.pi, 0.85, '$2a_2$')\n", (34408, 34438), True, 'import matplotlib.pyplot as plt\n'), ((34441, 34478), 'matplotlib.pyplot.plot', 'plt.plot', (['max_2_y_time', 'max_2_y', '"""k-"""'], {}), "(max_2_y_time, max_2_y, 'k-')\n", (34449, 34478), True, 'import matplotlib.pyplot as plt\n'), ((34479, 34521), 'matplotlib.pyplot.plot', 'plt.plot', (['max_2_y_time', 'max_2_y_side', '"""k-"""'], {}), "(max_2_y_time, max_2_y_side, 'k-')\n", (34487, 34521), True, 'import matplotlib.pyplot as plt\n'), ((34522, 34559), 'matplotlib.pyplot.plot', 'plt.plot', (['min_2_y_time', 'min_2_y', '"""k-"""'], {}), "(min_2_y_time, min_2_y, 'k-')\n", (34530, 34559), True, 'import matplotlib.pyplot as plt\n'), ((34560, 34602), 'matplotlib.pyplot.plot', 'plt.plot', (['min_2_y_time', 'min_2_y_side', '"""k-"""'], {}), "(min_2_y_time, min_2_y_side, 'k-')\n", (34568, 34602), True, 'import matplotlib.pyplot as plt\n'), ((34603, 34659), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_max_min_2_y_time', 'dash_max_min_2_y', '"""k--"""'], {}), "(dash_max_min_2_y_time, dash_max_min_2_y, 'k--')\n", (34611, 34659), True, 'import matplotlib.pyplot as plt\n'), ((34660, 34708), 'matplotlib.pyplot.text', 'plt.text', (['(4.08 * np.pi)', '(-2.2)', '"""$\\\\frac{p_2}{2}$"""'], {}), "(4.08 * np.pi, -2.2, '$\\\\frac{p_2}{2}$')\n", (34668, 34708), True, 'import matplotlib.pyplot as plt\n'), ((34710, 34747), 'matplotlib.pyplot.plot', 'plt.plot', (['max_1_x_time', 'max_1_x', '"""k-"""'], {}), "(max_1_x_time, max_1_x, 'k-')\n", (34718, 34747), True, 'import matplotlib.pyplot as plt\n'), ((34748, 34790), 'matplotlib.pyplot.plot', 'plt.plot', (['max_1_x_time_side', 'max_1_x', '"""k-"""'], {}), "(max_1_x_time_side, max_1_x, 'k-')\n", (34756, 34790), True, 'import matplotlib.pyplot as plt\n'), ((34791, 34828), 'matplotlib.pyplot.plot', 'plt.plot', (['min_1_x_time', 'min_1_x', '"""k-"""'], {}), "(min_1_x_time, min_1_x, 'k-')\n", (34799, 34828), True, 'import matplotlib.pyplot as plt\n'), ((34829, 34871), 'matplotlib.pyplot.plot', 'plt.plot', (['min_1_x_time_side', 'min_1_x', '"""k-"""'], {}), "(min_1_x_time_side, min_1_x, 'k-')\n", (34837, 34871), True, 'import matplotlib.pyplot as plt\n'), ((34872, 34928), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_max_min_1_x_time', 'dash_max_min_1_x', '"""k--"""'], {}), "(dash_max_min_1_x_time, dash_max_min_1_x, 'k--')\n", (34880, 34928), True, 'import matplotlib.pyplot as plt\n'), ((34929, 34967), 'matplotlib.pyplot.text', 'plt.text', (['(5.42 * np.pi)', '(-0.1)', '"""$2a_1$"""'], {}), "(5.42 * np.pi, -0.1, '$2a_1$')\n", (34937, 34967), True, 'import matplotlib.pyplot as plt\n'), ((34970, 35007), 'matplotlib.pyplot.plot', 'plt.plot', (['max_1_y_time', 'max_1_y', '"""k-"""'], {}), "(max_1_y_time, max_1_y, 'k-')\n", (34978, 35007), True, 'import matplotlib.pyplot as plt\n'), ((35008, 35050), 'matplotlib.pyplot.plot', 'plt.plot', (['max_1_y_time', 'max_1_y_side', '"""k-"""'], {}), "(max_1_y_time, max_1_y_side, 'k-')\n", (35016, 35050), True, 'import matplotlib.pyplot as plt\n'), ((35051, 35088), 'matplotlib.pyplot.plot', 'plt.plot', (['min_1_y_time', 'min_1_y', '"""k-"""'], {}), "(min_1_y_time, min_1_y, 'k-')\n", (35059, 35088), True, 'import matplotlib.pyplot as plt\n'), ((35089, 35131), 'matplotlib.pyplot.plot', 'plt.plot', (['min_1_y_time', 'min_1_y_side', '"""k-"""'], {}), "(min_1_y_time, min_1_y_side, 'k-')\n", (35097, 35131), True, 'import matplotlib.pyplot as plt\n'), ((35132, 35188), 'matplotlib.pyplot.plot', 'plt.plot', (['dash_max_min_1_y_time', 'dash_max_min_1_y', '"""k--"""'], {}), "(dash_max_min_1_y_time, dash_max_min_1_y, 'k--')\n", (35140, 35188), True, 'import matplotlib.pyplot as plt\n'), ((35189, 35237), 'matplotlib.pyplot.text', 'plt.text', (['(4.48 * np.pi)', '(-2.5)', '"""$\\\\frac{p_1}{2}$"""'], {}), "(4.48 * np.pi, -2.5, '$\\\\frac{p_1}{2}$')\n", (35197, 35237), True, 'import matplotlib.pyplot as plt\n'), ((35239, 35273), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(3.9 * np.pi)', '(5.6 * np.pi)'], {}), '(3.9 * np.pi, 5.6 * np.pi)\n', (35247, 35273), True, 'import matplotlib.pyplot as plt\n'), ((35274, 35332), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(4 * np.pi, 5 * np.pi)', "('4$\\\\pi$', '5$\\\\pi$')"], {}), "((4 * np.pi, 5 * np.pi), ('4$\\\\pi$', '5$\\\\pi$'))\n", (35284, 35332), True, 'import matplotlib.pyplot as plt\n'), ((35333, 35391), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-2, -1, 0, 1, 2)', "('-2', '-1', '0', '1', '2')"], {}), "((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))\n", (35343, 35391), True, 'import matplotlib.pyplot as plt\n'), ((35552, 35615), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/edge_effects_characteristic_wave.png"""'], {}), "('jss_figures/edge_effects_characteristic_wave.png')\n", (35563, 35615), True, 'import matplotlib.pyplot as plt\n'), ((35616, 35626), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (35624, 35626), True, 'import matplotlib.pyplot as plt\n'), ((35641, 35664), 'numpy.linspace', 'np.linspace', (['(5)', '(95)', '(100)'], {}), '(5, 95, 100)\n', (35652, 35664), True, 'import numpy as np\n'), ((35788, 35838), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 't', 'time_series': 'signal_orig'}), '(time=t, time_series=signal_orig)\n', (35805, 35838), False, 'import emd_utils\n'), ((36106, 36137), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(1001)'], {}), '(0, 5 * np.pi, 1001)\n', (36117, 36137), True, 'import numpy as np\n'), ((36191, 36221), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(101)'], {}), '(0, 5 * np.pi, 101)\n', (36202, 36221), True, 'import numpy as np\n'), ((36239, 36259), 'emd_utils.time_extension', 'time_extension', (['time'], {}), '(time)\n', (36253, 36259), False, 'from emd_utils import time_extension, Utility\n'), ((37406, 37429), 'numpy.hstack', 'np.hstack', (['(weights, 0)'], {}), '((weights, 0))\n', (37415, 37429), True, 'import numpy as np\n'), ((39418, 39440), 'cvxpy.Problem', 'cvx.Problem', (['objective'], {}), '(objective)\n', (39429, 39440), True, 'import cvxpy as cvx\n'), ((39507, 39525), 'numpy.array', 'np.array', (['vx.value'], {}), '(vx.value)\n', (39515, 39525), True, 'import numpy as np\n'), ((40751, 40803), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'lsq_signal'}), '(time=time, time_series=lsq_signal)\n', (40768, 40803), False, 'import emd_utils\n'), ((40821, 40892), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time_extended', 'time_series': 'time_series_extended'}), '(time=time_extended, time_series=time_series_extended)\n', (40838, 40892), False, 'import emd_utils\n'), ((41503, 41519), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (41514, 41519), True, 'import matplotlib.pyplot as plt\n'), ((41559, 41608), 'matplotlib.pyplot.title', 'plt.title', (['"""Single Neuron Neural Network Example"""'], {}), "('Single Neuron Neural Network Example')\n", (41568, 41608), True, 'import matplotlib.pyplot as plt\n'), ((41609, 41661), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'lsq_signal'], {'zorder': '(2)', 'label': '"""Signal"""'}), "(time, lsq_signal, zorder=2, label='Signal')\n", (41617, 41661), True, 'import matplotlib.pyplot as plt\n'), ((41773, 41838), 'matplotlib.pyplot.scatter', 'plt.scatter', (['maxima_time', 'maxima'], {'c': '"""r"""', 'zorder': '(3)', 'label': '"""Maxima"""'}), "(maxima_time, maxima, c='r', zorder=3, label='Maxima')\n", (41784, 41838), True, 'import matplotlib.pyplot as plt\n'), ((41839, 41904), 'matplotlib.pyplot.scatter', 'plt.scatter', (['minima_time', 'minima'], {'c': '"""b"""', 'zorder': '(3)', 'label': '"""Minima"""'}), "(minima_time, minima, c='b', zorder=3, label='Minima')\n", (41850, 41904), True, 'import matplotlib.pyplot as plt\n'), ((44091, 44125), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(3.4 * np.pi)', '(5.6 * np.pi)'], {}), '(3.4 * np.pi, 5.6 * np.pi)\n', (44099, 44125), True, 'import matplotlib.pyplot as plt\n'), ((44126, 44184), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(4 * np.pi, 5 * np.pi)', "('4$\\\\pi$', '5$\\\\pi$')"], {}), "((4 * np.pi, 5 * np.pi), ('4$\\\\pi$', '5$\\\\pi$'))\n", (44136, 44184), True, 'import matplotlib.pyplot as plt\n'), ((44185, 44243), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-2, -1, 0, 1, 2)', "('-2', '-1', '0', '1', '2')"], {}), "((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))\n", (44195, 44243), True, 'import matplotlib.pyplot as plt\n'), ((44404, 44449), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/neural_network.png"""'], {}), "('jss_figures/neural_network.png')\n", (44415, 44449), True, 'import matplotlib.pyplot as plt\n'), ((44450, 44460), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (44458, 44460), True, 'import matplotlib.pyplot as plt\n'), ((44472, 44489), 'numpy.random.seed', 'np.random.seed', (['(0)'], {}), '(0)\n', (44486, 44489), True, 'import numpy as np\n'), ((44498, 44529), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(1001)'], {}), '(0, 5 * np.pi, 1001)\n', (44509, 44529), True, 'import numpy as np\n'), ((44541, 44570), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(51)'], {}), '(0, 5 * np.pi, 51)\n', (44552, 44570), True, 'import numpy as np\n'), ((44722, 44761), 'AdvEMDpy.EMD', 'EMD', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (44725, 44761), False, 'from AdvEMDpy import EMD\n'), ((44980, 45009), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(31)'], {}), '(0, 5 * np.pi, 31)\n', (44991, 45009), True, 'import numpy as np\n'), ((45228, 45257), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(11)'], {}), '(0, 5 * np.pi, 11)\n', (45239, 45257), True, 'import numpy as np\n'), ((45477, 45495), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {}), '(3, 1)\n', (45489, 45495), True, 'import matplotlib.pyplot as plt\n'), ((45592, 45623), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.1)'}), '(hspace=0.1)\n', (45611, 45623), True, 'import matplotlib.pyplot as plt\n'), ((49286, 49337), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/DFA_different_trends.png"""'], {}), "('jss_figures/DFA_different_trends.png')\n", (49297, 49337), True, 'import matplotlib.pyplot as plt\n'), ((49338, 49348), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (49346, 49348), True, 'import matplotlib.pyplot as plt\n'), ((49371, 49389), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(3)', '(1)'], {}), '(3, 1)\n', (49383, 49389), True, 'import matplotlib.pyplot as plt\n'), ((49500, 49531), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.1)'}), '(hspace=0.1)\n', (49519, 49531), True, 'import matplotlib.pyplot as plt\n'), ((51975, 52033), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/DFA_different_trends_zoomed.png"""'], {}), "('jss_figures/DFA_different_trends_zoomed.png')\n", (51986, 52033), True, 'import matplotlib.pyplot as plt\n'), ((52034, 52044), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (52042, 52044), True, 'import matplotlib.pyplot as plt\n'), ((52058, 52135), 'emd_hilbert.hilbert_spectrum', 'hilbert_spectrum', (['time', 'imfs_51', 'hts_51', 'ifs_51'], {'max_frequency': '(12)', 'plot': '(False)'}), '(time, imfs_51, hts_51, ifs_51, max_frequency=12, plot=False)\n', (52074, 52135), False, 'from emd_hilbert import Hilbert, hilbert_spectrum\n'), ((52152, 52168), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (52163, 52168), True, 'import matplotlib.pyplot as plt\n'), ((52951, 52989), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (rad.s$^{-1}$)"""'], {}), "('Frequency (rad.s$^{-1}$)')\n", (52961, 52989), True, 'import matplotlib.pyplot as plt\n'), ((52991, 53013), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (53001, 53013), True, 'import matplotlib.pyplot as plt\n'), ((53180, 53231), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/DFA_hilbert_spectrum.png"""'], {}), "('jss_figures/DFA_hilbert_spectrum.png')\n", (53191, 53231), True, 'import matplotlib.pyplot as plt\n'), ((53232, 53242), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (53240, 53242), True, 'import matplotlib.pyplot as plt\n'), ((53261, 53292), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(1001)'], {}), '(0, 5 * np.pi, 1001)\n', (53272, 53292), True, 'import numpy as np\n'), ((53347, 53376), 'numpy.linspace', 'np.linspace', (['(0)', '(5 * np.pi)', '(51)'], {}), '(0, 5 * np.pi, 51)\n', (53358, 53376), True, 'import numpy as np\n'), ((53385, 53432), 'emd_mean.Fluctuation', 'Fluctuation', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (53396, 53432), False, 'from emd_mean import Fluctuation\n'), ((53926, 53969), 'emd_utils.Utility', 'Utility', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (53933, 53969), False, 'from emd_utils import time_extension, Utility\n'), ((54062, 54078), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (54073, 54078), True, 'import matplotlib.pyplot as plt\n'), ((54249, 54320), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'time_series'], {'label': '"""Time series"""', 'zorder': '(2)', 'LineWidth': '(2)'}), "(time, time_series, label='Time series', zorder=2, LineWidth=2)\n", (54257, 54320), True, 'import matplotlib.pyplot as plt\n'), ((54321, 54406), 'matplotlib.pyplot.scatter', 'plt.scatter', (['time[maxima]', 'time_series[maxima]'], {'c': '"""r"""', 'label': '"""Maxima"""', 'zorder': '(10)'}), "(time[maxima], time_series[maxima], c='r', label='Maxima', zorder=10\n )\n", (54332, 54406), True, 'import matplotlib.pyplot as plt\n'), ((54402, 54487), 'matplotlib.pyplot.scatter', 'plt.scatter', (['time[minima]', 'time_series[minima]'], {'c': '"""b"""', 'label': '"""Minima"""', 'zorder': '(10)'}), "(time[minima], time_series[minima], c='b', label='Minima', zorder=10\n )\n", (54413, 54487), True, 'import matplotlib.pyplot as plt\n'), ((55096, 55234), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi)', "('$0$', '$\\\\pi$', '2$\\\\pi$', '3$\\\\pi$', '4$\\\\pi$', '5$\\\\pi$')"], {}), "((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi), (\n '$0$', '$\\\\pi$', '2$\\\\pi$', '3$\\\\pi$', '4$\\\\pi$', '5$\\\\pi$'))\n", (55106, 55234), True, 'import matplotlib.pyplot as plt\n'), ((55242, 55300), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-2, -1, 0, 1, 2)', "('-2', '-1', '0', '1', '2')"], {}), "((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))\n", (55252, 55300), True, 'import matplotlib.pyplot as plt\n'), ((55301, 55338), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.25 * np.pi)', '(5.25 * np.pi)'], {}), '(-0.25 * np.pi, 5.25 * np.pi)\n', (55309, 55338), True, 'import matplotlib.pyplot as plt\n'), ((55499, 55559), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Schoenberg_Whitney_Conditions.png"""'], {}), "('jss_figures/Schoenberg_Whitney_Conditions.png')\n", (55510, 55559), True, 'import matplotlib.pyplot as plt\n'), ((55560, 55570), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (55568, 55570), True, 'import matplotlib.pyplot as plt\n'), ((55609, 55660), 'numpy.linspace', 'np.linspace', (['((0 + a) * np.pi)', '((5 - a) * np.pi)', '(1001)'], {}), '((0 + a) * np.pi, (5 - a) * np.pi, 1001)\n', (55620, 55660), True, 'import numpy as np\n'), ((55669, 55718), 'numpy.linspace', 'np.linspace', (['((0 + a) * np.pi)', '((5 - a) * np.pi)', '(11)'], {}), '((0 + a) * np.pi, (5 - a) * np.pi, 11)\n', (55680, 55718), True, 'import numpy as np\n'), ((55773, 55826), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (55790, 55826), False, 'import emd_utils\n'), ((56176, 56232), 'emd_mean.Fluctuation', 'emd_mean.Fluctuation', ([], {'time': 'time', 'time_series': 'time_series'}), '(time=time, time_series=time_series)\n', (56196, 56232), False, 'import emd_mean\n'), ((58501, 58537), 'numpy.linspace', 'np.linspace', (['knots[0]', 'knots[-1]', '(31)'], {}), '(knots[0], knots[-1], 31)\n', (58512, 58537), True, 'import numpy as np\n'), ((58666, 58731), 'AdvEMDpy.EMD', 'AdvEMDpy.EMD', ([], {'time': 'derivative_time', 'time_series': 'derivative_of_lsq'}), '(time=derivative_time, time_series=derivative_of_lsq)\n', (58678, 58731), False, 'import AdvEMDpy\n'), ((58938, 59004), 'emd_utils.Utility', 'emd_utils.Utility', ([], {'time': 'time[:-1]', 'time_series': 'imf_1_of_derivative'}), '(time=time[:-1], time_series=imf_1_of_derivative)\n', (58955, 59004), False, 'import emd_utils\n'), ((60536, 60552), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (60547, 60552), True, 'import matplotlib.pyplot as plt\n'), ((60592, 60644), 'matplotlib.pyplot.title', 'plt.title', (['"""Detrended Fluctuation Analysis Examples"""'], {}), "('Detrended Fluctuation Analysis Examples')\n", (60601, 60644), True, 'import matplotlib.pyplot as plt\n'), ((60645, 60706), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'time_series'], {'LineWidth': '(2)', 'label': '"""Time series"""'}), "(time, time_series, LineWidth=2, label='Time series')\n", (60653, 60706), True, 'import matplotlib.pyplot as plt\n'), ((60707, 60771), 'matplotlib.pyplot.scatter', 'plt.scatter', (['maxima_x', 'maxima_y'], {'c': '"""r"""', 'zorder': '(4)', 'label': '"""Maxima"""'}), "(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')\n", (60718, 60771), True, 'import matplotlib.pyplot as plt\n'), ((60772, 60836), 'matplotlib.pyplot.scatter', 'plt.scatter', (['minima_x', 'minima_y'], {'c': '"""b"""', 'zorder': '(4)', 'label': '"""Minima"""'}), "(minima_x, minima_y, c='b', zorder=4, label='Minima')\n", (60783, 60836), True, 'import matplotlib.pyplot as plt\n'), ((61316, 61361), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'minima_envelope'], {'c': '"""darkblue"""'}), "(time, minima_envelope, c='darkblue')\n", (61324, 61361), True, 'import matplotlib.pyplot as plt\n'), ((61362, 61431), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '((maxima_envelope + minima_envelope) / 2)'], {'c': '"""darkblue"""'}), "(time, (maxima_envelope + minima_envelope) / 2, c='darkblue')\n", (61370, 61431), True, 'import matplotlib.pyplot as plt\n'), ((61526, 61577), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'minima_envelope_smooth'], {'c': '"""darkred"""'}), "(time, minima_envelope_smooth, c='darkred')\n", (61534, 61577), True, 'import matplotlib.pyplot as plt\n'), ((61578, 61665), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '((maxima_envelope_smooth + minima_envelope_smooth) / 2)'], {'c': '"""darkred"""'}), "(time, (maxima_envelope_smooth + minima_envelope_smooth) / 2, c=\n 'darkred')\n", (61586, 61665), True, 'import matplotlib.pyplot as plt\n'), ((61755, 61806), 'matplotlib.pyplot.plot', 'plt.plot', (['time', 'EEMD_minima_envelope'], {'c': '"""darkgreen"""'}), "(time, EEMD_minima_envelope, c='darkgreen')\n", (61763, 61806), True, 'import matplotlib.pyplot as plt\n'), ((61807, 61892), 'matplotlib.pyplot.plot', 'plt.plot', (['time', '((EEMD_maxima_envelope + EEMD_minima_envelope) / 2)'], {'c': '"""darkgreen"""'}), "(time, (EEMD_maxima_envelope + EEMD_minima_envelope) / 2, c='darkgreen'\n )\n", (61815, 61892), True, 'import matplotlib.pyplot as plt\n'), ((62170, 62308), 'matplotlib.pyplot.xticks', 'plt.xticks', (['(0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi)', "('$0$', '$\\\\pi$', '2$\\\\pi$', '3$\\\\pi$', '4$\\\\pi$', '5$\\\\pi$')"], {}), "((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi), (\n '$0$', '$\\\\pi$', '2$\\\\pi$', '3$\\\\pi$', '4$\\\\pi$', '5$\\\\pi$'))\n", (62180, 62308), True, 'import matplotlib.pyplot as plt\n'), ((62377, 62435), 'matplotlib.pyplot.yticks', 'plt.yticks', (['(-2, -1, 0, 1, 2)', "('-2', '-1', '0', '1', '2')"], {}), "((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))\n", (62387, 62435), True, 'import matplotlib.pyplot as plt\n'), ((62436, 62473), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-0.25 * np.pi)', '(5.25 * np.pi)'], {}), '(-0.25 * np.pi, 5.25 * np.pi)\n', (62444, 62473), True, 'import matplotlib.pyplot as plt\n'), ((62634, 62695), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/detrended_fluctuation_analysis.png"""'], {}), "('jss_figures/detrended_fluctuation_analysis.png')\n", (62645, 62695), True, 'import matplotlib.pyplot as plt\n'), ((62696, 62706), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (62704, 62706), True, 'import matplotlib.pyplot as plt\n'), ((62914, 62939), 'numpy.linspace', 'np.linspace', (['(0)', '(150)', '(1501)'], {}), '(0, 150, 1501)\n', (62925, 62939), True, 'import numpy as np\n'), ((62964, 62996), 'scipy.integrate.odeint', 'odeint', (['duffing_equation', 'XY0', 't'], {}), '(duffing_equation, XY0, t)\n', (62970, 62996), False, 'from scipy.integrate import odeint\n'), ((63156, 63174), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (63168, 63174), True, 'import matplotlib.pyplot as plt\n'), ((63175, 63206), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.2)'}), '(hspace=0.2)\n', (63194, 63206), True, 'import matplotlib.pyplot as plt\n'), ((63783, 63830), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Duffing_equation.png"""'], {}), "('jss_figures/Duffing_equation.png')\n", (63794, 63830), True, 'import matplotlib.pyplot as plt\n'), ((63831, 63841), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (63839, 63841), True, 'import matplotlib.pyplot as plt\n'), ((63891, 63902), 'PyEMD.EMD', 'pyemd0215', ([], {}), '()\n', (63900, 63902), True, 'from PyEMD import EMD as pyemd0215\n'), ((63934, 63993), 'emd.spectra.frequency_transform', 'emd040.spectra.frequency_transform', (['py_emd.T', '(10)', '"""hilbert"""'], {}), "(py_emd.T, 10, 'hilbert')\n", (63968, 63993), True, 'import emd as emd040\n'), ((64018, 64062), 'emd.spectra.define_hist_bins', 'emd040.spectra.define_hist_bins', (['(0)', '(0.2)', '(100)'], {}), '(0, 0.2, 100)\n', (64049, 64062), True, 'import emd as emd040\n'), ((64069, 64116), 'emd.spectra.hilberthuang', 'emd040.spectra.hilberthuang', (['IF', 'IA', 'freq_edges'], {}), '(IF, IA, freq_edges)\n', (64096, 64116), True, 'import emd as emd040\n'), ((64123, 64152), 'scipy.ndimage.gaussian_filter', 'gaussian_filter', (['hht'], {'sigma': '(1)'}), '(hht, sigma=1)\n', (64138, 64152), False, 'from scipy.ndimage import gaussian_filter\n'), ((64158, 64174), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (64169, 64174), True, 'import matplotlib.pyplot as plt\n'), ((64727, 64756), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 50, 100, 150]'], {}), '([0, 50, 100, 150])\n', (64737, 64756), True, 'import matplotlib.pyplot as plt\n'), ((64757, 64782), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[0, 0.1, 0.2]'], {}), '([0, 0.1, 0.2])\n', (64767, 64782), True, 'import matplotlib.pyplot as plt\n'), ((64783, 64811), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (Hz)"""'], {}), "('Frequency (Hz)')\n", (64793, 64811), True, 'import matplotlib.pyplot as plt\n'), ((64812, 64834), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (64822, 64834), True, 'import matplotlib.pyplot as plt\n'), ((65001, 65057), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Duffing_equation_ht_pyemd.png"""'], {}), "('jss_figures/Duffing_equation_ht_pyemd.png')\n", (65012, 65057), True, 'import matplotlib.pyplot as plt\n'), ((65058, 65068), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (65066, 65068), True, 'import matplotlib.pyplot as plt\n'), ((65070, 65080), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (65078, 65080), True, 'import matplotlib.pyplot as plt\n'), ((65093, 65112), 'emd.sift.sift', 'emd040.sift.sift', (['x'], {}), '(x)\n', (65109, 65112), True, 'import emd as emd040\n'), ((65126, 65185), 'emd.spectra.frequency_transform', 'emd040.spectra.frequency_transform', (['emd_sift', '(10)', '"""hilbert"""'], {}), "(emd_sift, 10, 'hilbert')\n", (65160, 65185), True, 'import emd as emd040\n'), ((65210, 65254), 'emd.spectra.define_hist_bins', 'emd040.spectra.define_hist_bins', (['(0)', '(0.2)', '(100)'], {}), '(0, 0.2, 100)\n', (65241, 65254), True, 'import emd as emd040\n'), ((65261, 65308), 'emd.spectra.hilberthuang', 'emd040.spectra.hilberthuang', (['IF', 'IA', 'freq_edges'], {}), '(IF, IA, freq_edges)\n', (65288, 65308), True, 'import emd as emd040\n'), ((65315, 65344), 'scipy.ndimage.gaussian_filter', 'gaussian_filter', (['hht'], {'sigma': '(1)'}), '(hht, sigma=1)\n', (65330, 65344), False, 'from scipy.ndimage import gaussian_filter\n'), ((65350, 65366), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (65361, 65366), True, 'import matplotlib.pyplot as plt\n'), ((65916, 65945), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 50, 100, 150]'], {}), '([0, 50, 100, 150])\n', (65926, 65945), True, 'import matplotlib.pyplot as plt\n'), ((65946, 65971), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[0, 0.1, 0.2]'], {}), '([0, 0.1, 0.2])\n', (65956, 65971), True, 'import matplotlib.pyplot as plt\n'), ((65972, 66000), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (Hz)"""'], {}), "('Frequency (Hz)')\n", (65982, 66000), True, 'import matplotlib.pyplot as plt\n'), ((66001, 66023), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (66011, 66023), True, 'import matplotlib.pyplot as plt\n'), ((66190, 66244), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Duffing_equation_ht_emd.png"""'], {}), "('jss_figures/Duffing_equation_ht_emd.png')\n", (66201, 66244), True, 'import matplotlib.pyplot as plt\n'), ((66245, 66255), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (66253, 66255), True, 'import matplotlib.pyplot as plt\n'), ((66314, 66349), 'AdvEMDpy.EMD', 'AdvEMDpy.EMD', ([], {'time': 't', 'time_series': 'x'}), '(time=t, time_series=x)\n', (66326, 66349), False, 'import AdvEMDpy\n'), ((66467, 66485), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(1)'], {}), '(2, 1)\n', (66479, 66485), True, 'import matplotlib.pyplot as plt\n'), ((66486, 66517), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.3)'}), '(hspace=0.3)\n', (66505, 66517), True, 'import matplotlib.pyplot as plt\n'), ((68054, 68106), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Duffing_equation_imfs.png"""'], {}), "('jss_figures/Duffing_equation_imfs.png')\n", (68065, 68106), True, 'import matplotlib.pyplot as plt\n'), ((68107, 68117), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (68115, 68117), True, 'import matplotlib.pyplot as plt\n'), ((68131, 68221), 'emd_hilbert.hilbert_spectrum', 'hilbert_spectrum', (['t', 'emd_duff', 'emd_ht_duff', 'emd_if_duff'], {'max_frequency': '(1.3)', 'plot': '(False)'}), '(t, emd_duff, emd_ht_duff, emd_if_duff, max_frequency=1.3,\n plot=False)\n', (68147, 68221), False, 'from emd_hilbert import Hilbert, hilbert_spectrum\n'), ((68224, 68240), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(111)'], {}), '(111)\n', (68235, 68240), True, 'import matplotlib.pyplot as plt\n'), ((68842, 68871), 'matplotlib.pyplot.xticks', 'plt.xticks', (['[0, 50, 100, 150]'], {}), '([0, 50, 100, 150])\n', (68852, 68871), True, 'import matplotlib.pyplot as plt\n'), ((68872, 68897), 'matplotlib.pyplot.yticks', 'plt.yticks', (['[0, 0.1, 0.2]'], {}), '([0, 0.1, 0.2])\n', (68882, 68897), True, 'import matplotlib.pyplot as plt\n'), ((68898, 68926), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (Hz)"""'], {}), "('Frequency (Hz)')\n", (68908, 68926), True, 'import matplotlib.pyplot as plt\n'), ((68927, 68949), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (s)"""'], {}), "('Time (s)')\n", (68937, 68949), True, 'import matplotlib.pyplot as plt\n'), ((69116, 69166), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/Duffing_equation_ht.png"""'], {}), "('jss_figures/Duffing_equation_ht.png')\n", (69127, 69166), True, 'import matplotlib.pyplot as plt\n'), ((69167, 69177), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (69175, 69177), True, 'import matplotlib.pyplot as plt\n'), ((69230, 69275), 'pandas.read_csv', 'pd.read_csv', (['"""Data/co2_mm_mlo.csv"""'], {'header': '(51)'}), "('Data/co2_mm_mlo.csv', header=51)\n", (69241, 69275), True, 'import pandas as pd\n'), ((69277, 69330), 'matplotlib.pyplot.plot', 'plt.plot', (["CO2_data['month']", "CO2_data['decimal date']"], {}), "(CO2_data['month'], CO2_data['decimal date'])\n", (69285, 69330), True, 'import matplotlib.pyplot as plt\n'), ((69426, 69457), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Parts per million"""'], {}), "('Parts per million')\n", (69436, 69457), True, 'import matplotlib.pyplot as plt\n'), ((69458, 69484), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (years)"""'], {}), "('Time (years)')\n", (69468, 69484), True, 'import matplotlib.pyplot as plt\n'), ((69485, 69533), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/CO2_concentration.png"""'], {}), "('jss_figures/CO2_concentration.png')\n", (69496, 69533), True, 'import matplotlib.pyplot as plt\n'), ((69534, 69544), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (69542, 69544), True, 'import matplotlib.pyplot as plt\n'), ((69589, 69607), 'numpy.asarray', 'np.asarray', (['signal'], {}), '(signal)\n', (69599, 69607), True, 'import numpy as np\n'), ((69640, 69656), 'numpy.asarray', 'np.asarray', (['time'], {}), '(time)\n', (69650, 69656), True, 'import numpy as np\n'), ((69713, 69724), 'PyEMD.EMD', 'pyemd0215', ([], {}), '()\n', (69722, 69724), True, 'from PyEMD import EMD as pyemd0215\n'), ((69761, 69827), 'emd.spectra.frequency_transform', 'emd040.spectra.frequency_transform', (['py_emd[:2, :].T', '(12)', '"""hilbert"""'], {}), "(py_emd[:2, :].T, 12, 'hilbert')\n", (69795, 69827), True, 'import emd as emd040\n'), ((69954, 69996), 'emd.spectra.define_hist_bins', 'emd040.spectra.define_hist_bins', (['(0)', '(2)', '(100)'], {}), '(0, 2, 100)\n', (69985, 69996), True, 'import emd as emd040\n'), ((70003, 70050), 'emd.spectra.hilberthuang', 'emd040.spectra.hilberthuang', (['IF', 'IA', 'freq_edges'], {}), '(IF, IA, freq_edges)\n', (70030, 70050), True, 'import emd as emd040\n'), ((70057, 70086), 'scipy.ndimage.gaussian_filter', 'gaussian_filter', (['hht'], {'sigma': '(1)'}), '(hht, sigma=1)\n', (70072, 70086), False, 'from scipy.ndimage import gaussian_filter\n'), ((70097, 70111), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (70109, 70111), True, 'import matplotlib.pyplot as plt\n'), ((70348, 70385), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (year$^{-1}$)"""'], {}), "('Frequency (year$^{-1}$)')\n", (70358, 70385), True, 'import matplotlib.pyplot as plt\n'), ((70386, 70412), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (years)"""'], {}), "('Time (years)')\n", (70396, 70412), True, 'import matplotlib.pyplot as plt\n'), ((70770, 70818), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/CO2_Hilbert_pyemd.png"""'], {}), "('jss_figures/CO2_Hilbert_pyemd.png')\n", (70781, 70818), True, 'import matplotlib.pyplot as plt\n'), ((70819, 70829), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (70827, 70829), True, 'import matplotlib.pyplot as plt\n'), ((70842, 70866), 'emd.sift.sift', 'emd040.sift.sift', (['signal'], {}), '(signal)\n', (70858, 70866), True, 'import emd as emd040\n'), ((70880, 70946), 'emd.spectra.frequency_transform', 'emd040.spectra.frequency_transform', (['emd_sift[:, :1]', '(12)', '"""hilbert"""'], {}), "(emd_sift[:, :1], 12, 'hilbert')\n", (70914, 70946), True, 'import emd as emd040\n'), ((71062, 71104), 'emd.spectra.define_hist_bins', 'emd040.spectra.define_hist_bins', (['(0)', '(2)', '(100)'], {}), '(0, 2, 100)\n', (71093, 71104), True, 'import emd as emd040\n'), ((71111, 71158), 'emd.spectra.hilberthuang', 'emd040.spectra.hilberthuang', (['IF', 'IA', 'freq_edges'], {}), '(IF, IA, freq_edges)\n', (71138, 71158), True, 'import emd as emd040\n'), ((71165, 71194), 'scipy.ndimage.gaussian_filter', 'gaussian_filter', (['hht'], {'sigma': '(1)'}), '(hht, sigma=1)\n', (71180, 71194), False, 'from scipy.ndimage import gaussian_filter\n'), ((71205, 71219), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (71217, 71219), True, 'import matplotlib.pyplot as plt\n'), ((71453, 71490), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (year$^{-1}$)"""'], {}), "('Frequency (year$^{-1}$)')\n", (71463, 71490), True, 'import matplotlib.pyplot as plt\n'), ((71491, 71517), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (years)"""'], {}), "('Time (years)')\n", (71501, 71517), True, 'import matplotlib.pyplot as plt\n'), ((71875, 71921), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/CO2_Hilbert_emd.png"""'], {}), "('jss_figures/CO2_Hilbert_emd.png')\n", (71886, 71921), True, 'import matplotlib.pyplot as plt\n'), ((71922, 71932), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (71930, 71932), True, 'import matplotlib.pyplot as plt\n'), ((71992, 72027), 'numpy.linspace', 'np.linspace', (['time[0]', 'time[-1]', '(200)'], {}), '(time[0], time[-1], 200)\n', (72003, 72027), True, 'import numpy as np\n'), ((72043, 72086), 'AdvEMDpy.EMD', 'AdvEMDpy.EMD', ([], {'time': 'time', 'time_series': 'signal'}), '(time=time, time_series=signal)\n', (72055, 72086), False, 'import AdvEMDpy\n'), ((72341, 72359), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(2)', '(2)'], {}), '(2, 2)\n', (72353, 72359), True, 'import matplotlib.pyplot as plt\n'), ((72360, 72391), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'hspace': '(0.5)'}), '(hspace=0.5)\n', (72379, 72391), True, 'import matplotlib.pyplot as plt\n'), ((73151, 73189), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/CO2_EMD.png"""'], {}), "('jss_figures/CO2_EMD.png')\n", (73162, 73189), True, 'import matplotlib.pyplot as plt\n'), ((73190, 73200), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (73198, 73200), True, 'import matplotlib.pyplot as plt\n'), ((73214, 73302), 'emd_hilbert.hilbert_spectrum', 'hilbert_spectrum', (['time', 'imfs', 'hts', 'ifs'], {'max_frequency': '(10)', 'which_imfs': '[1]', 'plot': '(False)'}), '(time, imfs, hts, ifs, max_frequency=10, which_imfs=[1],\n plot=False)\n', (73230, 73302), False, 'from emd_hilbert import Hilbert, hilbert_spectrum\n'), ((73388, 73402), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {}), '()\n', (73400, 73402), True, 'import matplotlib.pyplot as plt\n'), ((73718, 73755), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Frequency (year$^{-1}$)"""'], {}), "('Frequency (year$^{-1}$)')\n", (73728, 73755), True, 'import matplotlib.pyplot as plt\n'), ((73756, 73782), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time (years)"""'], {}), "('Time (years)')\n", (73766, 73782), True, 'import matplotlib.pyplot as plt\n'), ((74105, 74147), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""jss_figures/CO2_Hilbert.png"""'], {}), "('jss_figures/CO2_Hilbert.png')\n", (74116, 74147), True, 'import matplotlib.pyplot as plt\n'), ((74148, 74158), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (74156, 74158), True, 'import matplotlib.pyplot as plt\n'), ((772, 795), 'numpy.sin', 'np.sin', (['pseudo_alg_time'], {}), '(pseudo_alg_time)\n', (778, 795), True, 'import numpy as np\n'), ((798, 825), 'numpy.sin', 'np.sin', (['(5 * pseudo_alg_time)'], {}), '(5 * pseudo_alg_time)\n', (804, 825), True, 'import numpy as np\n'), ((1807, 1830), 'numpy.sin', 'np.sin', (['pseudo_alg_time'], {}), '(pseudo_alg_time)\n', (1813, 1830), True, 'import numpy as np\n'), ((2994, 3021), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(1.2)', '(100)'], {}), '(-0.2, 1.2, 100)\n', (3005, 3021), True, 'import numpy as np\n'), ((3056, 3083), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(1.2)', '(100)'], {}), '(-0.2, 1.2, 100)\n', (3067, 3083), True, 'import numpy as np\n'), ((3286, 3315), 'numpy.sin', 'np.sin', (['knot_demonstrate_time'], {}), '(knot_demonstrate_time)\n', (3292, 3315), True, 'import numpy as np\n'), ((3318, 3351), 'numpy.sin', 'np.sin', (['(5 * knot_demonstrate_time)'], {}), '(5 * knot_demonstrate_time)\n', (3324, 3351), True, 'import numpy as np\n'), ((4529, 4552), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (4540, 4552), True, 'import numpy as np\n'), ((4662, 4685), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (4673, 4685), True, 'import numpy as np\n'), ((4763, 4786), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (4774, 4786), True, 'import numpy as np\n'), ((5139, 5168), 'numpy.sin', 'np.sin', (['knot_demonstrate_time'], {}), '(knot_demonstrate_time)\n', (5145, 5168), True, 'import numpy as np\n'), ((5171, 5204), 'numpy.sin', 'np.sin', (['(5 * knot_demonstrate_time)'], {}), '(5 * knot_demonstrate_time)\n', (5177, 5204), True, 'import numpy as np\n'), ((6445, 6468), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (6456, 6468), True, 'import numpy as np\n'), ((6570, 6593), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (6581, 6593), True, 'import numpy as np\n'), ((6663, 6686), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (6674, 6686), True, 'import numpy as np\n'), ((7017, 7046), 'numpy.sin', 'np.sin', (['knot_demonstrate_time'], {}), '(knot_demonstrate_time)\n', (7023, 7046), True, 'import numpy as np\n'), ((7049, 7082), 'numpy.sin', 'np.sin', (['(5 * knot_demonstrate_time)'], {}), '(5 * knot_demonstrate_time)\n', (7055, 7082), True, 'import numpy as np\n'), ((8309, 8332), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (8320, 8332), True, 'import numpy as np\n'), ((8437, 8460), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (8448, 8460), True, 'import numpy as np\n'), ((8533, 8556), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (8544, 8556), True, 'import numpy as np\n'), ((9378, 9400), 'numpy.random.normal', 'np.random.normal', (['(0)', '(1)'], {}), '(0, 1)\n', (9394, 9400), True, 'import numpy as np\n'), ((10482, 10526), 'numpy.linspace', 'np.linspace', (['(0.85 * np.pi)', '(1.15 * np.pi)', '(101)'], {}), '(0.85 * np.pi, 1.15 * np.pi, 101)\n', (10493, 10526), True, 'import numpy as np\n'), ((10630, 10674), 'numpy.linspace', 'np.linspace', (['(0.85 * np.pi)', '(1.15 * np.pi)', '(101)'], {}), '(0.85 * np.pi, 1.15 * np.pi, 101)\n', (10641, 10674), True, 'import numpy as np\n'), ((10752, 10775), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(101)'], {}), '(-3, 3, 101)\n', (10763, 10775), True, 'import numpy as np\n'), ((10835, 10858), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(101)'], {}), '(-3, 3, 101)\n', (10846, 10858), True, 'import numpy as np\n'), ((13644, 13688), 'numpy.linspace', 'np.linspace', (['(0.85 * np.pi)', '(1.15 * np.pi)', '(101)'], {}), '(0.85 * np.pi, 1.15 * np.pi, 101)\n', (13655, 13688), True, 'import numpy as np\n'), ((13792, 13836), 'numpy.linspace', 'np.linspace', (['(0.85 * np.pi)', '(1.15 * np.pi)', '(101)'], {}), '(0.85 * np.pi, 1.15 * np.pi, 101)\n', (13803, 13836), True, 'import numpy as np\n'), ((13914, 13937), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(101)'], {}), '(-3, 3, 101)\n', (13925, 13937), True, 'import numpy as np\n'), ((13997, 14020), 'numpy.linspace', 'np.linspace', (['(-3)', '(3)', '(101)'], {}), '(-3, 3, 101)\n', (14008, 14020), True, 'import numpy as np\n'), ((15795, 15822), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(0.8)', '(100)'], {}), '(-0.2, 0.8, 100)\n', (15806, 15822), True, 'import numpy as np\n'), ((15860, 15887), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(0.8)', '(100)'], {}), '(-0.2, 0.8, 100)\n', (15871, 15887), True, 'import numpy as np\n'), ((16263, 16290), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(1.2)', '(100)'], {}), '(-0.2, 1.2, 100)\n', (16274, 16290), True, 'import numpy as np\n'), ((16328, 16355), 'numpy.linspace', 'np.linspace', (['(-0.2)', '(1.2)', '(100)'], {}), '(-0.2, 1.2, 100)\n', (16339, 16355), True, 'import numpy as np\n'), ((16622, 16634), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (16628, 16634), True, 'import numpy as np\n'), ((16637, 16653), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (16643, 16653), True, 'import numpy as np\n'), ((17029, 17056), 'numpy.ones_like', 'np.ones_like', (['max_dash_time'], {}), '(max_dash_time)\n', (17041, 17056), True, 'import numpy as np\n'), ((17160, 17187), 'numpy.ones_like', 'np.ones_like', (['min_dash_time'], {}), '(min_dash_time)\n', (17172, 17187), True, 'import numpy as np\n'), ((17517, 17552), 'numpy.ones_like', 'np.ones_like', (['max_discard_dash_time'], {}), '(max_discard_dash_time)\n', (17529, 17552), True, 'import numpy as np\n'), ((18741, 18770), 'numpy.ones_like', 'np.ones_like', (['length_distance'], {}), '(length_distance)\n', (18753, 18770), True, 'import numpy as np\n'), ((18880, 18905), 'numpy.ones_like', 'np.ones_like', (['length_time'], {}), '(length_time)\n', (18892, 18905), True, 'import numpy as np\n'), ((18937, 18962), 'numpy.ones_like', 'np.ones_like', (['length_time'], {}), '(length_time)\n', (18949, 18962), True, 'import numpy as np\n'), ((19089, 19120), 'numpy.ones_like', 'np.ones_like', (['length_distance_2'], {}), '(length_distance_2)\n', (19101, 19120), True, 'import numpy as np\n'), ((19237, 19264), 'numpy.ones_like', 'np.ones_like', (['length_time_2'], {}), '(length_time_2)\n', (19249, 19264), True, 'import numpy as np\n'), ((19298, 19325), 'numpy.ones_like', 'np.ones_like', (['length_time_2'], {}), '(length_time_2)\n', (19310, 19325), True, 'import numpy as np\n'), ((19365, 19377), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (19372, 19377), True, 'import numpy as np\n'), ((19412, 19424), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (19419, 19424), True, 'import numpy as np\n'), ((19561, 19583), 'numpy.ones_like', 'np.ones_like', (['end_time'], {}), '(end_time)\n', (19573, 19583), True, 'import numpy as np\n'), ((19698, 19731), 'numpy.ones_like', 'np.ones_like', (['anti_symmetric_time'], {}), '(anti_symmetric_time)\n', (19710, 19731), True, 'import numpy as np\n'), ((22012, 22024), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (22018, 22024), True, 'import numpy as np\n'), ((22027, 22043), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (22033, 22043), True, 'import numpy as np\n'), ((22497, 22521), 'numpy.ones_like', 'np.ones_like', (['max_dash_1'], {}), '(max_dash_1)\n', (22509, 22521), True, 'import numpy as np\n'), ((22555, 22579), 'numpy.ones_like', 'np.ones_like', (['max_dash_1'], {}), '(max_dash_1)\n', (22567, 22579), True, 'import numpy as np\n'), ((22761, 22785), 'numpy.ones_like', 'np.ones_like', (['min_dash_1'], {}), '(min_dash_1)\n', (22773, 22785), True, 'import numpy as np\n'), ((22819, 22843), 'numpy.ones_like', 'np.ones_like', (['min_dash_1'], {}), '(min_dash_1)\n', (22831, 22843), True, 'import numpy as np\n'), ((23342, 23366), 'numpy.ones_like', 'np.ones_like', (['max_dash_1'], {}), '(max_dash_1)\n', (23354, 23366), True, 'import numpy as np\n'), ((23878, 23902), 'numpy.ones_like', 'np.ones_like', (['min_dash_1'], {}), '(min_dash_1)\n', (23890, 23902), True, 'import numpy as np\n'), ((24227, 24252), 'numpy.ones_like', 'np.ones_like', (['maxima_dash'], {}), '(maxima_dash)\n', (24239, 24252), True, 'import numpy as np\n'), ((24289, 24314), 'numpy.ones_like', 'np.ones_like', (['maxima_dash'], {}), '(maxima_dash)\n', (24301, 24314), True, 'import numpy as np\n'), ((24363, 24388), 'numpy.ones_like', 'np.ones_like', (['maxima_dash'], {}), '(maxima_dash)\n', (24375, 24388), True, 'import numpy as np\n'), ((24495, 24530), 'numpy.ones_like', 'np.ones_like', (['maxima_line_dash_time'], {}), '(maxima_line_dash_time)\n', (24507, 24530), True, 'import numpy as np\n'), ((24627, 24652), 'numpy.ones_like', 'np.ones_like', (['minima_dash'], {}), '(minima_dash)\n', (24639, 24652), True, 'import numpy as np\n'), ((24689, 24714), 'numpy.ones_like', 'np.ones_like', (['minima_dash'], {}), '(minima_dash)\n', (24701, 24714), True, 'import numpy as np\n'), ((24763, 24788), 'numpy.ones_like', 'np.ones_like', (['minima_dash'], {}), '(minima_dash)\n', (24775, 24788), True, 'import numpy as np\n'), ((24896, 24931), 'numpy.ones_like', 'np.ones_like', (['minima_line_dash_time'], {}), '(minima_line_dash_time)\n', (24908, 24931), True, 'import numpy as np\n'), ((25757, 25781), 'numpy.ones_like', 'np.ones_like', (['min_dash_4'], {}), '(min_dash_4)\n', (25769, 25781), True, 'import numpy as np\n'), ((29062, 29074), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (29068, 29074), True, 'import numpy as np\n'), ((29077, 29093), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (29083, 29093), True, 'import numpy as np\n'), ((29372, 29407), 'numpy.abs', 'np.abs', (['(maxima_y[-2] - minima_y[-2])'], {}), '(maxima_y[-2] - minima_y[-2])\n', (29378, 29407), True, 'import numpy as np\n'), ((29417, 29452), 'numpy.abs', 'np.abs', (['(maxima_y[-1] - minima_y[-1])'], {}), '(maxima_y[-1] - minima_y[-1])\n', (29423, 29452), True, 'import numpy as np\n'), ((29466, 29501), 'numpy.abs', 'np.abs', (['(maxima_x[-2] - minima_x[-2])'], {}), '(maxima_x[-2] - minima_x[-2])\n', (29472, 29501), True, 'import numpy as np\n'), ((29511, 29546), 'numpy.abs', 'np.abs', (['(maxima_x[-1] - minima_x[-1])'], {}), '(maxima_x[-1] - minima_x[-1])\n', (29517, 29546), True, 'import numpy as np\n'), ((29806, 29871), 'numpy.cos', 'np.cos', (['(2 * np.pi * (1 / P1) * (Coughlin_time - Coughlin_time[0]))'], {}), '(2 * np.pi * (1 / P1) * (Coughlin_time - Coughlin_time[0]))\n', (29812, 29871), True, 'import numpy as np\n'), ((31033, 31059), 'numpy.ones_like', 'np.ones_like', (['max_2_x_time'], {}), '(max_2_x_time)\n', (31045, 31059), True, 'import numpy as np\n'), ((31241, 31267), 'numpy.ones_like', 'np.ones_like', (['min_2_x_time'], {}), '(min_2_x_time)\n', (31253, 31267), True, 'import numpy as np\n'), ((31371, 31401), 'numpy.ones_like', 'np.ones_like', (['dash_max_min_2_x'], {}), '(dash_max_min_2_x)\n', (31383, 31401), True, 'import numpy as np\n'), ((31564, 31585), 'numpy.ones_like', 'np.ones_like', (['max_2_y'], {}), '(max_2_y)\n', (31576, 31585), True, 'import numpy as np\n'), ((31748, 31769), 'numpy.ones_like', 'np.ones_like', (['min_2_y'], {}), '(min_2_y)\n', (31760, 31769), True, 'import numpy as np\n'), ((31866, 31901), 'numpy.ones_like', 'np.ones_like', (['dash_max_min_2_y_time'], {}), '(dash_max_min_2_y_time)\n', (31878, 31901), True, 'import numpy as np\n'), ((32083, 32109), 'numpy.ones_like', 'np.ones_like', (['max_1_x_time'], {}), '(max_1_x_time)\n', (32095, 32109), True, 'import numpy as np\n'), ((32291, 32317), 'numpy.ones_like', 'np.ones_like', (['min_1_x_time'], {}), '(min_1_x_time)\n', (32303, 32317), True, 'import numpy as np\n'), ((32421, 32451), 'numpy.ones_like', 'np.ones_like', (['dash_max_min_1_x'], {}), '(dash_max_min_1_x)\n', (32433, 32451), True, 'import numpy as np\n'), ((32614, 32635), 'numpy.ones_like', 'np.ones_like', (['max_1_y'], {}), '(max_1_y)\n', (32626, 32635), True, 'import numpy as np\n'), ((32798, 32819), 'numpy.ones_like', 'np.ones_like', (['min_1_y'], {}), '(min_1_y)\n', (32810, 32819), True, 'import numpy as np\n'), ((32916, 32951), 'numpy.ones_like', 'np.ones_like', (['dash_max_min_1_y_time'], {}), '(dash_max_min_1_y_time)\n', (32928, 32951), True, 'import numpy as np\n'), ((36151, 36163), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (36157, 36163), True, 'import numpy as np\n'), ((36166, 36182), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (36172, 36182), True, 'import numpy as np\n'), ((36283, 36311), 'numpy.zeros_like', 'np.zeros_like', (['time_extended'], {}), '(time_extended)\n', (36296, 36311), True, 'import numpy as np\n'), ((36783, 36808), 'numpy.ones', 'np.ones', (['neural_network_k'], {}), '(neural_network_k)\n', (36790, 36808), True, 'import numpy as np\n'), ((36943, 36974), 'numpy.matmul', 'np.matmul', (['weights', 'train_input'], {}), '(weights, train_input)\n', (36952, 36974), True, 'import numpy as np\n'), ((37097, 37123), 'numpy.mean', 'np.mean', (['gradients'], {'axis': '(1)'}), '(gradients, axis=1)\n', (37104, 37123), True, 'import numpy as np\n'), ((39331, 39364), 'cvxpy.norm', 'cvx.norm', (['(2 * (vx * P) + 1 - t)', '(2)'], {}), '(2 * (vx * P) + 1 - t, 2)\n', (39339, 39364), True, 'import cvxpy as cvx\n'), ((42240, 42269), 'numpy.linspace', 'np.linspace', (['(-2.75)', '(2.75)', '(100)'], {}), '(-2.75, 2.75, 100)\n', (42251, 42269), True, 'import numpy as np\n'), ((42346, 42435), 'numpy.linspace', 'np.linspace', (['((time[-302] + time[-301]) / 2)', '((time[-302] + time[-301]) / 2 + 0.1)', '(100)'], {}), '((time[-302] + time[-301]) / 2, (time[-302] + time[-301]) / 2 + \n 0.1, 100)\n', (42357, 42435), True, 'import numpy as np\n'), ((42483, 42572), 'numpy.linspace', 'np.linspace', (['((time[-302] + time[-301]) / 2)', '((time[-302] + time[-301]) / 2 + 0.1)', '(100)'], {}), '((time[-302] + time[-301]) / 2, (time[-302] + time[-301]) / 2 + \n 0.1, 100)\n', (42494, 42572), True, 'import numpy as np\n'), ((42619, 42748), 'numpy.linspace', 'np.linspace', (['((time_extended[-1001] + time_extended[-1002]) / 2)', '((time_extended[-1001] + time_extended[-1002]) / 2 - 0.1)', '(100)'], {}), '((time_extended[-1001] + time_extended[-1002]) / 2, (\n time_extended[-1001] + time_extended[-1002]) / 2 - 0.1, 100)\n', (42630, 42748), True, 'import numpy as np\n'), ((42808, 42937), 'numpy.linspace', 'np.linspace', (['((time_extended[-1001] + time_extended[-1002]) / 2)', '((time_extended[-1001] + time_extended[-1002]) / 2 - 0.1)', '(100)'], {}), '((time_extended[-1001] + time_extended[-1002]) / 2, (\n time_extended[-1001] + time_extended[-1002]) / 2 - 0.1, 100)\n', (42819, 42937), True, 'import numpy as np\n'), ((43064, 43093), 'numpy.linspace', 'np.linspace', (['(-2.75)', '(2.75)', '(100)'], {}), '(-2.75, 2.75, 100)\n', (43075, 43093), True, 'import numpy as np\n'), ((43160, 43189), 'numpy.linspace', 'np.linspace', (['(-2.75)', '(2.75)', '(100)'], {}), '(-2.75, 2.75, 100)\n', (43171, 43189), True, 'import numpy as np\n'), ((43290, 43379), 'numpy.linspace', 'np.linspace', (['((time[-202] + time[-201]) / 2)', '((time[-202] + time[-201]) / 2 + 0.1)', '(100)'], {}), '((time[-202] + time[-201]) / 2, (time[-202] + time[-201]) / 2 + \n 0.1, 100)\n', (43301, 43379), True, 'import numpy as np\n'), ((43430, 43519), 'numpy.linspace', 'np.linspace', (['((time[-202] + time[-201]) / 2)', '((time[-202] + time[-201]) / 2 + 0.1)', '(100)'], {}), '((time[-202] + time[-201]) / 2, (time[-202] + time[-201]) / 2 + \n 0.1, 100)\n', (43441, 43519), True, 'import numpy as np\n'), ((43569, 43698), 'numpy.linspace', 'np.linspace', (['((time_extended[-1001] + time_extended[-1000]) / 2)', '((time_extended[-1001] + time_extended[-1000]) / 2 - 0.1)', '(100)'], {}), '((time_extended[-1001] + time_extended[-1000]) / 2, (\n time_extended[-1001] + time_extended[-1000]) / 2 - 0.1, 100)\n', (43580, 43698), True, 'import numpy as np\n'), ((43761, 43890), 'numpy.linspace', 'np.linspace', (['((time_extended[-1001] + time_extended[-1000]) / 2)', '((time_extended[-1001] + time_extended[-1000]) / 2 - 0.1)', '(100)'], {}), '((time_extended[-1001] + time_extended[-1000]) / 2, (\n time_extended[-1001] + time_extended[-1000]) / 2 - 0.1, 100)\n', (43772, 43890), True, 'import numpy as np\n'), ((44020, 44049), 'numpy.linspace', 'np.linspace', (['(-2.75)', '(2.75)', '(100)'], {}), '(-2.75, 2.75, 100)\n', (44031, 44049), True, 'import numpy as np\n'), ((44623, 44639), 'numpy.cos', 'np.cos', (['(8 * time)'], {}), '(8 * time)\n', (44629, 44639), True, 'import numpy as np\n'), ((45509, 45594), 'textwrap.fill', 'textwrap.fill', (['"""Comparison of Trends Extracted with Different Knot Sequences"""', '(40)'], {}), "('Comparison of Trends Extracted with Different Knot Sequences',\n 40)\n", (45522, 45594), False, 'import textwrap\n'), ((46081, 46104), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (46092, 46104), True, 'import numpy as np\n'), ((46282, 46326), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (46293, 46326), True, 'import numpy as np\n'), ((46367, 46411), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (46378, 46411), True, 'import numpy as np\n'), ((46482, 46509), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (46493, 46509), True, 'import numpy as np\n'), ((46559, 46586), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (46570, 46586), True, 'import numpy as np\n'), ((47335, 47358), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (47346, 47358), True, 'import numpy as np\n'), ((47719, 47763), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (47730, 47763), True, 'import numpy as np\n'), ((47804, 47848), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (47815, 47848), True, 'import numpy as np\n'), ((47919, 47946), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (47930, 47946), True, 'import numpy as np\n'), ((47996, 48023), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (48007, 48023), True, 'import numpy as np\n'), ((48529, 48552), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (48540, 48552), True, 'import numpy as np\n'), ((48950, 48994), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (48961, 48994), True, 'import numpy as np\n'), ((49035, 49079), 'numpy.linspace', 'np.linspace', (['(0.95 * np.pi)', '(1.55 * np.pi)', '(101)'], {}), '(0.95 * np.pi, 1.55 * np.pi, 101)\n', (49046, 49079), True, 'import numpy as np\n'), ((49150, 49177), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (49161, 49177), True, 'import numpy as np\n'), ((49227, 49254), 'numpy.linspace', 'np.linspace', (['(-5.5)', '(5.5)', '(101)'], {}), '(-5.5, 5.5, 101)\n', (49238, 49254), True, 'import numpy as np\n'), ((49403, 49508), 'textwrap.fill', 'textwrap.fill', (['"""Comparison of Trends Extracted with Different Knot Sequences Zoomed Region"""', '(40)'], {}), "(\n 'Comparison of Trends Extracted with Different Knot Sequences Zoomed Region'\n , 40)\n", (49416, 49508), False, 'import textwrap\n'), ((49863, 49886), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (49874, 49886), True, 'import numpy as np\n'), ((50730, 50753), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (50741, 50753), True, 'import numpy as np\n'), ((51555, 51578), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (51566, 51578), True, 'import numpy as np\n'), ((52303, 52417), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of Simple Sinusoidal Time Seres with Added Noise"""', '(50)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of Simple Sinusoidal Time Seres with Added Noise'\n , 50)\n", (52316, 52417), False, 'import textwrap\n'), ((52489, 52498), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (52495, 52498), True, 'import numpy as np\n'), ((53307, 53319), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (53313, 53319), True, 'import numpy as np\n'), ((53322, 53338), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (53328, 53338), True, 'import numpy as np\n'), ((54128, 54257), 'textwrap.fill', 'textwrap.fill', (['"""Plot Demonstrating Unsmoothed Extrema Envelopes if Schoenberg–Whitney Conditions are Not Satisfied"""', '(50)'], {}), "(\n 'Plot Demonstrating Unsmoothed Extrema Envelopes if Schoenberg–Whitney Conditions are Not Satisfied'\n , 50)\n", (54141, 54257), False, 'import textwrap\n'), ((55025, 55053), 'numpy.linspace', 'np.linspace', (['(-3.0)', '(-2.0)', '(101)'], {}), '(-3.0, -2.0, 101)\n', (55036, 55053), True, 'import numpy as np\n'), ((55733, 55745), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (55739, 55745), True, 'import numpy as np\n'), ((55748, 55764), 'numpy.cos', 'np.cos', (['(5 * time)'], {}), '(5 * time)\n', (55754, 55764), True, 'import numpy as np\n'), ((62125, 62137), 'numpy.cos', 'np.cos', (['time'], {}), '(time)\n', (62131, 62137), True, 'import numpy as np\n'), ((64309, 64413), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of Duffing Equation using PyEMD 0.2.10"""', '(40)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of Duffing Equation using PyEMD 0.2.10'\n , 40)\n", (64322, 64413), False, 'import textwrap\n'), ((65501, 65601), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of Duffing Equation using emd 0.3.3"""', '(40)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of Duffing Equation using emd 0.3.3',\n 40)\n", (65514, 65601), False, 'import textwrap\n'), ((68251, 68351), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of Duffing Equation using AdvEMDpy"""', '(40)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of Duffing Equation using AdvEMDpy', 40\n )\n", (68264, 68351), False, 'import textwrap\n'), ((68561, 68570), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (68567, 68570), True, 'import numpy as np\n'), ((69341, 69428), 'textwrap.fill', 'textwrap.fill', (['"""Mean Monthly Concentration of Carbon Dioxide in the Atmosphere"""', '(35)'], {}), "('Mean Monthly Concentration of Carbon Dioxide in the Atmosphere',\n 35)\n", (69354, 69428), False, 'import textwrap\n'), ((70246, 70356), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using PyEMD 0.2.10"""', '(45)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using PyEMD 0.2.10'\n , 45)\n", (70259, 70356), False, 'import textwrap\n'), ((70528, 70546), 'numpy.ones_like', 'np.ones_like', (['time'], {}), '(time)\n', (70540, 70546), True, 'import numpy as np\n'), ((71354, 71461), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using emd 0.3.3"""', '(45)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using emd 0.3.3'\n , 45)\n", (71367, 71461), False, 'import textwrap\n'), ((71633, 71651), 'numpy.ones_like', 'np.ones_like', (['time'], {}), '(time)\n', (71645, 71651), True, 'import numpy as np\n'), ((73550, 73559), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (73556, 73559), True, 'import numpy as np\n'), ((73619, 73725), 'textwrap.fill', 'textwrap.fill', (['"""Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using AdvEMDpy"""', '(40)'], {}), "(\n 'Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using AdvEMDpy'\n , 40)\n", (73632, 73725), False, 'import textwrap\n'), ((73804, 73828), 'numpy.ones_like', 'np.ones_like', (['x_hs[0, :]'], {}), '(x_hs[0, :])\n', (73816, 73828), True, 'import numpy as np\n'), ((983, 992), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (990, 992), True, 'import matplotlib.pyplot as plt\n'), ((1382, 1405), 'numpy.sin', 'np.sin', (['pseudo_alg_time'], {}), '(pseudo_alg_time)\n', (1388, 1405), True, 'import numpy as np\n'), ((1694, 1717), 'numpy.sin', 'np.sin', (['pseudo_alg_time'], {}), '(pseudo_alg_time)\n', (1700, 1717), True, 'import numpy as np\n'), ((2980, 2992), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (2987, 2992), True, 'import numpy as np\n'), ((3042, 3054), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (3049, 3054), True, 'import numpy as np\n'), ((3650, 3659), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (3657, 3659), True, 'import matplotlib.pyplot as plt\n'), ((4515, 4527), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (4522, 4527), True, 'import numpy as np\n'), ((4648, 4660), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (4655, 4660), True, 'import numpy as np\n'), ((4749, 4761), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (4756, 4761), True, 'import numpy as np\n'), ((5535, 5544), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (5542, 5544), True, 'import matplotlib.pyplot as plt\n'), ((6431, 6443), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (6438, 6443), True, 'import numpy as np\n'), ((6556, 6568), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (6563, 6568), True, 'import numpy as np\n'), ((6649, 6661), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (6656, 6661), True, 'import numpy as np\n'), ((7413, 7422), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (7420, 7422), True, 'import matplotlib.pyplot as plt\n'), ((8295, 8307), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (8302, 8307), True, 'import numpy as np\n'), ((8423, 8435), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (8430, 8435), True, 'import numpy as np\n'), ((8519, 8531), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (8526, 8531), True, 'import numpy as np\n'), ((8894, 8903), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (8901, 8903), True, 'import matplotlib.pyplot as plt\n'), ((8935, 8944), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (8942, 8944), True, 'import matplotlib.pyplot as plt\n'), ((9004, 9013), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (9011, 9013), True, 'import matplotlib.pyplot as plt\n'), ((9639, 9681), 'textwrap.fill', 'textwrap.fill', (['"""Noiseless time series"""', '(12)'], {}), "('Noiseless time series', 12)\n", (9652, 9681), False, 'import textwrap\n'), ((9766, 9798), 'textwrap.fill', 'textwrap.fill', (['"""Mean filter"""', '(12)'], {}), "('Mean filter', 12)\n", (9779, 9798), False, 'import textwrap\n'), ((9885, 9919), 'textwrap.fill', 'textwrap.fill', (['"""Median filter"""', '(13)'], {}), "('Median filter', 13)\n", (9898, 9919), False, 'import textwrap\n'), ((10009, 10047), 'textwrap.fill', 'textwrap.fill', (['"""Windsorize filter"""', '(12)'], {}), "('Windsorize filter', 12)\n", (10022, 10047), False, 'import textwrap\n'), ((10161, 10213), 'textwrap.fill', 'textwrap.fill', (['"""Windsorize interpolation filter"""', '(14)'], {}), "('Windsorize interpolation filter', 14)\n", (10174, 10213), False, 'import textwrap\n'), ((10332, 10368), 'textwrap.fill', 'textwrap.fill', (['"""Quantile window"""', '(12)'], {}), "('Quantile window', 12)\n", (10345, 10368), False, 'import textwrap\n'), ((10533, 10545), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (10540, 10545), True, 'import numpy as np\n'), ((10582, 10616), 'textwrap.fill', 'textwrap.fill', (['"""Zoomed region"""', '(10)'], {}), "('Zoomed region', 10)\n", (10595, 10616), False, 'import textwrap\n'), ((10680, 10692), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (10687, 10692), True, 'import numpy as np\n'), ((10738, 10750), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (10745, 10750), True, 'import numpy as np\n'), ((10821, 10833), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (10828, 10833), True, 'import numpy as np\n'), ((11163, 11205), 'textwrap.fill', 'textwrap.fill', (['"""Noiseless time series"""', '(12)'], {}), "('Noiseless time series', 12)\n", (11176, 11205), False, 'import textwrap\n'), ((11290, 11322), 'textwrap.fill', 'textwrap.fill', (['"""Mean filter"""', '(12)'], {}), "('Mean filter', 12)\n", (11303, 11322), False, 'import textwrap\n'), ((11409, 11443), 'textwrap.fill', 'textwrap.fill', (['"""Median filter"""', '(13)'], {}), "('Median filter', 13)\n", (11422, 11443), False, 'import textwrap\n'), ((11533, 11571), 'textwrap.fill', 'textwrap.fill', (['"""Windsorize filter"""', '(12)'], {}), "('Windsorize filter', 12)\n", (11546, 11571), False, 'import textwrap\n'), ((11685, 11737), 'textwrap.fill', 'textwrap.fill', (['"""Windsorize interpolation filter"""', '(14)'], {}), "('Windsorize interpolation filter', 14)\n", (11698, 11737), False, 'import textwrap\n'), ((11856, 11892), 'textwrap.fill', 'textwrap.fill', (['"""Quantile window"""', '(12)'], {}), "('Quantile window', 12)\n", (11869, 11892), False, 'import textwrap\n'), ((12615, 12624), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (12622, 12624), True, 'import matplotlib.pyplot as plt\n'), ((12656, 12665), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (12663, 12665), True, 'import matplotlib.pyplot as plt\n'), ((12725, 12734), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (12732, 12734), True, 'import matplotlib.pyplot as plt\n'), ((13009, 13051), 'textwrap.fill', 'textwrap.fill', (['"""Noiseless time series"""', '(12)'], {}), "('Noiseless time series', 12)\n", (13022, 13051), False, 'import textwrap\n'), ((13120, 13167), 'textwrap.fill', 'textwrap.fill', (['"""Hodrick-Prescott smoothing"""', '(12)'], {}), "('Hodrick-Prescott smoothing', 12)\n", (13133, 13167), False, 'import textwrap\n'), ((13244, 13294), 'textwrap.fill', 'textwrap.fill', (['"""Henderson-Whittaker smoothing"""', '(13)'], {}), "('Henderson-Whittaker smoothing', 13)\n", (13257, 13294), False, 'import textwrap\n'), ((13438, 13482), 'textwrap.fill', 'textwrap.fill', (['"""Downsampled & decimated"""', '(11)'], {}), "('Downsampled & decimated', 11)\n", (13451, 13482), False, 'import textwrap\n'), ((13598, 13630), 'textwrap.fill', 'textwrap.fill', (['"""Downsampled"""', '(13)'], {}), "('Downsampled', 13)\n", (13611, 13630), False, 'import textwrap\n'), ((13695, 13707), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (13702, 13707), True, 'import numpy as np\n'), ((13744, 13778), 'textwrap.fill', 'textwrap.fill', (['"""Zoomed region"""', '(10)'], {}), "('Zoomed region', 10)\n", (13757, 13778), False, 'import textwrap\n'), ((13842, 13854), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (13849, 13854), True, 'import numpy as np\n'), ((13900, 13912), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (13907, 13912), True, 'import numpy as np\n'), ((13983, 13995), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (13990, 13995), True, 'import numpy as np\n'), ((14336, 14378), 'textwrap.fill', 'textwrap.fill', (['"""Noiseless time series"""', '(12)'], {}), "('Noiseless time series', 12)\n", (14349, 14378), False, 'import textwrap\n'), ((14447, 14494), 'textwrap.fill', 'textwrap.fill', (['"""Hodrick-Prescott smoothing"""', '(12)'], {}), "('Hodrick-Prescott smoothing', 12)\n", (14460, 14494), False, 'import textwrap\n'), ((14571, 14621), 'textwrap.fill', 'textwrap.fill', (['"""Henderson-Whittaker smoothing"""', '(13)'], {}), "('Henderson-Whittaker smoothing', 13)\n", (14584, 14621), False, 'import textwrap\n'), ((14713, 14757), 'textwrap.fill', 'textwrap.fill', (['"""Downsampled & decimated"""', '(13)'], {}), "('Downsampled & decimated', 13)\n", (14726, 14757), False, 'import textwrap\n'), ((14821, 14853), 'textwrap.fill', 'textwrap.fill', (['"""Downsampled"""', '(13)'], {}), "('Downsampled', 13)\n", (14834, 14853), False, 'import textwrap\n'), ((15781, 15793), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (15788, 15793), True, 'import numpy as np\n'), ((15846, 15858), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (15853, 15858), True, 'import numpy as np\n'), ((16249, 16261), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (16256, 16261), True, 'import numpy as np\n'), ((16314, 16326), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (16321, 16326), True, 'import numpy as np\n'), ((19755, 19764), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (19762, 19764), True, 'import matplotlib.pyplot as plt\n'), ((19964, 20001), 'textwrap.fill', 'textwrap.fill', (['"""Symmetric signal"""', '(10)'], {}), "('Symmetric signal', 10)\n", (19977, 20001), False, 'import textwrap\n'), ((20108, 20150), 'textwrap.fill', 'textwrap.fill', (['"""Anti-symmetric signal"""', '(10)'], {}), "('Anti-symmetric signal', 10)\n", (20121, 20150), False, 'import textwrap\n'), ((20823, 20860), 'textwrap.fill', 'textwrap.fill', (['"""Axes of symmetry"""', '(10)'], {}), "('Axes of symmetry', 10)\n", (20836, 20860), False, 'import textwrap\n'), ((21149, 21194), 'textwrap.fill', 'textwrap.fill', (['"""Symmetric Discard maxima"""', '(10)'], {}), "('Symmetric Discard maxima', 10)\n", (21162, 21194), False, 'import textwrap\n'), ((21263, 21307), 'textwrap.fill', 'textwrap.fill', (['"""Symmetric Anchor maxima"""', '(10)'], {}), "('Symmetric Anchor maxima', 10)\n", (21276, 21307), False, 'import textwrap\n'), ((21385, 21427), 'textwrap.fill', 'textwrap.fill', (['"""Anti-Symmetric maxima"""', '(10)'], {}), "('Anti-Symmetric maxima', 10)\n", (21398, 21427), False, 'import textwrap\n'), ((21502, 21539), 'textwrap.fill', 'textwrap.fill', (['"""Symmetric maxima"""', '(10)'], {}), "('Symmetric maxima', 10)\n", (21515, 21539), False, 'import textwrap\n'), ((26015, 26024), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (26022, 26024), True, 'import matplotlib.pyplot as plt\n'), ((26056, 26065), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (26063, 26065), True, 'import matplotlib.pyplot as plt\n'), ((26125, 26134), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (26132, 26134), True, 'import matplotlib.pyplot as plt\n'), ((28058, 28098), 'textwrap.fill', 'textwrap.fill', (['"""Slope-based maximum"""', '(11)'], {}), "('Slope-based maximum', 11)\n", (28071, 28098), False, 'import textwrap\n'), ((28199, 28239), 'textwrap.fill', 'textwrap.fill', (['"""Slope-based minimum"""', '(11)'], {}), "('Slope-based minimum', 11)\n", (28212, 28239), False, 'import textwrap\n'), ((28360, 28409), 'textwrap.fill', 'textwrap.fill', (['"""Improved slope-based maximum"""', '(11)'], {}), "('Improved slope-based maximum', 11)\n", (28373, 28409), False, 'import textwrap\n'), ((28532, 28581), 'textwrap.fill', 'textwrap.fill', (['"""Improved slope-based minimum"""', '(11)'], {}), "('Improved slope-based minimum', 11)\n", (28545, 28581), False, 'import textwrap\n'), ((32975, 32984), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (32982, 32984), True, 'import matplotlib.pyplot as plt\n'), ((33188, 33222), 'textwrap.fill', 'textwrap.fill', (['"""Huang maximum"""', '(10)'], {}), "('Huang maximum', 10)\n", (33201, 33222), False, 'import textwrap\n'), ((33289, 33323), 'textwrap.fill', 'textwrap.fill', (['"""Huang minimum"""', '(10)'], {}), "('Huang minimum', 10)\n", (33302, 33323), False, 'import textwrap\n'), ((33414, 33451), 'textwrap.fill', 'textwrap.fill', (['"""Coughlin maximum"""', '(14)'], {}), "('Coughlin maximum', 14)\n", (33427, 33451), False, 'import textwrap\n'), ((33542, 33579), 'textwrap.fill', 'textwrap.fill', (['"""Coughlin minimum"""', '(14)'], {}), "('Coughlin minimum', 14)\n", (33555, 33579), False, 'import textwrap\n'), ((33667, 33703), 'textwrap.fill', 'textwrap.fill', (['"""Average maximum"""', '(14)'], {}), "('Average maximum', 14)\n", (33680, 33703), False, 'import textwrap\n'), ((33786, 33822), 'textwrap.fill', 'textwrap.fill', (['"""Average minimum"""', '(14)'], {}), "('Average minimum', 14)\n", (33799, 33822), False, 'import textwrap\n'), ((34015, 34061), 'textwrap.fill', 'textwrap.fill', (['"""Huang Characteristic Wave"""', '(14)'], {}), "('Huang Characteristic Wave', 14)\n", (34028, 34061), False, 'import textwrap\n'), ((34129, 34178), 'textwrap.fill', 'textwrap.fill', (['"""Coughlin Characteristic Wave"""', '(14)'], {}), "('Coughlin Characteristic Wave', 14)\n", (34142, 34178), False, 'import textwrap\n'), ((35679, 35705), 'numpy.cos', 'np.cos', (['(2 * np.pi * t / 50)'], {}), '(2 * np.pi * t / 50)\n', (35685, 35705), True, 'import numpy as np\n'), ((35749, 35776), 'numpy.sin', 'np.sin', (['(2 * np.pi * t / 200)'], {}), '(2 * np.pi * t / 200)\n', (35755, 35776), True, 'import numpy as np\n'), ((41520, 41529), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (41527, 41529), True, 'import matplotlib.pyplot as plt\n'), ((41731, 41771), 'textwrap.fill', 'textwrap.fill', (['"""Extrapolated signal"""', '(12)'], {}), "('Extrapolated signal', 12)\n", (41744, 41771), False, 'import textwrap\n'), ((42003, 42043), 'textwrap.fill', 'textwrap.fill', (['"""Extrapolated maxima"""', '(12)'], {}), "('Extrapolated maxima', 12)\n", (42016, 42043), False, 'import textwrap\n'), ((42140, 42180), 'textwrap.fill', 'textwrap.fill', (['"""Extrapolated minima"""', '(12)'], {}), "('Extrapolated minima', 12)\n", (42153, 42180), False, 'import textwrap\n'), ((42226, 42238), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (42233, 42238), True, 'import numpy as np\n'), ((42293, 42335), 'textwrap.fill', 'textwrap.fill', (['"""Neural network inputs"""', '(13)'], {}), "('Neural network inputs', 13)\n", (42306, 42335), False, 'import textwrap\n'), ((42453, 42465), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (42460, 42465), True, 'import numpy as np\n'), ((42589, 42601), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (42596, 42601), True, 'import numpy as np\n'), ((42778, 42790), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (42785, 42790), True, 'import numpy as np\n'), ((42966, 42978), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (42973, 42978), True, 'import numpy as np\n'), ((43050, 43062), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43057, 43062), True, 'import numpy as np\n'), ((43146, 43158), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43153, 43158), True, 'import numpy as np\n'), ((43236, 43279), 'textwrap.fill', 'textwrap.fill', (['"""Neural network targets"""', '(13)'], {}), "('Neural network targets', 13)\n", (43249, 43279), False, 'import textwrap\n'), ((43397, 43409), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43404, 43409), True, 'import numpy as np\n'), ((43536, 43548), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43543, 43548), True, 'import numpy as np\n'), ((43728, 43740), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43735, 43740), True, 'import numpy as np\n'), ((43919, 43931), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (43926, 43931), True, 'import numpy as np\n'), ((44006, 44018), 'numpy.ones', 'np.ones', (['(100)'], {}), '(100)\n', (44013, 44018), True, 'import numpy as np\n'), ((44585, 44601), 'numpy.cos', 'np.cos', (['(2 * time)'], {}), '(2 * time)\n', (44591, 44601), True, 'import numpy as np\n'), ((44604, 44620), 'numpy.cos', 'np.cos', (['(4 * time)'], {}), '(4 * time)\n', (44610, 44620), True, 'import numpy as np\n'), ((45747, 45810), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 1, IMF 2, & IMF 3 with 51 knots"""', '(21)'], {}), "('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21)\n", (45760, 45810), False, 'import textwrap\n'), ((45997, 46020), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (46008, 46020), True, 'import numpy as np\n'), ((46067, 46079), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (46074, 46079), True, 'import numpy as np\n'), ((46334, 46346), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (46341, 46346), True, 'import numpy as np\n'), ((46420, 46432), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (46427, 46432), True, 'import numpy as np\n'), ((46468, 46480), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (46475, 46480), True, 'import numpy as np\n'), ((46545, 46557), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (46552, 46557), True, 'import numpy as np\n'), ((46909, 46966), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 1 and IMF 2 with 31 knots"""', '(19)'], {}), "('Sum of IMF 1 and IMF 2 with 31 knots', 19)\n", (46922, 46966), False, 'import textwrap\n'), ((47023, 47080), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 2 and IMF 3 with 51 knots"""', '(19)'], {}), "('Sum of IMF 2 and IMF 3 with 51 knots', 19)\n", (47036, 47080), False, 'import textwrap\n'), ((47251, 47274), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (47262, 47274), True, 'import numpy as np\n'), ((47321, 47333), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47328, 47333), True, 'import numpy as np\n'), ((47771, 47783), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47778, 47783), True, 'import numpy as np\n'), ((47857, 47869), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47864, 47869), True, 'import numpy as np\n'), ((47905, 47917), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47912, 47917), True, 'import numpy as np\n'), ((47982, 47994), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47989, 47994), True, 'import numpy as np\n'), ((48445, 48468), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (48456, 48468), True, 'import numpy as np\n'), ((48515, 48527), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (48522, 48527), True, 'import numpy as np\n'), ((49002, 49014), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49009, 49014), True, 'import numpy as np\n'), ((49088, 49100), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49095, 49100), True, 'import numpy as np\n'), ((49136, 49148), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49143, 49148), True, 'import numpy as np\n'), ((49213, 49225), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49220, 49225), True, 'import numpy as np\n'), ((49655, 49718), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 1, IMF 2, & IMF 3 with 51 knots"""', '(21)'], {}), "('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21)\n", (49668, 49718), False, 'import textwrap\n'), ((49779, 49802), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (49790, 49802), True, 'import numpy as np\n'), ((49849, 49861), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49856, 49861), True, 'import numpy as np\n'), ((50414, 50471), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 1 and IMF 2 with 31 knots"""', '(19)'], {}), "('Sum of IMF 1 and IMF 2 with 31 knots', 19)\n", (50427, 50471), False, 'import textwrap\n'), ((50528, 50585), 'textwrap.fill', 'textwrap.fill', (['"""Sum of IMF 2 and IMF 3 with 51 knots"""', '(19)'], {}), "('Sum of IMF 2 and IMF 3 with 51 knots', 19)\n", (50541, 50585), False, 'import textwrap\n'), ((50646, 50669), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (50657, 50669), True, 'import numpy as np\n'), ((50716, 50728), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (50723, 50728), True, 'import numpy as np\n'), ((51471, 51494), 'numpy.linspace', 'np.linspace', (['(-5)', '(5)', '(101)'], {}), '(-5, 5, 101)\n', (51482, 51494), True, 'import numpy as np\n'), ((51541, 51553), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (51548, 51553), True, 'import numpy as np\n'), ((52183, 52192), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (52190, 52192), True, 'import matplotlib.pyplot as plt\n'), ((52224, 52233), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (52231, 52233), True, 'import matplotlib.pyplot as plt\n'), ((52569, 52593), 'numpy.ones_like', 'np.ones_like', (['x_hs[0, :]'], {}), '(x_hs[0, :])\n', (52581, 52593), True, 'import numpy as np\n'), ((52661, 52685), 'numpy.ones_like', 'np.ones_like', (['x_hs[0, :]'], {}), '(x_hs[0, :])\n', (52673, 52685), True, 'import numpy as np\n'), ((52753, 52777), 'numpy.ones_like', 'np.ones_like', (['x_hs[0, :]'], {}), '(x_hs[0, :])\n', (52765, 52777), True, 'import numpy as np\n'), ((54079, 54088), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (54086, 54088), True, 'import matplotlib.pyplot as plt\n'), ((54523, 54570), 'textwrap.fill', 'textwrap.fill', (['"""Unsmoothed maxima envelope"""', '(10)'], {}), "('Unsmoothed maxima envelope', 10)\n", (54536, 54570), False, 'import textwrap\n'), ((54626, 54671), 'textwrap.fill', 'textwrap.fill', (['"""Smoothed maxima envelope"""', '(10)'], {}), "('Smoothed maxima envelope', 10)\n", (54639, 54671), False, 'import textwrap\n'), ((54722, 54769), 'textwrap.fill', 'textwrap.fill', (['"""Unsmoothed minima envelope"""', '(10)'], {}), "('Unsmoothed minima envelope', 10)\n", (54735, 54769), False, 'import textwrap\n'), ((54819, 54864), 'textwrap.fill', 'textwrap.fill', (['"""Smoothed minima envelope"""', '(10)'], {}), "('Smoothed minima envelope', 10)\n", (54832, 54864), False, 'import textwrap\n'), ((54934, 54962), 'numpy.linspace', 'np.linspace', (['(-3.0)', '(-2.0)', '(101)'], {}), '(-3.0, -2.0, 101)\n', (54945, 54962), True, 'import numpy as np\n'), ((55011, 55023), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (55018, 55023), True, 'import numpy as np\n'), ((60553, 60562), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (60560, 60562), True, 'import matplotlib.pyplot as plt\n'), ((60941, 60976), 'textwrap.fill', 'textwrap.fill', (['"""Optimal maxima"""', '(10)'], {}), "('Optimal maxima', 10)\n", (60954, 60976), False, 'import textwrap\n'), ((61083, 61118), 'textwrap.fill', 'textwrap.fill', (['"""Optimal minima"""', '(10)'], {}), "('Optimal minima', 10)\n", (61096, 61118), False, 'import textwrap\n'), ((61189, 61227), 'textwrap.fill', 'textwrap.fill', (['"""Inflection points"""', '(10)'], {}), "('Inflection points', 10)\n", (61202, 61227), False, 'import textwrap\n'), ((61281, 61314), 'textwrap.fill', 'textwrap.fill', (['"""EMD envelope"""', '(10)'], {}), "('EMD envelope', 10)\n", (61294, 61314), False, 'import textwrap\n'), ((61490, 61524), 'textwrap.fill', 'textwrap.fill', (['"""SEMD envelope"""', '(10)'], {}), "('SEMD envelope', 10)\n", (61503, 61524), False, 'import textwrap\n'), ((61719, 61753), 'textwrap.fill', 'textwrap.fill', (['"""EEMD envelope"""', '(10)'], {}), "('EEMD envelope', 10)\n", (61732, 61753), False, 'import textwrap\n'), ((61953, 61999), 'textwrap.fill', 'textwrap.fill', (['"""Inflection point envelope"""', '(10)'], {}), "('Inflection point envelope', 10)\n", (61966, 61999), False, 'import textwrap\n'), ((62062, 62108), 'textwrap.fill', 'textwrap.fill', (['"""Binomial average envelope"""', '(10)'], {}), "('Binomial average envelope', 10)\n", (62075, 62108), False, 'import textwrap\n'), ((64189, 64198), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (64196, 64198), True, 'import matplotlib.pyplot as plt\n'), ((64230, 64239), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (64237, 64239), True, 'import matplotlib.pyplot as plt\n'), ((64527, 64547), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (64539, 64547), True, 'import numpy as np\n'), ((64561, 64617), 'textwrap.fill', 'textwrap.fill', (['"""Hamiltonian frequency approximation"""', '(15)'], {}), "('Hamiltonian frequency approximation', 15)\n", (64574, 64617), False, 'import textwrap\n'), ((64643, 64663), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (64655, 64663), True, 'import numpy as np\n'), ((64678, 64725), 'textwrap.fill', 'textwrap.fill', (['"""Driving function frequency"""', '(15)'], {}), "('Driving function frequency', 15)\n", (64691, 64725), False, 'import textwrap\n'), ((65381, 65390), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (65388, 65390), True, 'import matplotlib.pyplot as plt\n'), ((65422, 65431), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (65429, 65431), True, 'import matplotlib.pyplot as plt\n'), ((65716, 65736), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (65728, 65736), True, 'import numpy as np\n'), ((65750, 65806), 'textwrap.fill', 'textwrap.fill', (['"""Hamiltonian frequency approximation"""', '(15)'], {}), "('Hamiltonian frequency approximation', 15)\n", (65763, 65806), False, 'import textwrap\n'), ((65832, 65852), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (65844, 65852), True, 'import numpy as np\n'), ((65867, 65914), 'textwrap.fill', 'textwrap.fill', (['"""Driving function frequency"""', '(15)'], {}), "('Driving function frequency', 15)\n", (65880, 65914), False, 'import textwrap\n'), ((66532, 66541), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (66539, 66541), True, 'import matplotlib.pyplot as plt\n'), ((66573, 66582), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (66580, 66582), True, 'import matplotlib.pyplot as plt\n'), ((67415, 67443), 'numpy.cos', 'np.cos', (['(0.04 * 2 * np.pi * t)'], {}), '(0.04 * 2 * np.pi * t)\n', (67421, 67443), True, 'import numpy as np\n'), ((68431, 68440), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (68438, 68440), True, 'import matplotlib.pyplot as plt\n'), ((68472, 68481), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (68479, 68481), True, 'import matplotlib.pyplot as plt\n'), ((68642, 68662), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (68654, 68662), True, 'import numpy as np\n'), ((68676, 68732), 'textwrap.fill', 'textwrap.fill', (['"""Hamiltonian frequency approximation"""', '(15)'], {}), "('Hamiltonian frequency approximation', 15)\n", (68689, 68732), False, 'import textwrap\n'), ((68758, 68778), 'numpy.ones_like', 'np.ones_like', (['t[:-1]'], {}), '(t[:-1])\n', (68770, 68778), True, 'import numpy as np\n'), ((68793, 68840), 'textwrap.fill', 'textwrap.fill', (['"""Driving function frequency"""', '(15)'], {}), "('Driving function frequency', 15)\n", (68806, 68840), False, 'import textwrap\n'), ((70126, 70135), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (70133, 70135), True, 'import matplotlib.pyplot as plt\n'), ((70167, 70176), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (70174, 70176), True, 'import matplotlib.pyplot as plt\n'), ((70561, 70594), 'textwrap.fill', 'textwrap.fill', (['"""Annual cycle"""', '(10)'], {}), "('Annual cycle', 10)\n", (70574, 70594), False, 'import textwrap\n'), ((71234, 71243), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (71241, 71243), True, 'import matplotlib.pyplot as plt\n'), ((71275, 71284), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (71282, 71284), True, 'import matplotlib.pyplot as plt\n'), ((71666, 71699), 'textwrap.fill', 'textwrap.fill', (['"""Annual cycle"""', '(10)'], {}), "('Annual cycle', 10)\n", (71679, 71699), False, 'import textwrap\n'), ((72903, 72912), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (72910, 72912), True, 'import matplotlib.pyplot as plt\n'), ((73112, 73121), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (73119, 73121), True, 'import matplotlib.pyplot as plt\n'), ((73417, 73426), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (73424, 73426), True, 'import matplotlib.pyplot as plt\n'), ((73458, 73467), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (73465, 73467), True, 'import matplotlib.pyplot as plt\n'), ((73843, 73876), 'textwrap.fill', 'textwrap.fill', (['"""Annual cycle"""', '(10)'], {}), "('Annual cycle', 10)\n", (73856, 73876), False, 'import textwrap\n'), ((4934, 4957), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (4945, 4957), True, 'import numpy as np\n'), ((6818, 6841), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (6829, 6841), True, 'import numpy as np\n'), ((8694, 8717), 'numpy.linspace', 'np.linspace', (['(-2)', '(2)', '(101)'], {}), '(-2, 2, 101)\n', (8705, 8717), True, 'import numpy as np\n'), ((17828, 17884), 'numpy.linspace', 'np.linspace', (['((5 - 2.6 * a) * np.pi)', '((5 - a) * np.pi)', '(101)'], {}), '((5 - 2.6 * a) * np.pi, (5 - a) * np.pi, 101)\n', (17839, 17884), True, 'import numpy as np\n'), ((35714, 35740), 'numpy.cos', 'np.cos', (['(2 * np.pi * t / 25)'], {}), '(2 * np.pi * t / 25)\n', (35720, 35740), True, 'import numpy as np\n'), ((37195, 37220), 'numpy.abs', 'np.abs', (['average_gradients'], {}), '(average_gradients)\n', (37201, 37220), True, 'import numpy as np\n'), ((45983, 45995), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (45990, 45995), True, 'import numpy as np\n'), ((47237, 47249), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (47244, 47249), True, 'import numpy as np\n'), ((48431, 48443), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (48438, 48443), True, 'import numpy as np\n'), ((49765, 49777), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (49772, 49777), True, 'import numpy as np\n'), ((50632, 50644), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (50639, 50644), True, 'import numpy as np\n'), ((51457, 51469), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (51464, 51469), True, 'import numpy as np\n'), ((52450, 52459), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (52456, 52459), True, 'import numpy as np\n'), ((54920, 54932), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (54927, 54932), True, 'import numpy as np\n'), ((68401, 68410), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (68407, 68410), True, 'import numpy as np\n'), ((73362, 73371), 'numpy.abs', 'np.abs', (['z'], {}), '(z)\n', (73368, 73371), True, 'import numpy as np\n'), ((4920, 4932), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (4927, 4932), True, 'import numpy as np\n'), ((6804, 6816), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (6811, 6816), True, 'import numpy as np\n'), ((8680, 8692), 'numpy.ones', 'np.ones', (['(101)'], {}), '(101)\n', (8687, 8692), True, 'import numpy as np\n'), ((17948, 18004), 'numpy.linspace', 'np.linspace', (['((5 - 2.6 * a) * np.pi)', '((5 - a) * np.pi)', '(101)'], {}), '((5 - 2.6 * a) * np.pi, (5 - a) * np.pi, 101)\n', (17959, 18004), True, 'import numpy as np\n'), ((37228, 37253), 'numpy.abs', 'np.abs', (['average_gradients'], {}), '(average_gradients)\n', (37234, 37253), True, 'import numpy as np\n'), ((45861, 45930), 'numpy.var', 'np.var', (['(time_series - (imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :]))'], {}), '(time_series - (imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :]))\n', (45867, 45930), True, 'import numpy as np\n'), ((47131, 47184), 'numpy.var', 'np.var', (['(time_series - (imfs_31[1, :] + imfs_31[2, :]))'], {}), '(time_series - (imfs_31[1, :] + imfs_31[2, :]))\n', (47137, 47184), True, 'import numpy as np\n'), ((48343, 48378), 'numpy.var', 'np.var', (['(time_series - imfs_51[3, :])'], {}), '(time_series - imfs_51[3, :])\n', (48349, 48378), True, 'import numpy as np\n'), ((62888, 62906), 'numpy.cos', 'np.cos', (['(omega * ts)'], {}), '(omega * ts)\n', (62894, 62906), True, 'import numpy as np\n'), ((64487, 64498), 'numpy.abs', 'np.abs', (['hht'], {}), '(hht)\n', (64493, 64498), True, 'import numpy as np\n'), ((65676, 65687), 'numpy.abs', 'np.abs', (['hht'], {}), '(hht)\n', (65682, 65687), True, 'import numpy as np\n'), ((70498, 70509), 'numpy.abs', 'np.abs', (['hht'], {}), '(hht)\n', (70504, 70509), True, 'import numpy as np\n'), ((71603, 71614), 'numpy.abs', 'np.abs', (['hht'], {}), '(hht)\n', (71609, 71614), True, 'import numpy as np\n'), ((69898, 69920), 'numpy.ones_like', 'np.ones_like', (['IF[:, 0]'], {}), '(IF[:, 0])\n', (69910, 69920), True, 'import numpy as np\n'), ((72296, 72319), 'numpy.ones_like', 'np.ones_like', (['ifs[1, :]'], {}), '(ifs[1, :])\n', (72308, 72319), True, 'import numpy as np\n'), ((66996, 67024), 'numpy.cos', 'np.cos', (['(0.04 * 2 * np.pi * t)'], {}), '(0.04 * 2 * np.pi * t)\n', (67002, 67024), True, 'import numpy as np\n'), ((67170, 67198), 'numpy.cos', 'np.cos', (['(0.04 * 2 * np.pi * t)'], {}), '(0.04 * 2 * np.pi * t)\n', (67176, 67198), True, 'import numpy as np\n'), ((67339, 67367), 'numpy.cos', 'np.cos', (['(0.04 * 2 * np.pi * t)'], {}), '(0.04 * 2 * np.pi * t)\n', (67345, 67367), True, 'import numpy as np\n'), ((71009, 71025), 'numpy.ones_like', 'np.ones_like', (['IF'], {}), '(IF)\n', (71021, 71025), True, 'import numpy as np\n')] |
"""
Revision ID: 0158_remove_rate_limit_default
Revises: 0157_add_rate_limit_to_service
Create Date: 2018-01-09 14:33:08.313893
"""
import sqlalchemy as sa
from alembic import op
revision = "0158_remove_rate_limit_default"
down_revision = "0157_add_rate_limit_to_service"
def upgrade():
op.execute("ALTER TABLE services ALTER rate_limit DROP DEFAULT")
op.execute("ALTER TABLE services_history ALTER rate_limit DROP DEFAULT")
def downgrade():
op.execute("ALTER TABLE services ALTER rate_limit SET DEFAULT '3000'")
op.execute("ALTER TABLE services_history ALTER rate_limit SET DEFAULT '3000'")
| [
"alembic.op.execute"
] | [((296, 360), 'alembic.op.execute', 'op.execute', (['"""ALTER TABLE services ALTER rate_limit DROP DEFAULT"""'], {}), "('ALTER TABLE services ALTER rate_limit DROP DEFAULT')\n", (306, 360), False, 'from alembic import op\n'), ((365, 437), 'alembic.op.execute', 'op.execute', (['"""ALTER TABLE services_history ALTER rate_limit DROP DEFAULT"""'], {}), "('ALTER TABLE services_history ALTER rate_limit DROP DEFAULT')\n", (375, 437), False, 'from alembic import op\n'), ((461, 531), 'alembic.op.execute', 'op.execute', (['"""ALTER TABLE services ALTER rate_limit SET DEFAULT \'3000\'"""'], {}), '("ALTER TABLE services ALTER rate_limit SET DEFAULT \'3000\'")\n', (471, 531), False, 'from alembic import op\n'), ((536, 614), 'alembic.op.execute', 'op.execute', (['"""ALTER TABLE services_history ALTER rate_limit SET DEFAULT \'3000\'"""'], {}), '("ALTER TABLE services_history ALTER rate_limit SET DEFAULT \'3000\'")\n', (546, 614), False, 'from alembic import op\n')] |
#!/usr/bin/env python
# encoding: utf-8 -*-
"""
This module contains unit tests of the rmgpy.reaction module.
"""
import numpy
import unittest
from external.wip import work_in_progress
from rmgpy.species import Species, TransitionState
from rmgpy.reaction import Reaction
from rmgpy.statmech.translation import Translation, IdealGasTranslation
from rmgpy.statmech.rotation import Rotation, LinearRotor, NonlinearRotor, KRotor, SphericalTopRotor
from rmgpy.statmech.vibration import Vibration, HarmonicOscillator
from rmgpy.statmech.torsion import Torsion, HinderedRotor
from rmgpy.statmech.conformer import Conformer
from rmgpy.kinetics import Arrhenius
from rmgpy.thermo import Wilhoit
import rmgpy.constants as constants
################################################################################
class PseudoSpecies:
"""
Can be used in place of a :class:`rmg.species.Species` for isomorphism checks.
PseudoSpecies('a') is isomorphic with PseudoSpecies('A')
but nothing else.
"""
def __init__(self, label):
self.label = label
def __repr__(self):
return "PseudoSpecies('{0}')".format(self.label)
def __str__(self):
return self.label
def isIsomorphic(self, other):
return self.label.lower() == other.label.lower()
class TestReactionIsomorphism(unittest.TestCase):
"""
Contains unit tests of the isomorphism testing of the Reaction class.
"""
def makeReaction(self,reaction_string):
""""
Make a Reaction (containing PseudoSpecies) of from a string like 'Ab=CD'
"""
reactants, products = reaction_string.split('=')
reactants = [PseudoSpecies(i) for i in reactants]
products = [PseudoSpecies(i) for i in products]
return Reaction(reactants=reactants, products=products)
def test1to1(self):
r1 = self.makeReaction('A=B')
self.assertTrue(r1.isIsomorphic(self.makeReaction('a=B')))
self.assertTrue(r1.isIsomorphic(self.makeReaction('b=A')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('B=a'),eitherDirection=False))
self.assertFalse(r1.isIsomorphic(self.makeReaction('A=C')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('A=BB')))
def test1to2(self):
r1 = self.makeReaction('A=BC')
self.assertTrue(r1.isIsomorphic(self.makeReaction('a=Bc')))
self.assertTrue(r1.isIsomorphic(self.makeReaction('cb=a')))
self.assertTrue(r1.isIsomorphic(self.makeReaction('a=cb'),eitherDirection=False))
self.assertFalse(r1.isIsomorphic(self.makeReaction('bc=a'),eitherDirection=False))
self.assertFalse(r1.isIsomorphic(self.makeReaction('a=c')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('ab=c')))
def test2to2(self):
r1 = self.makeReaction('AB=CD')
self.assertTrue(r1.isIsomorphic(self.makeReaction('ab=cd')))
self.assertTrue(r1.isIsomorphic(self.makeReaction('ab=dc'),eitherDirection=False))
self.assertTrue(r1.isIsomorphic(self.makeReaction('dc=ba')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('cd=ab'),eitherDirection=False))
self.assertFalse(r1.isIsomorphic(self.makeReaction('ab=ab')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('ab=cde')))
def test2to3(self):
r1 = self.makeReaction('AB=CDE')
self.assertTrue(r1.isIsomorphic(self.makeReaction('ab=cde')))
self.assertTrue(r1.isIsomorphic(self.makeReaction('ba=edc'),eitherDirection=False))
self.assertTrue(r1.isIsomorphic(self.makeReaction('dec=ba')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('cde=ab'),eitherDirection=False))
self.assertFalse(r1.isIsomorphic(self.makeReaction('ab=abc')))
self.assertFalse(r1.isIsomorphic(self.makeReaction('abe=cde')))
class TestReaction(unittest.TestCase):
"""
Contains unit tests of the Reaction class.
"""
def setUp(self):
"""
A method that is called prior to each unit test in this class.
"""
ethylene = Species(
label = 'C2H4',
conformer = Conformer(
E0 = (44.7127, 'kJ/mol'),
modes = [
IdealGasTranslation(
mass = (28.0313, 'amu'),
),
NonlinearRotor(
inertia = (
[3.41526, 16.6498, 20.065],
'amu*angstrom^2',
),
symmetry = 4,
),
HarmonicOscillator(
frequencies = (
[828.397, 970.652, 977.223, 1052.93, 1233.55, 1367.56, 1465.09, 1672.25, 3098.46, 3111.7, 3165.79, 3193.54],
'cm^-1',
),
),
],
spinMultiplicity = 1,
opticalIsomers = 1,
),
)
hydrogen = Species(
label = 'H',
conformer = Conformer(
E0 = (211.794, 'kJ/mol'),
modes = [
IdealGasTranslation(
mass = (1.00783, 'amu'),
),
],
spinMultiplicity = 2,
opticalIsomers = 1,
),
)
ethyl = Species(
label = 'C2H5',
conformer = Conformer(
E0 = (111.603, 'kJ/mol'),
modes = [
IdealGasTranslation(
mass = (29.0391, 'amu'),
),
NonlinearRotor(
inertia = (
[4.8709, 22.2353, 23.9925],
'amu*angstrom^2',
),
symmetry = 1,
),
HarmonicOscillator(
frequencies = (
[482.224, 791.876, 974.355, 1051.48, 1183.21, 1361.36, 1448.65, 1455.07, 1465.48, 2688.22, 2954.51, 3033.39, 3101.54, 3204.73],
'cm^-1',
),
),
HinderedRotor(
inertia = (1.11481, 'amu*angstrom^2'),
symmetry = 6,
barrier = (0.244029, 'kJ/mol'),
semiclassical = None,
),
],
spinMultiplicity = 2,
opticalIsomers = 1,
),
)
TS = TransitionState(
label = 'TS',
conformer = Conformer(
E0 = (266.694, 'kJ/mol'),
modes = [
IdealGasTranslation(
mass = (29.0391, 'amu'),
),
NonlinearRotor(
inertia = (
[6.78512, 22.1437, 22.2114],
'amu*angstrom^2',
),
symmetry = 1,
),
HarmonicOscillator(
frequencies = (
[412.75, 415.206, 821.495, 924.44, 982.714, 1024.16, 1224.21, 1326.36, 1455.06, 1600.35, 3101.46, 3110.55, 3175.34, 3201.88],
'cm^-1',
),
),
],
spinMultiplicity = 2,
opticalIsomers = 1,
),
frequency = (-750.232, 'cm^-1'),
)
self.reaction = Reaction(
reactants = [hydrogen, ethylene],
products = [ethyl],
kinetics = Arrhenius(
A = (501366000.0, 'cm^3/(mol*s)'),
n = 1.637,
Ea = (4.32508, 'kJ/mol'),
T0 = (1, 'K'),
Tmin = (300, 'K'),
Tmax = (2500, 'K'),
),
transitionState = TS,
)
# CC(=O)O[O]
acetylperoxy = Species(
label='acetylperoxy',
thermo=Wilhoit(Cp0=(4.0*constants.R,"J/(mol*K)"), CpInf=(21.0*constants.R,"J/(mol*K)"), a0=-3.95, a1=9.26, a2=-15.6, a3=8.55, B=(500.0,"K"), H0=(-6.151e+04,"J/mol"), S0=(-790.2,"J/(mol*K)")),
)
# C[C]=O
acetyl = Species(
label='acetyl',
thermo=Wilhoit(Cp0=(4.0*constants.R,"J/(mol*K)"), CpInf=(15.5*constants.R,"J/(mol*K)"), a0=0.2541, a1=-0.4712, a2=-4.434, a3=2.25, B=(500.0,"K"), H0=(-1.439e+05,"J/mol"), S0=(-524.6,"J/(mol*K)")),
)
# [O][O]
oxygen = Species(
label='oxygen',
thermo=Wilhoit(Cp0=(3.5*constants.R,"J/(mol*K)"), CpInf=(4.5*constants.R,"J/(mol*K)"), a0=-0.9324, a1=26.18, a2=-70.47, a3=44.12, B=(500.0,"K"), H0=(1.453e+04,"J/mol"), S0=(-12.19,"J/(mol*K)")),
)
self.reaction2 = Reaction(
reactants=[acetyl, oxygen],
products=[acetylperoxy],
kinetics = Arrhenius(
A = (2.65e12, 'cm^3/(mol*s)'),
n = 0.0,
Ea = (0.0, 'kJ/mol'),
T0 = (1, 'K'),
Tmin = (300, 'K'),
Tmax = (2000, 'K'),
),
)
def testIsIsomerization(self):
"""
Test the Reaction.isIsomerization() method.
"""
isomerization = Reaction(reactants=[Species()], products=[Species()])
association = Reaction(reactants=[Species(),Species()], products=[Species()])
dissociation = Reaction(reactants=[Species()], products=[Species(),Species()])
bimolecular = Reaction(reactants=[Species(),Species()], products=[Species(),Species()])
self.assertTrue(isomerization.isIsomerization())
self.assertFalse(association.isIsomerization())
self.assertFalse(dissociation.isIsomerization())
self.assertFalse(bimolecular.isIsomerization())
def testIsAssociation(self):
"""
Test the Reaction.isAssociation() method.
"""
isomerization = Reaction(reactants=[Species()], products=[Species()])
association = Reaction(reactants=[Species(),Species()], products=[Species()])
dissociation = Reaction(reactants=[Species()], products=[Species(),Species()])
bimolecular = Reaction(reactants=[Species(),Species()], products=[Species(),Species()])
self.assertFalse(isomerization.isAssociation())
self.assertTrue(association.isAssociation())
self.assertFalse(dissociation.isAssociation())
self.assertFalse(bimolecular.isAssociation())
def testIsDissociation(self):
"""
Test the Reaction.isDissociation() method.
"""
isomerization = Reaction(reactants=[Species()], products=[Species()])
association = Reaction(reactants=[Species(),Species()], products=[Species()])
dissociation = Reaction(reactants=[Species()], products=[Species(),Species()])
bimolecular = Reaction(reactants=[Species(),Species()], products=[Species(),Species()])
self.assertFalse(isomerization.isDissociation())
self.assertFalse(association.isDissociation())
self.assertTrue(dissociation.isDissociation())
self.assertFalse(bimolecular.isDissociation())
def testHasTemplate(self):
"""
Test the Reaction.hasTemplate() method.
"""
reactants = self.reaction.reactants[:]
products = self.reaction.products[:]
self.assertTrue(self.reaction.hasTemplate(reactants, products))
self.assertTrue(self.reaction.hasTemplate(products, reactants))
self.assertFalse(self.reaction2.hasTemplate(reactants, products))
self.assertFalse(self.reaction2.hasTemplate(products, reactants))
reactants.reverse()
products.reverse()
self.assertTrue(self.reaction.hasTemplate(reactants, products))
self.assertTrue(self.reaction.hasTemplate(products, reactants))
self.assertFalse(self.reaction2.hasTemplate(reactants, products))
self.assertFalse(self.reaction2.hasTemplate(products, reactants))
reactants = self.reaction2.reactants[:]
products = self.reaction2.products[:]
self.assertFalse(self.reaction.hasTemplate(reactants, products))
self.assertFalse(self.reaction.hasTemplate(products, reactants))
self.assertTrue(self.reaction2.hasTemplate(reactants, products))
self.assertTrue(self.reaction2.hasTemplate(products, reactants))
reactants.reverse()
products.reverse()
self.assertFalse(self.reaction.hasTemplate(reactants, products))
self.assertFalse(self.reaction.hasTemplate(products, reactants))
self.assertTrue(self.reaction2.hasTemplate(reactants, products))
self.assertTrue(self.reaction2.hasTemplate(products, reactants))
def testEnthalpyOfReaction(self):
"""
Test the Reaction.getEnthalpyOfReaction() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Hlist0 = [float(v) for v in ['-146007', '-145886', '-144195', '-141973', '-139633', '-137341', '-135155', '-133093', '-131150', '-129316']]
Hlist = self.reaction2.getEnthalpiesOfReaction(Tlist)
for i in range(len(Tlist)):
self.assertAlmostEqual(Hlist[i] / 1000., Hlist0[i] / 1000., 2)
def testEntropyOfReaction(self):
"""
Test the Reaction.getEntropyOfReaction() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Slist0 = [float(v) for v in ['-156.793', '-156.872', '-153.504', '-150.317', '-147.707', '-145.616', '-143.93', '-142.552', '-141.407', '-140.441']]
Slist = self.reaction2.getEntropiesOfReaction(Tlist)
for i in range(len(Tlist)):
self.assertAlmostEqual(Slist[i], Slist0[i], 2)
def testFreeEnergyOfReaction(self):
"""
Test the Reaction.getFreeEnergyOfReaction() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Glist0 = [float(v) for v in ['-114648', '-83137.2', '-52092.4', '-21719.3', '8073.53', '37398.1', '66346.8', '94990.6', '123383', '151565']]
Glist = self.reaction2.getFreeEnergiesOfReaction(Tlist)
for i in range(len(Tlist)):
self.assertAlmostEqual(Glist[i] / 1000., Glist0[i] / 1000., 2)
def testEquilibriumConstantKa(self):
"""
Test the Reaction.getEquilibriumConstant() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Kalist0 = [float(v) for v in ['8.75951e+29', '7.1843e+10', '34272.7', '26.1877', '0.378696', '0.0235579', '0.00334673', '0.000792389', '0.000262777', '0.000110053']]
Kalist = self.reaction2.getEquilibriumConstants(Tlist, type='Ka')
for i in range(len(Tlist)):
self.assertAlmostEqual(Kalist[i] / Kalist0[i], 1.0, 4)
def testEquilibriumConstantKc(self):
"""
Test the Reaction.getEquilibriumConstant() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Kclist0 = [float(v) for v in ['1.45661e+28', '2.38935e+09', '1709.76', '1.74189', '0.0314866', '0.00235045', '0.000389568', '0.000105413', '3.93273e-05', '1.83006e-05']]
Kclist = self.reaction2.getEquilibriumConstants(Tlist, type='Kc')
for i in range(len(Tlist)):
self.assertAlmostEqual(Kclist[i] / Kclist0[i], 1.0, 4)
def testEquilibriumConstantKp(self):
"""
Test the Reaction.getEquilibriumConstant() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
Kplist0 = [float(v) for v in ['8.75951e+24', '718430', '0.342727', '0.000261877', '3.78696e-06', '2.35579e-07', '3.34673e-08', '7.92389e-09', '2.62777e-09', '1.10053e-09']]
Kplist = self.reaction2.getEquilibriumConstants(Tlist, type='Kp')
for i in range(len(Tlist)):
self.assertAlmostEqual(Kplist[i] / Kplist0[i], 1.0, 4)
def testStoichiometricCoefficient(self):
"""
Test the Reaction.getStoichiometricCoefficient() method.
"""
for reactant in self.reaction.reactants:
self.assertEqual(self.reaction.getStoichiometricCoefficient(reactant), -1)
for product in self.reaction.products:
self.assertEqual(self.reaction.getStoichiometricCoefficient(product), 1)
for reactant in self.reaction2.reactants:
self.assertEqual(self.reaction.getStoichiometricCoefficient(reactant), 0)
for product in self.reaction2.products:
self.assertEqual(self.reaction.getStoichiometricCoefficient(product), 0)
def testRateCoefficient(self):
"""
Test the Reaction.getRateCoefficient() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
self.assertAlmostEqual(self.reaction.getRateCoefficient(T, P) / self.reaction.kinetics.getRateCoefficient(T), 1.0, 6)
def testGenerateReverseRateCoefficient(self):
"""
Test the Reaction.generateReverseRateCoefficient() method.
"""
Tlist = numpy.arange(200.0, 2001.0, 200.0, numpy.float64)
P = 1e5
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
for T in Tlist:
kr0 = self.reaction2.getRateCoefficient(T, P) / self.reaction2.getEquilibriumConstant(T)
kr = reverseKinetics.getRateCoefficient(T)
self.assertAlmostEqual(kr0 / kr, 1.0, 0)
def testGenerateReverseRateCoefficientArrhenius(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the Arrhenius format.
"""
original_kinetics = Arrhenius(
A = (2.65e12, 'cm^3/(mol*s)'),
n = 0.0,
Ea = (0.0, 'kJ/mol'),
T0 = (1, 'K'),
Tmin = (300, 'K'),
Tmax = (2000, 'K'),
)
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(original_kinetics.Tmin.value_si, original_kinetics.Tmax.value_si, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
@work_in_progress
def testGenerateReverseRateCoefficientArrheniusEP(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the ArrheniusEP format.
"""
from rmgpy.kinetics import ArrheniusEP
original_kinetics = ArrheniusEP(
A = (2.65e12, 'cm^3/(mol*s)'),
n = 0.0,
alpha = 0.5,
E0 = (41.84, 'kJ/mol'),
Tmin = (300, 'K'),
Tmax = (2000, 'K'),
)
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(original_kinetics.Tmin, original_kinetics.Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientPDepArrhenius(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the PDepArrhenius format.
"""
from rmgpy.kinetics import PDepArrhenius
arrhenius0 = Arrhenius(
A = (1.0e6,"s^-1"),
n = 1.0,
Ea = (10.0,"kJ/mol"),
T0 = (300.0,"K"),
Tmin = (300.0,"K"),
Tmax = (2000.0,"K"),
comment = """This data is completely made up""",
)
arrhenius1 = Arrhenius(
A = (1.0e12,"s^-1"),
n = 1.0,
Ea = (20.0,"kJ/mol"),
T0 = (300.0,"K"),
Tmin = (300.0,"K"),
Tmax = (2000.0,"K"),
comment = """This data is completely made up""",
)
pressures = numpy.array([0.1, 10.0])
arrhenius = [arrhenius0, arrhenius1]
Tmin = 300.0
Tmax = 2000.0
Pmin = 0.1
Pmax = 10.0
comment = """This data is completely made up"""
original_kinetics = PDepArrhenius(
pressures = (pressures,"bar"),
arrhenius = arrhenius,
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
comment = comment,
)
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientMultiArrhenius(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the MultiArrhenius format.
"""
from rmgpy.kinetics import MultiArrhenius
pressures = numpy.array([0.1, 10.0])
Tmin = 300.0
Tmax = 2000.0
Pmin = 0.1
Pmax = 10.0
comment = """This data is completely made up"""
arrhenius = [
Arrhenius(
A = (9.3e-14,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (4740*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
Arrhenius(
A = (1.4e-9,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (11200*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
]
original_kinetics = MultiArrhenius(
arrhenius = arrhenius,
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
)
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientMultiPDepArrhenius(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the MultiPDepArrhenius format.
"""
from rmgpy.kinetics import PDepArrhenius, MultiPDepArrhenius
Tmin = 350.
Tmax = 1500.
Pmin = 1e-1
Pmax = 1e1
pressures = numpy.array([1e-1,1e1])
comment = 'CH3 + C2H6 <=> CH4 + C2H5 (Baulch 2005)'
arrhenius = [
PDepArrhenius(
pressures = (pressures,"bar"),
arrhenius = [
Arrhenius(
A = (9.3e-16,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (4740*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
Arrhenius(
A = (9.3e-14,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (4740*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
],
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
comment = comment,
),
PDepArrhenius(
pressures = (pressures,"bar"),
arrhenius = [
Arrhenius(
A = (1.4e-11,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (11200*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
Arrhenius(
A = (1.4e-9,"cm^3/(molecule*s)"),
n = 0.0,
Ea = (11200*constants.R*0.001,"kJ/mol"),
T0 = (1,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
comment = comment,
),
],
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
comment = comment,
),
]
original_kinetics = MultiPDepArrhenius(
arrhenius = arrhenius,
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
comment = comment,
)
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientThirdBody(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the ThirdBody format.
"""
from rmgpy.kinetics import ThirdBody
arrheniusLow = Arrhenius(
A = (2.62e+33,"cm^6/(mol^2*s)"),
n = -4.76,
Ea = (10.21,"kJ/mol"),
T0 = (1,"K"),
)
efficiencies = {"C": 3, "C(=O)=O": 2, "CC": 3, "O": 6, "[Ar]": 0.7, "[C]=O": 1.5, "[H][H]": 2}
Tmin = 300.
Tmax = 2000.
Pmin = 0.01
Pmax = 100.
comment = """H + CH3 -> CH4"""
thirdBody = ThirdBody(
arrheniusLow = arrheniusLow,
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
efficiencies = efficiencies,
comment = comment,
)
original_kinetics = thirdBody
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientLindemann(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the Lindemann format.
"""
from rmgpy.kinetics import Lindemann
arrheniusHigh = Arrhenius(
A = (1.39e+16,"cm^3/(mol*s)"),
n = -0.534,
Ea = (2.243,"kJ/mol"),
T0 = (1,"K"),
)
arrheniusLow = Arrhenius(
A = (2.62e+33,"cm^6/(mol^2*s)"),
n = -4.76,
Ea = (10.21,"kJ/mol"),
T0 = (1,"K"),
)
efficiencies = {"C": 3, "C(=O)=O": 2, "CC": 3, "O": 6, "[Ar]": 0.7, "[C]=O": 1.5, "[H][H]": 2}
Tmin = 300.
Tmax = 2000.
Pmin = 0.01
Pmax = 100.
comment = """H + CH3 -> CH4"""
lindemann = Lindemann(
arrheniusHigh = arrheniusHigh,
arrheniusLow = arrheniusLow,
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
efficiencies = efficiencies,
comment = comment,
)
original_kinetics = lindemann
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testGenerateReverseRateCoefficientTroe(self):
"""
Test the Reaction.generateReverseRateCoefficient() method works for the Troe format.
"""
from rmgpy.kinetics import Troe
arrheniusHigh = Arrhenius(
A = (1.39e+16,"cm^3/(mol*s)"),
n = -0.534,
Ea = (2.243,"kJ/mol"),
T0 = (1,"K"),
)
arrheniusLow = Arrhenius(
A = (2.62e+33,"cm^6/(mol^2*s)"),
n = -4.76,
Ea = (10.21,"kJ/mol"),
T0 = (1,"K"),
)
alpha = 0.783
T3 = 74
T1 = 2941
T2 = 6964
efficiencies = {"C": 3, "C(=O)=O": 2, "CC": 3, "O": 6, "[Ar]": 0.7, "[C]=O": 1.5, "[H][H]": 2}
Tmin = 300.
Tmax = 2000.
Pmin = 0.01
Pmax = 100.
comment = """H + CH3 -> CH4"""
troe = Troe(
arrheniusHigh = arrheniusHigh,
arrheniusLow = arrheniusLow,
alpha = alpha,
T3 = (T3,"K"),
T1 = (T1,"K"),
T2 = (T2,"K"),
Tmin = (Tmin,"K"),
Tmax = (Tmax,"K"),
Pmin = (Pmin,"bar"),
Pmax = (Pmax,"bar"),
efficiencies = efficiencies,
comment = comment,
)
original_kinetics = troe
self.reaction2.kinetics = original_kinetics
reverseKinetics = self.reaction2.generateReverseRateCoefficient()
self.reaction2.kinetics = reverseKinetics
# reverse reactants, products to ensure Keq is correctly computed
self.reaction2.reactants, self.reaction2.products = self.reaction2.products, self.reaction2.reactants
reversereverseKinetics = self.reaction2.generateReverseRateCoefficient()
# check that reverting the reverse yields the original
Tlist = numpy.arange(Tmin, Tmax, 200.0, numpy.float64)
P = 1e5
for T in Tlist:
korig = original_kinetics.getRateCoefficient(T, P)
krevrev = reversereverseKinetics.getRateCoefficient(T, P)
self.assertAlmostEqual(korig / krevrev, 1.0, 0)
def testTSTCalculation(self):
"""
A test of the transition state theory k(T) calculation function,
using the reaction H + C2H4 -> C2H5.
"""
Tlist = 1000.0/numpy.arange(0.4, 3.35, 0.01)
klist = numpy.array([self.reaction.calculateTSTRateCoefficient(T) for T in Tlist])
arrhenius = Arrhenius().fitToData(Tlist, klist, kunits='m^3/(mol*s)')
klist2 = numpy.array([arrhenius.getRateCoefficient(T) for T in Tlist])
# Check that the correct Arrhenius parameters are returned
self.assertAlmostEqual(arrhenius.A.value_si, 2265.2488, delta=1e-2)
self.assertAlmostEqual(arrhenius.n.value_si, 1.45419, delta=1e-4)
self.assertAlmostEqual(arrhenius.Ea.value_si, 6645.24, delta=1e-2)
# Check that the fit is satisfactory (defined here as always within 5%)
for i in range(len(Tlist)):
self.assertAlmostEqual(klist[i], klist2[i], delta=5e-2 * klist[i])
def testPickle(self):
"""
Test that a Reaction object can be successfully pickled and
unpickled with no loss of information.
"""
import cPickle
reaction = cPickle.loads(cPickle.dumps(self.reaction,-1))
self.assertEqual(len(self.reaction.reactants), len(reaction.reactants))
self.assertEqual(len(self.reaction.products), len(reaction.products))
for reactant0, reactant in zip(self.reaction.reactants, reaction.reactants):
self.assertAlmostEqual(reactant0.conformer.E0.value_si / 1e6, reactant.conformer.E0.value_si / 1e6, 2)
self.assertEqual(reactant0.conformer.E0.units, reactant.conformer.E0.units)
for product0, product in zip(self.reaction.products, reaction.products):
self.assertAlmostEqual(product0.conformer.E0.value_si / 1e6, product.conformer.E0.value_si / 1e6, 2)
self.assertEqual(product0.conformer.E0.units, product.conformer.E0.units)
self.assertAlmostEqual(self.reaction.transitionState.conformer.E0.value_si / 1e6, reaction.transitionState.conformer.E0.value_si / 1e6, 2)
self.assertEqual(self.reaction.transitionState.conformer.E0.units, reaction.transitionState.conformer.E0.units)
self.assertAlmostEqual(self.reaction.transitionState.frequency.value_si, reaction.transitionState.frequency.value_si, 2)
self.assertEqual(self.reaction.transitionState.frequency.units, reaction.transitionState.frequency.units)
self.assertAlmostEqual(self.reaction.kinetics.A.value_si, reaction.kinetics.A.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.n.value_si, reaction.kinetics.n.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.T0.value_si, reaction.kinetics.T0.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.Ea.value_si, reaction.kinetics.Ea.value_si, delta=1e-6)
self.assertEqual(self.reaction.kinetics.comment, reaction.kinetics.comment)
self.assertEqual(self.reaction.duplicate, reaction.duplicate)
self.assertEqual(self.reaction.degeneracy, reaction.degeneracy)
def testOutput(self):
"""
Test that a Reaction object can be successfully reconstructed
from its repr() output with no loss of information.
"""
exec('reaction = %r' % (self.reaction))
self.assertEqual(len(self.reaction.reactants), len(reaction.reactants))
self.assertEqual(len(self.reaction.products), len(reaction.products))
for reactant0, reactant in zip(self.reaction.reactants, reaction.reactants):
self.assertAlmostEqual(reactant0.conformer.E0.value_si / 1e6, reactant.conformer.E0.value_si / 1e6, 2)
self.assertEqual(reactant0.conformer.E0.units, reactant.conformer.E0.units)
for product0, product in zip(self.reaction.products, reaction.products):
self.assertAlmostEqual(product0.conformer.E0.value_si / 1e6, product.conformer.E0.value_si / 1e6, 2)
self.assertEqual(product0.conformer.E0.units, product.conformer.E0.units)
self.assertAlmostEqual(self.reaction.transitionState.conformer.E0.value_si / 1e6, reaction.transitionState.conformer.E0.value_si / 1e6, 2)
self.assertEqual(self.reaction.transitionState.conformer.E0.units, reaction.transitionState.conformer.E0.units)
self.assertAlmostEqual(self.reaction.transitionState.frequency.value_si, reaction.transitionState.frequency.value_si, 2)
self.assertEqual(self.reaction.transitionState.frequency.units, reaction.transitionState.frequency.units)
self.assertAlmostEqual(self.reaction.kinetics.A.value_si, reaction.kinetics.A.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.n.value_si, reaction.kinetics.n.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.T0.value_si, reaction.kinetics.T0.value_si, delta=1e-6)
self.assertAlmostEqual(self.reaction.kinetics.Ea.value_si, reaction.kinetics.Ea.value_si, delta=1e-6)
self.assertEqual(self.reaction.kinetics.comment, reaction.kinetics.comment)
self.assertEqual(self.reaction.duplicate, reaction.duplicate)
self.assertEqual(self.reaction.degeneracy, reaction.degeneracy)
################################################################################
if __name__ == '__main__':
unittest.main(testRunner=unittest.TextTestRunner(verbosity=2))
| [
"rmgpy.statmech.torsion.HinderedRotor",
"rmgpy.kinetics.Arrhenius",
"numpy.array",
"rmgpy.thermo.Wilhoit",
"rmgpy.reaction.Reaction",
"rmgpy.kinetics.Troe",
"rmgpy.kinetics.ThirdBody",
"unittest.TextTestRunner",
"rmgpy.species.Species",
"numpy.arange",
"rmgpy.statmech.translation.IdealGasTranslation",
"cPickle.dumps",
"rmgpy.kinetics.MultiPDepArrhenius",
"rmgpy.statmech.rotation.NonlinearRotor",
"rmgpy.kinetics.MultiArrhenius",
"rmgpy.kinetics.PDepArrhenius",
"rmgpy.kinetics.ArrheniusEP",
"rmgpy.statmech.vibration.HarmonicOscillator",
"rmgpy.kinetics.Lindemann"
] | [((1776, 1824), 'rmgpy.reaction.Reaction', 'Reaction', ([], {'reactants': 'reactants', 'products': 'products'}), '(reactants=reactants, products=products)\n', (1784, 1824), False, 'from rmgpy.reaction import Reaction\n'), ((13183, 13232), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (13195, 13232), False, 'import numpy\n'), ((13689, 13738), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (13701, 13738), False, 'import numpy\n'), ((14193, 14242), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (14205, 14242), False, 'import numpy\n'), ((14708, 14757), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (14720, 14757), False, 'import numpy\n'), ((15250, 15299), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (15262, 15299), False, 'import numpy\n'), ((15796, 15845), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (15808, 15845), False, 'import numpy\n'), ((17007, 17056), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (17019, 17056), False, 'import numpy\n'), ((17389, 17438), 'numpy.arange', 'numpy.arange', (['(200.0)', '(2001.0)', '(200.0)', 'numpy.float64'], {}), '(200.0, 2001.0, 200.0, numpy.float64)\n', (17401, 17438), False, 'import numpy\n'), ((17972, 18097), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(2650000000000.0, 'cm^3/(mol*s)')", 'n': '(0.0)', 'Ea': "(0.0, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(300, 'K')", 'Tmax': "(2000, 'K')"}), "(A=(2650000000000.0, 'cm^3/(mol*s)'), n=0.0, Ea=(0.0, 'kJ/mol'),\n T0=(1, 'K'), Tmin=(300, 'K'), Tmax=(2000, 'K'))\n", (17981, 18097), False, 'from rmgpy.kinetics import Arrhenius\n'), ((18760, 18865), 'numpy.arange', 'numpy.arange', (['original_kinetics.Tmin.value_si', 'original_kinetics.Tmax.value_si', '(200.0)', 'numpy.float64'], {}), '(original_kinetics.Tmin.value_si, original_kinetics.Tmax.\n value_si, 200.0, numpy.float64)\n', (18772, 18865), False, 'import numpy\n'), ((19378, 19506), 'rmgpy.kinetics.ArrheniusEP', 'ArrheniusEP', ([], {'A': "(2650000000000.0, 'cm^3/(mol*s)')", 'n': '(0.0)', 'alpha': '(0.5)', 'E0': "(41.84, 'kJ/mol')", 'Tmin': "(300, 'K')", 'Tmax': "(2000, 'K')"}), "(A=(2650000000000.0, 'cm^3/(mol*s)'), n=0.0, alpha=0.5, E0=(\n 41.84, 'kJ/mol'), Tmin=(300, 'K'), Tmax=(2000, 'K'))\n", (19389, 19506), False, 'from rmgpy.kinetics import ArrheniusEP\n'), ((20168, 20255), 'numpy.arange', 'numpy.arange', (['original_kinetics.Tmin', 'original_kinetics.Tmax', '(200.0)', 'numpy.float64'], {}), '(original_kinetics.Tmin, original_kinetics.Tmax, 200.0, numpy.\n float64)\n', (20180, 20255), False, 'import numpy\n'), ((20745, 20914), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1000000.0, 's^-1')", 'n': '(1.0)', 'Ea': "(10.0, 'kJ/mol')", 'T0': "(300.0, 'K')", 'Tmin': "(300.0, 'K')", 'Tmax': "(2000.0, 'K')", 'comment': '"""This data is completely made up"""'}), "(A=(1000000.0, 's^-1'), n=1.0, Ea=(10.0, 'kJ/mol'), T0=(300.0, 'K'\n ), Tmin=(300.0, 'K'), Tmax=(2000.0, 'K'), comment=\n 'This data is completely made up')\n", (20754, 20914), False, 'from rmgpy.kinetics import Arrhenius\n'), ((21036, 21211), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1000000000000.0, 's^-1')", 'n': '(1.0)', 'Ea': "(20.0, 'kJ/mol')", 'T0': "(300.0, 'K')", 'Tmin': "(300.0, 'K')", 'Tmax': "(2000.0, 'K')", 'comment': '"""This data is completely made up"""'}), "(A=(1000000000000.0, 's^-1'), n=1.0, Ea=(20.0, 'kJ/mol'), T0=(\n 300.0, 'K'), Tmin=(300.0, 'K'), Tmax=(2000.0, 'K'), comment=\n 'This data is completely made up')\n", (21045, 21211), False, 'from rmgpy.kinetics import Arrhenius\n'), ((21328, 21352), 'numpy.array', 'numpy.array', (['[0.1, 10.0]'], {}), '([0.1, 10.0])\n', (21339, 21352), False, 'import numpy\n'), ((21565, 21731), 'rmgpy.kinetics.PDepArrhenius', 'PDepArrhenius', ([], {'pressures': "(pressures, 'bar')", 'arrhenius': 'arrhenius', 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'Pmin': "(Pmin, 'bar')", 'Pmax': "(Pmax, 'bar')", 'comment': 'comment'}), "(pressures=(pressures, 'bar'), arrhenius=arrhenius, Tmin=(Tmin,\n 'K'), Tmax=(Tmax, 'K'), Pmin=(Pmin, 'bar'), Pmax=(Pmax, 'bar'), comment\n =comment)\n", (21578, 21731), False, 'from rmgpy.kinetics import PDepArrhenius, MultiPDepArrhenius\n'), ((22354, 22400), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (22366, 22400), False, 'import numpy\n'), ((22898, 22922), 'numpy.array', 'numpy.array', (['[0.1, 10.0]'], {}), '([0.1, 10.0])\n', (22909, 22922), False, 'import numpy\n'), ((23733, 23825), 'rmgpy.kinetics.MultiArrhenius', 'MultiArrhenius', ([], {'arrhenius': 'arrhenius', 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(arrhenius=arrhenius, Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (23747, 23825), False, 'from rmgpy.kinetics import MultiArrhenius\n'), ((24411, 24457), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (24423, 24457), False, 'import numpy\n'), ((25061, 25085), 'numpy.array', 'numpy.array', (['[0.1, 10.0]'], {}), '([0.1, 10.0])\n', (25072, 25085), False, 'import numpy\n'), ((27357, 27493), 'rmgpy.kinetics.MultiPDepArrhenius', 'MultiPDepArrhenius', ([], {'arrhenius': 'arrhenius', 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'Pmin': "(Pmin, 'bar')", 'Pmax': "(Pmax, 'bar')", 'comment': 'comment'}), "(arrhenius=arrhenius, Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n Pmin=(Pmin, 'bar'), Pmax=(Pmax, 'bar'), comment=comment)\n", (27375, 27493), False, 'from rmgpy.kinetics import PDepArrhenius, MultiPDepArrhenius\n'), ((28105, 28151), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (28117, 28151), False, 'import numpy\n'), ((28637, 28727), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(2.62e+33, 'cm^6/(mol^2*s)')", 'n': '(-4.76)', 'Ea': "(10.21, 'kJ/mol')", 'T0': "(1, 'K')"}), "(A=(2.62e+33, 'cm^6/(mol^2*s)'), n=-4.76, Ea=(10.21, 'kJ/mol'), T0\n =(1, 'K'))\n", (28646, 28727), False, 'from rmgpy.kinetics import Arrhenius\n'), ((29033, 29197), 'rmgpy.kinetics.ThirdBody', 'ThirdBody', ([], {'arrheniusLow': 'arrheniusLow', 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'Pmin': "(Pmin, 'bar')", 'Pmax': "(Pmax, 'bar')", 'efficiencies': 'efficiencies', 'comment': 'comment'}), "(arrheniusLow=arrheniusLow, Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n Pmin=(Pmin, 'bar'), Pmax=(Pmax, 'bar'), efficiencies=efficiencies,\n comment=comment)\n", (29042, 29197), False, 'from rmgpy.kinetics import ThirdBody\n'), ((29867, 29913), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (29879, 29913), False, 'import numpy\n'), ((30400, 30489), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1.39e+16, 'cm^3/(mol*s)')", 'n': '(-0.534)', 'Ea': "(2.243, 'kJ/mol')", 'T0': "(1, 'K')"}), "(A=(1.39e+16, 'cm^3/(mol*s)'), n=-0.534, Ea=(2.243, 'kJ/mol'), T0=\n (1, 'K'))\n", (30409, 30489), False, 'from rmgpy.kinetics import Arrhenius\n'), ((30575, 30665), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(2.62e+33, 'cm^6/(mol^2*s)')", 'n': '(-4.76)', 'Ea': "(10.21, 'kJ/mol')", 'T0': "(1, 'K')"}), "(A=(2.62e+33, 'cm^6/(mol^2*s)'), n=-4.76, Ea=(10.21, 'kJ/mol'), T0\n =(1, 'K'))\n", (30584, 30665), False, 'from rmgpy.kinetics import Arrhenius\n'), ((30971, 31165), 'rmgpy.kinetics.Lindemann', 'Lindemann', ([], {'arrheniusHigh': 'arrheniusHigh', 'arrheniusLow': 'arrheniusLow', 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'Pmin': "(Pmin, 'bar')", 'Pmax': "(Pmax, 'bar')", 'efficiencies': 'efficiencies', 'comment': 'comment'}), "(arrheniusHigh=arrheniusHigh, arrheniusLow=arrheniusLow, Tmin=(\n Tmin, 'K'), Tmax=(Tmax, 'K'), Pmin=(Pmin, 'bar'), Pmax=(Pmax, 'bar'),\n efficiencies=efficiencies, comment=comment)\n", (30980, 31165), False, 'from rmgpy.kinetics import Lindemann\n'), ((31856, 31902), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (31868, 31902), False, 'import numpy\n'), ((32375, 32464), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1.39e+16, 'cm^3/(mol*s)')", 'n': '(-0.534)', 'Ea': "(2.243, 'kJ/mol')", 'T0': "(1, 'K')"}), "(A=(1.39e+16, 'cm^3/(mol*s)'), n=-0.534, Ea=(2.243, 'kJ/mol'), T0=\n (1, 'K'))\n", (32384, 32464), False, 'from rmgpy.kinetics import Arrhenius\n'), ((32550, 32640), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(2.62e+33, 'cm^6/(mol^2*s)')", 'n': '(-4.76)', 'Ea': "(10.21, 'kJ/mol')", 'T0': "(1, 'K')"}), "(A=(2.62e+33, 'cm^6/(mol^2*s)'), n=-4.76, Ea=(10.21, 'kJ/mol'), T0\n =(1, 'K'))\n", (32559, 32640), False, 'from rmgpy.kinetics import Arrhenius\n'), ((33015, 33262), 'rmgpy.kinetics.Troe', 'Troe', ([], {'arrheniusHigh': 'arrheniusHigh', 'arrheniusLow': 'arrheniusLow', 'alpha': 'alpha', 'T3': "(T3, 'K')", 'T1': "(T1, 'K')", 'T2': "(T2, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'Pmin': "(Pmin, 'bar')", 'Pmax': "(Pmax, 'bar')", 'efficiencies': 'efficiencies', 'comment': 'comment'}), "(arrheniusHigh=arrheniusHigh, arrheniusLow=arrheniusLow, alpha=alpha,\n T3=(T3, 'K'), T1=(T1, 'K'), T2=(T2, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax,\n 'K'), Pmin=(Pmin, 'bar'), Pmax=(Pmax, 'bar'), efficiencies=efficiencies,\n comment=comment)\n", (33019, 33262), False, 'from rmgpy.kinetics import Troe\n'), ((33998, 34044), 'numpy.arange', 'numpy.arange', (['Tmin', 'Tmax', '(200.0)', 'numpy.float64'], {}), '(Tmin, Tmax, 200.0, numpy.float64)\n', (34010, 34044), False, 'import numpy\n'), ((23096, 23263), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(9.3e-14, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(4740 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(9.3e-14, 'cm^3/(molecule*s)'), n=0.0, Ea=(4740 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (23105, 23263), False, 'from rmgpy.kinetics import Arrhenius\n'), ((23401, 23569), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1.4e-09, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(11200 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(1.4e-09, 'cm^3/(molecule*s)'), n=0.0, Ea=(11200 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (23410, 23569), False, 'from rmgpy.kinetics import Arrhenius\n'), ((34478, 34507), 'numpy.arange', 'numpy.arange', (['(0.4)', '(3.35)', '(0.01)'], {}), '(0.4, 3.35, 0.01)\n', (34490, 34507), False, 'import numpy\n'), ((35482, 35514), 'cPickle.dumps', 'cPickle.dumps', (['self.reaction', '(-1)'], {}), '(self.reaction, -1)\n', (35495, 35514), False, 'import cPickle\n'), ((39720, 39756), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (39743, 39756), False, 'import unittest\n'), ((7826, 7953), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(501366000.0, 'cm^3/(mol*s)')", 'n': '(1.637)', 'Ea': "(4.32508, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(300, 'K')", 'Tmax': "(2500, 'K')"}), "(A=(501366000.0, 'cm^3/(mol*s)'), n=1.637, Ea=(4.32508, 'kJ/mol'),\n T0=(1, 'K'), Tmin=(300, 'K'), Tmax=(2500, 'K'))\n", (7835, 7953), False, 'from rmgpy.kinetics import Arrhenius\n'), ((8229, 8428), 'rmgpy.thermo.Wilhoit', 'Wilhoit', ([], {'Cp0': "(4.0 * constants.R, 'J/(mol*K)')", 'CpInf': "(21.0 * constants.R, 'J/(mol*K)')", 'a0': '(-3.95)', 'a1': '(9.26)', 'a2': '(-15.6)', 'a3': '(8.55)', 'B': "(500.0, 'K')", 'H0': "(-61510.0, 'J/mol')", 'S0': "(-790.2, 'J/(mol*K)')"}), "(Cp0=(4.0 * constants.R, 'J/(mol*K)'), CpInf=(21.0 * constants.R,\n 'J/(mol*K)'), a0=-3.95, a1=9.26, a2=-15.6, a3=8.55, B=(500.0, 'K'), H0=\n (-61510.0, 'J/mol'), S0=(-790.2, 'J/(mol*K)'))\n", (8236, 8428), False, 'from rmgpy.thermo import Wilhoit\n'), ((8515, 8719), 'rmgpy.thermo.Wilhoit', 'Wilhoit', ([], {'Cp0': "(4.0 * constants.R, 'J/(mol*K)')", 'CpInf': "(15.5 * constants.R, 'J/(mol*K)')", 'a0': '(0.2541)', 'a1': '(-0.4712)', 'a2': '(-4.434)', 'a3': '(2.25)', 'B': "(500.0, 'K')", 'H0': "(-143900.0, 'J/mol')", 'S0': "(-524.6, 'J/(mol*K)')"}), "(Cp0=(4.0 * constants.R, 'J/(mol*K)'), CpInf=(15.5 * constants.R,\n 'J/(mol*K)'), a0=0.2541, a1=-0.4712, a2=-4.434, a3=2.25, B=(500.0, 'K'),\n H0=(-143900.0, 'J/mol'), S0=(-524.6, 'J/(mol*K)'))\n", (8522, 8719), False, 'from rmgpy.thermo import Wilhoit\n'), ((8806, 9007), 'rmgpy.thermo.Wilhoit', 'Wilhoit', ([], {'Cp0': "(3.5 * constants.R, 'J/(mol*K)')", 'CpInf': "(4.5 * constants.R, 'J/(mol*K)')", 'a0': '(-0.9324)', 'a1': '(26.18)', 'a2': '(-70.47)', 'a3': '(44.12)', 'B': "(500.0, 'K')", 'H0': "(14530.0, 'J/mol')", 'S0': "(-12.19, 'J/(mol*K)')"}), "(Cp0=(3.5 * constants.R, 'J/(mol*K)'), CpInf=(4.5 * constants.R,\n 'J/(mol*K)'), a0=-0.9324, a1=26.18, a2=-70.47, a3=44.12, B=(500.0, 'K'),\n H0=(14530.0, 'J/mol'), S0=(-12.19, 'J/(mol*K)'))\n", (8813, 9007), False, 'from rmgpy.thermo import Wilhoit\n'), ((9150, 9275), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(2650000000000.0, 'cm^3/(mol*s)')", 'n': '(0.0)', 'Ea': "(0.0, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(300, 'K')", 'Tmax': "(2000, 'K')"}), "(A=(2650000000000.0, 'cm^3/(mol*s)'), n=0.0, Ea=(0.0, 'kJ/mol'),\n T0=(1, 'K'), Tmin=(300, 'K'), Tmax=(2000, 'K'))\n", (9159, 9275), False, 'from rmgpy.kinetics import Arrhenius\n'), ((34619, 34630), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {}), '()\n', (34628, 34630), False, 'from rmgpy.kinetics import Arrhenius\n'), ((9562, 9571), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9569, 9571), False, 'from rmgpy.species import Species, TransitionState\n'), ((9584, 9593), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9591, 9593), False, 'from rmgpy.species import Species, TransitionState\n'), ((9638, 9647), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9645, 9647), False, 'from rmgpy.species import Species, TransitionState\n'), ((9648, 9657), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9655, 9657), False, 'from rmgpy.species import Species, TransitionState\n'), ((9670, 9679), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9677, 9679), False, 'from rmgpy.species import Species, TransitionState\n'), ((9725, 9734), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9732, 9734), False, 'from rmgpy.species import Species, TransitionState\n'), ((9747, 9756), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9754, 9756), False, 'from rmgpy.species import Species, TransitionState\n'), ((9757, 9766), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9764, 9766), False, 'from rmgpy.species import Species, TransitionState\n'), ((9811, 9820), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9818, 9820), False, 'from rmgpy.species import Species, TransitionState\n'), ((9821, 9830), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9828, 9830), False, 'from rmgpy.species import Species, TransitionState\n'), ((9843, 9852), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9850, 9852), False, 'from rmgpy.species import Species, TransitionState\n'), ((9853, 9862), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (9860, 9862), False, 'from rmgpy.species import Species, TransitionState\n'), ((10251, 10260), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10258, 10260), False, 'from rmgpy.species import Species, TransitionState\n'), ((10273, 10282), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10280, 10282), False, 'from rmgpy.species import Species, TransitionState\n'), ((10327, 10336), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10334, 10336), False, 'from rmgpy.species import Species, TransitionState\n'), ((10337, 10346), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10344, 10346), False, 'from rmgpy.species import Species, TransitionState\n'), ((10359, 10368), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10366, 10368), False, 'from rmgpy.species import Species, TransitionState\n'), ((10414, 10423), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10421, 10423), False, 'from rmgpy.species import Species, TransitionState\n'), ((10436, 10445), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10443, 10445), False, 'from rmgpy.species import Species, TransitionState\n'), ((10446, 10455), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10453, 10455), False, 'from rmgpy.species import Species, TransitionState\n'), ((10500, 10509), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10507, 10509), False, 'from rmgpy.species import Species, TransitionState\n'), ((10510, 10519), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10517, 10519), False, 'from rmgpy.species import Species, TransitionState\n'), ((10532, 10541), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10539, 10541), False, 'from rmgpy.species import Species, TransitionState\n'), ((10542, 10551), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10549, 10551), False, 'from rmgpy.species import Species, TransitionState\n'), ((10926, 10935), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10933, 10935), False, 'from rmgpy.species import Species, TransitionState\n'), ((10948, 10957), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (10955, 10957), False, 'from rmgpy.species import Species, TransitionState\n'), ((11002, 11011), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11009, 11011), False, 'from rmgpy.species import Species, TransitionState\n'), ((11012, 11021), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11019, 11021), False, 'from rmgpy.species import Species, TransitionState\n'), ((11034, 11043), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11041, 11043), False, 'from rmgpy.species import Species, TransitionState\n'), ((11089, 11098), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11096, 11098), False, 'from rmgpy.species import Species, TransitionState\n'), ((11111, 11120), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11118, 11120), False, 'from rmgpy.species import Species, TransitionState\n'), ((11121, 11130), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11128, 11130), False, 'from rmgpy.species import Species, TransitionState\n'), ((11175, 11184), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11182, 11184), False, 'from rmgpy.species import Species, TransitionState\n'), ((11185, 11194), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11192, 11194), False, 'from rmgpy.species import Species, TransitionState\n'), ((11207, 11216), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11214, 11216), False, 'from rmgpy.species import Species, TransitionState\n'), ((11217, 11226), 'rmgpy.species.Species', 'Species', ([], {}), '()\n', (11224, 11226), False, 'from rmgpy.species import Species, TransitionState\n'), ((25291, 25458), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(9.3e-16, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(4740 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(9.3e-16, 'cm^3/(molecule*s)'), n=0.0, Ea=(4740 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (25300, 25458), False, 'from rmgpy.kinetics import Arrhenius\n'), ((25668, 25835), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(9.3e-14, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(4740 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(9.3e-14, 'cm^3/(molecule*s)'), n=0.0, Ea=(4740 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (25677, 25835), False, 'from rmgpy.kinetics import Arrhenius\n'), ((26365, 26533), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1.4e-11, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(11200 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(1.4e-11, 'cm^3/(molecule*s)'), n=0.0, Ea=(11200 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (26374, 26533), False, 'from rmgpy.kinetics import Arrhenius\n'), ((26743, 26911), 'rmgpy.kinetics.Arrhenius', 'Arrhenius', ([], {'A': "(1.4e-09, 'cm^3/(molecule*s)')", 'n': '(0.0)', 'Ea': "(11200 * constants.R * 0.001, 'kJ/mol')", 'T0': "(1, 'K')", 'Tmin': "(Tmin, 'K')", 'Tmax': "(Tmax, 'K')", 'comment': 'comment'}), "(A=(1.4e-09, 'cm^3/(molecule*s)'), n=0.0, Ea=(11200 * constants.R *\n 0.001, 'kJ/mol'), T0=(1, 'K'), Tmin=(Tmin, 'K'), Tmax=(Tmax, 'K'),\n comment=comment)\n", (26752, 26911), False, 'from rmgpy.kinetics import Arrhenius\n'), ((4228, 4270), 'rmgpy.statmech.translation.IdealGasTranslation', 'IdealGasTranslation', ([], {'mass': "(28.0313, 'amu')"}), "(mass=(28.0313, 'amu'))\n", (4247, 4270), False, 'from rmgpy.statmech.translation import Translation, IdealGasTranslation\n'), ((4341, 4427), 'rmgpy.statmech.rotation.NonlinearRotor', 'NonlinearRotor', ([], {'inertia': "([3.41526, 16.6498, 20.065], 'amu*angstrom^2')", 'symmetry': '(4)'}), "(inertia=([3.41526, 16.6498, 20.065], 'amu*angstrom^2'),\n symmetry=4)\n", (4355, 4427), False, 'from rmgpy.statmech.rotation import Rotation, LinearRotor, NonlinearRotor, KRotor, SphericalTopRotor\n'), ((4603, 4762), 'rmgpy.statmech.vibration.HarmonicOscillator', 'HarmonicOscillator', ([], {'frequencies': "([828.397, 970.652, 977.223, 1052.93, 1233.55, 1367.56, 1465.09, 1672.25, \n 3098.46, 3111.7, 3165.79, 3193.54], 'cm^-1')"}), "(frequencies=([828.397, 970.652, 977.223, 1052.93, \n 1233.55, 1367.56, 1465.09, 1672.25, 3098.46, 3111.7, 3165.79, 3193.54],\n 'cm^-1'))\n", (4621, 4762), False, 'from rmgpy.statmech.vibration import Vibration, HarmonicOscillator\n'), ((5200, 5242), 'rmgpy.statmech.translation.IdealGasTranslation', 'IdealGasTranslation', ([], {'mass': "(1.00783, 'amu')"}), "(mass=(1.00783, 'amu'))\n", (5219, 5242), False, 'from rmgpy.statmech.translation import Translation, IdealGasTranslation\n'), ((5596, 5638), 'rmgpy.statmech.translation.IdealGasTranslation', 'IdealGasTranslation', ([], {'mass': "(29.0391, 'amu')"}), "(mass=(29.0391, 'amu'))\n", (5615, 5638), False, 'from rmgpy.statmech.translation import Translation, IdealGasTranslation\n'), ((5709, 5795), 'rmgpy.statmech.rotation.NonlinearRotor', 'NonlinearRotor', ([], {'inertia': "([4.8709, 22.2353, 23.9925], 'amu*angstrom^2')", 'symmetry': '(1)'}), "(inertia=([4.8709, 22.2353, 23.9925], 'amu*angstrom^2'),\n symmetry=1)\n", (5723, 5795), False, 'from rmgpy.statmech.rotation import Rotation, LinearRotor, NonlinearRotor, KRotor, SphericalTopRotor\n'), ((5971, 6149), 'rmgpy.statmech.vibration.HarmonicOscillator', 'HarmonicOscillator', ([], {'frequencies': "([482.224, 791.876, 974.355, 1051.48, 1183.21, 1361.36, 1448.65, 1455.07, \n 1465.48, 2688.22, 2954.51, 3033.39, 3101.54, 3204.73], 'cm^-1')"}), "(frequencies=([482.224, 791.876, 974.355, 1051.48, \n 1183.21, 1361.36, 1448.65, 1455.07, 1465.48, 2688.22, 2954.51, 3033.39,\n 3101.54, 3204.73], 'cm^-1'))\n", (5989, 6149), False, 'from rmgpy.statmech.vibration import Vibration, HarmonicOscillator\n'), ((6294, 6411), 'rmgpy.statmech.torsion.HinderedRotor', 'HinderedRotor', ([], {'inertia': "(1.11481, 'amu*angstrom^2')", 'symmetry': '(6)', 'barrier': "(0.244029, 'kJ/mol')", 'semiclassical': 'None'}), "(inertia=(1.11481, 'amu*angstrom^2'), symmetry=6, barrier=(\n 0.244029, 'kJ/mol'), semiclassical=None)\n", (6307, 6411), False, 'from rmgpy.statmech.torsion import Torsion, HinderedRotor\n'), ((6841, 6883), 'rmgpy.statmech.translation.IdealGasTranslation', 'IdealGasTranslation', ([], {'mass': "(29.0391, 'amu')"}), "(mass=(29.0391, 'amu'))\n", (6860, 6883), False, 'from rmgpy.statmech.translation import Translation, IdealGasTranslation\n'), ((6954, 7041), 'rmgpy.statmech.rotation.NonlinearRotor', 'NonlinearRotor', ([], {'inertia': "([6.78512, 22.1437, 22.2114], 'amu*angstrom^2')", 'symmetry': '(1)'}), "(inertia=([6.78512, 22.1437, 22.2114], 'amu*angstrom^2'),\n symmetry=1)\n", (6968, 7041), False, 'from rmgpy.statmech.rotation import Rotation, LinearRotor, NonlinearRotor, KRotor, SphericalTopRotor\n'), ((7217, 7392), 'rmgpy.statmech.vibration.HarmonicOscillator', 'HarmonicOscillator', ([], {'frequencies': "([412.75, 415.206, 821.495, 924.44, 982.714, 1024.16, 1224.21, 1326.36, \n 1455.06, 1600.35, 3101.46, 3110.55, 3175.34, 3201.88], 'cm^-1')"}), "(frequencies=([412.75, 415.206, 821.495, 924.44, 982.714,\n 1024.16, 1224.21, 1326.36, 1455.06, 1600.35, 3101.46, 3110.55, 3175.34,\n 3201.88], 'cm^-1'))\n", (7235, 7392), False, 'from rmgpy.statmech.vibration import Vibration, HarmonicOscillator\n')] |
"""
closed-loop MILP solved to determine optimal ordering defined by ADG
"""
import sys
import yaml
import time
import matplotlib.colors as mcolors
import matplotlib
import matplotlib.pyplot as plt
import random
import logging
import time
import networkx as nx
import csv
import statistics as stat
import os
import sys
from mip import Model, ProgressLog, xsum, maximize, minimize, BINARY, CONTINUOUS, Constr, ConstrList
sys.path.insert(1, "functions/")
from planners import *
from visualizers import *
from milp_formulation import *
from robot import *
from adg import *
from adg_node import *
from process_results import *
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(name)s - %(levelname)s :: %(message)s', level=logging.INFO)
def main():
""" --------------------------- INPUTS --------------------------------- """
show_visual = False
show_ADG = True #not show_visual
run_MILP = True #False #True
save_file = False
sim_timeout = 500
# define prediction and control horizons: H_prediction >= H_control
H_prediction = np.NaN # integer value for forward node lookup
H_control = 5
random_seed = 0
mu = 0.5
robust_param = 0.0
delay_amount = 5
delayed_robot_cnt = 2
w = 1.4 # sub-optimality bound: w = 1.0 -> CBS, else ECBS!
fldr = "nuernberg_small" # auto_gen_01_nuernberg | auto_gen_00_large | auto_gen_02_simple | manual_03_maxplus
random.seed(random_seed)
np.random.seed(random_seed)
""" -------------------------------------------------------------------- """
# start initial
pwd = os.path.dirname(os.path.abspath(__file__))
logger.info(pwd)
map_file = pwd + "/data/" + fldr + "/csv_map_yaml.yaml"
robot_file = pwd + "/data/" + fldr + "/csv_robots_yaml.yaml"
robot_file_tmp = pwd + "/data/tmp/robots.yaml"
start_time = time.time()
plans = run_CBS(map_file, robot_file, w=w) # if w > 1.0, run_CBS uses ECBS!
logger.info(" with sub-optimality w={}".format(w))
logger.info(" plan statistics: {} \n".format(plans["statistics"]))
logger.debug(plans["schedule"])
# show factory map
# show_factory_map(map_file, robot_file, True)
# plt.show()
map_gen_robot_count = 10
map_gen_seedval = "NaN"
try:
map_gen_robot_count = int(sys.argv[1])
map_gen_seedval = int(sys.argv[2])
H_control = int(sys.argv[3])
robust_param = int(sys.argv[4])
random.seed(map_gen_seedval) # map_gen_seedval
np.random.seed(map_gen_seedval) # map_gen_seedval
except:
print(" no valid inputs given, ignoring ...")
# determine ADG, reverse ADG and dependency groups
ADG, robot_plan, goal_positions = determine_ADG(plans, show_graph=False)
nodes_all, edges_type_1, dependency_groups = analyze_ADG(ADG, plans, show_graph=False)
ADG_reverse = ADG.reverse(copy=False)
# initialize simulation
robots = []
solve_time = []
robots_done = []
time_to_goal = {}
colors = plt.cm.rainbow( np.arange(len(robot_plan))/len(robot_plan) )
for robot_id in robot_plan:
plan = robot_plan[robot_id]
logger.debug("Robot {} - plan: {} \t \t positions: {}".format(robot_id, plan["nodes"], plan["positions"]))
new_robot = Robot(robot_id, plan, colors[robot_id], goal_positions[robot_id])
robots.append(new_robot)
robots_done.append(False)
time_to_goal[robot_id] = 0
if show_visual:
visualizer = Visualizer(map_file, robots)
# initialize optimization MIP object m_opt
m_opt = Model('MILP_sequence', solver='CBC')
# print(m_opt.max_nodes)
pl_opt = ProgressLog()
# pl_opt.settings = "objective_value"
# print("pl_opt.settings: {}".format(pl_opt.settings))
# print("pl_opt.log: {}".format(pl_opt.log))
# pl_opt.instance = m_opt.name
# print("pl_opt.instance: {}".format(pl_opt.instance))
ADG_fig = plt.figure(figsize=(12,8))
plt.subplots_adjust(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)
metadata = dict(title='Movie Test', artist='Matplotlib',
comment='Movie support!')
writer = FFMpegWriter(fps=2, metadata=metadata)
with writer.saving(ADG_fig, "ADG_video.mp4", 500):
# run a simulation in time
k = 0
robot_IDs_to_delay = []
while (not all(robots_done)) and (k < sim_timeout):
print("pl_opt.log: {}".format(pl_opt.log))
m_opt.clear()
# show current robot status
logger.info("-------------------- @ time step k = {} --------------------".format(k))
for robot in robots:
node_info = ADG.node[robot.current_node]["data"]
logger.debug(" - Robot {} # {} @ {} => status: {}".format(robot.robot_ID, node_info.ID, node_info.s_loc, robot.status))
# solve MILP for the advanced ADG to potentially adjust ordering
res, solve_t = solve_MILP(robots, dependency_groups, ADG, ADG_reverse, H_control, H_prediction, m_opt, pl_opt, run=run_MILP, uncertainty_bound=robust_param)
solve_time.append(solve_t)
if not (res is None or res == "OptimizationStatus.OPTIMAL"):
ValueError("Optimization NOT optimal")
# ADG after MILP
if show_ADG:
#
draw_ADG(ADG, robots, "ADG after MILP ADG | k = {}".format(k), writer=writer)
# plt.show()
# check for cycles
try:
nx.find_cycle(ADG, orientation="original")
logger.warning("Cycle detected!!")
raise Exception("ADG has a cycle => deadlock! something is wrong with optimization")
except nx.NetworkXNoCycle:
logger.debug("no cycle detected in ADG => no deadlock. good!")
pass
if (k % delay_amount) == 0:
robot_IDs = np.arange(map_gen_robot_count)
robot_IDs_to_delay = np.random.choice(map_gen_robot_count, size=delayed_robot_cnt, replace=False)
logger.info("delaying robots (ID): {}".format(robot_IDs_to_delay))
# Advance robots if possible (dependencies have been met)
for robot in robots:
# check if all dependencies have been met, to advance to next node
node_info = ADG.node[robot.current_node]["data"]
node_dependencies_list = list(ADG_reverse.neighbors(robot.current_node))
all_dependencies_completed = True
for dependency in node_dependencies_list:
if (ADG.node[dependency]["data"].status != Status.FINISHED):
all_dependencies_completed = False
# if all dependencies are completed, the robot can advance!
# delay_amount = np.random.poisson(mu) # same sample every time
if all_dependencies_completed and k > 0: # (robot.robot_ID == 2 or k > 5)
if (not (robot.robot_ID in robot_IDs_to_delay)): # or (k < 10 or k > 20)): # or (robot.robot_ID == 3 or k > 8):
ADG.node[robot.current_node]["data"].status = Status.FINISHED
robot.advance()
if not robot.is_done():
time_to_goal[robot.robot_ID] += 1
else:
robots_done[robot.robot_ID] = True
if show_visual:
visualizer.redraw(robots, pause_length=0.1)
# return 0
k += 1
# end of while loop
total_time = 0
for idx, t in time_to_goal.items():
total_time += t
logger.info("Total time to complete missions: {}".format(total_time))
logger.info("horizon = {}".format(H_control))
logger.info("")
logger.info("Computation time:")
logger.info(" - max: {}".format(max(solve_time)))
logger.info(" - avg: {}".format(stat.mean(solve_time)))
# create data to save to YAML file
simulation_results = {}
simulation_results["parameters"] = {}
simulation_results["parameters"]["H_control"] = H_control
simulation_results["parameters"]["random seed"] = random_seed
simulation_results["parameters"]["ECBS w"] = w
simulation_results["parameters"]["mu"] = mu
simulation_results["parameters"]["robust param"] = robust_param
simulation_results["parameters"]["delay amount"] = delay_amount
simulation_results["map details"] = {}
simulation_results["map details"]["robot_count"] = map_gen_robot_count
simulation_results["map details"]["seed val"] = map_gen_seedval
simulation_results["results"] = {}
simulation_results["results"]["comp time"] = {}
simulation_results["results"]["comp time"]["solve_time"] = [solve_time]
simulation_results["results"]["comp time"]["max"] = max(solve_time)
simulation_results["results"]["comp time"]["avg"] = stat.mean(solve_time)
simulation_results["results"]["total time"] = total_time
logger.info(simulation_results)
file_name = pwd + "/results/robust_" +str(delayed_robot_cnt) + "x" + str(delay_amount) + "/res_robots_" + str(map_gen_robot_count) + "_horizon_" + str(H_control) + "_mapseed_" + str(map_gen_seedval) + "_robustparam_" + str(robust_param) + ".yaml"
if save_file:
save_to_yaml(simulation_results, file_name)
if __name__ == "__main__":
main()
| [
"logging.getLogger",
"logging.basicConfig",
"matplotlib.pyplot.subplots_adjust",
"sys.path.insert",
"statistics.mean",
"random.seed",
"matplotlib.pyplot.figure",
"os.path.abspath",
"mip.ProgressLog",
"time.time",
"mip.Model",
"networkx.find_cycle"
] | [((426, 458), 'sys.path.insert', 'sys.path.insert', (['(1)', '"""functions/"""'], {}), "(1, 'functions/')\n", (441, 458), False, 'import sys\n'), ((640, 667), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (657, 667), False, 'import logging\n'), ((668, 762), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(name)s - %(levelname)s :: %(message)s"""', 'level': 'logging.INFO'}), "(format='%(name)s - %(levelname)s :: %(message)s', level\n =logging.INFO)\n", (687, 762), False, 'import logging\n'), ((1450, 1474), 'random.seed', 'random.seed', (['random_seed'], {}), '(random_seed)\n', (1461, 1474), False, 'import random\n'), ((1876, 1887), 'time.time', 'time.time', ([], {}), '()\n', (1885, 1887), False, 'import time\n'), ((3585, 3621), 'mip.Model', 'Model', (['"""MILP_sequence"""'], {'solver': '"""CBC"""'}), "('MILP_sequence', solver='CBC')\n", (3590, 3621), False, 'from mip import Model, ProgressLog, xsum, maximize, minimize, BINARY, CONTINUOUS, Constr, ConstrList\n'), ((3664, 3677), 'mip.ProgressLog', 'ProgressLog', ([], {}), '()\n', (3675, 3677), False, 'from mip import Model, ProgressLog, xsum, maximize, minimize, BINARY, CONTINUOUS, Constr, ConstrList\n'), ((3937, 3964), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(12, 8)'}), '(figsize=(12, 8))\n', (3947, 3964), True, 'import matplotlib.pyplot as plt\n'), ((3968, 4041), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'left': '(0)', 'bottom': '(0)', 'right': '(1)', 'top': '(1)', 'wspace': '(0)', 'hspace': '(0)'}), '(left=0, bottom=0, right=1, top=1, wspace=0, hspace=0)\n', (3987, 4041), True, 'import matplotlib.pyplot as plt\n'), ((8921, 8942), 'statistics.mean', 'stat.mean', (['solve_time'], {}), '(solve_time)\n', (8930, 8942), True, 'import statistics as stat\n'), ((1635, 1660), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (1650, 1660), False, 'import os\n'), ((2464, 2492), 'random.seed', 'random.seed', (['map_gen_seedval'], {}), '(map_gen_seedval)\n', (2475, 2492), False, 'import random\n'), ((7943, 7964), 'statistics.mean', 'stat.mean', (['solve_time'], {}), '(solve_time)\n', (7952, 7964), True, 'import statistics as stat\n'), ((5522, 5564), 'networkx.find_cycle', 'nx.find_cycle', (['ADG'], {'orientation': '"""original"""'}), "(ADG, orientation='original')\n", (5535, 5564), True, 'import networkx as nx\n')] |
import rinobot_plugin as bot
import numpy as np
def main():
# lets get our parameters and data
filepath = bot.filepath()
data = bot.loadfile(filepath)
# now comes the custom plugin logic
shift = bot.get_arg('shift', type=float, required=True)
index = bot.index_from_args(data)
data[index] = data[index] + shift
outname = bot.no_extension() + '-shift-%s.txt' % shift
# then we set up the output
outpath = bot.output_filepath(outname)
np.savetxt(outpath, data)
if __name__ == "__main__":
main()
| [
"rinobot_plugin.loadfile",
"rinobot_plugin.filepath",
"rinobot_plugin.no_extension",
"rinobot_plugin.index_from_args",
"numpy.savetxt",
"rinobot_plugin.get_arg",
"rinobot_plugin.output_filepath"
] | [((115, 129), 'rinobot_plugin.filepath', 'bot.filepath', ([], {}), '()\n', (127, 129), True, 'import rinobot_plugin as bot\n'), ((141, 163), 'rinobot_plugin.loadfile', 'bot.loadfile', (['filepath'], {}), '(filepath)\n', (153, 163), True, 'import rinobot_plugin as bot\n'), ((217, 264), 'rinobot_plugin.get_arg', 'bot.get_arg', (['"""shift"""'], {'type': 'float', 'required': '(True)'}), "('shift', type=float, required=True)\n", (228, 264), True, 'import rinobot_plugin as bot\n'), ((277, 302), 'rinobot_plugin.index_from_args', 'bot.index_from_args', (['data'], {}), '(data)\n', (296, 302), True, 'import rinobot_plugin as bot\n'), ((447, 475), 'rinobot_plugin.output_filepath', 'bot.output_filepath', (['outname'], {}), '(outname)\n', (466, 475), True, 'import rinobot_plugin as bot\n'), ((480, 505), 'numpy.savetxt', 'np.savetxt', (['outpath', 'data'], {}), '(outpath, data)\n', (490, 505), True, 'import numpy as np\n'), ((356, 374), 'rinobot_plugin.no_extension', 'bot.no_extension', ([], {}), '()\n', (372, 374), True, 'import rinobot_plugin as bot\n')] |
from gluon.contrib.memcache.memcache import Client
import time
"""
examle of usage:
cache.memcache=MemcacheClient(request,[127.0.0.1:11211],debug=true)
"""
import cPickle as pickle
import thread
locker = thread.allocate_lock()
def MemcacheClient(*a, **b):
locker.acquire()
if not hasattr(MemcacheClient, '__mc_instance'):
MemcacheClient.__mc_instance = _MemcacheClient(*a, **b)
locker.release()
return MemcacheClient.__mc_instance
class _MemcacheClient(Client):
def __init__(self, request, servers, debug=0, pickleProtocol=0,
pickler=pickle.Pickler, unpickler=pickle.Unpickler,
pload=None, pid=None):
self.request=request
Client.__init__(self,servers,debug,pickleProtocol,
pickler,unpickler,pload,pid)
def __call__(self,key,f,time_expire=300):
#key=self.__keyFormat__(key)
value=None
obj=self.get(key)
if obj:
value=obj
elif f is None:
if obj: self.delete(key)
else:
value=f()
self.set(key,value,time_expire)
return value
def increment(self,key,value=1,time_expire=300):
newKey=self.__keyFormat__(key)
obj=self.get(newKey)
if obj:
return Client.incr(self,newKey,value)
else:
self.set(newKey,value,time_expire)
return value
def set(self,key,value,time_expire=300):
newKey = self.__keyFormat__(key)
return Client.set(self,newKey,value,time_expire)
def get(self,key):
newKey = self.__keyFormat__(key)
return Client.get(self,newKey)
def delete(self,key):
newKey = self.__keyFormat__(key)
return Client.delete(self,newKey)
def __keyFormat__(self,key):
return '%s/%s' % (self.request.application,key.replace(' ','_'))
| [
"gluon.contrib.memcache.memcache.Client.delete",
"gluon.contrib.memcache.memcache.Client.incr",
"gluon.contrib.memcache.memcache.Client.get",
"gluon.contrib.memcache.memcache.Client.__init__",
"thread.allocate_lock",
"gluon.contrib.memcache.memcache.Client.set"
] | [((208, 230), 'thread.allocate_lock', 'thread.allocate_lock', ([], {}), '()\n', (228, 230), False, 'import thread\n'), ((707, 796), 'gluon.contrib.memcache.memcache.Client.__init__', 'Client.__init__', (['self', 'servers', 'debug', 'pickleProtocol', 'pickler', 'unpickler', 'pload', 'pid'], {}), '(self, servers, debug, pickleProtocol, pickler, unpickler,\n pload, pid)\n', (722, 796), False, 'from gluon.contrib.memcache.memcache import Client\n'), ((1528, 1572), 'gluon.contrib.memcache.memcache.Client.set', 'Client.set', (['self', 'newKey', 'value', 'time_expire'], {}), '(self, newKey, value, time_expire)\n', (1538, 1572), False, 'from gluon.contrib.memcache.memcache import Client\n'), ((1654, 1678), 'gluon.contrib.memcache.memcache.Client.get', 'Client.get', (['self', 'newKey'], {}), '(self, newKey)\n', (1664, 1678), False, 'from gluon.contrib.memcache.memcache import Client\n'), ((1765, 1792), 'gluon.contrib.memcache.memcache.Client.delete', 'Client.delete', (['self', 'newKey'], {}), '(self, newKey)\n', (1778, 1792), False, 'from gluon.contrib.memcache.memcache import Client\n'), ((1305, 1337), 'gluon.contrib.memcache.memcache.Client.incr', 'Client.incr', (['self', 'newKey', 'value'], {}), '(self, newKey, value)\n', (1316, 1337), False, 'from gluon.contrib.memcache.memcache import Client\n')] |
#!/usr/bin/env python
# encoding: utf-8
import numbers
import os
import re
import sys
from itertools import chain
import numpy as np
import scipy.sparse as sp
import six
import pickle
from .model import get_convo_nn2
from .stop_words import THAI_STOP_WORDS
from .utils import CHAR_TYPES_MAP, CHARS_MAP, create_feature_array
MODULE_PATH = os.path.dirname(__file__)
WEIGHT_PATH = os.path.join(MODULE_PATH, 'weight', 'cnn_without_ne_ab.h5')
TOKENIZER = None
def tokenize(text, custom_dict=None):
"""
Tokenize given Thai text string
Input
=====
text: str, Thai text string
custom_dict: str (or list), path to customized dictionary file
It allows the function not to tokenize given dictionary wrongly.
The file should contain custom words separated by line.
Alternatively, you can provide list of custom words too.
Output
======
tokens: list, list of tokenized words
Example
=======
>> deepcut.tokenize('ตัดคำได้ดีมาก')
>> ['ตัดคำ','ได้','ดี','มาก']
"""
global TOKENIZER
if not TOKENIZER:
TOKENIZER = DeepcutTokenizer()
return TOKENIZER.tokenize(text, custom_dict=custom_dict)
def _custom_dict(word, text, word_end):
word_length = len(word)
initial_loc = 0
while True:
try:
start_char = re.search(word, text).start()
first_char = start_char + initial_loc
last_char = first_char + word_length - 1
initial_loc += start_char + word_length
text = text[start_char + word_length:]
word_end[first_char:last_char] = (word_length - 1) * [0]
word_end[last_char] = 1
except:
break
return word_end
def _document_frequency(X):
"""
Count the number of non-zero values for each feature in sparse X.
"""
if sp.isspmatrix_csr(X):
return np.bincount(X.indices, minlength=X.shape[1])
return np.diff(sp.csc_matrix(X, copy=False).indptr)
def _check_stop_list(stop):
"""
Check stop words list
ref: https://github.com/scikit-learn/scikit-learn/blob/master/sklearn/feature_extraction/text.py#L87-L95
"""
if stop == "thai":
return THAI_STOP_WORDS
elif isinstance(stop, six.string_types):
raise ValueError("not a built-in stop list: %s" % stop)
elif stop is None:
return None
# assume it's a collection
return frozenset(stop)
def load_model(file_path):
"""
Load saved pickle file of DeepcutTokenizer
Parameters
==========
file_path: str, path to saved model from ``save_model`` method in DeepcutTokenizer
"""
tokenizer = pickle.load(open(file_path, 'rb'))
tokenizer.model = get_convo_nn2()
tokenizer.model = tokenizer.model.load_weights(WEIGHT_PATH)
return tokenizer
class DeepcutTokenizer(object):
"""
Class for tokenizing given Thai text documents using deepcut library
Parameters
==========
ngram_range : tuple, tuple for ngram range for vocabulary, (1, 1) for unigram
and (1, 2) for bigram
stop_words : list or set, list or set of stop words to be removed
if None, max_df can be set to value [0.7, 1.0) to automatically remove
vocabulary. If using "thai", this will use list of pre-populated stop words
max_features : int or None, if provided, only consider number of vocabulary
ordered by term frequencies
max_df : float in range [0.0, 1.0] or int, default=1.0
ignore terms that have a document frequency higher than the given threshold
min_df : float in range [0.0, 1.0] or int, default=1
ignore terms that have a document frequency lower than the given threshold
dtype : type, optional
Example
=======
raw_documents = ['ฉันอยากกินข้าวของฉัน',
'ฉันอยากกินไก่',
'อยากนอนอย่างสงบ']
tokenizer = DeepcutTokenizer(ngram_range=(1, 1))
X = tokenizer.fit_tranform(raw_documents) # document-term matrix in sparse CSR format
>> X.todense()
>> [[0, 0, 1, 0, 1, 0, 2, 1],
[0, 1, 1, 0, 1, 0, 1, 0],
[1, 0, 0, 1, 1, 1, 0, 0]]
>> tokenizer.vocabulary_
>> {'นอน': 0, 'ไก่': 1, 'กิน': 2, 'อย่าง': 3, 'อยาก': 4, 'สงบ': 5, 'ฉัน': 6, 'ข้าว': 7}
"""
def __init__(self, ngram_range=(1, 1), stop_words=None,
max_df=1.0, min_df=1, max_features=None, dtype=np.dtype('float64')):
self.model = get_convo_nn2()
self.model.load_weights(WEIGHT_PATH)
self.vocabulary_ = {}
self.ngram_range = ngram_range
self.dtype = dtype
self.max_df = max_df
self.min_df = min_df
if max_df < 0 or min_df < 0:
raise ValueError("negative value for max_df or min_df")
self.max_features = max_features
self.stop_words = _check_stop_list(stop_words)
def _word_ngrams(self, tokens):
"""
Turn tokens into a tokens of n-grams
ref: https://github.com/scikit-learn/scikit-learn/blob/ef5cb84a/sklearn/feature_extraction/text.py#L124-L153
"""
# handle stop words
if self.stop_words is not None:
tokens = [w for w in tokens if w not in self.stop_words]
# handle token n-grams
min_n, max_n = self.ngram_range
if max_n != 1:
original_tokens = tokens
if min_n == 1:
# no need to do any slicing for unigrams
# just iterate through the original tokens
tokens = list(original_tokens)
min_n += 1
else:
tokens = []
n_original_tokens = len(original_tokens)
# bind method outside of loop to reduce overhead
tokens_append = tokens.append
space_join = " ".join
for n in range(min_n,
min(max_n + 1, n_original_tokens + 1)):
for i in range(n_original_tokens - n + 1):
tokens_append(space_join(original_tokens[i: i + n]))
return tokens
def _limit_features(self, X, vocabulary,
high=None, low=None, limit=None):
"""Remove too rare or too common features.
ref: https://github.com/scikit-learn/scikit-learn/blob/ef5cb84a/sklearn/feature_extraction/text.py#L734-L773
"""
if high is None and low is None and limit is None:
return X, set()
# Calculate a mask based on document frequencies
dfs = _document_frequency(X)
mask = np.ones(len(dfs), dtype=bool)
if high is not None:
mask &= dfs <= high
if low is not None:
mask &= dfs >= low
if limit is not None and mask.sum() > limit:
tfs = np.asarray(X.sum(axis=0)).ravel()
mask_inds = (-tfs[mask]).argsort()[:limit]
new_mask = np.zeros(len(dfs), dtype=bool)
new_mask[np.where(mask)[0][mask_inds]] = True
mask = new_mask
new_indices = np.cumsum(mask) - 1 # maps old indices to new
removed_terms = set()
for term, old_index in list(vocabulary.items()):
if mask[old_index]:
vocabulary[term] = new_indices[old_index]
else:
del vocabulary[term]
removed_terms.add(term)
kept_indices = np.where(mask)[0]
if len(kept_indices) == 0:
raise ValueError("After pruning, no terms remain. Try a lower"
" min_df or a higher max_df.")
return X[:, kept_indices], removed_terms
def transform(self, raw_documents, new_document=False):
"""
raw_documents: list, list of new documents to be transformed
new_document: bool, if True, assume seeing documents and build a new self.vobabulary_,
if False, use the previous self.vocabulary_
"""
n_doc = len(raw_documents)
tokenized_documents = []
for doc in raw_documents:
tokens = tokenize(doc) # method in this file
tokens = self._word_ngrams(tokens)
tokenized_documents.append(tokens)
if new_document:
self.vocabulary_ = {v: k for k, v in enumerate(set(chain.from_iterable(tokenized_documents)))}
values, row_indices, col_indices = [], [], []
for r, tokens in enumerate(tokenized_documents):
tokens = self._word_ngrams(tokens)
feature = {}
for token in tokens:
word_index = self.vocabulary_.get(token)
if word_index is not None:
if word_index not in feature.keys():
feature[word_index] = 1
else:
feature[word_index] += 1
for c, v in feature.items():
values.append(v)
row_indices.append(r)
col_indices.append(c)
# document-term matrix in CSR format
X = sp.csr_matrix((values, (row_indices, col_indices)),
shape=(n_doc, len(self.vocabulary_)),
dtype=self.dtype)
# truncate vocabulary by max_df and min_df
if new_document:
max_df = self.max_df
min_df = self.min_df
max_doc_count = (max_df
if isinstance(max_df, numbers.Integral)
else max_df * n_doc)
min_doc_count = (min_df
if isinstance(min_df, numbers.Integral)
else min_df * n_doc)
if max_doc_count < min_doc_count:
raise ValueError(
"max_df corresponds to < documents than min_df")
X, _ = self._limit_features(X, self.vocabulary_,
max_doc_count,
min_doc_count,
self.max_features)
return X
def fit_tranform(self, raw_documents):
"""
Transform given list of raw_documents to document-term matrix in
sparse CSR format (see scipy)
"""
X = self.transform(raw_documents, new_document=True)
return X
def tokenize(self, text, custom_dict=None):
n_pad = 21
if not text:
return [''] # case of empty string
if isinstance(text, str) and sys.version_info.major == 2:
text = text.decode('utf-8')
x_char, x_type = create_feature_array(text, n_pad=n_pad)
word_end = []
# Fix thread-related issue in Keras + TensorFlow + Flask async environment
# ref: https://github.com/keras-team/keras/issues/2397
y_predict = self.model.predict([x_char, x_type])
c = [i[0] for i in y_predict.tolist()]
return list(zip(list(text),c))
def save_model(self, file_path):
"""
Save tokenizer to pickle format
"""
self.model = None # set model to None to successfully save the model
with open(file_path, 'wb') as f:
pickle.dump(self, f) | [
"scipy.sparse.isspmatrix_csr",
"scipy.sparse.csc_matrix",
"pickle.dump",
"numpy.where",
"os.path.join",
"os.path.dirname",
"itertools.chain.from_iterable",
"numpy.cumsum",
"numpy.dtype",
"numpy.bincount",
"re.search"
] | [((341, 366), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (356, 366), False, 'import os\n'), ((381, 440), 'os.path.join', 'os.path.join', (['MODULE_PATH', '"""weight"""', '"""cnn_without_ne_ab.h5"""'], {}), "(MODULE_PATH, 'weight', 'cnn_without_ne_ab.h5')\n", (393, 440), False, 'import os\n'), ((1845, 1865), 'scipy.sparse.isspmatrix_csr', 'sp.isspmatrix_csr', (['X'], {}), '(X)\n', (1862, 1865), True, 'import scipy.sparse as sp\n'), ((1882, 1926), 'numpy.bincount', 'np.bincount', (['X.indices'], {'minlength': 'X.shape[1]'}), '(X.indices, minlength=X.shape[1])\n', (1893, 1926), True, 'import numpy as np\n'), ((4399, 4418), 'numpy.dtype', 'np.dtype', (['"""float64"""'], {}), "('float64')\n", (4407, 4418), True, 'import numpy as np\n'), ((1946, 1974), 'scipy.sparse.csc_matrix', 'sp.csc_matrix', (['X'], {'copy': '(False)'}), '(X, copy=False)\n', (1959, 1974), True, 'import scipy.sparse as sp\n'), ((7020, 7035), 'numpy.cumsum', 'np.cumsum', (['mask'], {}), '(mask)\n', (7029, 7035), True, 'import numpy as np\n'), ((7362, 7376), 'numpy.where', 'np.where', (['mask'], {}), '(mask)\n', (7370, 7376), True, 'import numpy as np\n'), ((11115, 11135), 'pickle.dump', 'pickle.dump', (['self', 'f'], {}), '(self, f)\n', (11126, 11135), False, 'import pickle\n'), ((1326, 1347), 're.search', 're.search', (['word', 'text'], {}), '(word, text)\n', (1335, 1347), False, 'import re\n'), ((6932, 6946), 'numpy.where', 'np.where', (['mask'], {}), '(mask)\n', (6940, 6946), True, 'import numpy as np\n'), ((8247, 8287), 'itertools.chain.from_iterable', 'chain.from_iterable', (['tokenized_documents'], {}), '(tokenized_documents)\n', (8266, 8287), False, 'from itertools import chain\n')] |
# Copyright 2021 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from cumm import tensorview as tv
from contextlib import AbstractContextManager
from spconv.cppconstants import CPU_ONLY_BUILD
from spconv.core_cc.csrc.utils.boxops import BoxOps
from spconv.core_cc.csrc.sparse.all.ops_cpu1d import Point2VoxelCPU as Point2VoxelCPU1d
from spconv.core_cc.csrc.sparse.all.ops_cpu2d import Point2VoxelCPU as Point2VoxelCPU2d
from spconv.core_cc.csrc.sparse.all.ops_cpu3d import Point2VoxelCPU as Point2VoxelCPU3d
from spconv.core_cc.csrc.sparse.all.ops_cpu4d import Point2VoxelCPU as Point2VoxelCPU4d
if not CPU_ONLY_BUILD:
from spconv.core_cc.csrc.sparse.all.ops1d import Point2Voxel as Point2VoxelGPU1d
from spconv.core_cc.csrc.sparse.all.ops2d import Point2Voxel as Point2VoxelGPU2d
from spconv.core_cc.csrc.sparse.all.ops3d import Point2Voxel as Point2VoxelGPU3d
from spconv.core_cc.csrc.sparse.all.ops4d import Point2Voxel as Point2VoxelGPU4d
class nullcontext(AbstractContextManager):
"""Context manager that does no additional processing.
Used as a stand-in for a normal context manager, when a particular
block of code is only sometimes used with a normal context manager:
cm = optional_cm if condition else nullcontext()
with cm:
# Perform operation, using optional_cm if condition is True
"""
def __init__(self, enter_result=None):
self.enter_result = enter_result
def __enter__(self):
return self.enter_result
def __exit__(self, *excinfo):
pass
def rbbox_iou(box_corners: np.ndarray, qbox_corners: np.ndarray,
standup_iou: np.ndarray, standup_thresh: float):
if not BoxOps.has_boost():
raise NotImplementedError(
"this op require spconv built with boost, download boost, export BOOST_ROOT and rebuild."
)
N = box_corners.shape[0]
K = qbox_corners.shape[0]
overlap = np.zeros((N, K), dtype=box_corners.dtype)
BoxOps.rbbox_iou(tv.from_numpy(box_corners), tv.from_numpy(qbox_corners),
tv.from_numpy(standup_iou), tv.from_numpy(overlap),
standup_thresh, False)
return overlap
def rbbox_intersection(box_corners: np.ndarray, qbox_corners: np.ndarray,
standup_iou: np.ndarray, standup_thresh: float):
if not BoxOps.has_boost():
raise NotImplementedError(
"this op require spconv built with boost, download boost, export BOOST_ROOT and rebuild."
)
N = box_corners.shape[0]
K = qbox_corners.shape[0]
overlap = np.zeros((N, K), dtype=box_corners.dtype)
BoxOps.rbbox_iou(tv.from_numpy(box_corners), tv.from_numpy(qbox_corners),
tv.from_numpy(standup_iou), tv.from_numpy(overlap),
standup_thresh, True)
return overlap
def rbbox_iou_loss(box_corners: np.ndarray, qbox_corners: np.ndarray):
if not BoxOps.has_boost():
raise NotImplementedError(
"this op require spconv built with boost, download boost, export BOOST_ROOT and rebuild."
)
N = box_corners.shape[0]
overlap = np.zeros((N, ), dtype=box_corners.dtype)
BoxOps.rbbox_iou_aligned(tv.from_numpy(box_corners),
tv.from_numpy(qbox_corners),
tv.from_numpy(overlap), False)
return overlap
def non_max_suppression_cpu(boxes: np.ndarray,
order: np.ndarray,
thresh: float,
eps: float = 0.0):
return BoxOps.non_max_suppression_cpu(tv.from_numpy(boxes),
tv.from_numpy(order), thresh, eps)
def rotate_non_max_suppression_cpu(boxes: np.ndarray, order: np.ndarray,
standup_iou: np.ndarray, thresh: float):
if not BoxOps.has_boost():
raise NotImplementedError(
"this op require spconv built with boost, download boost, export BOOST_ROOT and rebuild."
)
return BoxOps.rotate_non_max_suppression_cpu(tv.from_numpy(boxes),
tv.from_numpy(order),
tv.from_numpy(standup_iou),
thresh)
| [
"numpy.zeros",
"cumm.tensorview.from_numpy",
"spconv.core_cc.csrc.utils.boxops.BoxOps.has_boost"
] | [((2449, 2490), 'numpy.zeros', 'np.zeros', (['(N, K)'], {'dtype': 'box_corners.dtype'}), '((N, K), dtype=box_corners.dtype)\n', (2457, 2490), True, 'import numpy as np\n'), ((3105, 3146), 'numpy.zeros', 'np.zeros', (['(N, K)'], {'dtype': 'box_corners.dtype'}), '((N, K), dtype=box_corners.dtype)\n', (3113, 3146), True, 'import numpy as np\n'), ((3655, 3694), 'numpy.zeros', 'np.zeros', (['(N,)'], {'dtype': 'box_corners.dtype'}), '((N,), dtype=box_corners.dtype)\n', (3663, 3694), True, 'import numpy as np\n'), ((2209, 2227), 'spconv.core_cc.csrc.utils.boxops.BoxOps.has_boost', 'BoxOps.has_boost', ([], {}), '()\n', (2225, 2227), False, 'from spconv.core_cc.csrc.utils.boxops import BoxOps\n'), ((2513, 2539), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['box_corners'], {}), '(box_corners)\n', (2526, 2539), True, 'from cumm import tensorview as tv\n'), ((2541, 2568), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['qbox_corners'], {}), '(qbox_corners)\n', (2554, 2568), True, 'from cumm import tensorview as tv\n'), ((2591, 2617), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['standup_iou'], {}), '(standup_iou)\n', (2604, 2617), True, 'from cumm import tensorview as tv\n'), ((2619, 2641), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['overlap'], {}), '(overlap)\n', (2632, 2641), True, 'from cumm import tensorview as tv\n'), ((2865, 2883), 'spconv.core_cc.csrc.utils.boxops.BoxOps.has_boost', 'BoxOps.has_boost', ([], {}), '()\n', (2881, 2883), False, 'from spconv.core_cc.csrc.utils.boxops import BoxOps\n'), ((3169, 3195), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['box_corners'], {}), '(box_corners)\n', (3182, 3195), True, 'from cumm import tensorview as tv\n'), ((3197, 3224), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['qbox_corners'], {}), '(qbox_corners)\n', (3210, 3224), True, 'from cumm import tensorview as tv\n'), ((3247, 3273), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['standup_iou'], {}), '(standup_iou)\n', (3260, 3273), True, 'from cumm import tensorview as tv\n'), ((3275, 3297), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['overlap'], {}), '(overlap)\n', (3288, 3297), True, 'from cumm import tensorview as tv\n'), ((3445, 3463), 'spconv.core_cc.csrc.utils.boxops.BoxOps.has_boost', 'BoxOps.has_boost', ([], {}), '()\n', (3461, 3463), False, 'from spconv.core_cc.csrc.utils.boxops import BoxOps\n'), ((3726, 3752), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['box_corners'], {}), '(box_corners)\n', (3739, 3752), True, 'from cumm import tensorview as tv\n'), ((3783, 3810), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['qbox_corners'], {}), '(qbox_corners)\n', (3796, 3810), True, 'from cumm import tensorview as tv\n'), ((3841, 3863), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['overlap'], {}), '(overlap)\n', (3854, 3863), True, 'from cumm import tensorview as tv\n'), ((4119, 4139), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['boxes'], {}), '(boxes)\n', (4132, 4139), True, 'from cumm import tensorview as tv\n'), ((4183, 4203), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['order'], {}), '(order)\n', (4196, 4203), True, 'from cumm import tensorview as tv\n'), ((4380, 4398), 'spconv.core_cc.csrc.utils.boxops.BoxOps.has_boost', 'BoxOps.has_boost', ([], {}), '()\n', (4396, 4398), False, 'from spconv.core_cc.csrc.utils.boxops import BoxOps\n'), ((4596, 4616), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['boxes'], {}), '(boxes)\n', (4609, 4616), True, 'from cumm import tensorview as tv\n'), ((4667, 4687), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['order'], {}), '(order)\n', (4680, 4687), True, 'from cumm import tensorview as tv\n'), ((4738, 4764), 'cumm.tensorview.from_numpy', 'tv.from_numpy', (['standup_iou'], {}), '(standup_iou)\n', (4751, 4764), True, 'from cumm import tensorview as tv\n')] |
from torch import nn
def get_fc_discriminator(num_classes, ndf=64):
return nn.Sequential(
nn.Conv2d(num_classes, ndf, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf, ndf * 2, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 2, ndf * 4, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 8, 1, kernel_size=4, stride=2, padding=1),
)
# def get_fe_discriminator(num_classes, ndf=64): # 256-128-64-32-16
# return nn.Sequential(
# nn.Conv2d(num_classes, ndf * 4, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 4, ndf * 2, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 2, ndf, kernel_size=2, stride=2, padding=0),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# # nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
# # nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf, 1, kernel_size=2, stride=2, padding=0),
# )
# def get_fe_discriminator(num_classes, ndf=64):
# return nn.Sequential(
# nn.Conv2d(num_classes, ndf, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf, ndf * 2, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 2, ndf * 4, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
# # nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
# # nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 4, 1, kernel_size=1, stride=1, padding=0),
# )
def get_fe_discriminator(num_classes, ndf=64): # H/8,H/8,(1024 -> 256 -> 128 -> 64 -> 1)
return nn.Sequential(
nn.Conv2d(num_classes, ndf * 4, kernel_size=1, stride=1, padding=0),
# x=self.dropout(x)
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 4, ndf * 2, kernel_size=1, stride=1, padding=0),
# x=self.dropout(x)
nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf * 2, ndf, kernel_size=1, stride=1, padding=0),
# x=self.dropout(x)
nn.LeakyReLU(negative_slope=0.2, inplace=True),
# nn.Conv2d(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1),
# nn.LeakyReLU(negative_slope=0.2, inplace=True),
nn.Conv2d(ndf, 1, kernel_size=1, stride=1, padding=0),
) | [
"torch.nn.LeakyReLU",
"torch.nn.Conv2d"
] | [((104, 167), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_classes', 'ndf'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(num_classes, ndf, kernel_size=4, stride=2, padding=1)\n', (113, 167), False, 'from torch import nn\n'), ((177, 223), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (189, 223), False, 'from torch import nn\n'), ((233, 292), 'torch.nn.Conv2d', 'nn.Conv2d', (['ndf', '(ndf * 2)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(ndf, ndf * 2, kernel_size=4, stride=2, padding=1)\n', (242, 292), False, 'from torch import nn\n'), ((302, 348), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (314, 348), False, 'from torch import nn\n'), ((358, 421), 'torch.nn.Conv2d', 'nn.Conv2d', (['(ndf * 2)', '(ndf * 4)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(ndf * 2, ndf * 4, kernel_size=4, stride=2, padding=1)\n', (367, 421), False, 'from torch import nn\n'), ((431, 477), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (443, 477), False, 'from torch import nn\n'), ((487, 550), 'torch.nn.Conv2d', 'nn.Conv2d', (['(ndf * 4)', '(ndf * 8)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(ndf * 4, ndf * 8, kernel_size=4, stride=2, padding=1)\n', (496, 550), False, 'from torch import nn\n'), ((560, 606), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (572, 606), False, 'from torch import nn\n'), ((616, 673), 'torch.nn.Conv2d', 'nn.Conv2d', (['(ndf * 8)', '(1)'], {'kernel_size': '(4)', 'stride': '(2)', 'padding': '(1)'}), '(ndf * 8, 1, kernel_size=4, stride=2, padding=1)\n', (625, 673), False, 'from torch import nn\n'), ((2201, 2268), 'torch.nn.Conv2d', 'nn.Conv2d', (['num_classes', '(ndf * 4)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(num_classes, ndf * 4, kernel_size=1, stride=1, padding=0)\n', (2210, 2268), False, 'from torch import nn\n'), ((2306, 2352), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (2318, 2352), False, 'from torch import nn\n'), ((2362, 2425), 'torch.nn.Conv2d', 'nn.Conv2d', (['(ndf * 4)', '(ndf * 2)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(ndf * 4, ndf * 2, kernel_size=1, stride=1, padding=0)\n', (2371, 2425), False, 'from torch import nn\n'), ((2463, 2509), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (2475, 2509), False, 'from torch import nn\n'), ((2519, 2578), 'torch.nn.Conv2d', 'nn.Conv2d', (['(ndf * 2)', 'ndf'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(ndf * 2, ndf, kernel_size=1, stride=1, padding=0)\n', (2528, 2578), False, 'from torch import nn\n'), ((2616, 2662), 'torch.nn.LeakyReLU', 'nn.LeakyReLU', ([], {'negative_slope': '(0.2)', 'inplace': '(True)'}), '(negative_slope=0.2, inplace=True)\n', (2628, 2662), False, 'from torch import nn\n'), ((2805, 2858), 'torch.nn.Conv2d', 'nn.Conv2d', (['ndf', '(1)'], {'kernel_size': '(1)', 'stride': '(1)', 'padding': '(0)'}), '(ndf, 1, kernel_size=1, stride=1, padding=0)\n', (2814, 2858), False, 'from torch import nn\n')] |
import os
from setuptools import find_packages, setup
# Load version number
__version__ = None
src_dir = os.path.abspath(os.path.dirname(__file__))
version_file = os.path.join(src_dir, 'chemprop', '_version.py')
with open(version_file, encoding='utf-8') as fd:
exec(fd.read())
# Load README
with open('README.md', encoding='utf-8') as f:
long_description = f.read()
setup(
name='chemprop',
version=__version__,
author='<NAME>, <NAME>, <NAME>, <NAME>, <NAME>',
author_email='<EMAIL>',
description='Molecular Property Prediction with Message Passing Neural Networks',
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/chemprop/chemprop',
download_url=f'https://github.com/chemprop/chemprop/v_{__version__}.tar.gz',
project_urls={
'Documentation': 'https://chemprop.readthedocs.io/en/latest/',
'Source': 'https://github.com/chemprop/chemprop',
'PyPi': 'https://pypi.org/project/chemprop/',
'Demo': 'http://chemprop.csail.mit.edu/',
},
license='MIT',
packages=find_packages(),
package_data={'chemprop': ['py.typed']},
entry_points={
'console_scripts': [
'chemprop_train=chemprop.train:chemprop_train',
'chemprop_predict=chemprop.train:chemprop_predict',
'chemprop_fingerprint=chemprop.train:chemprop_fingerprint',
'chemprop_hyperopt=chemprop.hyperparameter_optimization:chemprop_hyperopt',
'chemprop_interpret=chemprop.interpret:chemprop_interpret',
'chemprop_web=chemprop.web.run:chemprop_web',
'sklearn_train=chemprop.sklearn_train:sklearn_train',
'sklearn_predict=chemprop.sklearn_predict:sklearn_predict',
]
},
install_requires=[
'flask>=1.1.2',
'hyperopt>=0.2.3',
'matplotlib>=3.1.3',
'numpy>=1.18.1',
'pandas>=1.0.3',
'pandas-flavor>=0.2.0',
'scikit-learn>=0.22.2.post1',
'scipy>=1.4.1',
'sphinx>=3.1.2',
'tensorboardX>=2.0',
'torch>=1.5.1',
'tqdm>=4.45.0',
'typed-argument-parser>=1.6.1'
],
extras_require={
'test': [
'pytest>=6.2.2',
'parameterized>=0.8.1'
]
},
python_requires='>=3.6',
classifiers=[
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent'
],
keywords=[
'chemistry',
'machine learning',
'property prediction',
'message passing neural network',
'graph neural network'
]
)
| [
"os.path.dirname",
"setuptools.find_packages",
"os.path.join"
] | [((172, 220), 'os.path.join', 'os.path.join', (['src_dir', '"""chemprop"""', '"""_version.py"""'], {}), "(src_dir, 'chemprop', '_version.py')\n", (184, 220), False, 'import os\n'), ((129, 154), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (144, 154), False, 'import os\n'), ((1144, 1159), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1157, 1159), False, 'from setuptools import find_packages, setup\n')] |
"""
Main module.
Implement the central Checker class.
Also, it models the Bindings and Scopes.
"""
import __future__
import doctest
import os
import sys
PY2 = sys.version_info < (3, 0)
PY32 = sys.version_info < (3, 3) # Python 2.5 to 3.2
PY33 = sys.version_info < (3, 4) # Python 2.5 to 3.3
PY34 = sys.version_info < (3, 5) # Python 2.5 to 3.4
try:
sys.pypy_version_info
PYPY = True
except AttributeError:
PYPY = False
builtin_vars = dir(__import__('__builtin__' if PY2 else 'builtins'))
try:
import ast
except ImportError: # Python 2.5
import _ast as ast
if 'decorator_list' not in ast.ClassDef._fields:
# Patch the missing attribute 'decorator_list'
ast.ClassDef.decorator_list = ()
ast.FunctionDef.decorator_list = property(lambda s: s.decorators)
from pyflakes import messages
if PY2:
def getNodeType(node_class):
# workaround str.upper() which is locale-dependent
return str(unicode(node_class.__name__).upper())
else:
def getNodeType(node_class):
return node_class.__name__.upper()
# Python >= 3.3 uses ast.Try instead of (ast.TryExcept + ast.TryFinally)
if PY32:
def getAlternatives(n):
if isinstance(n, (ast.If, ast.TryFinally)):
return [n.body]
if isinstance(n, ast.TryExcept):
return [n.body + n.orelse] + [[hdl] for hdl in n.handlers]
else:
def getAlternatives(n):
if isinstance(n, ast.If):
return [n.body]
if isinstance(n, ast.Try):
return [n.body + n.orelse] + [[hdl] for hdl in n.handlers]
if PY34:
LOOP_TYPES = (ast.While, ast.For)
else:
LOOP_TYPES = (ast.While, ast.For, ast.AsyncFor)
class _FieldsOrder(dict):
"""Fix order of AST node fields."""
def _get_fields(self, node_class):
# handle iter before target, and generators before element
fields = node_class._fields
if 'iter' in fields:
key_first = 'iter'.find
elif 'generators' in fields:
key_first = 'generators'.find
else:
key_first = 'value'.find
return tuple(sorted(fields, key=key_first, reverse=True))
def __missing__(self, node_class):
self[node_class] = fields = self._get_fields(node_class)
return fields
def counter(items):
"""
Simplest required implementation of collections.Counter. Required as 2.6
does not have Counter in collections.
"""
results = {}
for item in items:
results[item] = results.get(item, 0) + 1
return results
def iter_child_nodes(node, omit=None, _fields_order=_FieldsOrder()):
"""
Yield all direct child nodes of *node*, that is, all fields that
are nodes and all items of fields that are lists of nodes.
"""
for name in _fields_order[node.__class__]:
if name == omit:
continue
field = getattr(node, name, None)
if isinstance(field, ast.AST):
yield field
elif isinstance(field, list):
for item in field:
yield item
def convert_to_value(item):
if isinstance(item, ast.Str):
return item.s
elif hasattr(ast, 'Bytes') and isinstance(item, ast.Bytes):
return item.s
elif isinstance(item, ast.Tuple):
return tuple(convert_to_value(i) for i in item.elts)
elif isinstance(item, ast.Num):
return item.n
elif isinstance(item, ast.Name):
result = VariableKey(item=item)
constants_lookup = {
'True': True,
'False': False,
'None': None,
}
return constants_lookup.get(
result.name,
result,
)
elif (not PY33) and isinstance(item, ast.NameConstant):
# None, True, False are nameconstants in python3, but names in 2
return item.value
else:
return UnhandledKeyType()
class Binding(object):
"""
Represents the binding of a value to a name.
The checker uses this to keep track of which names have been bound and
which names have not. See L{Assignment} for a special type of binding that
is checked with stricter rules.
@ivar used: pair of (L{Scope}, node) indicating the scope and
the node that this binding was last used.
"""
def __init__(self, name, source):
self.name = name
self.source = source
self.used = False
def __str__(self):
return self.name
def __repr__(self):
return '<%s object %r from line %r at 0x%x>' % (self.__class__.__name__,
self.name,
self.source.lineno,
id(self))
def redefines(self, other):
return isinstance(other, Definition) and self.name == other.name
class Definition(Binding):
"""
A binding that defines a function or a class.
"""
class UnhandledKeyType(object):
"""
A dictionary key of a type that we cannot or do not check for duplicates.
"""
class VariableKey(object):
"""
A dictionary key which is a variable.
@ivar item: The variable AST object.
"""
def __init__(self, item):
self.name = item.id
def __eq__(self, compare):
return (
compare.__class__ == self.__class__
and compare.name == self.name
)
def __hash__(self):
return hash(self.name)
class Importation(Definition):
"""
A binding created by an import statement.
@ivar fullName: The complete name given to the import statement,
possibly including multiple dotted components.
@type fullName: C{str}
"""
def __init__(self, name, source, full_name=None):
self.fullName = full_name or name
self.redefined = []
super(Importation, self).__init__(name, source)
def redefines(self, other):
if isinstance(other, SubmoduleImportation):
# See note in SubmoduleImportation about RedefinedWhileUnused
return self.fullName == other.fullName
return isinstance(other, Definition) and self.name == other.name
def _has_alias(self):
"""Return whether importation needs an as clause."""
return not self.fullName.split('.')[-1] == self.name
@property
def source_statement(self):
"""Generate a source statement equivalent to the import."""
if self._has_alias():
return 'import %s as %s' % (self.fullName, self.name)
else:
return 'import %s' % self.fullName
def __str__(self):
"""Return import full name with alias."""
if self._has_alias():
return self.fullName + ' as ' + self.name
else:
return self.fullName
class SubmoduleImportation(Importation):
"""
A binding created by a submodule import statement.
A submodule import is a special case where the root module is implicitly
imported, without an 'as' clause, and the submodule is also imported.
Python does not restrict which attributes of the root module may be used.
This class is only used when the submodule import is without an 'as' clause.
pyflakes handles this case by registering the root module name in the scope,
allowing any attribute of the root module to be accessed.
RedefinedWhileUnused is suppressed in `redefines` unless the submodule
name is also the same, to avoid false positives.
"""
def __init__(self, name, source):
# A dot should only appear in the name when it is a submodule import
assert '.' in name and (not source or isinstance(source, ast.Import))
package_name = name.split('.')[0]
super(SubmoduleImportation, self).__init__(package_name, source)
self.fullName = name
def redefines(self, other):
if isinstance(other, Importation):
return self.fullName == other.fullName
return super(SubmoduleImportation, self).redefines(other)
def __str__(self):
return self.fullName
@property
def source_statement(self):
return 'import ' + self.fullName
class ImportationFrom(Importation):
def __init__(self, name, source, module, real_name=None):
self.module = module
self.real_name = real_name or name
if module.endswith('.'):
full_name = module + self.real_name
else:
full_name = module + '.' + self.real_name
super(ImportationFrom, self).__init__(name, source, full_name)
def __str__(self):
"""Return import full name with alias."""
if self.real_name != self.name:
return self.fullName + ' as ' + self.name
else:
return self.fullName
@property
def source_statement(self):
if self.real_name != self.name:
return 'from %s import %s as %s' % (self.module,
self.real_name,
self.name)
else:
return 'from %s import %s' % (self.module, self.name)
class StarImportation(Importation):
"""A binding created by a 'from x import *' statement."""
def __init__(self, name, source):
super(StarImportation, self).__init__('*', source)
# Each star importation needs a unique name, and
# may not be the module name otherwise it will be deemed imported
self.name = name + '.*'
self.fullName = name
@property
def source_statement(self):
return 'from ' + self.fullName + ' import *'
def __str__(self):
# When the module ends with a ., avoid the ambiguous '..*'
if self.fullName.endswith('.'):
return self.source_statement
else:
return self.name
class FutureImportation(ImportationFrom):
"""
A binding created by a from `__future__` import statement.
`__future__` imports are implicitly used.
"""
def __init__(self, name, source, scope):
super(FutureImportation, self).__init__(name, source, '__future__')
self.used = (scope, source)
class Argument(Binding):
"""
Represents binding a name as an argument.
"""
class Assignment(Binding):
"""
Represents binding a name with an explicit assignment.
The checker will raise warnings for any Assignment that isn't used. Also,
the checker does not consider assignments in tuple/list unpacking to be
Assignments, rather it treats them as simple Bindings.
"""
class FunctionDefinition(Definition):
pass
class ClassDefinition(Definition):
pass
class ExportBinding(Binding):
"""
A binding created by an C{__all__} assignment. If the names in the list
can be determined statically, they will be treated as names for export and
additional checking applied to them.
The only C{__all__} assignment that can be recognized is one which takes
the value of a literal list containing literal strings. For example::
__all__ = ["foo", "bar"]
Names which are imported and not otherwise used but appear in the value of
C{__all__} will not have an unused import warning reported for them.
"""
def __init__(self, name, source, scope):
if '__all__' in scope and isinstance(source, ast.AugAssign):
self.names = list(scope['__all__'].names)
else:
self.names = []
if isinstance(source.value, (ast.List, ast.Tuple)):
for node in source.value.elts:
if isinstance(node, ast.Str):
self.names.append(node.s)
super(ExportBinding, self).__init__(name, source)
class Scope(dict):
importStarred = False # set to True when import * is found
def __repr__(self):
scope_cls = self.__class__.__name__
return '<%s at 0x%x %s>' % (scope_cls, id(self), dict.__repr__(self))
class ClassScope(Scope):
pass
class FunctionScope(Scope):
"""
I represent a name scope for a function.
@ivar globals: Names declared 'global' in this function.
"""
usesLocals = False
alwaysUsed = set(['__tracebackhide__',
'__traceback_info__', '__traceback_supplement__'])
def __init__(self):
super(FunctionScope, self).__init__()
# Simplify: manage the special locals as globals
self.globals = self.alwaysUsed.copy()
self.returnValue = None # First non-empty return
self.isGenerator = False # Detect a generator
def unusedAssignments(self):
"""
Return a generator for the assignments which have not been used.
"""
for name, binding in self.items():
if (not binding.used and name not in self.globals
and not self.usesLocals
and isinstance(binding, Assignment)):
yield name, binding
class GeneratorScope(Scope):
pass
class ModuleScope(Scope):
"""Scope for a module."""
_futures_allowed = True
class DoctestScope(ModuleScope):
"""Scope for a doctest."""
# Globally defined names which are not attributes of the builtins module, or
# are only present on some platforms.
_MAGIC_GLOBALS = ['__file__', '__builtins__', 'WindowsError']
def getNodeName(node):
# Returns node.id, or node.name, or None
if hasattr(node, 'id'): # One of the many nodes with an id
return node.id
if hasattr(node, 'name'): # an ExceptHandler node
return node.name
class Checker(object):
"""
I check the cleanliness and sanity of Python code.
@ivar _deferredFunctions: Tracking list used by L{deferFunction}. Elements
of the list are two-tuples. The first element is the callable passed
to L{deferFunction}. The second element is a copy of the scope stack
at the time L{deferFunction} was called.
@ivar _deferredAssignments: Similar to C{_deferredFunctions}, but for
callables which are deferred assignment checks.
"""
nodeDepth = 0
offset = None
traceTree = False
builtIns = set(builtin_vars).union(_MAGIC_GLOBALS)
_customBuiltIns = os.environ.get('PYFLAKES_BUILTINS')
if _customBuiltIns:
builtIns.update(_customBuiltIns.split(','))
del _customBuiltIns
def __init__(self, tree, filename='(none)', builtins=None,
withDoctest='PYFLAKES_DOCTEST' in os.environ):
self._nodeHandlers = {}
self._deferredFunctions = []
self._deferredAssignments = []
self.deadScopes = []
self.messages = []
self.filename = filename
if builtins:
self.builtIns = self.builtIns.union(builtins)
self.withDoctest = withDoctest
self.scopeStack = [ModuleScope()]
self.exceptHandlers = [()]
self.root = tree
self.handleChildren(tree)
self.runDeferred(self._deferredFunctions)
# Set _deferredFunctions to None so that deferFunction will fail
# noisily if called after we've run through the deferred functions.
self._deferredFunctions = None
self.runDeferred(self._deferredAssignments)
# Set _deferredAssignments to None so that deferAssignment will fail
# noisily if called after we've run through the deferred assignments.
self._deferredAssignments = None
del self.scopeStack[1:]
self.popScope()
self.checkDeadScopes()
def deferFunction(self, callable):
"""
Schedule a function handler to be called just before completion.
This is used for handling function bodies, which must be deferred
because code later in the file might modify the global scope. When
`callable` is called, the scope at the time this is called will be
restored, however it will contain any new bindings added to it.
"""
self._deferredFunctions.append((callable, self.scopeStack[:], self.offset))
def deferAssignment(self, callable):
"""
Schedule an assignment handler to be called just after deferred
function handlers.
"""
self._deferredAssignments.append((callable, self.scopeStack[:], self.offset))
def runDeferred(self, deferred):
"""
Run the callables in C{deferred} using their associated scope stack.
"""
for handler, scope, offset in deferred:
self.scopeStack = scope
self.offset = offset
handler()
def _in_doctest(self):
return (len(self.scopeStack) >= 2 and
isinstance(self.scopeStack[1], DoctestScope))
@property
def futuresAllowed(self):
if not all(isinstance(scope, ModuleScope)
for scope in self.scopeStack):
return False
return self.scope._futures_allowed
@futuresAllowed.setter
def futuresAllowed(self, value):
assert value is False
if isinstance(self.scope, ModuleScope):
self.scope._futures_allowed = False
@property
def scope(self):
return self.scopeStack[-1]
def popScope(self):
self.deadScopes.append(self.scopeStack.pop())
def checkDeadScopes(self):
"""
Look at scopes which have been fully examined and report names in them
which were imported but unused.
"""
for scope in self.deadScopes:
# imports in classes are public members
if isinstance(scope, ClassScope):
continue
all_binding = scope.get('__all__')
if all_binding and not isinstance(all_binding, ExportBinding):
all_binding = None
if all_binding:
all_names = set(all_binding.names)
undefined = all_names.difference(scope)
else:
all_names = undefined = []
if undefined:
if not scope.importStarred and \
os.path.basename(self.filename) != '__init__.py':
# Look for possible mistakes in the export list
for name in undefined:
self.report(messages.UndefinedExport,
scope['__all__'].source, name)
# mark all import '*' as used by the undefined in __all__
if scope.importStarred:
for binding in scope.values():
if isinstance(binding, StarImportation):
binding.used = all_binding
# Look for imported names that aren't used.
for value in scope.values():
if isinstance(value, Importation):
used = value.used or value.name in all_names
if not used:
messg = messages.UnusedImport
self.report(messg, value.source, str(value))
for node in value.redefined:
if isinstance(self.getParent(node), ast.For):
messg = messages.ImportShadowedByLoopVar
elif used:
continue
else:
messg = messages.RedefinedWhileUnused
self.report(messg, node, value.name, value.source)
def pushScope(self, scopeClass=FunctionScope):
self.scopeStack.append(scopeClass())
def report(self, messageClass, *args, **kwargs):
self.messages.append(messageClass(self.filename, *args, **kwargs))
def getParent(self, node):
# Lookup the first parent which is not Tuple, List or Starred
while True:
node = node.parent
if not hasattr(node, 'elts') and not hasattr(node, 'ctx'):
return node
def getCommonAncestor(self, lnode, rnode, stop):
if stop in (lnode, rnode) or not (hasattr(lnode, 'parent') and
hasattr(rnode, 'parent')):
return None
if lnode is rnode:
return lnode
if (lnode.depth > rnode.depth):
return self.getCommonAncestor(lnode.parent, rnode, stop)
if (lnode.depth < rnode.depth):
return self.getCommonAncestor(lnode, rnode.parent, stop)
return self.getCommonAncestor(lnode.parent, rnode.parent, stop)
def descendantOf(self, node, ancestors, stop):
for a in ancestors:
if self.getCommonAncestor(node, a, stop):
return True
return False
def differentForks(self, lnode, rnode):
"""True, if lnode and rnode are located on different forks of IF/TRY"""
ancestor = self.getCommonAncestor(lnode, rnode, self.root)
parts = getAlternatives(ancestor)
if parts:
for items in parts:
if self.descendantOf(lnode, items, ancestor) ^ \
self.descendantOf(rnode, items, ancestor):
return True
return False
def addBinding(self, node, value):
"""
Called when a binding is altered.
- `node` is the statement responsible for the change
- `value` is the new value, a Binding instance
"""
# assert value.source in (node, node.parent):
for scope in self.scopeStack[::-1]:
if value.name in scope:
break
existing = scope.get(value.name)
if existing and not self.differentForks(node, existing.source):
parent_stmt = self.getParent(value.source)
if isinstance(existing, Importation) and isinstance(parent_stmt, ast.For):
self.report(messages.ImportShadowedByLoopVar,
node, value.name, existing.source)
elif scope is self.scope:
if (isinstance(parent_stmt, ast.comprehension) and
not isinstance(self.getParent(existing.source),
(ast.For, ast.comprehension))):
self.report(messages.RedefinedInListComp,
node, value.name, existing.source)
elif not existing.used and value.redefines(existing):
self.report(messages.RedefinedWhileUnused,
node, value.name, existing.source)
elif isinstance(existing, Importation) and value.redefines(existing):
existing.redefined.append(node)
if value.name in self.scope:
# then assume the rebound name is used as a global or within a loop
value.used = self.scope[value.name].used
self.scope[value.name] = value
def getNodeHandler(self, node_class):
try:
return self._nodeHandlers[node_class]
except KeyError:
nodeType = getNodeType(node_class)
self._nodeHandlers[node_class] = handler = getattr(self, nodeType)
return handler
def handleNodeLoad(self, node):
name = getNodeName(node)
if not name:
return
in_generators = None
importStarred = None
# try enclosing function scopes and global scope
for scope in self.scopeStack[-1::-1]:
# only generators used in a class scope can access the names
# of the class. this is skipped during the first iteration
if in_generators is False and isinstance(scope, ClassScope):
continue
try:
scope[name].used = (self.scope, node)
except KeyError:
pass
else:
return
importStarred = importStarred or scope.importStarred
if in_generators is not False:
in_generators = isinstance(scope, GeneratorScope)
# look in the built-ins
if name in self.builtIns:
return
if importStarred:
from_list = []
for scope in self.scopeStack[-1::-1]:
for binding in scope.values():
if isinstance(binding, StarImportation):
# mark '*' imports as used for each scope
binding.used = (self.scope, node)
from_list.append(binding.fullName)
# report * usage, with a list of possible sources
from_list = ', '.join(sorted(from_list))
self.report(messages.ImportStarUsage, node, name, from_list)
return
if name == '__path__' and os.path.basename(self.filename) == '__init__.py':
# the special name __path__ is valid only in packages
return
# protected with a NameError handler?
if 'NameError' not in self.exceptHandlers[-1]:
self.report(messages.UndefinedName, node, name)
def handleNodeStore(self, node):
name = getNodeName(node)
if not name:
return
# if the name hasn't already been defined in the current scope
if isinstance(self.scope, FunctionScope) and name not in self.scope:
# for each function or module scope above us
for scope in self.scopeStack[:-1]:
if not isinstance(scope, (FunctionScope, ModuleScope)):
continue
# if the name was defined in that scope, and the name has
# been accessed already in the current scope, and hasn't
# been declared global
used = name in scope and scope[name].used
if used and used[0] is self.scope and name not in self.scope.globals:
# then it's probably a mistake
self.report(messages.UndefinedLocal,
scope[name].used[1], name, scope[name].source)
break
parent_stmt = self.getParent(node)
if isinstance(parent_stmt, (ast.For, ast.comprehension)) or (
parent_stmt != node.parent and
not self.isLiteralTupleUnpacking(parent_stmt)):
binding = Binding(name, node)
elif name == '__all__' and isinstance(self.scope, ModuleScope):
binding = ExportBinding(name, node.parent, self.scope)
else:
binding = Assignment(name, node)
self.addBinding(node, binding)
def handleNodeDelete(self, node):
def on_conditional_branch():
"""
Return `True` if node is part of a conditional body.
"""
current = getattr(node, 'parent', None)
while current:
if isinstance(current, (ast.If, ast.While, ast.IfExp)):
return True
current = getattr(current, 'parent', None)
return False
name = getNodeName(node)
if not name:
return
if on_conditional_branch():
# We cannot predict if this conditional branch is going to
# be executed.
return
if isinstance(self.scope, FunctionScope) and name in self.scope.globals:
self.scope.globals.remove(name)
else:
try:
del self.scope[name]
except KeyError:
self.report(messages.UndefinedName, node, name)
def handleChildren(self, tree, omit=None):
for node in iter_child_nodes(tree, omit=omit):
self.handleNode(node, tree)
def isLiteralTupleUnpacking(self, node):
if isinstance(node, ast.Assign):
for child in node.targets + [node.value]:
if not hasattr(child, 'elts'):
return False
return True
def isDocstring(self, node):
"""
Determine if the given node is a docstring, as long as it is at the
correct place in the node tree.
"""
return isinstance(node, ast.Str) or (isinstance(node, ast.Expr) and
isinstance(node.value, ast.Str))
def getDocstring(self, node):
if isinstance(node, ast.Expr):
node = node.value
if not isinstance(node, ast.Str):
return (None, None)
if PYPY:
doctest_lineno = node.lineno - 1
else:
# Computed incorrectly if the docstring has backslash
doctest_lineno = node.lineno - node.s.count('\n') - 1
return (node.s, doctest_lineno)
def handleNode(self, node, parent):
if node is None:
return
if self.offset and getattr(node, 'lineno', None) is not None:
node.lineno += self.offset[0]
node.col_offset += self.offset[1]
if self.traceTree:
print(' ' * self.nodeDepth + node.__class__.__name__)
if self.futuresAllowed and not (isinstance(node, ast.ImportFrom) or
self.isDocstring(node)):
self.futuresAllowed = False
self.nodeDepth += 1
node.depth = self.nodeDepth
node.parent = parent
try:
handler = self.getNodeHandler(node.__class__)
handler(node)
finally:
self.nodeDepth -= 1
if self.traceTree:
print(' ' * self.nodeDepth + 'end ' + node.__class__.__name__)
_getDoctestExamples = doctest.DocTestParser().get_examples
def handleDoctests(self, node):
try:
if hasattr(node, 'docstring'):
docstring = node.docstring
# This is just a reasonable guess. In Python 3.7, docstrings no
# longer have line numbers associated with them. This will be
# incorrect if there are empty lines between the beginning
# of the function and the docstring.
node_lineno = node.lineno
if hasattr(node, 'args'):
node_lineno = max([node_lineno] +
[arg.lineno for arg in node.args.args])
else:
(docstring, node_lineno) = self.getDocstring(node.body[0])
examples = docstring and self._getDoctestExamples(docstring)
except (ValueError, IndexError):
# e.g. line 6 of the docstring for <string> has inconsistent
# leading whitespace: ...
return
if not examples:
return
# Place doctest in module scope
saved_stack = self.scopeStack
self.scopeStack = [self.scopeStack[0]]
node_offset = self.offset or (0, 0)
self.pushScope(DoctestScope)
underscore_in_builtins = '_' in self.builtIns
if not underscore_in_builtins:
self.builtIns.add('_')
for example in examples:
try:
tree = compile(example.source, "<doctest>", "exec", ast.PyCF_ONLY_AST)
except SyntaxError:
e = sys.exc_info()[1]
if PYPY:
e.offset += 1
position = (node_lineno + example.lineno + e.lineno,
example.indent + 4 + (e.offset or 0))
self.report(messages.DoctestSyntaxError, node, position)
else:
self.offset = (node_offset[0] + node_lineno + example.lineno,
node_offset[1] + example.indent + 4)
self.handleChildren(tree)
self.offset = node_offset
if not underscore_in_builtins:
self.builtIns.remove('_')
self.popScope()
self.scopeStack = saved_stack
def ignore(self, node):
pass
# "stmt" type nodes
DELETE = PRINT = FOR = ASYNCFOR = WHILE = IF = WITH = WITHITEM = \
ASYNCWITH = ASYNCWITHITEM = RAISE = TRYFINALLY = EXEC = \
EXPR = ASSIGN = handleChildren
PASS = ignore
# "expr" type nodes
BOOLOP = BINOP = UNARYOP = IFEXP = SET = \
COMPARE = CALL = REPR = ATTRIBUTE = SUBSCRIPT = \
STARRED = NAMECONSTANT = handleChildren
NUM = STR = BYTES = ELLIPSIS = ignore
# "slice" type nodes
SLICE = EXTSLICE = INDEX = handleChildren
# expression contexts are node instances too, though being constants
LOAD = STORE = DEL = AUGLOAD = AUGSTORE = PARAM = ignore
# same for operators
AND = OR = ADD = SUB = MULT = DIV = MOD = POW = LSHIFT = RSHIFT = \
BITOR = BITXOR = BITAND = FLOORDIV = INVERT = NOT = UADD = USUB = \
EQ = NOTEQ = LT = LTE = GT = GTE = IS = ISNOT = IN = NOTIN = \
MATMULT = ignore
# additional node types
COMPREHENSION = KEYWORD = FORMATTEDVALUE = JOINEDSTR = handleChildren
def DICT(self, node):
# Complain if there are duplicate keys with different values
# If they have the same value it's not going to cause potentially
# unexpected behaviour so we'll not complain.
keys = [
convert_to_value(key) for key in node.keys
]
key_counts = counter(keys)
duplicate_keys = [
key for key, count in key_counts.items()
if count > 1
]
for key in duplicate_keys:
key_indices = [i for i, i_key in enumerate(keys) if i_key == key]
values = counter(
convert_to_value(node.values[index])
for index in key_indices
)
if any(count == 1 for value, count in values.items()):
for key_index in key_indices:
key_node = node.keys[key_index]
if isinstance(key, VariableKey):
self.report(messages.MultiValueRepeatedKeyVariable,
key_node,
key.name)
else:
self.report(
messages.MultiValueRepeatedKeyLiteral,
key_node,
key,
)
self.handleChildren(node)
def ASSERT(self, node):
if isinstance(node.test, ast.Tuple) and node.test.elts != []:
self.report(messages.AssertTuple, node)
self.handleChildren(node)
def GLOBAL(self, node):
"""
Keep track of globals declarations.
"""
global_scope_index = 1 if self._in_doctest() else 0
global_scope = self.scopeStack[global_scope_index]
# Ignore 'global' statement in global scope.
if self.scope is not global_scope:
# One 'global' statement can bind multiple (comma-delimited) names.
for node_name in node.names:
node_value = Assignment(node_name, node)
# Remove UndefinedName messages already reported for this name.
# TODO: if the global is not used in this scope, it does not
# become a globally defined name. See test_unused_global.
self.messages = [
m for m in self.messages if not
isinstance(m, messages.UndefinedName) or
m.message_args[0] != node_name]
# Bind name to global scope if it doesn't exist already.
global_scope.setdefault(node_name, node_value)
# Bind name to non-global scopes, but as already "used".
node_value.used = (global_scope, node)
for scope in self.scopeStack[global_scope_index + 1:]:
scope[node_name] = node_value
NONLOCAL = GLOBAL
def GENERATOREXP(self, node):
self.pushScope(GeneratorScope)
self.handleChildren(node)
self.popScope()
LISTCOMP = handleChildren if PY2 else GENERATOREXP
DICTCOMP = SETCOMP = GENERATOREXP
def NAME(self, node):
"""
Handle occurrence of Name (which can be a load/store/delete access.)
"""
# Locate the name in locals / function / globals scopes.
if isinstance(node.ctx, (ast.Load, ast.AugLoad)):
self.handleNodeLoad(node)
if (node.id == 'locals' and isinstance(self.scope, FunctionScope)
and isinstance(node.parent, ast.Call)):
# we are doing locals() call in current scope
self.scope.usesLocals = True
elif isinstance(node.ctx, (ast.Store, ast.AugStore)):
self.handleNodeStore(node)
elif isinstance(node.ctx, ast.Del):
self.handleNodeDelete(node)
else:
# must be a Param context -- this only happens for names in function
# arguments, but these aren't dispatched through here
raise RuntimeError("Got impossible expression context: %r" % (node.ctx,))
def CONTINUE(self, node):
# Walk the tree up until we see a loop (OK), a function or class
# definition (not OK), for 'continue', a finally block (not OK), or
# the top module scope (not OK)
n = node
while hasattr(n, 'parent'):
n, n_child = n.parent, n
if isinstance(n, LOOP_TYPES):
# Doesn't apply unless it's in the loop itself
if n_child not in n.orelse:
return
if isinstance(n, (ast.FunctionDef, ast.ClassDef)):
break
# Handle Try/TryFinally difference in Python < and >= 3.3
if hasattr(n, 'finalbody') and isinstance(node, ast.Continue):
if n_child in n.finalbody:
self.report(messages.ContinueInFinally, node)
return
if isinstance(node, ast.Continue):
self.report(messages.ContinueOutsideLoop, node)
else: # ast.Break
self.report(messages.BreakOutsideLoop, node)
BREAK = CONTINUE
def RETURN(self, node):
if isinstance(self.scope, (ClassScope, ModuleScope)):
self.report(messages.ReturnOutsideFunction, node)
return
if (
node.value and
hasattr(self.scope, 'returnValue') and
not self.scope.returnValue
):
self.scope.returnValue = node.value
self.handleNode(node.value, node)
def YIELD(self, node):
if isinstance(self.scope, (ClassScope, ModuleScope)):
self.report(messages.YieldOutsideFunction, node)
return
self.scope.isGenerator = True
self.handleNode(node.value, node)
AWAIT = YIELDFROM = YIELD
def FUNCTIONDEF(self, node):
for deco in node.decorator_list:
self.handleNode(deco, node)
self.LAMBDA(node)
self.addBinding(node, FunctionDefinition(node.name, node))
# doctest does not process doctest within a doctest,
# or in nested functions.
if (self.withDoctest and
not self._in_doctest() and
not isinstance(self.scope, FunctionScope)):
self.deferFunction(lambda: self.handleDoctests(node))
ASYNCFUNCTIONDEF = FUNCTIONDEF
def LAMBDA(self, node):
args = []
annotations = []
if PY2:
def addArgs(arglist):
for arg in arglist:
if isinstance(arg, ast.Tuple):
addArgs(arg.elts)
else:
args.append(arg.id)
addArgs(node.args.args)
defaults = node.args.defaults
else:
for arg in node.args.args + node.args.kwonlyargs:
args.append(arg.arg)
annotations.append(arg.annotation)
defaults = node.args.defaults + node.args.kw_defaults
# Only for Python3 FunctionDefs
is_py3_func = hasattr(node, 'returns')
for arg_name in ('vararg', 'kwarg'):
wildcard = getattr(node.args, arg_name)
if not wildcard:
continue
args.append(wildcard if PY33 else wildcard.arg)
if is_py3_func:
if PY33: # Python 2.5 to 3.3
argannotation = arg_name + 'annotation'
annotations.append(getattr(node.args, argannotation))
else: # Python >= 3.4
annotations.append(wildcard.annotation)
if is_py3_func:
annotations.append(node.returns)
if len(set(args)) < len(args):
for (idx, arg) in enumerate(args):
if arg in args[:idx]:
self.report(messages.DuplicateArgument, node, arg)
for child in annotations + defaults:
if child:
self.handleNode(child, node)
def runFunction():
self.pushScope()
for name in args:
self.addBinding(node, Argument(name, node))
if isinstance(node.body, list):
# case for FunctionDefs
for stmt in node.body:
self.handleNode(stmt, node)
else:
# case for Lambdas
self.handleNode(node.body, node)
def checkUnusedAssignments():
"""
Check to see if any assignments have not been used.
"""
for name, binding in self.scope.unusedAssignments():
self.report(messages.UnusedVariable, binding.source, name)
self.deferAssignment(checkUnusedAssignments)
if PY32:
def checkReturnWithArgumentInsideGenerator():
"""
Check to see if there is any return statement with
arguments but the function is a generator.
"""
if self.scope.isGenerator and self.scope.returnValue:
self.report(messages.ReturnWithArgsInsideGenerator,
self.scope.returnValue)
self.deferAssignment(checkReturnWithArgumentInsideGenerator)
self.popScope()
self.deferFunction(runFunction)
def CLASSDEF(self, node):
"""
Check names used in a class definition, including its decorators, base
classes, and the body of its definition. Additionally, add its name to
the current scope.
"""
for deco in node.decorator_list:
self.handleNode(deco, node)
for baseNode in node.bases:
self.handleNode(baseNode, node)
if not PY2:
for keywordNode in node.keywords:
self.handleNode(keywordNode, node)
self.pushScope(ClassScope)
# doctest does not process doctest within a doctest
# classes within classes are processed.
if (self.withDoctest and
not self._in_doctest() and
not isinstance(self.scope, FunctionScope)):
self.deferFunction(lambda: self.handleDoctests(node))
for stmt in node.body:
self.handleNode(stmt, node)
self.popScope()
self.addBinding(node, ClassDefinition(node.name, node))
def AUGASSIGN(self, node):
self.handleNodeLoad(node.target)
self.handleNode(node.value, node)
self.handleNode(node.target, node)
def TUPLE(self, node):
if not PY2 and isinstance(node.ctx, ast.Store):
# Python 3 advanced tuple unpacking: a, *b, c = d.
# Only one starred expression is allowed, and no more than 1<<8
# assignments are allowed before a stared expression. There is
# also a limit of 1<<24 expressions after the starred expression,
# which is impossible to test due to memory restrictions, but we
# add it here anyway
has_starred = False
star_loc = -1
for i, n in enumerate(node.elts):
if isinstance(n, ast.Starred):
if has_starred:
self.report(messages.TwoStarredExpressions, node)
# The SyntaxError doesn't distinguish two from more
# than two.
break
has_starred = True
star_loc = i
if star_loc >= 1 << 8 or len(node.elts) - star_loc - 1 >= 1 << 24:
self.report(messages.TooManyExpressionsInStarredAssignment, node)
self.handleChildren(node)
LIST = TUPLE
def IMPORT(self, node):
for alias in node.names:
if '.' in alias.name and not alias.asname:
importation = SubmoduleImportation(alias.name, node)
else:
name = alias.asname or alias.name
importation = Importation(name, node, alias.name)
self.addBinding(node, importation)
def IMPORTFROM(self, node):
if node.module == '__future__':
if not self.futuresAllowed:
self.report(messages.LateFutureImport,
node, [n.name for n in node.names])
else:
self.futuresAllowed = False
module = ('.' * node.level) + (node.module or '')
for alias in node.names:
name = alias.asname or alias.name
if node.module == '__future__':
importation = FutureImportation(name, node, self.scope)
if alias.name not in __future__.all_feature_names:
self.report(messages.FutureFeatureNotDefined,
node, alias.name)
elif alias.name == '*':
# Only Python 2, local import * is a SyntaxWarning
if not PY2 and not isinstance(self.scope, ModuleScope):
self.report(messages.ImportStarNotPermitted,
node, module)
continue
self.scope.importStarred = True
self.report(messages.ImportStarUsed, node, module)
importation = StarImportation(module, node)
else:
importation = ImportationFrom(name, node,
module, alias.name)
self.addBinding(node, importation)
def TRY(self, node):
handler_names = []
# List the exception handlers
for i, handler in enumerate(node.handlers):
if isinstance(handler.type, ast.Tuple):
for exc_type in handler.type.elts:
handler_names.append(getNodeName(exc_type))
elif handler.type:
handler_names.append(getNodeName(handler.type))
if handler.type is None and i < len(node.handlers) - 1:
self.report(messages.DefaultExceptNotLast, handler)
# Memorize the except handlers and process the body
self.exceptHandlers.append(handler_names)
for child in node.body:
self.handleNode(child, node)
self.exceptHandlers.pop()
# Process the other nodes: "except:", "else:", "finally:"
self.handleChildren(node, omit='body')
TRYEXCEPT = TRY
def EXCEPTHANDLER(self, node):
if PY2 or node.name is None:
self.handleChildren(node)
return
# 3.x: the name of the exception, which is not a Name node, but
# a simple string, creates a local that is only bound within the scope
# of the except: block.
for scope in self.scopeStack[::-1]:
if node.name in scope:
is_name_previously_defined = True
break
else:
is_name_previously_defined = False
self.handleNodeStore(node)
self.handleChildren(node)
if not is_name_previously_defined:
# See discussion on https://github.com/PyCQA/pyflakes/pull/59
# We're removing the local name since it's being unbound
# after leaving the except: block and it's always unbound
# if the except: block is never entered. This will cause an
# "undefined name" error raised if the checked code tries to
# use the name afterwards.
#
# Unless it's been removed already. Then do nothing.
try:
del self.scope[node.name]
except KeyError:
pass
def ANNASSIGN(self, node):
if node.value:
# Only bind the *targets* if the assignment has a value.
# Otherwise it's not really ast.Store and shouldn't silence
# UndefinedLocal warnings.
self.handleNode(node.target, node)
self.handleNode(node.annotation, node)
if node.value:
# If the assignment has value, handle the *value* now.
self.handleNode(node.value, node)
| [
"sys.exc_info",
"doctest.DocTestParser",
"os.environ.get",
"os.path.basename"
] | [((14727, 14762), 'os.environ.get', 'os.environ.get', (['"""PYFLAKES_BUILTINS"""'], {}), "('PYFLAKES_BUILTINS')\n", (14741, 14762), False, 'import os\n'), ((30327, 30350), 'doctest.DocTestParser', 'doctest.DocTestParser', ([], {}), '()\n', (30348, 30350), False, 'import doctest\n'), ((25408, 25439), 'os.path.basename', 'os.path.basename', (['self.filename'], {}), '(self.filename)\n', (25424, 25439), False, 'import os\n'), ((18645, 18676), 'os.path.basename', 'os.path.basename', (['self.filename'], {}), '(self.filename)\n', (18661, 18676), False, 'import os\n'), ((31945, 31959), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (31957, 31959), False, 'import sys\n')] |
# Project Name: Auto Screenshot
# Description: Take screenshot of screen when any change take place.
# Author: Mani (Infinyte7)
# Date: 26-10-2020
# License: MIT
from pyscreenshot import grab
from PIL import ImageChops
import os
import time
import subprocess, sys
from datetime import datetime
import tkinter as tk
from tkinter import *
from tkinter import font
class AutoScreenshot:
def __init__(self, master):
self.root = root
root.title('Auto Screenshot')
root.config(bg="white")
fontRoboto = font.Font(family='Roboto', size=16, weight='bold')
# project name label
projectTitleLabel = Label(root, text="Auto Screenshot v1.0.0")
projectTitleLabel.config(font=fontRoboto, bg="white", fg="#5599ff")
projectTitleLabel.pack(padx="10")
# start button
btn_start = Button(root, text="Start", command=self.start)
btn_start.config(highlightthickness=0, bd=0, fg="white", bg="#5fd38d",
activebackground="#5fd38d", activeforeground="white", font=fontRoboto)
btn_start.pack(padx="10", fill=BOTH)
# close button
btn_start = Button(root, text="Close", command=self.close)
btn_start.config(highlightthickness=0, bd=0, fg="white", bg="#f44336",
activebackground="#ff7043", activeforeground="white", font=fontRoboto)
btn_start.pack(padx="10", pady="10", fill=BOTH)
def start(self):
# Create folder to store images
directory = "Screenshots"
self.new_folder = directory + "/" + datetime.now().strftime("%Y_%m_%d-%I_%M_%p")
# all images to one folder
if not os.path.exists(directory):
os.makedirs(directory)
# new folder for storing images for current session
if not os.path.exists(self.new_folder):
os.makedirs(self.new_folder)
# Run ScreenCords.py and get cordinates
cords_point = subprocess.check_output([sys.executable, "GetScreenCoordinates.py", "-l"])
cord_tuple = tuple(cords_point.decode("utf-8").rstrip().split(","))
# cordinates for screenshots and compare
self.cords = (int(cord_tuple[0]), int(cord_tuple[1]), int(cord_tuple[2]), int(cord_tuple[3]))
# save first image
img1 = grab(bbox=self.cords)
now = datetime.now().strftime("%Y_%m_%d-%I_%M_%S_%p")
fname = self.new_folder + "/ScreenShots" + now + ".png"
img1.save(fname)
print("First Screenshot taken")
# start taking screenshot of next images
self.take_screenshots()
def take_screenshots(self):
# grab first and second image
img1 = grab(bbox=self.cords)
time.sleep(1)
img2 = grab(bbox=self.cords)
# check difference between images
diff = ImageChops.difference(img1, img2)
bbox = diff.getbbox()
if bbox is not None:
now = datetime.now().strftime("%Y_%m_%d-%I_%M_%S_%p")
fname = self.new_folder + "/ScreenShots" + now + ".png"
img2.save(fname)
print("Screenshot taken")
root.after(5, self.take_screenshots)
def close(self):
quit()
if __name__ == "__main__":
root = Tk()
gui = AutoScreenshot(root)
root.mainloop()
| [
"subprocess.check_output",
"PIL.ImageChops.difference",
"os.path.exists",
"os.makedirs",
"time.sleep",
"tkinter.font.Font",
"pyscreenshot.grab",
"datetime.datetime.now"
] | [((547, 597), 'tkinter.font.Font', 'font.Font', ([], {'family': '"""Roboto"""', 'size': '(16)', 'weight': '"""bold"""'}), "(family='Roboto', size=16, weight='bold')\n", (556, 597), False, 'from tkinter import font\n'), ((1980, 2054), 'subprocess.check_output', 'subprocess.check_output', (["[sys.executable, 'GetScreenCoordinates.py', '-l']"], {}), "([sys.executable, 'GetScreenCoordinates.py', '-l'])\n", (2003, 2054), False, 'import subprocess, sys\n'), ((2326, 2347), 'pyscreenshot.grab', 'grab', ([], {'bbox': 'self.cords'}), '(bbox=self.cords)\n', (2330, 2347), False, 'from pyscreenshot import grab\n'), ((2714, 2735), 'pyscreenshot.grab', 'grab', ([], {'bbox': 'self.cords'}), '(bbox=self.cords)\n', (2718, 2735), False, 'from pyscreenshot import grab\n'), ((2744, 2757), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (2754, 2757), False, 'import time\n'), ((2773, 2794), 'pyscreenshot.grab', 'grab', ([], {'bbox': 'self.cords'}), '(bbox=self.cords)\n', (2777, 2794), False, 'from pyscreenshot import grab\n'), ((2853, 2886), 'PIL.ImageChops.difference', 'ImageChops.difference', (['img1', 'img2'], {}), '(img1, img2)\n', (2874, 2886), False, 'from PIL import ImageChops\n'), ((1697, 1722), 'os.path.exists', 'os.path.exists', (['directory'], {}), '(directory)\n', (1711, 1722), False, 'import os\n'), ((1736, 1758), 'os.makedirs', 'os.makedirs', (['directory'], {}), '(directory)\n', (1747, 1758), False, 'import os\n'), ((1835, 1866), 'os.path.exists', 'os.path.exists', (['self.new_folder'], {}), '(self.new_folder)\n', (1849, 1866), False, 'import os\n'), ((1880, 1908), 'os.makedirs', 'os.makedirs', (['self.new_folder'], {}), '(self.new_folder)\n', (1891, 1908), False, 'import os\n'), ((2362, 2376), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2374, 2376), False, 'from datetime import datetime\n'), ((1601, 1615), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1613, 1615), False, 'from datetime import datetime\n'), ((2973, 2987), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2985, 2987), False, 'from datetime import datetime\n')] |
# -*- coding: utf-8 -*-
'''
Oracle DataBase connection module
:mainteiner: <NAME> <<EMAIL>>
:maturity: new
:depends: cx_Oracle
:platform: all
:configuration: module provide connections for multiple Oracle DB instances.
**OS Environment**
.. code-block:: text
ORACLE_HOME: path to oracle product
PATH: path to Oracle Client libs need to be in PATH
**pillar**
.. code-block:: text
oracle.dbs: list of known based
oracle.dbs.<db>.uri: connection credentials in format:
user/password@host[:port]/sid[ as {sysdba|sysoper}]
'''
import os
import logging
from salt.utils.decorators import depends
log = logging.getLogger(__name__)
try:
import cx_Oracle
MODE = {
'sysdba': cx_Oracle.SYSDBA,
'sysoper': cx_Oracle.SYSOPER
}
HAS_CX_ORACLE = True
except ImportError:
MODE = {'sysdba': 2, 'sysoper': 4}
HAS_CX_ORACLE = False
__virtualname__ = 'oracle'
def __virtual__():
'''
Load module only if cx_Oracle installed
'''
return __virtualname__ if HAS_CX_ORACLE else False
def _cx_oracle_req():
'''
Fallback function stub
'''
return 'Need "cx_Oracle" and Oracle Client installed for this functin exist'
def _unicode_output(cursor, name, default_type, size, precision, scale):
'''
Return strings values as python unicode string
http://www.oracle.com/technetwork/articles/dsl/tuininga-cx-oracle-084866.html
'''
if default_type in (cx_Oracle.STRING, cx_Oracle.LONG_STRING,
cx_Oracle.FIXED_CHAR, cx_Oracle.CLOB):
return cursor.var(unicode, size, cursor.arraysize)
def _connect(uri):
'''
uri = user/password@host[:port]/sid[ as {sysdba|sysoper}]
Return cx_Oracle.Connection instance
'''
# cx_Oracle.Connection() not support 'as sysdba' syntax
uri_l = uri.rsplit(' as ', 1)
if len(uri_l) == 2:
credentials, mode = uri_l
mode = MODE[mode]
else:
credentials = uri_l[0]
mode = 0
userpass, hostportsid = credentials.split('@')
user, password = userpass.split('/')
hostport, sid = hostportsid.split('/')
hostport_l = hostport.split(':')
if len(hostport_l) == 2:
host, port = hostport_l
else:
host = hostport_l[0]
port = 1521
log.debug('connect: {0}'.format((user, password, host, port, sid, mode)))
# force UTF-8 client encoding
os.environ['NLS_LANG'] = '.AL32UTF8'
conn = cx_Oracle.connect(user, password,
cx_Oracle.makedsn(host, port, sid),
mode)
conn.outputtypehandler = _unicode_output
return conn
@depends('cx_Oracle', fallback_function=_cx_oracle_req)
def run_query(db, query):
'''
Run SQL query and return result
CLI example:
.. code-block:: bash
salt '*' oracle.run_query my_db "select * from my_table"
'''
log.debug('run query on {0}: {1}'.format(db, query))
conn = _connect(show_dbs(db)[db]['uri'])
return conn.cursor().execute(query).fetchall()
def show_dbs(*dbs):
'''
Show databases configuration from pillar. Filter by args
.. code-block:: bash
salt '*' oracle.show_dbs
salt '*' oracle.show_dbs my_db
'''
if dbs:
log.debug('get dbs from pillar: {0}'.format(dbs))
result = {}
for db in dbs:
result[db] = __salt__['pillar.get']('oracle:dbs:' + db)
return result
else:
pillar_dbs = __salt__['pillar.get']('oracle:dbs')
log.debug('get all ({0}) dbs from pillar'.format(len(pillar_dbs)))
return pillar_dbs
@depends('cx_Oracle', fallback_function=_cx_oracle_req)
def version(*dbs):
'''
Server Version (select banner from v$version)
CLI Example:
.. code-block:: bash
salt '*' oracle.version
salt '*' oracle.version my_db
'''
pillar_dbs = __salt__['pillar.get']('oracle:dbs')
get_version = lambda x: [
r[0] for r in run_query(x, "select banner from v$version order by banner")
]
result = {}
if dbs:
log.debug('get db versions for: {0}'.format(dbs))
for db in dbs:
if db in pillar_dbs:
result[db] = get_version(db)
else:
log.debug('get all({0}) dbs versions'.format(len(dbs)))
for db in dbs:
result[db] = get_version(db)
return result
@depends('cx_Oracle', fallback_function=_cx_oracle_req)
def client_version():
'''
Oracle Client Version
CLI Example:
.. code-block:: bash
salt '*' oracle.client_version
'''
return '.'.join((str(x) for x in cx_Oracle.clientversion()))
def show_pillar(item=None):
'''
Show Pillar segment oracle.* and subitem with notation "item:subitem"
CLI Example:
.. code-block:: bash
salt '*' oracle.show_pillar
salt '*' oracle.show_pillar dbs:my_db
'''
if item:
return __salt__['pillar.get']('oracle:' + item)
else:
return __salt__['pillar.get']('oracle')
def show_env():
'''
Show Environment used by Oracle Client
CLI Example:
.. code-block:: bash
salt '*' oracle.show_env
.. note::
at first _connect() ``NLS_LANG`` will forced to '.AL32UTF8'
'''
envs = ['PATH', 'ORACLE_HOME', 'TNS_ADMIN', 'NLS_LANG']
result = {}
for env in envs:
if env in os.environ:
result[env] = os.environ[env]
return result
| [
"logging.getLogger",
"cx_Oracle.makedsn",
"salt.utils.decorators.depends",
"cx_Oracle.clientversion"
] | [((668, 695), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (685, 695), False, 'import logging\n'), ((2680, 2734), 'salt.utils.decorators.depends', 'depends', (['"""cx_Oracle"""'], {'fallback_function': '_cx_oracle_req'}), "('cx_Oracle', fallback_function=_cx_oracle_req)\n", (2687, 2734), False, 'from salt.utils.decorators import depends\n'), ((3649, 3703), 'salt.utils.decorators.depends', 'depends', (['"""cx_Oracle"""'], {'fallback_function': '_cx_oracle_req'}), "('cx_Oracle', fallback_function=_cx_oracle_req)\n", (3656, 3703), False, 'from salt.utils.decorators import depends\n'), ((4428, 4482), 'salt.utils.decorators.depends', 'depends', (['"""cx_Oracle"""'], {'fallback_function': '_cx_oracle_req'}), "('cx_Oracle', fallback_function=_cx_oracle_req)\n", (4435, 4482), False, 'from salt.utils.decorators import depends\n'), ((2545, 2579), 'cx_Oracle.makedsn', 'cx_Oracle.makedsn', (['host', 'port', 'sid'], {}), '(host, port, sid)\n', (2562, 2579), False, 'import cx_Oracle\n'), ((4668, 4693), 'cx_Oracle.clientversion', 'cx_Oracle.clientversion', ([], {}), '()\n', (4691, 4693), False, 'import cx_Oracle\n')] |
from lib.utils.plugin import import_plugin
from .base_executor import parse_exception
from .executors.hive import HiveQueryExecutor
from .executors.presto import PrestoQueryExecutor
from .executors.sqlalchemy import (
MysqlQueryExecutor,
DruidQueryExecutor,
SqliteQueryExecutor,
SnowflakeQueryExecutor,
)
from .executors.bigquery import BigQueryQueryExecutor
ALL_PLUGIN_EXECUTORS = import_plugin("executor_plugin", "ALL_PLUGIN_EXECUTORS", [])
ALL_EXECUTORS = [
HiveQueryExecutor,
PrestoQueryExecutor,
MysqlQueryExecutor,
DruidQueryExecutor,
SqliteQueryExecutor,
BigQueryQueryExecutor,
SnowflakeQueryExecutor,
] + ALL_PLUGIN_EXECUTORS
def get_executor_class(language: str, name: str):
for executor in ALL_EXECUTORS:
if (
executor.EXECUTOR_LANGUAGE() == language
and executor.EXECUTOR_NAME() == name
):
return executor
raise ValueError(f"Unknown executor {name} with language {language}")
# Re-export parse_exception
parse_exception
| [
"lib.utils.plugin.import_plugin"
] | [((401, 461), 'lib.utils.plugin.import_plugin', 'import_plugin', (['"""executor_plugin"""', '"""ALL_PLUGIN_EXECUTORS"""', '[]'], {}), "('executor_plugin', 'ALL_PLUGIN_EXECUTORS', [])\n", (414, 461), False, 'from lib.utils.plugin import import_plugin\n')] |
# -*- encoding: utf-8
from sqlalchemy.testing import eq_, is_
from sqlalchemy import schema
from sqlalchemy.sql import table, column, quoted_name
from sqlalchemy.dialects import mssql
from sqlalchemy.dialects.mssql import mxodbc
from sqlalchemy.testing import fixtures, AssertsCompiledSQL
from sqlalchemy import sql
from sqlalchemy import Integer, String, Table, Column, select, MetaData,\
update, delete, insert, extract, union, func, PrimaryKeyConstraint, \
UniqueConstraint, Index, Sequence, literal
from sqlalchemy import testing
from sqlalchemy.dialects.mssql import base
class CompileTest(fixtures.TestBase, AssertsCompiledSQL):
__dialect__ = mssql.dialect()
def test_true_false(self):
self.assert_compile(
sql.false(), "0"
)
self.assert_compile(
sql.true(),
"1"
)
def test_select(self):
t = table('sometable', column('somecolumn'))
self.assert_compile(t.select(),
'SELECT sometable.somecolumn FROM sometable')
def test_select_with_nolock(self):
t = table('sometable', column('somecolumn'))
self.assert_compile(
t.select().with_hint(t, 'WITH (NOLOCK)'),
'SELECT sometable.somecolumn FROM sometable WITH (NOLOCK)')
def test_select_with_nolock_schema(self):
m = MetaData()
t = Table('sometable', m, Column('somecolumn', Integer),
schema='test_schema')
self.assert_compile(
t.select().with_hint(t, 'WITH (NOLOCK)'),
'SELECT test_schema.sometable.somecolumn '
'FROM test_schema.sometable WITH (NOLOCK)')
def test_select_w_order_by_collate(self):
m = MetaData()
t = Table('sometable', m, Column('somecolumn', String))
self.assert_compile(
select([t]).
order_by(
t.c.somecolumn.collate("Latin1_General_CS_AS_KS_WS_CI").asc()),
"SELECT sometable.somecolumn FROM sometable "
"ORDER BY sometable.somecolumn COLLATE "
"Latin1_General_CS_AS_KS_WS_CI ASC"
)
def test_join_with_hint(self):
t1 = table('t1',
column('a', Integer),
column('b', String),
column('c', String),
)
t2 = table('t2',
column("a", Integer),
column("b", Integer),
column("c", Integer),
)
join = t1.join(t2, t1.c.a == t2.c.a).\
select().with_hint(t1, 'WITH (NOLOCK)')
self.assert_compile(
join,
'SELECT t1.a, t1.b, t1.c, t2.a, t2.b, t2.c '
'FROM t1 WITH (NOLOCK) JOIN t2 ON t1.a = t2.a'
)
def test_insert(self):
t = table('sometable', column('somecolumn'))
self.assert_compile(t.insert(),
'INSERT INTO sometable (somecolumn) VALUES '
'(:somecolumn)')
def test_update(self):
t = table('sometable', column('somecolumn'))
self.assert_compile(t.update(t.c.somecolumn == 7),
'UPDATE sometable SET somecolumn=:somecolum'
'n WHERE sometable.somecolumn = '
':somecolumn_1', dict(somecolumn=10))
def test_insert_hint(self):
t = table('sometable', column('somecolumn'))
for targ in (None, t):
for darg in ("*", "mssql"):
self.assert_compile(
t.insert().
values(somecolumn="x").
with_hint("WITH (PAGLOCK)",
selectable=targ,
dialect_name=darg),
"INSERT INTO sometable WITH (PAGLOCK) "
"(somecolumn) VALUES (:somecolumn)"
)
def test_update_hint(self):
t = table('sometable', column('somecolumn'))
for targ in (None, t):
for darg in ("*", "mssql"):
self.assert_compile(
t.update().where(t.c.somecolumn == "q").
values(somecolumn="x").
with_hint("WITH (PAGLOCK)",
selectable=targ,
dialect_name=darg),
"UPDATE sometable WITH (PAGLOCK) "
"SET somecolumn=:somecolumn "
"WHERE sometable.somecolumn = :somecolumn_1"
)
def test_update_exclude_hint(self):
t = table('sometable', column('somecolumn'))
self.assert_compile(
t.update().where(t.c.somecolumn == "q").
values(somecolumn="x").
with_hint("XYZ", "mysql"),
"UPDATE sometable SET somecolumn=:somecolumn "
"WHERE sometable.somecolumn = :somecolumn_1"
)
def test_delete_hint(self):
t = table('sometable', column('somecolumn'))
for targ in (None, t):
for darg in ("*", "mssql"):
self.assert_compile(
t.delete().where(t.c.somecolumn == "q").
with_hint("WITH (PAGLOCK)",
selectable=targ,
dialect_name=darg),
"DELETE FROM sometable WITH (PAGLOCK) "
"WHERE sometable.somecolumn = :somecolumn_1"
)
def test_delete_exclude_hint(self):
t = table('sometable', column('somecolumn'))
self.assert_compile(
t.delete().
where(t.c.somecolumn == "q").
with_hint("XYZ", dialect_name="mysql"),
"DELETE FROM sometable WHERE "
"sometable.somecolumn = :somecolumn_1"
)
def test_delete_extra_froms(self):
t1 = table('t1', column('c1'))
t2 = table('t2', column('c1'))
q = sql.delete(t1).where(t1.c.c1 == t2.c.c1)
self.assert_compile(
q, "DELETE FROM t1 FROM t1, t2 WHERE t1.c1 = t2.c1"
)
def test_delete_extra_froms_alias(self):
a1 = table('t1', column('c1')).alias('a1')
t2 = table('t2', column('c1'))
q = sql.delete(a1).where(a1.c.c1 == t2.c.c1)
self.assert_compile(
q, "DELETE FROM a1 FROM t1 AS a1, t2 WHERE a1.c1 = t2.c1"
)
self.assert_compile(sql.delete(a1), "DELETE FROM t1 AS a1")
def test_update_from_hint(self):
t = table('sometable', column('somecolumn'))
t2 = table('othertable', column('somecolumn'))
for darg in ("*", "mssql"):
self.assert_compile(
t.update().where(t.c.somecolumn == t2.c.somecolumn).
values(somecolumn="x").
with_hint("WITH (PAGLOCK)",
selectable=t2,
dialect_name=darg),
"UPDATE sometable SET somecolumn=:somecolumn "
"FROM sometable, othertable WITH (PAGLOCK) "
"WHERE sometable.somecolumn = othertable.somecolumn"
)
def test_update_to_select_schema(self):
meta = MetaData()
table = Table(
"sometable", meta,
Column("sym", String),
Column("val", Integer),
schema="schema"
)
other = Table(
"#other", meta,
Column("sym", String),
Column("newval", Integer)
)
stmt = table.update().values(
val=select([other.c.newval]).
where(table.c.sym == other.c.sym).as_scalar())
self.assert_compile(
stmt,
"UPDATE [schema].sometable SET val="
"(SELECT [#other].newval FROM [#other] "
"WHERE [schema].sometable.sym = [#other].sym)",
)
stmt = table.update().values(val=other.c.newval).\
where(table.c.sym == other.c.sym)
self.assert_compile(
stmt,
"UPDATE [schema].sometable SET val="
"[#other].newval FROM [schema].sometable, "
"[#other] WHERE [schema].sometable.sym = [#other].sym",
)
# TODO: not supported yet.
# def test_delete_from_hint(self):
# t = table('sometable', column('somecolumn'))
# t2 = table('othertable', column('somecolumn'))
# for darg in ("*", "mssql"):
# self.assert_compile(
# t.delete().where(t.c.somecolumn==t2.c.somecolumn).
# with_hint("WITH (PAGLOCK)",
# selectable=t2,
# dialect_name=darg),
# ""
# )
def test_strict_binds(self):
"""test the 'strict' compiler binds."""
from sqlalchemy.dialects.mssql.base import MSSQLStrictCompiler
mxodbc_dialect = mxodbc.dialect()
mxodbc_dialect.statement_compiler = MSSQLStrictCompiler
t = table('sometable', column('foo'))
for expr, compile in [
(
select([literal("x"), literal("y")]),
"SELECT 'x' AS anon_1, 'y' AS anon_2",
),
(
select([t]).where(t.c.foo.in_(['x', 'y', 'z'])),
"SELECT sometable.foo FROM sometable WHERE sometable.foo "
"IN ('x', 'y', 'z')",
),
(
t.c.foo.in_([None]),
"sometable.foo IN (NULL)"
)
]:
self.assert_compile(expr, compile, dialect=mxodbc_dialect)
def test_in_with_subqueries(self):
"""Test removal of legacy behavior that converted "x==subquery"
to use IN.
"""
t = table('sometable', column('somecolumn'))
self.assert_compile(t.select().where(t.c.somecolumn
== t.select()),
'SELECT sometable.somecolumn FROM '
'sometable WHERE sometable.somecolumn = '
'(SELECT sometable.somecolumn FROM '
'sometable)')
self.assert_compile(t.select().where(t.c.somecolumn
!= t.select()),
'SELECT sometable.somecolumn FROM '
'sometable WHERE sometable.somecolumn != '
'(SELECT sometable.somecolumn FROM '
'sometable)')
@testing.uses_deprecated
def test_count(self):
t = table('sometable', column('somecolumn'))
self.assert_compile(t.count(),
'SELECT count(sometable.somecolumn) AS '
'tbl_row_count FROM sometable')
def test_noorderby_insubquery(self):
"""test that the ms-sql dialect removes ORDER BY clauses from
subqueries"""
table1 = table('mytable',
column('myid', Integer),
column('name', String),
column('description', String),
)
q = select([table1.c.myid],
order_by=[table1.c.myid]).alias('foo')
crit = q.c.myid == table1.c.myid
self.assert_compile(select(['*'], crit),
"SELECT * FROM (SELECT mytable.myid AS "
"myid FROM mytable) AS foo, mytable WHERE "
"foo.myid = mytable.myid")
def test_force_schema_quoted_name_w_dot_case_insensitive(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('id', Integer, primary_key=True),
schema=quoted_name("foo.dbo", True)
)
self.assert_compile(
select([tbl]),
"SELECT [foo.dbo].test.id FROM [foo.dbo].test"
)
def test_force_schema_quoted_w_dot_case_insensitive(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('id', Integer, primary_key=True),
schema=quoted_name("foo.dbo", True)
)
self.assert_compile(
select([tbl]),
"SELECT [foo.dbo].test.id FROM [foo.dbo].test"
)
def test_force_schema_quoted_name_w_dot_case_sensitive(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('id', Integer, primary_key=True),
schema=quoted_name("Foo.dbo", True)
)
self.assert_compile(
select([tbl]),
"SELECT [Foo.dbo].test.id FROM [Foo.dbo].test"
)
def test_force_schema_quoted_w_dot_case_sensitive(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('id', Integer, primary_key=True),
schema="[Foo.dbo]"
)
self.assert_compile(
select([tbl]),
"SELECT [Foo.dbo].test.id FROM [Foo.dbo].test"
)
def test_schema_autosplit_w_dot_case_insensitive(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('id', Integer, primary_key=True),
schema="foo.dbo"
)
self.assert_compile(
select([tbl]),
"SELECT foo.dbo.test.id FROM foo.dbo.test"
)
def test_schema_autosplit_w_dot_case_sensitive(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('id', Integer, primary_key=True),
schema="Foo.dbo"
)
self.assert_compile(
select([tbl]),
"SELECT [Foo].dbo.test.id FROM [Foo].dbo.test"
)
def test_owner_database_pairs(self):
dialect = mssql.dialect()
for identifier, expected_schema, expected_owner in [
("foo", None, "foo"),
("foo.bar", "foo", "bar"),
("Foo.Bar", "Foo", "Bar"),
("[Foo.Bar]", None, "Foo.Bar"),
("[Foo.Bar].[bat]", "Foo.Bar", "bat"),
]:
schema, owner = base._owner_plus_db(dialect, identifier)
eq_(owner, expected_owner)
eq_(schema, expected_schema)
def test_delete_schema(self):
metadata = MetaData()
tbl = Table('test', metadata, Column('id', Integer,
primary_key=True), schema='paj')
self.assert_compile(tbl.delete(tbl.c.id == 1),
'DELETE FROM paj.test WHERE paj.test.id = '
':id_1')
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
'DELETE FROM paj.test WHERE paj.test.id IN '
'(SELECT paj.test.id FROM paj.test '
'WHERE paj.test.id = :id_1)')
def test_delete_schema_multipart(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('id', Integer,
primary_key=True),
schema='banana.paj')
self.assert_compile(tbl.delete(tbl.c.id == 1),
'DELETE FROM banana.paj.test WHERE '
'banana.paj.test.id = :id_1')
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
'DELETE FROM banana.paj.test WHERE '
'banana.paj.test.id IN (SELECT banana.paj.test.id '
'FROM banana.paj.test WHERE '
'banana.paj.test.id = :id_1)')
def test_delete_schema_multipart_needs_quoting(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('id', Integer, primary_key=True),
schema='banana split.paj')
self.assert_compile(tbl.delete(tbl.c.id == 1),
'DELETE FROM [banana split].paj.test WHERE '
'[banana split].paj.test.id = :id_1')
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(tbl.delete().where(tbl.c.id.in_(s)),
'DELETE FROM [banana split].paj.test WHERE '
'[banana split].paj.test.id IN ('
'SELECT [banana split].paj.test.id FROM '
'[banana split].paj.test WHERE '
'[banana split].paj.test.id = :id_1)')
def test_delete_schema_multipart_both_need_quoting(self):
metadata = MetaData()
tbl = Table('test', metadata, Column('id', Integer,
primary_key=True),
schema='banana split.paj with a space')
self.assert_compile(tbl.delete(tbl.c.id == 1),
'DELETE FROM [banana split].[paj with a '
'space].test WHERE [banana split].[paj '
'with a space].test.id = :id_1')
s = select([tbl.c.id]).where(tbl.c.id == 1)
self.assert_compile(
tbl.delete().where(tbl.c.id.in_(s)),
"DELETE FROM [banana split].[paj with a space].test "
"WHERE [banana split].[paj with a space].test.id IN "
"(SELECT [banana split].[paj with a space].test.id "
"FROM [banana split].[paj with a space].test "
"WHERE [banana split].[paj with a space].test.id = :id_1)"
)
def test_union(self):
t1 = table(
't1', column('col1'), column('col2'),
column('col3'), column('col4'))
t2 = table(
't2', column('col1'), column('col2'),
column('col3'), column('col4'))
s1, s2 = select(
[t1.c.col3.label('col3'), t1.c.col4.label('col4')],
t1.c.col2.in_(['t1col2r1', 't1col2r2'])), \
select([t2.c.col3.label('col3'), t2.c.col4.label('col4')],
t2.c.col2.in_(['t2col2r2', 't2col2r3']))
u = union(s1, s2, order_by=['col3', 'col4'])
self.assert_compile(u,
'SELECT t1.col3 AS col3, t1.col4 AS col4 '
'FROM t1 WHERE t1.col2 IN (:col2_1, '
':col2_2) UNION SELECT t2.col3 AS col3, '
't2.col4 AS col4 FROM t2 WHERE t2.col2 IN '
'(:col2_3, :col2_4) ORDER BY col3, col4')
self.assert_compile(u.alias('bar').select(),
'SELECT bar.col3, bar.col4 FROM (SELECT '
't1.col3 AS col3, t1.col4 AS col4 FROM t1 '
'WHERE t1.col2 IN (:col2_1, :col2_2) UNION '
'SELECT t2.col3 AS col3, t2.col4 AS col4 '
'FROM t2 WHERE t2.col2 IN (:col2_3, '
':col2_4)) AS bar')
def test_function(self):
self.assert_compile(func.foo(1, 2), 'foo(:foo_1, :foo_2)')
self.assert_compile(func.current_time(), 'CURRENT_TIME')
self.assert_compile(func.foo(), 'foo()')
m = MetaData()
t = Table(
'sometable', m, Column('col1', Integer), Column('col2', Integer))
self.assert_compile(select([func.max(t.c.col1)]),
'SELECT max(sometable.col1) AS max_1 FROM '
'sometable')
def test_function_overrides(self):
self.assert_compile(func.current_date(), "GETDATE()")
self.assert_compile(func.length(3), "LEN(:length_1)")
def test_extract(self):
t = table('t', column('col1'))
for field in 'day', 'month', 'year':
self.assert_compile(
select([extract(field, t.c.col1)]),
'SELECT DATEPART(%s, t.col1) AS anon_1 FROM t' % field)
def test_update_returning(self):
table1 = table(
'mytable',
column('myid', Integer),
column('name', String(128)),
column('description', String(128)))
u = update(
table1,
values=dict(name='foo')).returning(table1.c.myid, table1.c.name)
self.assert_compile(u,
'UPDATE mytable SET name=:name OUTPUT '
'inserted.myid, inserted.name')
u = update(table1, values=dict(name='foo')).returning(table1)
self.assert_compile(u,
'UPDATE mytable SET name=:name OUTPUT '
'inserted.myid, inserted.name, '
'inserted.description')
u = update(
table1,
values=dict(
name='foo')).returning(table1).where(table1.c.name == 'bar')
self.assert_compile(u,
'UPDATE mytable SET name=:name OUTPUT '
'inserted.myid, inserted.name, '
'inserted.description WHERE mytable.name = '
':name_1')
u = update(table1, values=dict(name='foo'
)).returning(func.length(table1.c.name))
self.assert_compile(u,
'UPDATE mytable SET name=:name OUTPUT '
'LEN(inserted.name) AS length_1')
def test_delete_returning(self):
table1 = table(
'mytable', column('myid', Integer),
column('name', String(128)), column('description', String(128)))
d = delete(table1).returning(table1.c.myid, table1.c.name)
self.assert_compile(d,
'DELETE FROM mytable OUTPUT deleted.myid, '
'deleted.name')
d = delete(table1).where(table1.c.name == 'bar'
).returning(table1.c.myid,
table1.c.name)
self.assert_compile(d,
'DELETE FROM mytable OUTPUT deleted.myid, '
'deleted.name WHERE mytable.name = :name_1')
def test_insert_returning(self):
table1 = table(
'mytable', column('myid', Integer),
column('name', String(128)), column('description', String(128)))
i = insert(
table1,
values=dict(name='foo')).returning(table1.c.myid, table1.c.name)
self.assert_compile(i,
'INSERT INTO mytable (name) OUTPUT '
'inserted.myid, inserted.name VALUES '
'(:name)')
i = insert(table1, values=dict(name='foo')).returning(table1)
self.assert_compile(i,
'INSERT INTO mytable (name) OUTPUT '
'inserted.myid, inserted.name, '
'inserted.description VALUES (:name)')
i = insert(table1, values=dict(name='foo'
)).returning(func.length(table1.c.name))
self.assert_compile(i,
'INSERT INTO mytable (name) OUTPUT '
'LEN(inserted.name) AS length_1 VALUES '
'(:name)')
def test_limit_using_top(self):
t = table('t', column('x', Integer), column('y', Integer))
s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(10)
self.assert_compile(
s,
"SELECT TOP 10 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
checkparams={'x_1': 5}
)
def test_limit_zero_using_top(self):
t = table('t', column('x', Integer), column('y', Integer))
s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(0)
self.assert_compile(
s,
"SELECT TOP 0 t.x, t.y FROM t WHERE t.x = :x_1 ORDER BY t.y",
checkparams={'x_1': 5}
)
c = s.compile(dialect=mssql.dialect())
eq_(len(c._result_columns), 2)
assert t.c.x in set(c._create_result_map()['x'][1])
def test_offset_using_window(self):
t = table('t', column('x', Integer), column('y', Integer))
s = select([t]).where(t.c.x == 5).order_by(t.c.y).offset(20)
# test that the select is not altered with subsequent compile
# calls
for i in range(2):
self.assert_compile(
s,
"SELECT anon_1.x, anon_1.y FROM (SELECT t.x AS x, t.y "
"AS y, ROW_NUMBER() OVER (ORDER BY t.y) AS "
"mssql_rn FROM t WHERE t.x = :x_1) AS "
"anon_1 WHERE mssql_rn > :param_1",
checkparams={'param_1': 20, 'x_1': 5}
)
c = s.compile(dialect=mssql.dialect())
eq_(len(c._result_columns), 2)
assert t.c.x in set(c._create_result_map()['x'][1])
def test_limit_offset_using_window(self):
t = table('t', column('x', Integer), column('y', Integer))
s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(10).offset(20)
self.assert_compile(
s,
"SELECT anon_1.x, anon_1.y "
"FROM (SELECT t.x AS x, t.y AS y, "
"ROW_NUMBER() OVER (ORDER BY t.y) AS mssql_rn "
"FROM t "
"WHERE t.x = :x_1) AS anon_1 "
"WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1",
checkparams={'param_1': 20, 'param_2': 10, 'x_1': 5}
)
c = s.compile(dialect=mssql.dialect())
eq_(len(c._result_columns), 2)
assert t.c.x in set(c._create_result_map()['x'][1])
assert t.c.y in set(c._create_result_map()['y'][1])
def test_limit_offset_w_ambiguous_cols(self):
t = table('t', column('x', Integer), column('y', Integer))
cols = [t.c.x, t.c.x.label('q'), t.c.x.label('p'), t.c.y]
s = select(cols).where(t.c.x == 5).order_by(t.c.y).limit(10).offset(20)
self.assert_compile(
s,
"SELECT anon_1.x, anon_1.q, anon_1.p, anon_1.y "
"FROM (SELECT t.x AS x, t.x AS q, t.x AS p, t.y AS y, "
"ROW_NUMBER() OVER (ORDER BY t.y) AS mssql_rn "
"FROM t "
"WHERE t.x = :x_1) AS anon_1 "
"WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1",
checkparams={'param_1': 20, 'param_2': 10, 'x_1': 5}
)
c = s.compile(dialect=mssql.dialect())
eq_(len(c._result_columns), 4)
result_map = c._create_result_map()
for col in cols:
is_(result_map[col.key][1][0], col)
def test_limit_offset_with_correlated_order_by(self):
t1 = table('t1', column('x', Integer), column('y', Integer))
t2 = table('t2', column('x', Integer), column('y', Integer))
order_by = select([t2.c.y]).where(t1.c.x == t2.c.x).as_scalar()
s = select([t1]).where(t1.c.x == 5).order_by(order_by) \
.limit(10).offset(20)
self.assert_compile(
s,
"SELECT anon_1.x, anon_1.y "
"FROM (SELECT t1.x AS x, t1.y AS y, "
"ROW_NUMBER() OVER (ORDER BY "
"(SELECT t2.y FROM t2 WHERE t1.x = t2.x)"
") AS mssql_rn "
"FROM t1 "
"WHERE t1.x = :x_1) AS anon_1 "
"WHERE mssql_rn > :param_1 AND mssql_rn <= :param_2 + :param_1",
checkparams={'param_1': 20, 'param_2': 10, 'x_1': 5}
)
c = s.compile(dialect=mssql.dialect())
eq_(len(c._result_columns), 2)
assert t1.c.x in set(c._create_result_map()['x'][1])
assert t1.c.y in set(c._create_result_map()['y'][1])
def test_offset_dont_misapply_labelreference(self):
m = MetaData()
t = Table('t', m, Column('x', Integer))
expr1 = func.foo(t.c.x).label('x')
expr2 = func.foo(t.c.x).label('y')
stmt1 = select([expr1]).order_by(expr1.desc()).offset(1)
stmt2 = select([expr2]).order_by(expr2.desc()).offset(1)
self.assert_compile(
stmt1,
"SELECT anon_1.x FROM (SELECT foo(t.x) AS x, "
"ROW_NUMBER() OVER (ORDER BY foo(t.x) DESC) AS mssql_rn FROM t) "
"AS anon_1 WHERE mssql_rn > :param_1"
)
self.assert_compile(
stmt2,
"SELECT anon_1.y FROM (SELECT foo(t.x) AS y, "
"ROW_NUMBER() OVER (ORDER BY foo(t.x) DESC) AS mssql_rn FROM t) "
"AS anon_1 WHERE mssql_rn > :param_1"
)
def test_limit_zero_offset_using_window(self):
t = table('t', column('x', Integer), column('y', Integer))
s = select([t]).where(t.c.x == 5).order_by(t.c.y).limit(0).offset(0)
# render the LIMIT of zero, but not the OFFSET
# of zero, so produces TOP 0
self.assert_compile(
s,
"SELECT TOP 0 t.x, t.y FROM t "
"WHERE t.x = :x_1 ORDER BY t.y",
checkparams={'x_1': 5}
)
def test_primary_key_no_identity(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, autoincrement=False,
primary_key=True))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL, "
"PRIMARY KEY (id))"
)
def test_primary_key_defaults_to_identity(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, primary_key=True))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1), "
"PRIMARY KEY (id))"
)
def test_identity_no_primary_key(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, autoincrement=True))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1)"
")"
)
def test_identity_separate_from_primary_key(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, autoincrement=False,
primary_key=True),
Column('x', Integer, autoincrement=True)
)
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL, "
"x INTEGER NOT NULL IDENTITY(1,1), "
"PRIMARY KEY (id))"
)
def test_identity_illegal_two_autoincrements(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, autoincrement=True),
Column('id2', Integer, autoincrement=True),
)
# this will be rejected by the database, just asserting this is what
# the two autoincrements will do right now
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,1), "
"id2 INTEGER NOT NULL IDENTITY(1,1))"
)
def test_identity_start_0(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, mssql_identity_start=0,
primary_key=True))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
"PRIMARY KEY (id))"
)
def test_identity_increment_5(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, mssql_identity_increment=5,
primary_key=True))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(1,5), "
"PRIMARY KEY (id))"
)
def test_sequence_start_0(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, Sequence('', 0), primary_key=True))
with testing.expect_deprecated(
"Use of Sequence with SQL Server in order to affect "):
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(0,1), "
"PRIMARY KEY (id))"
)
def test_sequence_non_primary_key(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, Sequence('', start=5),
primary_key=False))
with testing.expect_deprecated(
"Use of Sequence with SQL Server in order to affect "):
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(5,1))"
)
def test_sequence_ignore_nullability(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer, Sequence('', start=5),
nullable=True))
with testing.expect_deprecated(
"Use of Sequence with SQL Server in order to affect "):
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (id INTEGER NOT NULL IDENTITY(5,1))"
)
def test_table_pkc_clustering(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('x', Integer, autoincrement=False),
Column('y', Integer, autoincrement=False),
PrimaryKeyConstraint("x", "y", mssql_clustered=True))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, "
"PRIMARY KEY CLUSTERED (x, y))"
)
def test_table_pkc_explicit_nonclustered(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('x', Integer, autoincrement=False),
Column('y', Integer, autoincrement=False),
PrimaryKeyConstraint("x", "y", mssql_clustered=False))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NOT NULL, "
"PRIMARY KEY NONCLUSTERED (x, y))"
)
def test_table_idx_explicit_nonclustered(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('x', Integer, autoincrement=False),
Column('y', Integer, autoincrement=False)
)
idx = Index("myidx", tbl.c.x, tbl.c.y, mssql_clustered=False)
self.assert_compile(
schema.CreateIndex(idx),
"CREATE NONCLUSTERED INDEX myidx ON test (x, y)"
)
def test_table_uc_explicit_nonclustered(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('x', Integer, autoincrement=False),
Column('y', Integer, autoincrement=False),
UniqueConstraint("x", "y", mssql_clustered=False))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (x INTEGER NULL, y INTEGER NULL, "
"UNIQUE NONCLUSTERED (x, y))"
)
def test_table_uc_clustering(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('x', Integer, autoincrement=False),
Column('y', Integer, autoincrement=False),
PrimaryKeyConstraint("x"),
UniqueConstraint("y", mssql_clustered=True))
self.assert_compile(
schema.CreateTable(tbl),
"CREATE TABLE test (x INTEGER NOT NULL, y INTEGER NULL, "
"PRIMARY KEY (x), UNIQUE CLUSTERED (y))"
)
def test_index_clustering(self):
metadata = MetaData()
tbl = Table('test', metadata,
Column('id', Integer))
idx = Index("foo", tbl.c.id, mssql_clustered=True)
self.assert_compile(schema.CreateIndex(idx),
"CREATE CLUSTERED INDEX foo ON test (id)"
)
def test_index_ordering(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('x', Integer), Column('y', Integer), Column('z', Integer))
idx = Index("foo", tbl.c.x.desc(), "y")
self.assert_compile(schema.CreateIndex(idx),
"CREATE INDEX foo ON test (x DESC, y)"
)
def test_create_index_expr(self):
m = MetaData()
t1 = Table('foo', m,
Column('x', Integer)
)
self.assert_compile(
schema.CreateIndex(Index("bar", t1.c.x > 5)),
"CREATE INDEX bar ON foo (x > 5)"
)
def test_drop_index_w_schema(self):
m = MetaData()
t1 = Table('foo', m,
Column('x', Integer),
schema='bar'
)
self.assert_compile(
schema.DropIndex(Index("idx_foo", t1.c.x)),
"DROP INDEX idx_foo ON bar.foo"
)
def test_index_extra_include_1(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('x', Integer), Column('y', Integer), Column('z', Integer))
idx = Index("foo", tbl.c.x, mssql_include=['y'])
self.assert_compile(schema.CreateIndex(idx),
"CREATE INDEX foo ON test (x) INCLUDE (y)"
)
def test_index_extra_include_2(self):
metadata = MetaData()
tbl = Table(
'test', metadata,
Column('x', Integer), Column('y', Integer), Column('z', Integer))
idx = Index("foo", tbl.c.x, mssql_include=[tbl.c.y])
self.assert_compile(schema.CreateIndex(idx),
"CREATE INDEX foo ON test (x) INCLUDE (y)"
)
class SchemaTest(fixtures.TestBase):
def setup(self):
t = Table('sometable', MetaData(),
Column('pk_column', Integer),
Column('test_column', String)
)
self.column = t.c.test_column
dialect = mssql.dialect()
self.ddl_compiler = dialect.ddl_compiler(dialect,
schema.CreateTable(t))
def _column_spec(self):
return self.ddl_compiler.get_column_specification(self.column)
def test_that_mssql_default_nullability_emits_null(self):
eq_("test_column VARCHAR(max) NULL", self._column_spec())
def test_that_mssql_none_nullability_does_not_emit_nullability(self):
self.column.nullable = None
eq_("test_column VARCHAR(max)", self._column_spec())
def test_that_mssql_specified_nullable_emits_null(self):
self.column.nullable = True
eq_("test_column VARCHAR(max) NULL", self._column_spec())
def test_that_mssql_specified_not_nullable_emits_not_null(self):
self.column.nullable = False
eq_("test_column VARCHAR(max) NOT NULL", self._column_spec())
| [
"sqlalchemy.sql.quoted_name",
"sqlalchemy.delete",
"sqlalchemy.dialects.mssql.base._owner_plus_db",
"sqlalchemy.MetaData",
"sqlalchemy.func.current_date",
"sqlalchemy.String",
"sqlalchemy.dialects.mssql.dialect",
"sqlalchemy.select",
"sqlalchemy.dialects.mssql.mxodbc.dialect",
"sqlalchemy.Column",
"sqlalchemy.func.current_time",
"sqlalchemy.union",
"sqlalchemy.func.max",
"sqlalchemy.sql.column",
"sqlalchemy.Index",
"sqlalchemy.literal",
"sqlalchemy.testing.eq_",
"sqlalchemy.func.foo",
"sqlalchemy.sql.false",
"sqlalchemy.PrimaryKeyConstraint",
"sqlalchemy.UniqueConstraint",
"sqlalchemy.testing.expect_deprecated",
"sqlalchemy.sql.table.update",
"sqlalchemy.func.length",
"sqlalchemy.schema.CreateIndex",
"sqlalchemy.Sequence",
"sqlalchemy.sql.true",
"sqlalchemy.testing.is_",
"sqlalchemy.extract",
"sqlalchemy.sql.delete",
"sqlalchemy.schema.CreateTable"
] | [((663, 678), 'sqlalchemy.dialects.mssql.dialect', 'mssql.dialect', ([], {}), '()\n', (676, 678), False, 'from sqlalchemy.dialects import mssql\n'), ((1360, 1370), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (1368, 1370), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((1729, 1739), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (1737, 1739), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((7163, 7173), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (7171, 7173), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((8854, 8870), 'sqlalchemy.dialects.mssql.mxodbc.dialect', 'mxodbc.dialect', ([], {}), '()\n', (8868, 8870), False, 'from sqlalchemy.dialects.mssql import mxodbc\n'), ((11560, 11570), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (11568, 11570), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((11941, 11951), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (11949, 11951), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((12325, 12335), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (12333, 12335), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((12704, 12714), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (12712, 12714), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((13065, 13075), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (13073, 13075), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((13418, 13428), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (13426, 13428), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((13757, 13772), 'sqlalchemy.dialects.mssql.dialect', 'mssql.dialect', ([], {}), '()\n', (13770, 13772), False, 'from sqlalchemy.dialects import mssql\n'), ((14257, 14267), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (14265, 14267), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((14947, 14957), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (14955, 14957), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((15749, 15759), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (15757, 15759), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((16630, 16640), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (16638, 16640), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((18090, 18130), 'sqlalchemy.union', 'union', (['s1', 's2'], {'order_by': "['col3', 'col4']"}), "(s1, s2, order_by=['col3', 'col4'])\n", (18095, 18130), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((19187, 19197), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (19195, 19197), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((27759, 27769), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (27767, 27769), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((29061, 29071), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (29069, 29071), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((29455, 29465), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (29463, 29465), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((29806, 29816), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (29814, 29816), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((30152, 30162), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (30160, 30162), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((30681, 30691), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (30689, 30691), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((31259, 31269), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (31267, 31269), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((31658, 31668), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (31666, 31668), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((32057, 32067), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (32065, 32067), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((32558, 32568), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (32566, 32568), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((33059, 33069), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (33067, 33069), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((33549, 33559), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (33557, 33559), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((34064, 34074), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (34072, 34074), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((34583, 34593), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (34591, 34593), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((34779, 34834), 'sqlalchemy.Index', 'Index', (['"""myidx"""', 'tbl.c.x', 'tbl.c.y'], {'mssql_clustered': '(False)'}), "('myidx', tbl.c.x, tbl.c.y, mssql_clustered=False)\n", (34784, 34834), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((35043, 35053), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (35051, 35053), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((35533, 35543), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (35541, 35543), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((36076, 36086), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (36084, 36086), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((36182, 36226), 'sqlalchemy.Index', 'Index', (['"""foo"""', 'tbl.c.id'], {'mssql_clustered': '(True)'}), "('foo', tbl.c.id, mssql_clustered=True)\n", (36187, 36226), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((36435, 36445), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (36443, 36445), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((36824, 36834), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (36832, 36834), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37121, 37131), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (37129, 37131), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37456, 37466), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (37464, 37466), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37610, 37652), 'sqlalchemy.Index', 'Index', (['"""foo"""', 'tbl.c.x'], {'mssql_include': "['y']"}), "('foo', tbl.c.x, mssql_include=['y'])\n", (37615, 37652), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37869, 37879), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (37877, 37879), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((38023, 38069), 'sqlalchemy.Index', 'Index', (['"""foo"""', 'tbl.c.x'], {'mssql_include': '[tbl.c.y]'}), "('foo', tbl.c.x, mssql_include=[tbl.c.y])\n", (38028, 38069), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((38501, 38516), 'sqlalchemy.dialects.mssql.dialect', 'mssql.dialect', ([], {}), '()\n', (38514, 38516), False, 'from sqlalchemy.dialects import mssql\n'), ((752, 763), 'sqlalchemy.sql.false', 'sql.false', ([], {}), '()\n', (761, 763), False, 'from sqlalchemy import sql\n'), ((820, 830), 'sqlalchemy.sql.true', 'sql.true', ([], {}), '()\n', (828, 830), False, 'from sqlalchemy import sql\n'), ((917, 937), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (923, 937), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((1124, 1144), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (1130, 1144), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((1405, 1434), 'sqlalchemy.Column', 'Column', (['"""somecolumn"""', 'Integer'], {}), "('somecolumn', Integer)\n", (1411, 1434), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((1774, 1802), 'sqlalchemy.Column', 'Column', (['"""somecolumn"""', 'String'], {}), "('somecolumn', String)\n", (1780, 1802), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((2211, 2231), 'sqlalchemy.sql.column', 'column', (['"""a"""', 'Integer'], {}), "('a', Integer)\n", (2217, 2231), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((2252, 2271), 'sqlalchemy.sql.column', 'column', (['"""b"""', 'String'], {}), "('b', String)\n", (2258, 2271), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((2292, 2311), 'sqlalchemy.sql.column', 'column', (['"""c"""', 'String'], {}), "('c', String)\n", (2298, 2311), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((2378, 2398), 'sqlalchemy.sql.column', 'column', (['"""a"""', 'Integer'], {}), "('a', Integer)\n", (2384, 2398), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((2419, 2439), 'sqlalchemy.sql.column', 'column', (['"""b"""', 'Integer'], {}), "('b', Integer)\n", (2425, 2439), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((2460, 2480), 'sqlalchemy.sql.column', 'column', (['"""c"""', 'Integer'], {}), "('c', Integer)\n", (2466, 2480), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((2834, 2854), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (2840, 2854), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((3073, 3093), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (3079, 3093), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((3419, 3439), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (3425, 3439), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((3968, 3988), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (3974, 3988), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((4608, 4628), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (4614, 4628), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((4977, 4997), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (4983, 4997), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((5528, 5548), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (5534, 5548), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((5866, 5878), 'sqlalchemy.sql.column', 'column', (['"""c1"""'], {}), "('c1')\n", (5872, 5878), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((5905, 5917), 'sqlalchemy.sql.column', 'column', (['"""c1"""'], {}), "('c1')\n", (5911, 5917), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((6197, 6209), 'sqlalchemy.sql.column', 'column', (['"""c1"""'], {}), "('c1')\n", (6203, 6209), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((6401, 6415), 'sqlalchemy.sql.delete', 'sql.delete', (['a1'], {}), '(a1)\n', (6411, 6415), False, 'from sqlalchemy import sql\n'), ((6510, 6530), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (6516, 6530), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((6565, 6585), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (6571, 6585), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((7240, 7261), 'sqlalchemy.Column', 'Column', (['"""sym"""', 'String'], {}), "('sym', String)\n", (7246, 7261), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((7275, 7297), 'sqlalchemy.Column', 'Column', (['"""val"""', 'Integer'], {}), "('val', Integer)\n", (7281, 7297), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((7400, 7421), 'sqlalchemy.Column', 'Column', (['"""sym"""', 'String'], {}), "('sym', String)\n", (7406, 7421), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((7435, 7460), 'sqlalchemy.Column', 'Column', (['"""newval"""', 'Integer'], {}), "('newval', Integer)\n", (7441, 7460), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((8967, 8980), 'sqlalchemy.sql.column', 'column', (['"""foo"""'], {}), "('foo')\n", (8973, 8980), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((9724, 9744), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (9730, 9744), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((10558, 10578), 'sqlalchemy.sql.column', 'column', (['"""somecolumn"""'], {}), "('somecolumn')\n", (10564, 10578), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((10940, 10963), 'sqlalchemy.sql.column', 'column', (['"""myid"""', 'Integer'], {}), "('myid', Integer)\n", (10946, 10963), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((10988, 11010), 'sqlalchemy.sql.column', 'column', (['"""name"""', 'String'], {}), "('name', String)\n", (10994, 11010), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((11035, 11064), 'sqlalchemy.sql.column', 'column', (['"""description"""', 'String'], {}), "('description', String)\n", (11041, 11064), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((11255, 11274), 'sqlalchemy.select', 'select', (["['*']", 'crit'], {}), "(['*'], crit)\n", (11261, 11274), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((11634, 11673), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (11640, 11673), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((11774, 11787), 'sqlalchemy.select', 'select', (['[tbl]'], {}), '([tbl])\n', (11780, 11787), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((12015, 12054), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (12021, 12054), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((12155, 12168), 'sqlalchemy.select', 'select', (['[tbl]'], {}), '([tbl])\n', (12161, 12168), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((12399, 12438), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (12405, 12438), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((12539, 12552), 'sqlalchemy.select', 'select', (['[tbl]'], {}), '([tbl])\n', (12545, 12552), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((12778, 12817), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (12784, 12817), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((12901, 12914), 'sqlalchemy.select', 'select', (['[tbl]'], {}), '([tbl])\n', (12907, 12914), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((13139, 13178), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (13145, 13178), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((13260, 13273), 'sqlalchemy.select', 'select', (['[tbl]'], {}), '([tbl])\n', (13266, 13273), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((13492, 13531), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (13498, 13531), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((13613, 13626), 'sqlalchemy.select', 'select', (['[tbl]'], {}), '([tbl])\n', (13619, 13626), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((14081, 14121), 'sqlalchemy.dialects.mssql.base._owner_plus_db', 'base._owner_plus_db', (['dialect', 'identifier'], {}), '(dialect, identifier)\n', (14100, 14121), False, 'from sqlalchemy.dialects.mssql import base\n'), ((14135, 14161), 'sqlalchemy.testing.eq_', 'eq_', (['owner', 'expected_owner'], {}), '(owner, expected_owner)\n', (14138, 14161), False, 'from sqlalchemy.testing import eq_, is_\n'), ((14174, 14202), 'sqlalchemy.testing.eq_', 'eq_', (['schema', 'expected_schema'], {}), '(schema, expected_schema)\n', (14177, 14202), False, 'from sqlalchemy.testing import eq_, is_\n'), ((14306, 14345), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (14312, 14345), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((15021, 15060), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (15027, 15060), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((15823, 15862), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (15829, 15862), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((16679, 16718), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (16685, 16718), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((17612, 17626), 'sqlalchemy.sql.column', 'column', (['"""col1"""'], {}), "('col1')\n", (17618, 17626), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((17628, 17642), 'sqlalchemy.sql.column', 'column', (['"""col2"""'], {}), "('col2')\n", (17634, 17642), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((17656, 17670), 'sqlalchemy.sql.column', 'column', (['"""col3"""'], {}), "('col3')\n", (17662, 17670), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((17672, 17686), 'sqlalchemy.sql.column', 'column', (['"""col4"""'], {}), "('col4')\n", (17678, 17686), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((17726, 17740), 'sqlalchemy.sql.column', 'column', (['"""col1"""'], {}), "('col1')\n", (17732, 17740), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((17742, 17756), 'sqlalchemy.sql.column', 'column', (['"""col2"""'], {}), "('col2')\n", (17748, 17756), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((17770, 17784), 'sqlalchemy.sql.column', 'column', (['"""col3"""'], {}), "('col3')\n", (17776, 17784), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((17786, 17800), 'sqlalchemy.sql.column', 'column', (['"""col4"""'], {}), "('col4')\n", (17792, 17800), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((19022, 19036), 'sqlalchemy.func.foo', 'func.foo', (['(1)', '(2)'], {}), '(1, 2)\n', (19030, 19036), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((19089, 19108), 'sqlalchemy.func.current_time', 'func.current_time', ([], {}), '()\n', (19106, 19108), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((19154, 19164), 'sqlalchemy.func.foo', 'func.foo', ([], {}), '()\n', (19162, 19164), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((19245, 19268), 'sqlalchemy.Column', 'Column', (['"""col1"""', 'Integer'], {}), "('col1', Integer)\n", (19251, 19268), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((19270, 19293), 'sqlalchemy.Column', 'Column', (['"""col2"""', 'Integer'], {}), "('col2', Integer)\n", (19276, 19293), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((19534, 19553), 'sqlalchemy.func.current_date', 'func.current_date', ([], {}), '()\n', (19551, 19553), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((19596, 19610), 'sqlalchemy.func.length', 'func.length', (['(3)'], {}), '(3)\n', (19607, 19610), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((19682, 19696), 'sqlalchemy.sql.column', 'column', (['"""col1"""'], {}), "('col1')\n", (19688, 19696), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((19998, 20021), 'sqlalchemy.sql.column', 'column', (['"""myid"""', 'Integer'], {}), "('myid', Integer)\n", (20004, 20021), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((21186, 21212), 'sqlalchemy.func.length', 'func.length', (['table1.c.name'], {}), '(table1.c.name)\n', (21197, 21212), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((21460, 21483), 'sqlalchemy.sql.column', 'column', (['"""myid"""', 'Integer'], {}), "('myid', Integer)\n", (21466, 21483), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((22213, 22236), 'sqlalchemy.sql.column', 'column', (['"""myid"""', 'Integer'], {}), "('myid', Integer)\n", (22219, 22236), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((23030, 23056), 'sqlalchemy.func.length', 'func.length', (['table1.c.name'], {}), '(table1.c.name)\n', (23041, 23056), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((23322, 23342), 'sqlalchemy.sql.column', 'column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (23328, 23342), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((23344, 23364), 'sqlalchemy.sql.column', 'column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (23350, 23364), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((23665, 23685), 'sqlalchemy.sql.column', 'column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (23671, 23685), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((23687, 23707), 'sqlalchemy.sql.column', 'column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (23693, 23707), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((24151, 24171), 'sqlalchemy.sql.column', 'column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (24157, 24171), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((24173, 24193), 'sqlalchemy.sql.column', 'column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (24179, 24193), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((24969, 24989), 'sqlalchemy.sql.column', 'column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (24975, 24989), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((24991, 25011), 'sqlalchemy.sql.column', 'column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (24997, 25011), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((25784, 25804), 'sqlalchemy.sql.column', 'column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (25790, 25804), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((25806, 25826), 'sqlalchemy.sql.column', 'column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (25812, 25826), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((26595, 26630), 'sqlalchemy.testing.is_', 'is_', (['result_map[col.key][1][0]', 'col'], {}), '(result_map[col.key][1][0], col)\n', (26598, 26630), False, 'from sqlalchemy.testing import eq_, is_\n'), ((26715, 26735), 'sqlalchemy.sql.column', 'column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (26721, 26735), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((26737, 26757), 'sqlalchemy.sql.column', 'column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (26743, 26757), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((26784, 26804), 'sqlalchemy.sql.column', 'column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (26790, 26804), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((26806, 26826), 'sqlalchemy.sql.column', 'column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (26812, 26826), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((27797, 27817), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (27803, 27817), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((28604, 28624), 'sqlalchemy.sql.column', 'column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (28610, 28624), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((28626, 28646), 'sqlalchemy.sql.column', 'column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (28632, 28646), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((29130, 29190), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'autoincrement': '(False)', 'primary_key': '(True)'}), "('id', Integer, autoincrement=False, primary_key=True)\n", (29136, 29190), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((29260, 29283), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (29278, 29283), False, 'from sqlalchemy import schema\n'), ((29524, 29563), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'primary_key': '(True)'}), "('id', Integer, primary_key=True)\n", (29530, 29563), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((29606, 29629), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (29624, 29629), False, 'from sqlalchemy import schema\n'), ((29875, 29916), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'autoincrement': '(True)'}), "('id', Integer, autoincrement=True)\n", (29881, 29916), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((29959, 29982), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (29977, 29982), False, 'from sqlalchemy import schema\n'), ((30221, 30281), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'autoincrement': '(False)', 'primary_key': '(True)'}), "('id', Integer, autoincrement=False, primary_key=True)\n", (30227, 30281), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((30330, 30370), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {'autoincrement': '(True)'}), "('x', Integer, autoincrement=True)\n", (30336, 30370), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((30434, 30457), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (30452, 30457), False, 'from sqlalchemy import schema\n'), ((30750, 30791), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'autoincrement': '(True)'}), "('id', Integer, autoincrement=True)\n", (30756, 30791), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((30813, 30855), 'sqlalchemy.Column', 'Column', (['"""id2"""', 'Integer'], {'autoincrement': '(True)'}), "('id2', Integer, autoincrement=True)\n", (30819, 30855), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((31048, 31071), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (31066, 31071), False, 'from sqlalchemy import schema\n'), ((31328, 31391), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'mssql_identity_start': '(0)', 'primary_key': '(True)'}), "('id', Integer, mssql_identity_start=0, primary_key=True)\n", (31334, 31391), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((31461, 31484), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (31479, 31484), False, 'from sqlalchemy import schema\n'), ((31727, 31794), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {'mssql_identity_increment': '(5)', 'primary_key': '(True)'}), "('id', Integer, mssql_identity_increment=5, primary_key=True)\n", (31733, 31794), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((31864, 31887), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (31882, 31887), False, 'from sqlalchemy import schema\n'), ((32197, 32282), 'sqlalchemy.testing.expect_deprecated', 'testing.expect_deprecated', (['"""Use of Sequence with SQL Server in order to affect """'], {}), "('Use of Sequence with SQL Server in order to affect '\n )\n", (32222, 32282), False, 'from sqlalchemy import testing\n'), ((32732, 32817), 'sqlalchemy.testing.expect_deprecated', 'testing.expect_deprecated', (['"""Use of Sequence with SQL Server in order to affect """'], {}), "('Use of Sequence with SQL Server in order to affect '\n )\n", (32757, 32817), False, 'from sqlalchemy import testing\n'), ((33229, 33314), 'sqlalchemy.testing.expect_deprecated', 'testing.expect_deprecated', (['"""Use of Sequence with SQL Server in order to affect """'], {}), "('Use of Sequence with SQL Server in order to affect '\n )\n", (33254, 33314), False, 'from sqlalchemy import testing\n'), ((33618, 33659), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {'autoincrement': '(False)'}), "('x', Integer, autoincrement=False)\n", (33624, 33659), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((33681, 33722), 'sqlalchemy.Column', 'Column', (['"""y"""', 'Integer'], {'autoincrement': '(False)'}), "('y', Integer, autoincrement=False)\n", (33687, 33722), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((33744, 33796), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""x"""', '"""y"""'], {'mssql_clustered': '(True)'}), "('x', 'y', mssql_clustered=True)\n", (33764, 33796), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((33839, 33862), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (33857, 33862), False, 'from sqlalchemy import schema\n'), ((34133, 34174), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {'autoincrement': '(False)'}), "('x', Integer, autoincrement=False)\n", (34139, 34174), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((34196, 34237), 'sqlalchemy.Column', 'Column', (['"""y"""', 'Integer'], {'autoincrement': '(False)'}), "('y', Integer, autoincrement=False)\n", (34202, 34237), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((34259, 34312), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""x"""', '"""y"""'], {'mssql_clustered': '(False)'}), "('x', 'y', mssql_clustered=False)\n", (34279, 34312), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((34355, 34378), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (34373, 34378), False, 'from sqlalchemy import schema\n'), ((34657, 34698), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {'autoincrement': '(False)'}), "('x', Integer, autoincrement=False)\n", (34663, 34698), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((34712, 34753), 'sqlalchemy.Column', 'Column', (['"""y"""', 'Integer'], {'autoincrement': '(False)'}), "('y', Integer, autoincrement=False)\n", (34718, 34753), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((34876, 34899), 'sqlalchemy.schema.CreateIndex', 'schema.CreateIndex', (['idx'], {}), '(idx)\n', (34894, 34899), False, 'from sqlalchemy import schema\n'), ((35112, 35153), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {'autoincrement': '(False)'}), "('x', Integer, autoincrement=False)\n", (35118, 35153), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((35175, 35216), 'sqlalchemy.Column', 'Column', (['"""y"""', 'Integer'], {'autoincrement': '(False)'}), "('y', Integer, autoincrement=False)\n", (35181, 35216), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((35238, 35287), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""x"""', '"""y"""'], {'mssql_clustered': '(False)'}), "('x', 'y', mssql_clustered=False)\n", (35254, 35287), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((35330, 35353), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (35348, 35353), False, 'from sqlalchemy import schema\n'), ((35602, 35643), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {'autoincrement': '(False)'}), "('x', Integer, autoincrement=False)\n", (35608, 35643), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((35665, 35706), 'sqlalchemy.Column', 'Column', (['"""y"""', 'Integer'], {'autoincrement': '(False)'}), "('y', Integer, autoincrement=False)\n", (35671, 35706), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((35728, 35753), 'sqlalchemy.PrimaryKeyConstraint', 'PrimaryKeyConstraint', (['"""x"""'], {}), "('x')\n", (35748, 35753), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((35775, 35818), 'sqlalchemy.UniqueConstraint', 'UniqueConstraint', (['"""y"""'], {'mssql_clustered': '(True)'}), "('y', mssql_clustered=True)\n", (35791, 35818), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((35861, 35884), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (35879, 35884), False, 'from sqlalchemy import schema\n'), ((36145, 36166), 'sqlalchemy.Column', 'Column', (['"""id"""', 'Integer'], {}), "('id', Integer)\n", (36151, 36166), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((36255, 36278), 'sqlalchemy.schema.CreateIndex', 'schema.CreateIndex', (['idx'], {}), '(idx)\n', (36273, 36278), False, 'from sqlalchemy import schema\n'), ((36509, 36529), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (36515, 36529), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((36531, 36551), 'sqlalchemy.Column', 'Column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (36537, 36551), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((36553, 36573), 'sqlalchemy.Column', 'Column', (['"""z"""', 'Integer'], {}), "('z', Integer)\n", (36559, 36573), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((36651, 36674), 'sqlalchemy.schema.CreateIndex', 'schema.CreateIndex', (['idx'], {}), '(idx)\n', (36669, 36674), False, 'from sqlalchemy import schema\n'), ((36883, 36903), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (36889, 36903), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37180, 37200), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (37186, 37200), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37530, 37550), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (37536, 37550), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37552, 37572), 'sqlalchemy.Column', 'Column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (37558, 37572), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37574, 37594), 'sqlalchemy.Column', 'Column', (['"""z"""', 'Integer'], {}), "('z', Integer)\n", (37580, 37594), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37681, 37704), 'sqlalchemy.schema.CreateIndex', 'schema.CreateIndex', (['idx'], {}), '(idx)\n', (37699, 37704), False, 'from sqlalchemy import schema\n'), ((37943, 37963), 'sqlalchemy.Column', 'Column', (['"""x"""', 'Integer'], {}), "('x', Integer)\n", (37949, 37963), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37965, 37985), 'sqlalchemy.Column', 'Column', (['"""y"""', 'Integer'], {}), "('y', Integer)\n", (37971, 37985), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37987, 38007), 'sqlalchemy.Column', 'Column', (['"""z"""', 'Integer'], {}), "('z', Integer)\n", (37993, 38007), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((38098, 38121), 'sqlalchemy.schema.CreateIndex', 'schema.CreateIndex', (['idx'], {}), '(idx)\n', (38116, 38121), False, 'from sqlalchemy import schema\n'), ((38316, 38326), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (38324, 38326), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((38346, 38374), 'sqlalchemy.Column', 'Column', (['"""pk_column"""', 'Integer'], {}), "('pk_column', Integer)\n", (38352, 38374), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((38394, 38423), 'sqlalchemy.Column', 'Column', (['"""test_column"""', 'String'], {}), "('test_column', String)\n", (38400, 38423), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((38624, 38645), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['t'], {}), '(t)\n', (38642, 38645), False, 'from sqlalchemy import schema\n'), ((5931, 5945), 'sqlalchemy.sql.delete', 'sql.delete', (['t1'], {}), '(t1)\n', (5941, 5945), False, 'from sqlalchemy import sql\n'), ((6223, 6237), 'sqlalchemy.sql.delete', 'sql.delete', (['a1'], {}), '(a1)\n', (6233, 6237), False, 'from sqlalchemy import sql\n'), ((7486, 7500), 'sqlalchemy.sql.table.update', 'table.update', ([], {}), '()\n', (7498, 7500), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((11104, 11153), 'sqlalchemy.select', 'select', (['[table1.c.myid]'], {'order_by': '[table1.c.myid]'}), '([table1.c.myid], order_by=[table1.c.myid])\n', (11110, 11153), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((11694, 11722), 'sqlalchemy.sql.quoted_name', 'quoted_name', (['"""foo.dbo"""', '(True)'], {}), "('foo.dbo', True)\n", (11705, 11722), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((12075, 12103), 'sqlalchemy.sql.quoted_name', 'quoted_name', (['"""foo.dbo"""', '(True)'], {}), "('foo.dbo', True)\n", (12086, 12103), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((12459, 12487), 'sqlalchemy.sql.quoted_name', 'quoted_name', (['"""Foo.dbo"""', '(True)'], {}), "('Foo.dbo', True)\n", (12470, 12487), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((14582, 14600), 'sqlalchemy.select', 'select', (['[tbl.c.id]'], {}), '([tbl.c.id])\n', (14588, 14600), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((15304, 15322), 'sqlalchemy.select', 'select', (['[tbl.c.id]'], {}), '([tbl.c.id])\n', (15310, 15322), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((16109, 16127), 'sqlalchemy.select', 'select', (['[tbl.c.id]'], {}), '([tbl.c.id])\n', (16115, 16127), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((17092, 17110), 'sqlalchemy.select', 'select', (['[tbl.c.id]'], {}), '([tbl.c.id])\n', (17098, 17110), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((20050, 20061), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (20056, 20061), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((20098, 20109), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (20104, 20109), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((21512, 21523), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (21518, 21523), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((21548, 21559), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (21554, 21559), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((21574, 21588), 'sqlalchemy.delete', 'delete', (['table1'], {}), '(table1)\n', (21580, 21588), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((22265, 22276), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (22271, 22276), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((22301, 22312), 'sqlalchemy.String', 'String', (['(128)'], {}), '(128)\n', (22307, 22312), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((23971, 23986), 'sqlalchemy.dialects.mssql.dialect', 'mssql.dialect', ([], {}), '()\n', (23984, 23986), False, 'from sqlalchemy.dialects import mssql\n'), ((25534, 25549), 'sqlalchemy.dialects.mssql.dialect', 'mssql.dialect', ([], {}), '()\n', (25547, 25549), False, 'from sqlalchemy.dialects import mssql\n'), ((26456, 26471), 'sqlalchemy.dialects.mssql.dialect', 'mssql.dialect', ([], {}), '()\n', (26469, 26471), False, 'from sqlalchemy.dialects import mssql\n'), ((27512, 27527), 'sqlalchemy.dialects.mssql.dialect', 'mssql.dialect', ([], {}), '()\n', (27525, 27527), False, 'from sqlalchemy.dialects import mssql\n'), ((27836, 27851), 'sqlalchemy.func.foo', 'func.foo', (['t.c.x'], {}), '(t.c.x)\n', (27844, 27851), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((27879, 27894), 'sqlalchemy.func.foo', 'func.foo', (['t.c.x'], {}), '(t.c.x)\n', (27887, 27894), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((32148, 32163), 'sqlalchemy.Sequence', 'Sequence', (['""""""', '(0)'], {}), "('', 0)\n", (32156, 32163), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((32345, 32368), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (32363, 32368), False, 'from sqlalchemy import schema\n'), ((32649, 32670), 'sqlalchemy.Sequence', 'Sequence', (['""""""'], {'start': '(5)'}), "('', start=5)\n", (32657, 32670), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((32880, 32903), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (32898, 32903), False, 'from sqlalchemy import schema\n'), ((33150, 33171), 'sqlalchemy.Sequence', 'Sequence', (['""""""'], {'start': '(5)'}), "('', start=5)\n", (33158, 33171), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((33377, 33400), 'sqlalchemy.schema.CreateTable', 'schema.CreateTable', (['tbl'], {}), '(tbl)\n', (33395, 33400), False, 'from sqlalchemy import schema\n'), ((36985, 37009), 'sqlalchemy.Index', 'Index', (['"""bar"""', '(t1.c.x > 5)'], {}), "('bar', t1.c.x > 5)\n", (36990, 37009), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((37313, 37337), 'sqlalchemy.Index', 'Index', (['"""idx_foo"""', 't1.c.x'], {}), "('idx_foo', t1.c.x)\n", (37318, 37337), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((1846, 1857), 'sqlalchemy.select', 'select', (['[t]'], {}), '([t])\n', (1852, 1857), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((6146, 6158), 'sqlalchemy.sql.column', 'column', (['"""c1"""'], {}), "('c1')\n", (6152, 6158), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((19331, 19349), 'sqlalchemy.func.max', 'func.max', (['t.c.col1'], {}), '(t.c.col1)\n', (19339, 19349), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((24775, 24790), 'sqlalchemy.dialects.mssql.dialect', 'mssql.dialect', ([], {}), '()\n', (24788, 24790), False, 'from sqlalchemy.dialects import mssql\n'), ((7846, 7860), 'sqlalchemy.sql.table.update', 'table.update', ([], {}), '()\n', (7858, 7860), False, 'from sqlalchemy.sql import table, column, quoted_name\n'), ((9052, 9064), 'sqlalchemy.literal', 'literal', (['"""x"""'], {}), "('x')\n", (9059, 9064), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((9066, 9078), 'sqlalchemy.literal', 'literal', (['"""y"""'], {}), "('y')\n", (9073, 9078), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((9182, 9193), 'sqlalchemy.select', 'select', (['[t]'], {}), '([t])\n', (9188, 9193), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((19801, 19825), 'sqlalchemy.extract', 'extract', (['field', 't.c.col1'], {}), '(field, t.c.col1)\n', (19808, 19825), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((21788, 21802), 'sqlalchemy.delete', 'delete', (['table1'], {}), '(table1)\n', (21794, 21802), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((26848, 26864), 'sqlalchemy.select', 'select', (['[t2.c.y]'], {}), '([t2.c.y])\n', (26854, 26864), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((27923, 27938), 'sqlalchemy.select', 'select', (['[expr1]'], {}), '([expr1])\n', (27929, 27938), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((27988, 28003), 'sqlalchemy.select', 'select', (['[expr2]'], {}), '([expr2])\n', (27994, 28003), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((7525, 7549), 'sqlalchemy.select', 'select', (['[other.c.newval]'], {}), '([other.c.newval])\n', (7531, 7549), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((23379, 23390), 'sqlalchemy.select', 'select', (['[t]'], {}), '([t])\n', (23385, 23390), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((23722, 23733), 'sqlalchemy.select', 'select', (['[t]'], {}), '([t])\n', (23728, 23733), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((24208, 24219), 'sqlalchemy.select', 'select', (['[t]'], {}), '([t])\n', (24214, 24219), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((25026, 25037), 'sqlalchemy.select', 'select', (['[t]'], {}), '([t])\n', (25032, 25037), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((25907, 25919), 'sqlalchemy.select', 'select', (['cols'], {}), '(cols)\n', (25913, 25919), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((26913, 26925), 'sqlalchemy.select', 'select', (['[t1]'], {}), '([t1])\n', (26919, 26925), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n'), ((28661, 28672), 'sqlalchemy.select', 'select', (['[t]'], {}), '([t])\n', (28667, 28672), False, 'from sqlalchemy import Integer, String, Table, Column, select, MetaData, update, delete, insert, extract, union, func, PrimaryKeyConstraint, UniqueConstraint, Index, Sequence, literal\n')] |
"""
This code is used to scrape ScienceDirect of publication urls and write them to
a text file in the current directory for later use.
"""
import selenium
from selenium import webdriver
import numpy as np
import pandas as pd
import bs4
from bs4 import BeautifulSoup
import time
from sklearn.utils import shuffle
def scrape_page(driver):
"""
This method finds all the publication result web elements on the webpage.
Parameters
----------
driver (Selenium webdriver object) : Instance of the webdriver class e.g.
webdriver.Chrome()
Returns
-------
elems (list) : A list of all scraped hrefs from the page
"""
elems = driver.find_elements_by_class_name('ResultItem')
return elems
def clean(elems):
"""
This method takes a list of scraped selenium web elements
and filters/ returns only the hrefs leading to publications.
Filtering includes removing all urls with keywords that are indicative of
non-html links.
Parameters
----------
elems (list) : The list of hrefs to be filtered
Returns
-------
urls (list) : The new list of hrefs, which should be the same as the list
displayed on gui ScienceDirect
"""
titles = []
urls = []
for elem in elems:
href_child = elem.find_element_by_css_selector('a[href]')
url = href_child.get_attribute('href')
title = href_child.text
titles.append(title)
urls.append(url)
return urls, titles
def build_url_list(gui_prefix,search_terms,journal_list):
"""
This method takes the list of journals and creates a tiple nested dictionary
containing all accessible urls to each page, in each year, for each journal,
for a given search on sciencedirect.
"""
dict1 = {}
years = np.arange(1995,2020)
for journal in journal_list:
dict2 = {}
for year in years:
dict3 = {}
for i in range(60):
url = gui_prefix + search_terms + '&show=100'+ '&articleTypes=FLA%2CREV' + '&years='+ str(year)
if i != 0:
url = url + '&offset=' + str(i) +'00'
url = url + '&pub=' + journal
dict3[i] = url
dict2[year] = dict3
dict1[journal] = dict2
return dict1
def proxify(scraped_urls,uw_prefix):
"""
This method takes a list of scraped urls and turns them into urls that
go through the UW Library proxy so that all of them are full access.
Parameters
----------
scraped_urls (list) : The list of URLs to be converted
uw_prefix (str) : The string that all URLs which go through the UW Library
Proxy start with.
Returns
-------
proxy_urls (list) : The list of converted URLs which go through UW Library
proxy
"""
proxy_urls = []
for url in scraped_urls:
sd_id = url[-17:]
newlink = uw_prefix + sd_id
if sd_id.startswith('S'):
proxy_urls.append(newlink)
return proxy_urls
def write_urls(urls,titles,file,journal,year):
"""
This method takes a list of urls and writes them to a desired text file.
Parameters
----------
urls (list) : The list of URLs to be saved.
file (file object) : The opened .txt file which will be written to.
year (str or int) : The year associated with the publication date.
Returns
-------
Does not return anything
"""
for link,title in zip(urls,titles):
line = link + ',' + title + ',' + journal + ',' + str(year)
file.write(line)
file.write('\n')
def find_pubTitle(driver,journal):
"""
This method finds the identifying number for a specific journal. This
identifying number is added to the gui query URL to ensure only publciations
from the desired journal are being found.
"""
pub_elems = driver.find_elements_by_css_selector('input[id*=publicationTitles]')
pub_names = []
for elem in pub_elems:
pub_name = elem.get_attribute("name")
if pub_name == journal:
return elem.get_attribute('id')[-6:] #returns the identifying number
#for that journal
df = pd.read_excel('elsevier_journals.xls')
df.Full_Category = df.Full_Category.str.lower() # lowercase topics for searching
df = df.drop_duplicates(subset = 'Journal_Title') # drop any duplicate journals
df = shuffle(df,random_state = 42)
# The set of default strings that will be used to sort which journals we want
journal_strings = ['chemistry','energy','molecular','atomic','chemical','biochem'
,'organic','polymer','chemical engineering','biotech','coloid']
name = df.Full_Category.str.contains # making this an easier command to type
# new dataframe full of only journals who's topic description contained the
# desired keywords
df2 = df[name('polymer') | name('chemistry') | name('energy')
| name('molecular') | name('colloid') | name('biochem')
| name('organic') | name('biotech') | name('chemical')]
journal_list = df2.Journal_Title # Series of only the journals to be searched
gui_prefix = 'https://www.sciencedirect.com/search/advanced?qs='
search_terms = 'chemistry%20OR%20molecule%20OR%20polymer%20OR%20organic'
url_dict = build_url_list(gui_prefix,search_terms,journal_list)
driver = webdriver.Chrome()
uw_prefix = 'https://www-sciencedirect-com.offcampus.lib.washington.edu/science/article/pii/'
filename = input("Input filename with .txt extension for URL storage: ")
url_counter = 0
master_list = []
file = open(filename,'a+')
for journal in journal_list:
for year in np.arange(1995,2020):
for offset in np.arange(60):
page = url_dict[journal][year][offset]
print("journal, year, offset = ",journal,year,offset)
driver.get(page)
time.sleep(2) # need sleep to load the page properly
if offset == 0: # if on page 1, we need to grab the publisher number
try: # we may be at a page which won't have the item we are looking for
pubTitles = find_pubTitle(driver,journal_list[journal_counter])
for url in url_dict[journal]:
url = url + '&pubTitles=' + pubTitles # update every url in the list
driver.get(url_dict[journal][year][0]) # reload the first page with the new url
except:
pass # if there is an exception, it means we are on the right page
scraped_elems = scrape_page(driver) # scrape the page
scraped_urls, titles = clean(scraped_elems)
proxy_urls = proxify(scraped_urls,uw_prefix) # not even sure this is needed
write_urls(proxy_urls,titles,file,journal,year)
url_counter += len(proxy_urls)
print('Total URLs saved is: ',url_counter)
if len(scraped_elems) < 100: # after content is saved, go to the next year
break # because we know this is the last page of urls for this year
file.close()
driver.quit()
| [
"sklearn.utils.shuffle",
"selenium.webdriver.Chrome",
"time.sleep",
"pandas.read_excel",
"numpy.arange"
] | [((4232, 4270), 'pandas.read_excel', 'pd.read_excel', (['"""elsevier_journals.xls"""'], {}), "('elsevier_journals.xls')\n", (4245, 4270), True, 'import pandas as pd\n'), ((4437, 4465), 'sklearn.utils.shuffle', 'shuffle', (['df'], {'random_state': '(42)'}), '(df, random_state=42)\n', (4444, 4465), False, 'from sklearn.utils import shuffle\n'), ((5368, 5386), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {}), '()\n', (5384, 5386), False, 'from selenium import webdriver\n'), ((1802, 1823), 'numpy.arange', 'np.arange', (['(1995)', '(2020)'], {}), '(1995, 2020)\n', (1811, 1823), True, 'import numpy as np\n'), ((5664, 5685), 'numpy.arange', 'np.arange', (['(1995)', '(2020)'], {}), '(1995, 2020)\n', (5673, 5685), True, 'import numpy as np\n'), ((5708, 5721), 'numpy.arange', 'np.arange', (['(60)'], {}), '(60)\n', (5717, 5721), True, 'import numpy as np\n'), ((5884, 5897), 'time.sleep', 'time.sleep', (['(2)'], {}), '(2)\n', (5894, 5897), False, 'import time\n')] |
#!/usr/bin/env python3
import json
import platform
def make_sys_report(anonymous=False, skipUsb=False, skipPackages=False):
def get_usb():
try:
import usb.core
except ImportError:
yield "NoLib"
return
speeds = ["Unknown", "Low", "Full", "High", "Super", "SuperPlus"]
format_hex = lambda val: f"{val:#0{6}x}"
try:
for dev in usb.core.find(find_all=True):
yield {
"port": dev.port_number,
"vendor_id": format_hex(dev.idVendor),
"product_id": format_hex(dev.idProduct),
"speed": speeds[dev.speed] if dev.speed < len(speeds) else dev.speed
}
except usb.core.NoBackendError:
yield "No USB backend found"
result = {
"architecture": ' '.join(platform.architecture()).strip(),
"machine": platform.machine(),
"platform": platform.platform(),
"processor": platform.processor(),
"python_build": ' '.join(platform.python_build()).strip(),
"python_compiler": platform.python_compiler(),
"python_implementation": platform.python_implementation(),
"python_version": platform.python_version(),
"release": platform.release(),
"system": platform.system(),
"version": platform.version(),
"win32_ver": ' '.join(platform.win32_ver()).strip(),
}
if not skipPackages:
from pip._internal.operations.freeze import freeze
result["packages"] = list(freeze(local_only=True))
if not skipUsb:
result["usb"] = list(get_usb())
if not anonymous:
result["uname"] = ' '.join(platform.uname()).strip(),
return result
if __name__ == "__main__":
data = make_sys_report()
with open("log_system_information.json", "w") as f:
json.dump(data, f, indent=4)
print(json.dumps(data, indent=4))
print("System info gathered successfully - saved as \"log_system_information.json\"")
| [
"platform.python_implementation",
"platform.version",
"platform.win32_ver",
"platform.platform",
"json.dumps",
"platform.python_build",
"platform.uname",
"platform.python_version",
"platform.release",
"platform.system",
"platform.architecture",
"platform.processor",
"pip._internal.operations.freeze.freeze",
"platform.machine",
"platform.python_compiler",
"json.dump"
] | [((928, 946), 'platform.machine', 'platform.machine', ([], {}), '()\n', (944, 946), False, 'import platform\n'), ((968, 987), 'platform.platform', 'platform.platform', ([], {}), '()\n', (985, 987), False, 'import platform\n'), ((1010, 1030), 'platform.processor', 'platform.processor', ([], {}), '()\n', (1028, 1030), False, 'import platform\n'), ((1126, 1152), 'platform.python_compiler', 'platform.python_compiler', ([], {}), '()\n', (1150, 1152), False, 'import platform\n'), ((1187, 1219), 'platform.python_implementation', 'platform.python_implementation', ([], {}), '()\n', (1217, 1219), False, 'import platform\n'), ((1247, 1272), 'platform.python_version', 'platform.python_version', ([], {}), '()\n', (1270, 1272), False, 'import platform\n'), ((1293, 1311), 'platform.release', 'platform.release', ([], {}), '()\n', (1309, 1311), False, 'import platform\n'), ((1331, 1348), 'platform.system', 'platform.system', ([], {}), '()\n', (1346, 1348), False, 'import platform\n'), ((1369, 1387), 'platform.version', 'platform.version', ([], {}), '()\n', (1385, 1387), False, 'import platform\n'), ((1884, 1912), 'json.dump', 'json.dump', (['data', 'f'], {'indent': '(4)'}), '(data, f, indent=4)\n', (1893, 1912), False, 'import json\n'), ((1924, 1950), 'json.dumps', 'json.dumps', (['data'], {'indent': '(4)'}), '(data, indent=4)\n', (1934, 1950), False, 'import json\n'), ((1575, 1598), 'pip._internal.operations.freeze.freeze', 'freeze', ([], {'local_only': '(True)'}), '(local_only=True)\n', (1581, 1598), False, 'from pip._internal.operations.freeze import freeze\n'), ((875, 898), 'platform.architecture', 'platform.architecture', ([], {}), '()\n', (896, 898), False, 'import platform\n'), ((1065, 1088), 'platform.python_build', 'platform.python_build', ([], {}), '()\n', (1086, 1088), False, 'import platform\n'), ((1419, 1439), 'platform.win32_ver', 'platform.win32_ver', ([], {}), '()\n', (1437, 1439), False, 'import platform\n'), ((1717, 1733), 'platform.uname', 'platform.uname', ([], {}), '()\n', (1731, 1733), False, 'import platform\n')] |
"""
Utils for creating xdelta patches.
"""
import logging
from subprocess import check_output, CalledProcessError
from shutil import copyfile
from os import remove, path
class PatchChecksumError(Exception):
def __init__(self, message, errors):
super(PatchChecksumError, self).__init__(message)
class Patch:
# TODO: Abstract out the need for "edited" by just copying the original
# file.
def __init__(self, original, filename, edited=None, xdelta_dir='.'):
self.original = original
self.edited = edited
self.filename = filename
# Need to have this absolute path for xdelta3 to be found.
self.xdelta_path = path.join(xdelta_dir, 'xdelta3')
# self.xdelta_path = 'xdelta3'
def create(self):
if self.edited is None:
raise Exception
cmd = [
self.xdelta_path,
'-f',
'-s',
self.original,
self.edited,
self.filename,
]
print(cmd)
logging.info(cmd)
try:
check_output(cmd)
except CalledProcessError as e:
raise Exception(e.output)
def apply(self):
if not self.edited:
copyfile(self.original, self.original + "_temp")
self.edited = self.original
self.original = self.original + "_temp"
cmd = [
self.xdelta_path,
'-f',
'-d',
'-s',
self.original,
self.filename,
self.edited,
]
logging.info(cmd)
try:
check_output(cmd)
except CalledProcessError:
raise PatchChecksumError('Target file had incorrect checksum', [])
finally:
if self.original.endswith('_temp'):
remove(self.original)
| [
"subprocess.check_output",
"os.path.join",
"shutil.copyfile",
"logging.info",
"os.remove"
] | [((674, 706), 'os.path.join', 'path.join', (['xdelta_dir', '"""xdelta3"""'], {}), "(xdelta_dir, 'xdelta3')\n", (683, 706), False, 'from os import remove, path\n'), ((1027, 1044), 'logging.info', 'logging.info', (['cmd'], {}), '(cmd)\n', (1039, 1044), False, 'import logging\n'), ((1567, 1584), 'logging.info', 'logging.info', (['cmd'], {}), '(cmd)\n', (1579, 1584), False, 'import logging\n'), ((1070, 1087), 'subprocess.check_output', 'check_output', (['cmd'], {}), '(cmd)\n', (1082, 1087), False, 'from subprocess import check_output, CalledProcessError\n'), ((1228, 1276), 'shutil.copyfile', 'copyfile', (['self.original', "(self.original + '_temp')"], {}), "(self.original, self.original + '_temp')\n", (1236, 1276), False, 'from shutil import copyfile\n'), ((1610, 1627), 'subprocess.check_output', 'check_output', (['cmd'], {}), '(cmd)\n', (1622, 1627), False, 'from subprocess import check_output, CalledProcessError\n'), ((1823, 1844), 'os.remove', 'remove', (['self.original'], {}), '(self.original)\n', (1829, 1844), False, 'from os import remove, path\n')] |
from rotor import Rotor
import sys
import getopt
class Enigma:
def __init__(self, key, rotors):
self.key = list(key)
self.rotors = []
for i in range(0, len(rotors)):
self.rotors.append(Rotor(self.key[i], rotors[i]))
def encrypt(self, word):
cipher = ''
for i, char in enumerate(word.upper()):
distance = self.rotors[i % 2].get_distance(char)
cipher += self.rotors[2].rotate((i + 1) % 2, distance)
return cipher
def decrypt(self, cipher):
word = ''
for i, char in enumerate(cipher.upper()):
distance = self.rotors[2].get_distance(char)
word += self.rotors[i % 2].rotate((i + 1) % 2, distance)
return word
def print_help():
print("\ncommand line arguments:\n" +
"-h/--help: all possible options\n" +
"-k/--key KEY: rotor starting key\n" +
"-p/--phrase Phrase: phrase to encrypt/decrypt\n" +
"-d/--decrypt: enables decrypt default is encrypt\n" +
"--r1 ROTOR: sets rotor 1\n" +
"--r2 ROTOR: sets rotor 2\n" +
"--r3 ROTOR: sets rotor 3\n" +
"possible rotors are 50, 51, 60, 61, 70 and 71\n")
def main(argv):
try:
opts, args = getopt.getopt(argv, "hk:p:d", ["help", "key=", "phrase", "decrypt", "r1=", "r2=", "r3="])
except getopt.GetoptError:
print_help()
sys.exit(2)
key = ''
phrase = ''
encrypt = True
rotors = ['', '', '']
for opt, arg in opts:
if opt in ("-h", "--help"):
print_help()
sys.exit()
elif opt in ("-k", "--key"):
key = arg
elif opt in ("-p", "--phrase"):
phrase = arg
elif opt in ("-d", "--decrypt"):
encrypt = False
elif opt == "--r1":
rotors[0] = arg
elif opt == "--r2":
rotors[1] = arg
elif opt == "--r3":
rotors[2] = arg
if not key == '' and not phrase == '' and not rotors[0] == ''\
and not rotors[1] == '' and not rotors[2] == '':
machine = Enigma(key, rotors)
if encrypt:
print(machine.encrypt(phrase))
else:
print(machine.decrypt(phrase))
else:
print_help()
if __name__ == '__main__':
main(sys.argv[1:])
| [
"rotor.Rotor",
"getopt.getopt",
"sys.exit"
] | [((1270, 1363), 'getopt.getopt', 'getopt.getopt', (['argv', '"""hk:p:d"""', "['help', 'key=', 'phrase', 'decrypt', 'r1=', 'r2=', 'r3=']"], {}), "(argv, 'hk:p:d', ['help', 'key=', 'phrase', 'decrypt', 'r1=',\n 'r2=', 'r3='])\n", (1283, 1363), False, 'import getopt\n'), ((1420, 1431), 'sys.exit', 'sys.exit', (['(2)'], {}), '(2)\n', (1428, 1431), False, 'import sys\n'), ((1605, 1615), 'sys.exit', 'sys.exit', ([], {}), '()\n', (1613, 1615), False, 'import sys\n'), ((227, 256), 'rotor.Rotor', 'Rotor', (['self.key[i]', 'rotors[i]'], {}), '(self.key[i], rotors[i])\n', (232, 256), False, 'from rotor import Rotor\n')] |
"""
Greedy Word Swap with Word Importance Ranking
===================================================
When WIR method is set to ``unk``, this is a reimplementation of the search
method from the paper: Is BERT Really Robust?
A Strong Baseline for Natural Language Attack on Text Classification and
Entailment by Jin et. al, 2019. See https://arxiv.org/abs/1907.11932 and
https://github.com/jind11/TextFooler.
"""
import numpy as np
import torch
from torch.nn.functional import softmax
from textattack.goal_function_results import GoalFunctionResultStatus
from textattack.search_methods import SearchMethod
from textattack.shared.validators import (
transformation_consists_of_word_swaps_and_deletions,
)
class GreedyWordSwapWIR(SearchMethod):
"""An attack that greedily chooses from a list of possible perturbations in
order of index, after ranking indices by importance.
Args:
wir_method: method for ranking most important words
"""
def __init__(self, wir_method="unk"):
self.wir_method = wir_method
def _get_index_order(self, initial_text):
"""Returns word indices of ``initial_text`` in descending order of
importance."""
len_text = len(initial_text.words)
if self.wir_method == "unk":
leave_one_texts = [
initial_text.replace_word_at_index(i, "[UNK]") for i in range(len_text)
]
leave_one_results, search_over = self.get_goal_results(leave_one_texts)
index_scores = np.array([result.score for result in leave_one_results])
elif self.wir_method == "weighted-saliency":
# first, compute word saliency
leave_one_texts = [
initial_text.replace_word_at_index(i, "[UNK]") for i in range(len_text)
]
leave_one_results, search_over = self.get_goal_results(leave_one_texts)
saliency_scores = np.array([result.score for result in leave_one_results])
softmax_saliency_scores = softmax(
torch.Tensor(saliency_scores), dim=0
).numpy()
# compute the largest change in score we can find by swapping each word
delta_ps = []
for idx in range(len_text):
transformed_text_candidates = self.get_transformations(
initial_text,
original_text=initial_text,
indices_to_modify=[idx],
)
if not transformed_text_candidates:
# no valid synonym substitutions for this word
delta_ps.append(0.0)
continue
swap_results, _ = self.get_goal_results(transformed_text_candidates)
score_change = [result.score for result in swap_results]
max_score_change = np.max(score_change)
delta_ps.append(max_score_change)
index_scores = softmax_saliency_scores * np.array(delta_ps)
elif self.wir_method == "delete":
leave_one_texts = [
initial_text.delete_word_at_index(i) for i in range(len_text)
]
leave_one_results, search_over = self.get_goal_results(leave_one_texts)
index_scores = np.array([result.score for result in leave_one_results])
elif self.wir_method == "random":
index_order = np.arange(len_text)
np.random.shuffle(index_order)
search_over = False
else:
raise ValueError(f"Unsupported WIR method {self.wir_method}")
if self.wir_method != "random":
index_order = (-index_scores).argsort()
return index_order, search_over
def _perform_search(self, initial_result):
attacked_text = initial_result.attacked_text
# Sort words by order of importance
index_order, search_over = self._get_index_order(attacked_text)
i = 0
cur_result = initial_result
results = None
while i < len(index_order) and not search_over:
transformed_text_candidates = self.get_transformations(
cur_result.attacked_text,
original_text=initial_result.attacked_text,
indices_to_modify=[index_order[i]],
)
i += 1
if len(transformed_text_candidates) == 0:
continue
results, search_over = self.get_goal_results(transformed_text_candidates)
results = sorted(results, key=lambda x: -x.score)
# Skip swaps which don't improve the score
if results[0].score > cur_result.score:
cur_result = results[0]
else:
continue
# If we succeeded, return the index with best similarity.
if cur_result.goal_status == GoalFunctionResultStatus.SUCCEEDED:
best_result = cur_result
# @TODO: Use vectorwise operations
max_similarity = -float("inf")
for result in results:
if result.goal_status != GoalFunctionResultStatus.SUCCEEDED:
break
candidate = result.attacked_text
try:
similarity_score = candidate.attack_attrs["similarity_score"]
except KeyError:
# If the attack was run without any similarity metrics,
# candidates won't have a similarity score. In this
# case, break and return the candidate that changed
# the original score the most.
break
if similarity_score > max_similarity:
max_similarity = similarity_score
best_result = result
return best_result
return cur_result
def check_transformation_compatibility(self, transformation):
"""Since it ranks words by their importance, GreedyWordSwapWIR is
limited to word swap and deletion transformations."""
return transformation_consists_of_word_swaps_and_deletions(transformation)
def extra_repr_keys(self):
return ["wir_method"]
| [
"torch.Tensor",
"numpy.max",
"numpy.array",
"textattack.shared.validators.transformation_consists_of_word_swaps_and_deletions",
"numpy.arange",
"numpy.random.shuffle"
] | [((6129, 6196), 'textattack.shared.validators.transformation_consists_of_word_swaps_and_deletions', 'transformation_consists_of_word_swaps_and_deletions', (['transformation'], {}), '(transformation)\n', (6180, 6196), False, 'from textattack.shared.validators import transformation_consists_of_word_swaps_and_deletions\n'), ((1520, 1576), 'numpy.array', 'np.array', (['[result.score for result in leave_one_results]'], {}), '([result.score for result in leave_one_results])\n', (1528, 1576), True, 'import numpy as np\n'), ((1921, 1977), 'numpy.array', 'np.array', (['[result.score for result in leave_one_results]'], {}), '([result.score for result in leave_one_results])\n', (1929, 1977), True, 'import numpy as np\n'), ((2851, 2871), 'numpy.max', 'np.max', (['score_change'], {}), '(score_change)\n', (2857, 2871), True, 'import numpy as np\n'), ((2976, 2994), 'numpy.array', 'np.array', (['delta_ps'], {}), '(delta_ps)\n', (2984, 2994), True, 'import numpy as np\n'), ((3272, 3328), 'numpy.array', 'np.array', (['[result.score for result in leave_one_results]'], {}), '([result.score for result in leave_one_results])\n', (3280, 3328), True, 'import numpy as np\n'), ((3397, 3416), 'numpy.arange', 'np.arange', (['len_text'], {}), '(len_text)\n', (3406, 3416), True, 'import numpy as np\n'), ((3429, 3459), 'numpy.random.shuffle', 'np.random.shuffle', (['index_order'], {}), '(index_order)\n', (3446, 3459), True, 'import numpy as np\n'), ((2042, 2071), 'torch.Tensor', 'torch.Tensor', (['saliency_scores'], {}), '(saliency_scores)\n', (2054, 2071), False, 'import torch\n')] |
from lemur import database
def rotate_certificate(endpoint, new_cert):
"""
Rotates a certificate on a given endpoint.
:param endpoint:
:param new_cert:
:return:
"""
# ensure that certificate is available for rotation
endpoint.source.plugin.update_endpoint(endpoint, new_cert)
endpoint.certificate = new_cert
database.update(endpoint)
| [
"lemur.database.update"
] | [((351, 376), 'lemur.database.update', 'database.update', (['endpoint'], {}), '(endpoint)\n', (366, 376), False, 'from lemur import database\n')] |
# coding: utf-8
# Copyright Luna Technology 2015
# <NAME> <<EMAIL>>
from __future__ import absolute_import
import os
from celery import Celery
# Set the default Django settings module for the 'celery' program
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pype.settings')
from django.conf import settings
from celery.signals import setup_logging
@setup_logging.connect
def configure_logging(sender=None, **kwargs):
import logging
import logging.config
logging.config.dictConfig(settings.LOGGING)
app = Celery('pype')
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
| [
"os.environ.setdefault",
"celery.Celery",
"logging.config.dictConfig"
] | [((214, 278), 'os.environ.setdefault', 'os.environ.setdefault', (['"""DJANGO_SETTINGS_MODULE"""', '"""pype.settings"""'], {}), "('DJANGO_SETTINGS_MODULE', 'pype.settings')\n", (235, 278), False, 'import os\n'), ((524, 538), 'celery.Celery', 'Celery', (['"""pype"""'], {}), "('pype')\n", (530, 538), False, 'from celery import Celery\n'), ((473, 516), 'logging.config.dictConfig', 'logging.config.dictConfig', (['settings.LOGGING'], {}), '(settings.LOGGING)\n', (498, 516), False, 'import logging\n')] |
from gtrain import Model
import numpy as np
import tensorflow as tf
class NetForHypinv(Model):
"""
Implementaion of the crutial function for the HypINV algorithm.
Warning: Do not use this class but implement its subclass, for example see FCNetForHypinv
"""
def __init__(self, weights):
self.eval_session = None
self.grad_session = None
self.initial_x = None
self.center = None
self.weights = weights
self.out_for_eval = None #(going to be filled in build_for_eval method)
self.boundary_out_for_eval = None
self.trained_x = None
self.training_class_index = None
self.x = None # tf variable for inversion (going to be filled in build method)
self.x_for_eval = None
self.out = None
self.boundary_out = None # list of tf tensorf for each class of softmax class vs others output
self.loss = None
self.boundary_loss = None
self.t = None #target
self.boundary_t = None
self.x1 = None # this attribute is used of purposes of modified loss function
def __del__(self):
# close arr sessions
if self.eval_session:
self.eval_session.close()
if self.grad_session:
self.grad_session.close()
def set_initial_x(self, initial_x):
# sets starting point for the search of the closest point
self.initial_x = initial_x
def set_center(self, center):
# sets center point
self.center = center / np.linalg.norm(center)
def set_x1(self, x1):
# sets x1 to which we want to found the cosest point x0
self.x1 = x1
def has_modified_loss(self):
pass # if uses modified loss then it returns true
def set_initial_x_in_session(self, x, session=None):
# sets initial x in certain session
if session is None:
self.set_initial_x(x)
else:
pass # overide this method
def eval(self, x):
if len(x.shape) == 1:
x = x.reshape((1,len(x)))
if not self.eval_session:
self.eval_session = tf.Session()
with self.eval_session.as_default():
self.build_for_eval()
self.eval_session.run(tf.global_variables_initializer())
return self.eval_session.run(self.out_for_eval, {self.x_for_eval: x})
def boundary_eval(self, x, class_index):
# evaluates binary classificaitons class_index and other classes
if not self.eval_session:
self.eval_session = tf.Session()
with self.eval_session.as_default():
self.build_for_eval()
self.eval_session.run(tf.global_variables_initializer())
return self.eval_session.run(self.boundary_out_for_eval[class_index], {self.x_for_eval: x})
def get_boundary_gradient(self, x, class_index):
# computes gradient of the boundary for specified class_index
if not self.grad_session:
self.grad_session = tf.Session()
with self.grad_session.as_default():
self.build_for_eval()
self.grad = list()
for i in range(len(self.weights[0][-1][0])):
self.grad.append(tf.gradients(self.boundary_out_for_eval[i], [self.x_for_eval])[0])
self.grad_x = self.x_for_eval
return self.grad_session.run(self.grad[class_index], {self.grad_x: x})
def build_for_eval(self):
# build model for evaluation
pass #override this method (fill self.out_for_eval)
def train_ended(self, session):
self.trained_x = session.run(self.x)
def build(self):
# build model for training
pass #override this method (fill self.x, self.out)
def set_train_class(self, class_index):
# sets class of the x1
self.training_class_index = class_index
# overided methods from gtrain.Model
def get_loss(self):
if self.training_class_index is None:
return self.loss
else:
return self.boundary_loss[self.training_class_index]
def get_hits(self):
return self.get_loss()
def get_count(self):
return self.get_loss()
def get_train_summaries(self):
return []
def get_dev_summaries(self):
return []
def get_placeholders(self):
if self.training_class_index is None:
return [self.t]
else:
return [self.boundary_t]
#________________________________________EXAMPLES_OF_NetForHypinv_CLASS_____________________________________________
class FCNetForHypinv(NetForHypinv):
"""
Implementation of multi layer perceptron to by used in HypINV rule extraction algorithm
"""
def __init__(self, weights, function=tf.sigmoid, use_modified_loss=False, mu = 0.01):
"""
:param weights: saved as [list of weights for layers][0 weight, 1 bias]
:param function: tf function for propagation. For example tf.nn.sigmoid, tf.atan
:param use_modified_loss: weather the modified loss should be used
:param mu: factor of the penalty terms that specified the distance between x0 and x1 and
the distance x1 from the boundary
"""
super(FCNetForHypinv, self).__init__(weights)
self.function = function
self.layer_sizes = [len(self.weights[0][0])]
for bias in weights[1]:
self.layer_sizes.append(len(bias))
self.num_classes = self.layer_sizes[-1]
self.initial_x = np.zeros([1, self.layer_sizes[0]])
self.use_modified_loss = use_modified_loss
self.mu = mu
def build(self):
with tf.name_scope("Input"):
if self.center is not None:
self.point_weights = tf.Variable(self.center.reshape((1, len(self.center))),
dtype=tf.float64, trainable=False, name="Boundary_point")
init_factor = self.center
init_factor[init_factor!=0] = self.initial_x[init_factor!=0] / self.center[init_factor!=0]
self.factor = tf.Variable(init_factor.reshape((1, len(self.center))),
dtype=tf.float64, name="factor")
else:
self.point_weights = tf.Variable(self.initial_x.reshape((1, len(self.initial_x))),
dtype=tf.float64, trainable=False, name="Boundary_point")
self.factor = tf.Variable(np.ones((1, len(self.center))),
dtype=tf.float64, name="factor")
self.x = self.point_weights * self.factor
with tf.name_scope("Target"):
if self.use_modified_loss:
x1_constant = tf.constant(self.x1.reshape((1, len(self.x1))), dtype=tf.float64)
self.t = tf.placeholder(tf.float64, shape=[None, self.num_classes], name="Target_output")
self.boundary_t = tf.placeholder(tf.float64, shape=[None, 2], name="Target_boundary_output")
with tf.name_scope("FC_net"):
flowing_x = self.x
for i, _ in enumerate(self.weights[0]):
with tf.name_scope("layer_{}".format(i)):
W = tf.constant(self.weights[0][i], name="Weight_{}".format(i), dtype=tf.float64)
b = tf.constant(self.weights[1][i], name="Bias_{}".format(i), dtype=tf.float64)
flowing_x = self.function(tf.nn.xw_plus_b(flowing_x, W, b))
y = flowing_x
self.out = tf.nn.softmax(y)
with tf.name_scope("Binary_class_output"):
self.boundary_out = list()
for i in range(self.num_classes):
mask = True+np.zeros(self.num_classes, dtype=np.bool)
mask[i] = False
x0 = self.out[:,i]
x1 = tf.reduce_max(tf.boolean_mask(self.out, mask, axis=1), axis=1)
s = x0+x1
out = tf.stack([x0/s, x1/s], axis=1)
self.boundary_out.append(out)
with tf.name_scope("Loss_functions"):
self.loss = tf.reduce_mean(
tf.nn.l2_loss(self.out-self.t),
name="loss")
with tf.name_scope("Binary_class_loss"):
self.boundary_loss = list()
if self.use_modified_loss:
for i in range(self.num_classes):
self.boundary_loss.append(
tf.reduce_mean(tf.nn.l2_loss(self.boundary_out[i]-self.boundary_t)) +
self.mu * tf.reduce_mean(tf.nn.l2_loss(self.x - x1_constant))
)
else:
for i in range(self.num_classes):
self.boundary_loss.append(
tf.reduce_mean(tf.nn.l2_loss(self.boundary_out[i] - self.boundary_t))
)
def set_initial_x_in_session(self, x, session=None):
if session is None:
self.set_initial_x(x)
else:
if self.center is None:
session.run([
self.point_weights.assign(x.reshape((1, len(x)))),
self.factor.assign(np.ones((1, len(x))))
])
else:
init_factor = self.center
init_factor[init_factor!=0] = x[init_factor!=0] / self.center[init_factor!=0]
session.run(self.factor.assign(init_factor.reshape((1,len(init_factor)))))
def build_for_eval(self):
with tf.name_scope("eInput"):
self.x_for_eval = tf.placeholder(tf.float32, shape=[None, len(self.weights[0][0])])#tf.Variable(tf.constant(self.initial_x), name="Boundary_point")
with tf.name_scope("eFC_net"):
flowing_x = self.x_for_eval
for i, _ in enumerate(self.weights[0]):
W = tf.constant(self.weights[0][i], name="eWeight_{}".format(i))
b = tf.constant(self.weights[1][i], name="eBias_{}".format(i))
flowing_x = self.function(tf.nn.xw_plus_b(flowing_x, W, b), name="elayer_{}".format(i))
y = flowing_x
self.out_for_eval = tf.nn.softmax(y)
with tf.name_scope("Binary_class_output"):
self.boundary_out_for_eval = list()
for i in range(self.num_classes):
mask = True+np.zeros(self.num_classes, dtype=np.bool)
mask[i] = False
x0 = self.out_for_eval[:, i]
x1 = tf.reduce_max(tf.boolean_mask(self.out_for_eval, mask, axis=1), axis=1)
s = x0+x1
out = tf.stack([x0/s, x1/s], axis=1)
self.boundary_out_for_eval.append(out)
def has_modified_loss(self):
return self.use_modified_loss
def name(self):
return "Hypinv_FC_net_{}".format("-".join([str(ls) for ls in self.layer_sizes]))
class FCNetForHypinvBinary(FCNetForHypinv):
"""
Implementation of multi layer perceptron to by used in HypINV rule extraction algorithm
The task is simplified to the binary classificaiton base_class_index against the other classes
"""
def __init__(self, weights, base_class_index, function=tf.sigmoid, use_modified_loss=False, mu = 0.01):
"""
:param weights: saved as [list of weights for layers][0 weight, 1 bias]
:param base_class_index: an index of the class which is used as the base class
:param function: tf function for propagation. For example tf.nn.sigmoid, tf.atan
:param use_modified_loss: weather the modified loss should be used
:param mu: factor of the penalty terms that specified the distance between x0 and x1 and
the distance x1 from the boundary
"""
super(FCNetForHypinvBinary, self).__init__(weights)
self.base_class_index = base_class_index
self.function = function
self.layer_sizes = [len(self.weights[0][0])]
for bias in weights[1]:
self.layer_sizes.append(len(bias))
self.num_classes = self.layer_sizes[-1]
self.initial_x = np.zeros([1, self.layer_sizes[0]])
self.use_modified_loss = use_modified_loss
self.mu = mu
def build(self):
with tf.name_scope("Input"):
self.init_point = tf.Variable(self.initial_x.reshape((1, len(self.initial_x))),
dtype=tf.float64, trainable=False, name="Boundary_point")
self.factor = tf.Variable(np.ones((1, len(self.initial_x))),
dtype=tf.float64, name="factor")
self.x = self.init_point * self.factor
with tf.name_scope("Target"):
if self.use_modified_loss:
x1_constant = tf.constant(self.x1.reshape((1, len(self.x1))), dtype=tf.float64)
self.t = tf.placeholder(tf.float64, shape=[None, 2], name="Target_output")
self.boundary_t = tf.placeholder(tf.float64, shape=[None, 2], name="Target_boundary_output")
with tf.name_scope("FC_net"):
flowing_x = self.x
for i, _ in enumerate(self.weights[0]):
with tf.name_scope("layer_{}".format(i)):
W = tf.constant(self.weights[0][i], name="Weight_{}".format(i), dtype=tf.float64)
b = tf.constant(self.weights[1][i], name="Bias_{}".format(i), dtype=tf.float64)
flowing_x = self.function(tf.nn.xw_plus_b(flowing_x, W, b))
y = flowing_x
full_out = tf.nn.softmax(y)
with tf.name_scope("Binary_class_output"):
self.boundary_out = list()
mask = True+np.zeros(self.num_classes, dtype=np.bool)
mask[self.base_class_index] = False
x0 = full_out[:,self.base_class_index]
x1 = tf.reduce_max(tf.boolean_mask(full_out, mask, axis=1), axis=1)
s = x0+x1
self.out = tf.stack([x0/s, x1/s], axis=1)
self.boundary_out.append(self.out)
self.boundary_out.append(tf.stack([x1/s, x0/s], axis=1))
with tf.name_scope("Loss_functions"):
self.loss = tf.reduce_mean(
tf.nn.l2_loss(self.out-self.t),
name="loss")
with tf.name_scope("Binary_class_loss"):
self.boundary_loss = list()
if self.use_modified_loss:
for i in range(2):
self.boundary_loss.append(
tf.reduce_mean(tf.nn.l2_loss(self.boundary_out[i]-self.boundary_t)) +
self.mu * tf.reduce_mean(tf.nn.l2_loss(self.x - x1_constant))
)
else:
for i in range(2):
self.boundary_loss.append(
tf.reduce_mean(tf.nn.l2_loss(self.boundary_out[i] - self.boundary_t))
)
def build_for_eval(self):
with tf.name_scope("eInput"):
self.x_for_eval = tf.placeholder(tf.float32, shape=[None, len(self.weights[0][0])])#tf.Variable(tf.constant(self.initial_x), name="Boundary_point")
with tf.name_scope("eFC_net"):
flowing_x = self.x_for_eval
for i, _ in enumerate(self.weights[0]):
W = tf.constant(self.weights[0][i], name="eWeight_{}".format(i))
b = tf.constant(self.weights[1][i], name="eBias_{}".format(i))
flowing_x = self.function(tf.nn.xw_plus_b(flowing_x, W, b), name="elayer_{}".format(i))
y = flowing_x
full_out = tf.nn.softmax(y)
with tf.name_scope("Binary_class_output"):
self.boundary_out_for_eval = list()
mask = True+np.zeros(self.num_classes, dtype=np.bool)
mask[self.base_class_index] = False
x0 = full_out[:, self.base_class_index]
x1 = tf.reduce_max(tf.boolean_mask(full_out, mask, axis=1), axis=1)
s = x0+x1
self.out_for_eval = tf.stack([x0/s, x1/s], axis=1)
self.boundary_out_for_eval.append(self.out_for_eval)
self.boundary_out_for_eval.append(tf.stack([x1/s, x0/s], axis=1))
def get_boundary_gradient(self, x, class_index):
if not self.grad_session:
self.grad_session = tf.Session()
with self.grad_session.as_default():
self.build_for_eval()
self.grad = list()
for i in range(2):
self.grad.append(tf.gradients(self.boundary_out_for_eval[i], [self.x_for_eval])[0])
self.grad_x = self.x_for_eval
return self.grad_session.run(self.grad[class_index], {self.grad_x: x})
def has_modified_loss(self):
return self.use_modified_loss
def name(self):
return "Hypinv_FC_net_{}".format("-".join([str(ls) for ls in self.layer_sizes]))
| [
"tensorflow.boolean_mask",
"tensorflow.Session",
"tensorflow.placeholder",
"tensorflow.nn.xw_plus_b",
"tensorflow.nn.l2_loss",
"tensorflow.global_variables_initializer",
"tensorflow.gradients",
"numpy.zeros",
"tensorflow.name_scope",
"tensorflow.nn.softmax",
"numpy.linalg.norm",
"tensorflow.stack"
] | [((5555, 5589), 'numpy.zeros', 'np.zeros', (['[1, self.layer_sizes[0]]'], {}), '([1, self.layer_sizes[0]])\n', (5563, 5589), True, 'import numpy as np\n'), ((12110, 12144), 'numpy.zeros', 'np.zeros', (['[1, self.layer_sizes[0]]'], {}), '([1, self.layer_sizes[0]])\n', (12118, 12144), True, 'import numpy as np\n'), ((1532, 1554), 'numpy.linalg.norm', 'np.linalg.norm', (['center'], {}), '(center)\n', (1546, 1554), True, 'import numpy as np\n'), ((2134, 2146), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (2144, 2146), True, 'import tensorflow as tf\n'), ((2566, 2578), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (2576, 2578), True, 'import tensorflow as tf\n'), ((3025, 3037), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (3035, 3037), True, 'import tensorflow as tf\n'), ((5697, 5719), 'tensorflow.name_scope', 'tf.name_scope', (['"""Input"""'], {}), "('Input')\n", (5710, 5719), True, 'import tensorflow as tf\n'), ((6710, 6733), 'tensorflow.name_scope', 'tf.name_scope', (['"""Target"""'], {}), "('Target')\n", (6723, 6733), True, 'import tensorflow as tf\n'), ((6891, 6976), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float64'], {'shape': '[None, self.num_classes]', 'name': '"""Target_output"""'}), "(tf.float64, shape=[None, self.num_classes], name='Target_output'\n )\n", (6905, 6976), True, 'import tensorflow as tf\n'), ((7002, 7076), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float64'], {'shape': '[None, 2]', 'name': '"""Target_boundary_output"""'}), "(tf.float64, shape=[None, 2], name='Target_boundary_output')\n", (7016, 7076), True, 'import tensorflow as tf\n'), ((7090, 7113), 'tensorflow.name_scope', 'tf.name_scope', (['"""FC_net"""'], {}), "('FC_net')\n", (7103, 7113), True, 'import tensorflow as tf\n'), ((7587, 7603), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['y'], {}), '(y)\n', (7600, 7603), True, 'import tensorflow as tf\n'), ((7617, 7653), 'tensorflow.name_scope', 'tf.name_scope', (['"""Binary_class_output"""'], {}), "('Binary_class_output')\n", (7630, 7653), True, 'import tensorflow as tf\n'), ((8099, 8130), 'tensorflow.name_scope', 'tf.name_scope', (['"""Loss_functions"""'], {}), "('Loss_functions')\n", (8112, 8130), True, 'import tensorflow as tf\n'), ((8262, 8296), 'tensorflow.name_scope', 'tf.name_scope', (['"""Binary_class_loss"""'], {}), "('Binary_class_loss')\n", (8275, 8296), True, 'import tensorflow as tf\n'), ((9547, 9570), 'tensorflow.name_scope', 'tf.name_scope', (['"""eInput"""'], {}), "('eInput')\n", (9560, 9570), True, 'import tensorflow as tf\n'), ((9745, 9769), 'tensorflow.name_scope', 'tf.name_scope', (['"""eFC_net"""'], {}), "('eFC_net')\n", (9758, 9769), True, 'import tensorflow as tf\n'), ((10185, 10201), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['y'], {}), '(y)\n', (10198, 10201), True, 'import tensorflow as tf\n'), ((10215, 10251), 'tensorflow.name_scope', 'tf.name_scope', (['"""Binary_class_output"""'], {}), "('Binary_class_output')\n", (10228, 10251), True, 'import tensorflow as tf\n'), ((12253, 12275), 'tensorflow.name_scope', 'tf.name_scope', (['"""Input"""'], {}), "('Input')\n", (12266, 12275), True, 'import tensorflow as tf\n'), ((12671, 12694), 'tensorflow.name_scope', 'tf.name_scope', (['"""Target"""'], {}), "('Target')\n", (12684, 12694), True, 'import tensorflow as tf\n'), ((12852, 12917), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float64'], {'shape': '[None, 2]', 'name': '"""Target_output"""'}), "(tf.float64, shape=[None, 2], name='Target_output')\n", (12866, 12917), True, 'import tensorflow as tf\n'), ((12948, 13022), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float64'], {'shape': '[None, 2]', 'name': '"""Target_boundary_output"""'}), "(tf.float64, shape=[None, 2], name='Target_boundary_output')\n", (12962, 13022), True, 'import tensorflow as tf\n'), ((13036, 13059), 'tensorflow.name_scope', 'tf.name_scope', (['"""FC_net"""'], {}), "('FC_net')\n", (13049, 13059), True, 'import tensorflow as tf\n'), ((13533, 13549), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['y'], {}), '(y)\n', (13546, 13549), True, 'import tensorflow as tf\n'), ((13563, 13599), 'tensorflow.name_scope', 'tf.name_scope', (['"""Binary_class_output"""'], {}), "('Binary_class_output')\n", (13576, 13599), True, 'import tensorflow as tf\n'), ((13930, 13964), 'tensorflow.stack', 'tf.stack', (['[x0 / s, x1 / s]'], {'axis': '(1)'}), '([x0 / s, x1 / s], axis=1)\n', (13938, 13964), True, 'import tensorflow as tf\n'), ((14090, 14121), 'tensorflow.name_scope', 'tf.name_scope', (['"""Loss_functions"""'], {}), "('Loss_functions')\n", (14103, 14121), True, 'import tensorflow as tf\n'), ((14253, 14287), 'tensorflow.name_scope', 'tf.name_scope', (['"""Binary_class_loss"""'], {}), "('Binary_class_loss')\n", (14266, 14287), True, 'import tensorflow as tf\n'), ((14912, 14935), 'tensorflow.name_scope', 'tf.name_scope', (['"""eInput"""'], {}), "('eInput')\n", (14925, 14935), True, 'import tensorflow as tf\n'), ((15110, 15134), 'tensorflow.name_scope', 'tf.name_scope', (['"""eFC_net"""'], {}), "('eFC_net')\n", (15123, 15134), True, 'import tensorflow as tf\n'), ((15541, 15557), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['y'], {}), '(y)\n', (15554, 15557), True, 'import tensorflow as tf\n'), ((15571, 15607), 'tensorflow.name_scope', 'tf.name_scope', (['"""Binary_class_output"""'], {}), "('Binary_class_output')\n", (15584, 15607), True, 'import tensorflow as tf\n'), ((15957, 15991), 'tensorflow.stack', 'tf.stack', (['[x0 / s, x1 / s]'], {'axis': '(1)'}), '([x0 / s, x1 / s], axis=1)\n', (15965, 15991), True, 'import tensorflow as tf\n'), ((16251, 16263), 'tensorflow.Session', 'tf.Session', ([], {}), '()\n', (16261, 16263), True, 'import tensorflow as tf\n'), ((2268, 2301), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2299, 2301), True, 'import tensorflow as tf\n'), ((2700, 2733), 'tensorflow.global_variables_initializer', 'tf.global_variables_initializer', ([], {}), '()\n', (2731, 2733), True, 'import tensorflow as tf\n'), ((8009, 8043), 'tensorflow.stack', 'tf.stack', (['[x0 / s, x1 / s]'], {'axis': '(1)'}), '([x0 / s, x1 / s], axis=1)\n', (8017, 8043), True, 'import tensorflow as tf\n'), ((8188, 8220), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['(self.out - self.t)'], {}), '(self.out - self.t)\n', (8201, 8220), True, 'import tensorflow as tf\n'), ((10635, 10669), 'tensorflow.stack', 'tf.stack', (['[x0 / s, x1 / s]'], {'axis': '(1)'}), '([x0 / s, x1 / s], axis=1)\n', (10643, 10669), True, 'import tensorflow as tf\n'), ((13664, 13705), 'numpy.zeros', 'np.zeros', (['self.num_classes'], {'dtype': 'np.bool'}), '(self.num_classes, dtype=np.bool)\n', (13672, 13705), True, 'import numpy as np\n'), ((13836, 13875), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['full_out', 'mask'], {'axis': '(1)'}), '(full_out, mask, axis=1)\n', (13851, 13875), True, 'import tensorflow as tf\n'), ((14045, 14079), 'tensorflow.stack', 'tf.stack', (['[x1 / s, x0 / s]'], {'axis': '(1)'}), '([x1 / s, x0 / s], axis=1)\n', (14053, 14079), True, 'import tensorflow as tf\n'), ((14179, 14211), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['(self.out - self.t)'], {}), '(self.out - self.t)\n', (14192, 14211), True, 'import tensorflow as tf\n'), ((15681, 15722), 'numpy.zeros', 'np.zeros', (['self.num_classes'], {'dtype': 'np.bool'}), '(self.num_classes, dtype=np.bool)\n', (15689, 15722), True, 'import numpy as np\n'), ((15854, 15893), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['full_out', 'mask'], {'axis': '(1)'}), '(full_out, mask, axis=1)\n', (15869, 15893), True, 'import tensorflow as tf\n'), ((16099, 16133), 'tensorflow.stack', 'tf.stack', (['[x1 / s, x0 / s]'], {'axis': '(1)'}), '([x1 / s, x0 / s], axis=1)\n', (16107, 16133), True, 'import tensorflow as tf\n'), ((7768, 7809), 'numpy.zeros', 'np.zeros', (['self.num_classes'], {'dtype': 'np.bool'}), '(self.num_classes, dtype=np.bool)\n', (7776, 7809), True, 'import numpy as np\n'), ((7912, 7951), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['self.out', 'mask'], {'axis': '(1)'}), '(self.out, mask, axis=1)\n', (7927, 7951), True, 'import tensorflow as tf\n'), ((10065, 10097), 'tensorflow.nn.xw_plus_b', 'tf.nn.xw_plus_b', (['flowing_x', 'W', 'b'], {}), '(flowing_x, W, b)\n', (10080, 10097), True, 'import tensorflow as tf\n'), ((10375, 10416), 'numpy.zeros', 'np.zeros', (['self.num_classes'], {'dtype': 'np.bool'}), '(self.num_classes, dtype=np.bool)\n', (10383, 10416), True, 'import numpy as np\n'), ((10529, 10577), 'tensorflow.boolean_mask', 'tf.boolean_mask', (['self.out_for_eval', 'mask'], {'axis': '(1)'}), '(self.out_for_eval, mask, axis=1)\n', (10544, 10577), True, 'import tensorflow as tf\n'), ((15430, 15462), 'tensorflow.nn.xw_plus_b', 'tf.nn.xw_plus_b', (['flowing_x', 'W', 'b'], {}), '(flowing_x, W, b)\n', (15445, 15462), True, 'import tensorflow as tf\n'), ((7504, 7536), 'tensorflow.nn.xw_plus_b', 'tf.nn.xw_plus_b', (['flowing_x', 'W', 'b'], {}), '(flowing_x, W, b)\n', (7519, 7536), True, 'import tensorflow as tf\n'), ((13450, 13482), 'tensorflow.nn.xw_plus_b', 'tf.nn.xw_plus_b', (['flowing_x', 'W', 'b'], {}), '(flowing_x, W, b)\n', (13465, 13482), True, 'import tensorflow as tf\n'), ((3258, 3320), 'tensorflow.gradients', 'tf.gradients', (['self.boundary_out_for_eval[i]', '[self.x_for_eval]'], {}), '(self.boundary_out_for_eval[i], [self.x_for_eval])\n', (3270, 3320), True, 'import tensorflow as tf\n'), ((8830, 8883), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['(self.boundary_out[i] - self.boundary_t)'], {}), '(self.boundary_out[i] - self.boundary_t)\n', (8843, 8883), True, 'import tensorflow as tf\n'), ((14791, 14844), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['(self.boundary_out[i] - self.boundary_t)'], {}), '(self.boundary_out[i] - self.boundary_t)\n', (14804, 14844), True, 'import tensorflow as tf\n'), ((16458, 16520), 'tensorflow.gradients', 'tf.gradients', (['self.boundary_out_for_eval[i]', '[self.x_for_eval]'], {}), '(self.boundary_out_for_eval[i], [self.x_for_eval])\n', (16470, 16520), True, 'import tensorflow as tf\n'), ((8513, 8566), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['(self.boundary_out[i] - self.boundary_t)'], {}), '(self.boundary_out[i] - self.boundary_t)\n', (8526, 8566), True, 'import tensorflow as tf\n'), ((14489, 14542), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['(self.boundary_out[i] - self.boundary_t)'], {}), '(self.boundary_out[i] - self.boundary_t)\n', (14502, 14542), True, 'import tensorflow as tf\n'), ((8617, 8652), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['(self.x - x1_constant)'], {}), '(self.x - x1_constant)\n', (8630, 8652), True, 'import tensorflow as tf\n'), ((14593, 14628), 'tensorflow.nn.l2_loss', 'tf.nn.l2_loss', (['(self.x - x1_constant)'], {}), '(self.x - x1_constant)\n', (14606, 14628), True, 'import tensorflow as tf\n')] |
import numpy
from keras.preprocessing import sequence
from keras.preprocessing.text import Tokenizer
from src.support import support
class PhraseManager:
def __init__(self, configuration):
self.train_phrases, self.train_labels = self._read_train_phrases()
self.test_phrases, self.test_labels = self._read_test_phrases()
self.configuration = configuration
self.tokenizer = None
def get_phrases_train(self):
return self.train_phrases, self.train_labels
def get_phrases_test(self):
return self.test_phrases, self.test_labels
def get_dataset(self, level = None):
if level == support.WORD_LEVEL:
return self._word_process(self.configuration[support.WORD_MAX_LENGTH])
elif level == support.CHAR_LEVEL:
return self._char_process(self.configuration[support.CHAR_MAX_LENGTH])
else:
return self.train_phrases, self.train_labels, self.test_phrases, self.test_labels
def _word_process(self, word_max_length):
tokenizer = Tokenizer(num_words=self.configuration[support.QUANTITY_WORDS])
tokenizer.fit_on_texts(self.train_phrases)
x_train_sequence = tokenizer.texts_to_sequences(self.train_phrases)
x_test_sequence = tokenizer.texts_to_sequences(self.test_phrases)
x_train = sequence.pad_sequences(x_train_sequence, maxlen=word_max_length, padding='post', truncating='post')
x_test = sequence.pad_sequences(x_test_sequence, maxlen=word_max_length, padding='post', truncating='post')
y_train = numpy.array(self.train_labels)
y_test = numpy.array(self.test_labels)
return x_train, y_train, x_test, y_test
def _char_process(self, max_length):
embedding_w, embedding_dic = self._onehot_dic_build()
x_train = []
for i in range(len(self.train_phrases)):
doc_vec = self._doc_process(self.train_phrases[i].lower(), embedding_dic, max_length)
x_train.append(doc_vec)
x_train = numpy.asarray(x_train, dtype='int64')
y_train = numpy.array(self.train_labels, dtype='float32')
x_test = []
for i in range(len( self.test_phrases)):
doc_vec = self._doc_process( self.test_phrases[i].lower(), embedding_dic, max_length)
x_test.append(doc_vec)
x_test = numpy.asarray(x_test, dtype='int64')
y_test = numpy.array(self.test_labels, dtype='float32')
del embedding_w, embedding_dic
return x_train, y_train, x_test, y_test
def _doc_process(self, doc, embedding_dic, max_length):
min_length = min(max_length, len(doc))
doc_vec = numpy.zeros(max_length, dtype='int64')
for j in range(min_length):
if doc[j] in embedding_dic:
doc_vec[j] = embedding_dic[doc[j]]
else:
doc_vec[j] = embedding_dic['UNK']
return doc_vec
def _onehot_dic_build(self):
alphabet = "abcdefghijklmnopqrstuvwxyz0123456789-,;.!?:'\"/\\|_@#$%^&*~`+-=<>()[]{}"
embedding_dic = {}
embedding_w = []
embedding_dic["UNK"] = 0
embedding_w.append(numpy.zeros(len(alphabet), dtype='float32'))
for i, alpha in enumerate(alphabet):
onehot = numpy.zeros(len(alphabet), dtype='float32')
embedding_dic[alpha] = i + 1
onehot[i] = 1
embedding_w.append(onehot)
embedding_w = numpy.array(embedding_w, dtype='float32')
return embedding_w, embedding_dic
def get_tokenizer(self):
if self.tokenizer is None:
self.tokenizer = Tokenizer(num_words=self.configuration[support.QUANTITY_WORDS])
self.tokenizer.fit_on_texts(self.train_phrases)
return self.tokenizer
def text_to_vector_word(self, text):
vector_sequence = self.get_tokenizer().texts_to_sequences([text])
result = sequence.pad_sequences(vector_sequence, maxlen=self.configuration[support.WORD_MAX_LENGTH], padding='post', truncating='post')
return result
def text_to_vector_word_all(self, texts):
vector_sequence = self.get_tokenizer().texts_to_sequences(texts)
result = sequence.pad_sequences(vector_sequence, maxlen=self.configuration[support.WORD_MAX_LENGTH], padding='post', truncating='post')
return result
def text_to_vector_char(self, text):
embedding_dictionary = self._get_embedding_dictionary()
max_length = self.configuration[support.CHAR_MAX_LENGTH]
min_length = min(max_length, len(text))
text_vector = numpy.zeros(max_length, dtype="int64")
for j in range(min_length):
if text[j] in embedding_dictionary:
text_vector[j] = embedding_dictionary[text[j]]
else:
text_vector[j] = embedding_dictionary["UNK"]
return text_vector
def text_to_vector_char_all(self, texts):
embedding_w, embedding_dic = self._onehot_dic_build()
result = []
for i in range(len(texts)):
doc_vec = self.text_to_vector_char(texts[i].lower())
result.append(doc_vec)
result = numpy.asarray(result, dtype="int64")
del embedding_w, embedding_dic
return result
def _get_embedding_dictionary(self):
return {'UNK': 0, 'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5, 'f': 6, 'g': 7, 'h': 8, 'i': 9, 'j': 10,
'k': 11, 'l': 12,
'm': 13, 'n': 14, 'o': 15, 'p': 16, 'q': 17, 'r': 18, 's': 19, 't': 20, 'u': 21, 'v': 22,
'w': 23, 'x': 24,
'y': 25, 'z': 26, '0': 27, '1': 28, '2': 29, '3': 30, '4': 31, '5': 32, '6': 33, '7': 34,
'8': 35, '9': 36,
'-': 60, ',': 38, ';': 39, '.': 40, '!': 41, '?': 42, ':': 43, "'": 44, '"': 45, '/': 46,
'\\': 47, '|': 48,
'_': 49, '@': 50, '#': 51, '$': 52, '%': 53, '^': 54, '&': 55, '*': 56, '~': 57, '`': 58,
'+': 59, '=': 61,
'<': 62, '>': 63, '(': 64, ')': 65, '[': 66, ']': 67, '{': 68, '}': 69}
def get_classes(self):
pass
def _read_train_phrases(self):
pass
def _read_test_phrases(self):
pass
class Phrase:
def __init__(self, text, classification):
self.text = text
self.classification = classification
def __str__(self):
return "Classification: " + str(self.classification) + "\nText: " + self.text
| [
"keras.preprocessing.text.Tokenizer",
"numpy.asarray",
"numpy.array",
"numpy.zeros",
"keras.preprocessing.sequence.pad_sequences"
] | [((1055, 1118), 'keras.preprocessing.text.Tokenizer', 'Tokenizer', ([], {'num_words': 'self.configuration[support.QUANTITY_WORDS]'}), '(num_words=self.configuration[support.QUANTITY_WORDS])\n', (1064, 1118), False, 'from keras.preprocessing.text import Tokenizer\n'), ((1338, 1442), 'keras.preprocessing.sequence.pad_sequences', 'sequence.pad_sequences', (['x_train_sequence'], {'maxlen': 'word_max_length', 'padding': '"""post"""', 'truncating': '"""post"""'}), "(x_train_sequence, maxlen=word_max_length, padding=\n 'post', truncating='post')\n", (1360, 1442), False, 'from keras.preprocessing import sequence\n'), ((1455, 1558), 'keras.preprocessing.sequence.pad_sequences', 'sequence.pad_sequences', (['x_test_sequence'], {'maxlen': 'word_max_length', 'padding': '"""post"""', 'truncating': '"""post"""'}), "(x_test_sequence, maxlen=word_max_length, padding=\n 'post', truncating='post')\n", (1477, 1558), False, 'from keras.preprocessing import sequence\n'), ((1572, 1602), 'numpy.array', 'numpy.array', (['self.train_labels'], {}), '(self.train_labels)\n', (1583, 1602), False, 'import numpy\n'), ((1620, 1649), 'numpy.array', 'numpy.array', (['self.test_labels'], {}), '(self.test_labels)\n', (1631, 1649), False, 'import numpy\n'), ((2025, 2062), 'numpy.asarray', 'numpy.asarray', (['x_train'], {'dtype': '"""int64"""'}), "(x_train, dtype='int64')\n", (2038, 2062), False, 'import numpy\n'), ((2081, 2128), 'numpy.array', 'numpy.array', (['self.train_labels'], {'dtype': '"""float32"""'}), "(self.train_labels, dtype='float32')\n", (2092, 2128), False, 'import numpy\n'), ((2349, 2385), 'numpy.asarray', 'numpy.asarray', (['x_test'], {'dtype': '"""int64"""'}), "(x_test, dtype='int64')\n", (2362, 2385), False, 'import numpy\n'), ((2403, 2449), 'numpy.array', 'numpy.array', (['self.test_labels'], {'dtype': '"""float32"""'}), "(self.test_labels, dtype='float32')\n", (2414, 2449), False, 'import numpy\n'), ((2663, 2701), 'numpy.zeros', 'numpy.zeros', (['max_length'], {'dtype': '"""int64"""'}), "(max_length, dtype='int64')\n", (2674, 2701), False, 'import numpy\n'), ((3445, 3486), 'numpy.array', 'numpy.array', (['embedding_w'], {'dtype': '"""float32"""'}), "(embedding_w, dtype='float32')\n", (3456, 3486), False, 'import numpy\n'), ((3911, 4042), 'keras.preprocessing.sequence.pad_sequences', 'sequence.pad_sequences', (['vector_sequence'], {'maxlen': 'self.configuration[support.WORD_MAX_LENGTH]', 'padding': '"""post"""', 'truncating': '"""post"""'}), "(vector_sequence, maxlen=self.configuration[support.\n WORD_MAX_LENGTH], padding='post', truncating='post')\n", (3933, 4042), False, 'from keras.preprocessing import sequence\n'), ((4197, 4328), 'keras.preprocessing.sequence.pad_sequences', 'sequence.pad_sequences', (['vector_sequence'], {'maxlen': 'self.configuration[support.WORD_MAX_LENGTH]', 'padding': '"""post"""', 'truncating': '"""post"""'}), "(vector_sequence, maxlen=self.configuration[support.\n WORD_MAX_LENGTH], padding='post', truncating='post')\n", (4219, 4328), False, 'from keras.preprocessing import sequence\n'), ((4587, 4625), 'numpy.zeros', 'numpy.zeros', (['max_length'], {'dtype': '"""int64"""'}), "(max_length, dtype='int64')\n", (4598, 4625), False, 'import numpy\n'), ((5163, 5199), 'numpy.asarray', 'numpy.asarray', (['result'], {'dtype': '"""int64"""'}), "(result, dtype='int64')\n", (5176, 5199), False, 'import numpy\n'), ((3623, 3686), 'keras.preprocessing.text.Tokenizer', 'Tokenizer', ([], {'num_words': 'self.configuration[support.QUANTITY_WORDS]'}), '(num_words=self.configuration[support.QUANTITY_WORDS])\n', (3632, 3686), False, 'from keras.preprocessing.text import Tokenizer\n')] |
from setuptools import setup
version = "1.0.0"
long_description = """
PayPalHttp is a generic http client designed to be used with code-generated projects.
"""
setup(
name="paypalhttp",
long_description=long_description,
version=version,
author="PayPal",
packages=["paypalhttp", "paypalhttp/testutils", "paypalhttp/serializers"],
install_requires=['requests>=2.0.0', 'six>=1.0.0', 'pyopenssl>=0.15'],
license="MIT",
classifiers=[
'Intended Audience :: Developers',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: PyPy',
'Topic :: Software Development :: Libraries :: Python Modules'
],
)
| [
"setuptools.setup"
] | [((164, 1079), 'setuptools.setup', 'setup', ([], {'name': '"""paypalhttp"""', 'long_description': 'long_description', 'version': 'version', 'author': '"""PayPal"""', 'packages': "['paypalhttp', 'paypalhttp/testutils', 'paypalhttp/serializers']", 'install_requires': "['requests>=2.0.0', 'six>=1.0.0', 'pyopenssl>=0.15']", 'license': '"""MIT"""', 'classifiers': "['Intended Audience :: Developers', 'Natural Language :: English',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: Implementation :: PyPy',\n 'Topic :: Software Development :: Libraries :: Python Modules']"}), "(name='paypalhttp', long_description=long_description, version=version,\n author='PayPal', packages=['paypalhttp', 'paypalhttp/testutils',\n 'paypalhttp/serializers'], install_requires=['requests>=2.0.0',\n 'six>=1.0.0', 'pyopenssl>=0.15'], license='MIT', classifiers=[\n 'Intended Audience :: Developers', 'Natural Language :: English',\n 'Operating System :: OS Independent', 'Programming Language :: Python',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 2.6',\n 'Programming Language :: Python :: 2.7',\n 'Programming Language :: Python :: 3',\n 'Programming Language :: Python :: 3.3',\n 'Programming Language :: Python :: 3.4',\n 'Programming Language :: Python :: 3.5',\n 'Programming Language :: Python :: 3.7',\n 'Programming Language :: Python :: Implementation :: PyPy',\n 'Topic :: Software Development :: Libraries :: Python Modules'])\n", (169, 1079), False, 'from setuptools import setup\n')] |
import gym
import gym.spaces as spaces
import sys
import socket
from _thread import *
import os
import numpy as np
import pandas as pd
import math as m
import time
import random
class NetEnv(gym.Env):
def __init__(self):
# Robot State values that will be bounced with client
self.robot_state = None
self.pos = None
self.message = np.array(12345, dtype=np.float32)
# Socket Conneciton
# MAC find WiFi IP - ipconfig getifaddr en0
HOST = '192.168.1.29'
# Port to listen on (non-privileged ports are > 1023)
PORT = 65432
self.ThreadCount = 0
print('Connected')
# Set up Socket
self.s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
self.s.bind((HOST, PORT))
except socket.error as e:
print(str(e))
print('Waiting for connection[s]...')
self.s.listen()
self.start = 0
# Wait for client[s] to join socket
self.conn1, addr1 = self.s.accept()
print('Connected by: ', addr1)
start_new_thread(self.main_client_thread, (self.conn1, ))
self.conn2, addr2 = self.s.accept()
print('Connected by: ', addr2)
start_new_thread(self.cam_client_thread, (self.conn2, ))
def main_client_thread(self, conn):
data = conn.recv(1024)
print('Main client says: {}'.format(data.decode('utf-8')))
conn.sendall(str.encode('Hi'))
def cam_client_thread(self, conn):
data = conn.recv(1024)
print('Cam client says: {}'.format(data.decode('utf-8')))
conn.sendall(str.encode('Hi'))
def step(self):
self.main_client_thread(self.conn1)
self.cam_client_thread(self.conn2)
if __name__ == '__main__':
# Construct MAIN SERVER object
env = NetEnv()
# WALK
for i in range(100000):
env.step()
print('Done')
| [
"numpy.array",
"socket.socket"
] | [((347, 380), 'numpy.array', 'np.array', (['(12345)'], {'dtype': 'np.float32'}), '(12345, dtype=np.float32)\n', (355, 380), True, 'import numpy as np\n'), ((621, 670), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_STREAM'], {}), '(socket.AF_INET, socket.SOCK_STREAM)\n', (634, 670), False, 'import socket\n')] |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# isort:skip_file
import uuid
from datetime import datetime
import logging
from math import nan
from unittest.mock import Mock, patch
import numpy as np
import pandas as pd
import tests.test_app
import superset.viz as viz
from superset import app
from superset.constants import NULL_STRING
from superset.exceptions import SpatialException
from superset.utils.core import DTTM_ALIAS
from .base_tests import SupersetTestCase
from .utils import load_fixture
logger = logging.getLogger(__name__)
class BaseVizTestCase(SupersetTestCase):
def test_constructor_exception_no_datasource(self):
form_data = {}
datasource = None
with self.assertRaises(Exception):
viz.BaseViz(datasource, form_data)
def test_process_metrics(self):
# test TableViz metrics in correct order
form_data = {
"url_params": {},
"row_limit": 500,
"metric": "sum__SP_POP_TOTL",
"entity": "country_code",
"secondary_metric": "sum__SP_POP_TOTL",
"granularity_sqla": "year",
"page_length": 0,
"all_columns": [],
"viz_type": "table",
"since": "2014-01-01",
"until": "2014-01-02",
"metrics": ["sum__SP_POP_TOTL", "SUM(SE_PRM_NENR_MA)", "SUM(SP_URB_TOTL)"],
"country_fieldtype": "cca3",
"percent_metrics": ["count"],
"slice_id": 74,
"time_grain_sqla": None,
"order_by_cols": [],
"groupby": ["country_name"],
"compare_lag": "10",
"limit": "25",
"datasource": "2__table",
"table_timestamp_format": "%Y-%m-%d %H:%M:%S",
"markup_type": "markdown",
"where": "",
"compare_suffix": "o10Y",
}
datasource = Mock()
datasource.type = "table"
test_viz = viz.BaseViz(datasource, form_data)
expect_metric_labels = [
u"sum__SP_POP_TOTL",
u"SUM(SE_PRM_NENR_MA)",
u"SUM(SP_URB_TOTL)",
u"count",
]
self.assertEqual(test_viz.metric_labels, expect_metric_labels)
self.assertEqual(test_viz.all_metrics, expect_metric_labels)
def test_get_df_returns_empty_df(self):
form_data = {"dummy": 123}
query_obj = {"granularity": "day"}
datasource = self.get_datasource_mock()
test_viz = viz.BaseViz(datasource, form_data)
result = test_viz.get_df(query_obj)
self.assertEqual(type(result), pd.DataFrame)
self.assertTrue(result.empty)
def test_get_df_handles_dttm_col(self):
form_data = {"dummy": 123}
query_obj = {"granularity": "day"}
results = Mock()
results.query = Mock()
results.status = Mock()
results.error_message = Mock()
datasource = Mock()
datasource.type = "table"
datasource.query = Mock(return_value=results)
mock_dttm_col = Mock()
datasource.get_column = Mock(return_value=mock_dttm_col)
test_viz = viz.BaseViz(datasource, form_data)
test_viz.df_metrics_to_num = Mock()
test_viz.get_fillna_for_columns = Mock(return_value=0)
results.df = pd.DataFrame(data={DTTM_ALIAS: ["1960-01-01 05:00:00"]})
datasource.offset = 0
mock_dttm_col = Mock()
datasource.get_column = Mock(return_value=mock_dttm_col)
mock_dttm_col.python_date_format = "epoch_ms"
result = test_viz.get_df(query_obj)
import logging
logger.info(result)
pd.testing.assert_series_equal(
result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 5, 0)], name=DTTM_ALIAS)
)
mock_dttm_col.python_date_format = None
result = test_viz.get_df(query_obj)
pd.testing.assert_series_equal(
result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 5, 0)], name=DTTM_ALIAS)
)
datasource.offset = 1
result = test_viz.get_df(query_obj)
pd.testing.assert_series_equal(
result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 6, 0)], name=DTTM_ALIAS)
)
datasource.offset = 0
results.df = pd.DataFrame(data={DTTM_ALIAS: ["1960-01-01"]})
mock_dttm_col.python_date_format = "%Y-%m-%d"
result = test_viz.get_df(query_obj)
pd.testing.assert_series_equal(
result[DTTM_ALIAS], pd.Series([datetime(1960, 1, 1, 0, 0)], name=DTTM_ALIAS)
)
def test_cache_timeout(self):
datasource = self.get_datasource_mock()
datasource.cache_timeout = 0
test_viz = viz.BaseViz(datasource, form_data={})
self.assertEqual(0, test_viz.cache_timeout)
datasource.cache_timeout = 156
test_viz = viz.BaseViz(datasource, form_data={})
self.assertEqual(156, test_viz.cache_timeout)
datasource.cache_timeout = None
datasource.database.cache_timeout = 0
self.assertEqual(0, test_viz.cache_timeout)
datasource.database.cache_timeout = 1666
self.assertEqual(1666, test_viz.cache_timeout)
datasource.database.cache_timeout = None
test_viz = viz.BaseViz(datasource, form_data={})
self.assertEqual(app.config["CACHE_DEFAULT_TIMEOUT"], test_viz.cache_timeout)
class TableVizTestCase(SupersetTestCase):
def test_get_data_applies_percentage(self):
form_data = {
"groupby": ["groupA", "groupB"],
"metrics": [
{
"expressionType": "SIMPLE",
"aggregate": "SUM",
"label": "SUM(value1)",
"column": {"column_name": "value1", "type": "DOUBLE"},
},
"count",
"avg__C",
],
"percent_metrics": [
{
"expressionType": "SIMPLE",
"aggregate": "SUM",
"label": "SUM(value1)",
"column": {"column_name": "value1", "type": "DOUBLE"},
},
"avg__B",
],
}
datasource = self.get_datasource_mock()
df = pd.DataFrame(
{
"SUM(value1)": [15, 20, 25, 40],
"avg__B": [10, 20, 5, 15],
"avg__C": [11, 22, 33, 44],
"count": [6, 7, 8, 9],
"groupA": ["A", "B", "C", "C"],
"groupB": ["x", "x", "y", "z"],
}
)
test_viz = viz.TableViz(datasource, form_data)
data = test_viz.get_data(df)
# Check method correctly transforms data and computes percents
self.assertEqual(
[
"groupA",
"groupB",
"SUM(value1)",
"count",
"avg__C",
"%SUM(value1)",
"%avg__B",
],
list(data["columns"]),
)
expected = [
{
"groupA": "A",
"groupB": "x",
"SUM(value1)": 15,
"count": 6,
"avg__C": 11,
"%SUM(value1)": 0.15,
"%avg__B": 0.2,
},
{
"groupA": "B",
"groupB": "x",
"SUM(value1)": 20,
"count": 7,
"avg__C": 22,
"%SUM(value1)": 0.2,
"%avg__B": 0.4,
},
{
"groupA": "C",
"groupB": "y",
"SUM(value1)": 25,
"count": 8,
"avg__C": 33,
"%SUM(value1)": 0.25,
"%avg__B": 0.1,
},
{
"groupA": "C",
"groupB": "z",
"SUM(value1)": 40,
"count": 9,
"avg__C": 44,
"%SUM(value1)": 0.4,
"%avg__B": 0.3,
},
]
self.assertEqual(expected, data["records"])
def test_parse_adhoc_filters(self):
form_data = {
"metrics": [
{
"expressionType": "SIMPLE",
"aggregate": "SUM",
"label": "SUM(value1)",
"column": {"column_name": "value1", "type": "DOUBLE"},
}
],
"adhoc_filters": [
{
"expressionType": "SIMPLE",
"clause": "WHERE",
"subject": "value2",
"operator": ">",
"comparator": "100",
},
{
"expressionType": "SIMPLE",
"clause": "HAVING",
"subject": "SUM(value1)",
"operator": "<",
"comparator": "10",
},
{
"expressionType": "SQL",
"clause": "HAVING",
"sqlExpression": "SUM(value1) > 5",
},
{
"expressionType": "SQL",
"clause": "WHERE",
"sqlExpression": "value3 in ('North America')",
},
],
}
datasource = self.get_datasource_mock()
test_viz = viz.TableViz(datasource, form_data)
query_obj = test_viz.query_obj()
self.assertEqual(
[{"col": "value2", "val": "100", "op": ">"}], query_obj["filter"]
)
self.assertEqual(
[{"op": "<", "val": "10", "col": "SUM(value1)"}],
query_obj["extras"]["having_druid"],
)
self.assertEqual("(value3 in ('North America'))", query_obj["extras"]["where"])
self.assertEqual("(SUM(value1) > 5)", query_obj["extras"]["having"])
def test_adhoc_filters_overwrite_legacy_filters(self):
form_data = {
"metrics": [
{
"expressionType": "SIMPLE",
"aggregate": "SUM",
"label": "SUM(value1)",
"column": {"column_name": "value1", "type": "DOUBLE"},
}
],
"adhoc_filters": [
{
"expressionType": "SIMPLE",
"clause": "WHERE",
"subject": "value2",
"operator": ">",
"comparator": "100",
},
{
"expressionType": "SQL",
"clause": "WHERE",
"sqlExpression": "value3 in ('North America')",
},
],
"having": "SUM(value1) > 5",
}
datasource = self.get_datasource_mock()
test_viz = viz.TableViz(datasource, form_data)
query_obj = test_viz.query_obj()
self.assertEqual(
[{"col": "value2", "val": "100", "op": ">"}], query_obj["filter"]
)
self.assertEqual([], query_obj["extras"]["having_druid"])
self.assertEqual("(value3 in ('North America'))", query_obj["extras"]["where"])
self.assertEqual("", query_obj["extras"]["having"])
def test_query_obj_merges_percent_metrics(self):
datasource = self.get_datasource_mock()
form_data = {
"metrics": ["sum__A", "count", "avg__C"],
"percent_metrics": ["sum__A", "avg__B", "max__Y"],
}
test_viz = viz.TableViz(datasource, form_data)
query_obj = test_viz.query_obj()
self.assertEqual(
["sum__A", "count", "avg__C", "avg__B", "max__Y"], query_obj["metrics"]
)
def test_query_obj_throws_columns_and_metrics(self):
datasource = self.get_datasource_mock()
form_data = {"all_columns": ["A", "B"], "metrics": ["x", "y"]}
with self.assertRaises(Exception):
test_viz = viz.TableViz(datasource, form_data)
test_viz.query_obj()
del form_data["metrics"]
form_data["groupby"] = ["B", "C"]
with self.assertRaises(Exception):
test_viz = viz.TableViz(datasource, form_data)
test_viz.query_obj()
@patch("superset.viz.BaseViz.query_obj")
def test_query_obj_merges_all_columns(self, super_query_obj):
datasource = self.get_datasource_mock()
form_data = {
"all_columns": ["colA", "colB", "colC"],
"order_by_cols": ['["colA", "colB"]', '["colC"]'],
}
super_query_obj.return_value = {
"columns": ["colD", "colC"],
"groupby": ["colA", "colB"],
}
test_viz = viz.TableViz(datasource, form_data)
query_obj = test_viz.query_obj()
self.assertEqual(form_data["all_columns"], query_obj["columns"])
self.assertEqual([], query_obj["groupby"])
self.assertEqual([["colA", "colB"], ["colC"]], query_obj["orderby"])
def test_query_obj_uses_sortby(self):
datasource = self.get_datasource_mock()
form_data = {
"metrics": ["colA", "colB"],
"order_desc": False,
}
def run_test(metric):
form_data["timeseries_limit_metric"] = metric
test_viz = viz.TableViz(datasource, form_data)
query_obj = test_viz.query_obj()
self.assertEqual(["colA", "colB", metric], query_obj["metrics"])
self.assertEqual([(metric, True)], query_obj["orderby"])
run_test("simple_metric")
run_test(
{
"label": "adhoc_metric",
"expressionType": "SIMPLE",
"aggregate": "SUM",
"column": {"column_name": "sort_column",},
}
)
def test_should_be_timeseries_raises_when_no_granularity(self):
datasource = self.get_datasource_mock()
form_data = {"include_time": True}
with self.assertRaises(Exception):
test_viz = viz.TableViz(datasource, form_data)
test_viz.should_be_timeseries()
def test_adhoc_metric_with_sortby(self):
metrics = [
{
"expressionType": "SIMPLE",
"aggregate": "SUM",
"label": "sum_value",
"column": {"column_name": "value1", "type": "DOUBLE"},
}
]
form_data = {
"metrics": metrics,
"timeseries_limit_metric": {
"expressionType": "SIMPLE",
"aggregate": "SUM",
"label": "SUM(value1)",
"column": {"column_name": "value1", "type": "DOUBLE"},
},
"order_desc": False,
}
df = pd.DataFrame({"SUM(value1)": [15], "sum_value": [15]})
datasource = self.get_datasource_mock()
test_viz = viz.TableViz(datasource, form_data)
data = test_viz.get_data(df)
self.assertEqual(["sum_value"], data["columns"])
class DistBarVizTestCase(SupersetTestCase):
def test_groupby_nulls(self):
form_data = {
"metrics": ["votes"],
"adhoc_filters": [],
"groupby": ["toppings"],
"columns": [],
}
datasource = self.get_datasource_mock()
df = pd.DataFrame(
{
"toppings": ["cheese", "pepperoni", "anchovies", None],
"votes": [3, 5, 1, 2],
}
)
test_viz = viz.DistributionBarViz(datasource, form_data)
data = test_viz.get_data(df)[0]
self.assertEqual("votes", data["key"])
expected_values = [
{"x": "pepperoni", "y": 5},
{"x": "cheese", "y": 3},
{"x": NULL_STRING, "y": 2},
{"x": "anchovies", "y": 1},
]
self.assertEqual(expected_values, data["values"])
def test_groupby_nans(self):
form_data = {
"metrics": ["count"],
"adhoc_filters": [],
"groupby": ["beds"],
"columns": [],
}
datasource = self.get_datasource_mock()
df = pd.DataFrame({"beds": [0, 1, nan, 2], "count": [30, 42, 3, 29]})
test_viz = viz.DistributionBarViz(datasource, form_data)
data = test_viz.get_data(df)[0]
self.assertEqual("count", data["key"])
expected_values = [
{"x": "1.0", "y": 42},
{"x": "0.0", "y": 30},
{"x": "2.0", "y": 29},
{"x": NULL_STRING, "y": 3},
]
self.assertEqual(expected_values, data["values"])
def test_column_nulls(self):
form_data = {
"metrics": ["votes"],
"adhoc_filters": [],
"groupby": ["toppings"],
"columns": ["role"],
}
datasource = self.get_datasource_mock()
df = pd.DataFrame(
{
"toppings": ["cheese", "pepperoni", "cheese", "pepperoni"],
"role": ["engineer", "engineer", None, None],
"votes": [3, 5, 1, 2],
}
)
test_viz = viz.DistributionBarViz(datasource, form_data)
data = test_viz.get_data(df)
expected = [
{
"key": NULL_STRING,
"values": [{"x": "pepperoni", "y": 2}, {"x": "cheese", "y": 1}],
},
{
"key": "engineer",
"values": [{"x": "pepperoni", "y": 5}, {"x": "cheese", "y": 3}],
},
]
self.assertEqual(expected, data)
class PairedTTestTestCase(SupersetTestCase):
def test_get_data_transforms_dataframe(self):
form_data = {
"groupby": ["groupA", "groupB", "groupC"],
"metrics": ["metric1", "metric2", "metric3"],
}
datasource = self.get_datasource_mock()
# Test data
raw = {}
raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300]
raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"]
raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"]
raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"]
raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9]
raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90]
raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900]
df = pd.DataFrame(raw)
pairedTTestViz = viz.viz_types["paired_ttest"](datasource, form_data)
data = pairedTTestViz.get_data(df)
# Check method correctly transforms data
expected = {
"metric1": [
{
"values": [
{"x": 100, "y": 1},
{"x": 200, "y": 2},
{"x": 300, "y": 3},
],
"group": ("a1", "a2", "a3"),
},
{
"values": [
{"x": 100, "y": 4},
{"x": 200, "y": 5},
{"x": 300, "y": 6},
],
"group": ("b1", "b2", "b3"),
},
{
"values": [
{"x": 100, "y": 7},
{"x": 200, "y": 8},
{"x": 300, "y": 9},
],
"group": ("c1", "c2", "c3"),
},
],
"metric2": [
{
"values": [
{"x": 100, "y": 10},
{"x": 200, "y": 20},
{"x": 300, "y": 30},
],
"group": ("a1", "a2", "a3"),
},
{
"values": [
{"x": 100, "y": 40},
{"x": 200, "y": 50},
{"x": 300, "y": 60},
],
"group": ("b1", "b2", "b3"),
},
{
"values": [
{"x": 100, "y": 70},
{"x": 200, "y": 80},
{"x": 300, "y": 90},
],
"group": ("c1", "c2", "c3"),
},
],
"metric3": [
{
"values": [
{"x": 100, "y": 100},
{"x": 200, "y": 200},
{"x": 300, "y": 300},
],
"group": ("a1", "a2", "a3"),
},
{
"values": [
{"x": 100, "y": 400},
{"x": 200, "y": 500},
{"x": 300, "y": 600},
],
"group": ("b1", "b2", "b3"),
},
{
"values": [
{"x": 100, "y": 700},
{"x": 200, "y": 800},
{"x": 300, "y": 900},
],
"group": ("c1", "c2", "c3"),
},
],
}
self.assertEqual(data, expected)
def test_get_data_empty_null_keys(self):
form_data = {"groupby": [], "metrics": ["", None]}
datasource = self.get_datasource_mock()
# Test data
raw = {}
raw[DTTM_ALIAS] = [100, 200, 300]
raw[""] = [1, 2, 3]
raw[None] = [10, 20, 30]
df = pd.DataFrame(raw)
pairedTTestViz = viz.viz_types["paired_ttest"](datasource, form_data)
data = pairedTTestViz.get_data(df)
# Check method correctly transforms data
expected = {
"N/A": [
{
"values": [
{"x": 100, "y": 1},
{"x": 200, "y": 2},
{"x": 300, "y": 3},
],
"group": "All",
}
],
"NULL": [
{
"values": [
{"x": 100, "y": 10},
{"x": 200, "y": 20},
{"x": 300, "y": 30},
],
"group": "All",
}
],
}
self.assertEqual(data, expected)
class PartitionVizTestCase(SupersetTestCase):
@patch("superset.viz.BaseViz.query_obj")
def test_query_obj_time_series_option(self, super_query_obj):
datasource = self.get_datasource_mock()
form_data = {}
test_viz = viz.PartitionViz(datasource, form_data)
super_query_obj.return_value = {}
query_obj = test_viz.query_obj()
self.assertFalse(query_obj["is_timeseries"])
test_viz.form_data["time_series_option"] = "agg_sum"
query_obj = test_viz.query_obj()
self.assertTrue(query_obj["is_timeseries"])
def test_levels_for_computes_levels(self):
raw = {}
raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300]
raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"]
raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"]
raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"]
raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9]
raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90]
raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900]
df = pd.DataFrame(raw)
groups = ["groupA", "groupB", "groupC"]
time_op = "agg_sum"
test_viz = viz.PartitionViz(Mock(), {})
levels = test_viz.levels_for(time_op, groups, df)
self.assertEqual(4, len(levels))
expected = {DTTM_ALIAS: 1800, "metric1": 45, "metric2": 450, "metric3": 4500}
self.assertEqual(expected, levels[0].to_dict())
expected = {
DTTM_ALIAS: {"a1": 600, "b1": 600, "c1": 600},
"metric1": {"a1": 6, "b1": 15, "c1": 24},
"metric2": {"a1": 60, "b1": 150, "c1": 240},
"metric3": {"a1": 600, "b1": 1500, "c1": 2400},
}
self.assertEqual(expected, levels[1].to_dict())
self.assertEqual(["groupA", "groupB"], levels[2].index.names)
self.assertEqual(["groupA", "groupB", "groupC"], levels[3].index.names)
time_op = "agg_mean"
levels = test_viz.levels_for(time_op, groups, df)
self.assertEqual(4, len(levels))
expected = {
DTTM_ALIAS: 200.0,
"metric1": 5.0,
"metric2": 50.0,
"metric3": 500.0,
}
self.assertEqual(expected, levels[0].to_dict())
expected = {
DTTM_ALIAS: {"a1": 200, "c1": 200, "b1": 200},
"metric1": {"a1": 2, "b1": 5, "c1": 8},
"metric2": {"a1": 20, "b1": 50, "c1": 80},
"metric3": {"a1": 200, "b1": 500, "c1": 800},
}
self.assertEqual(expected, levels[1].to_dict())
self.assertEqual(["groupA", "groupB"], levels[2].index.names)
self.assertEqual(["groupA", "groupB", "groupC"], levels[3].index.names)
def test_levels_for_diff_computes_difference(self):
raw = {}
raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300]
raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"]
raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"]
raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"]
raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9]
raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90]
raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900]
df = pd.DataFrame(raw)
groups = ["groupA", "groupB", "groupC"]
test_viz = viz.PartitionViz(Mock(), {})
time_op = "point_diff"
levels = test_viz.levels_for_diff(time_op, groups, df)
expected = {"metric1": 6, "metric2": 60, "metric3": 600}
self.assertEqual(expected, levels[0].to_dict())
expected = {
"metric1": {"a1": 2, "b1": 2, "c1": 2},
"metric2": {"a1": 20, "b1": 20, "c1": 20},
"metric3": {"a1": 200, "b1": 200, "c1": 200},
}
self.assertEqual(expected, levels[1].to_dict())
self.assertEqual(4, len(levels))
self.assertEqual(["groupA", "groupB", "groupC"], levels[3].index.names)
def test_levels_for_time_calls_process_data_and_drops_cols(self):
raw = {}
raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300]
raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"]
raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"]
raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"]
raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9]
raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90]
raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900]
df = pd.DataFrame(raw)
groups = ["groupA", "groupB", "groupC"]
test_viz = viz.PartitionViz(Mock(), {"groupby": groups})
def return_args(df_drop, aggregate):
return df_drop
test_viz.process_data = Mock(side_effect=return_args)
levels = test_viz.levels_for_time(groups, df)
self.assertEqual(4, len(levels))
cols = [DTTM_ALIAS, "metric1", "metric2", "metric3"]
self.assertEqual(sorted(cols), sorted(levels[0].columns.tolist()))
cols += ["groupA"]
self.assertEqual(sorted(cols), sorted(levels[1].columns.tolist()))
cols += ["groupB"]
self.assertEqual(sorted(cols), sorted(levels[2].columns.tolist()))
cols += ["groupC"]
self.assertEqual(sorted(cols), sorted(levels[3].columns.tolist()))
self.assertEqual(4, len(test_viz.process_data.mock_calls))
def test_nest_values_returns_hierarchy(self):
raw = {}
raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"]
raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"]
raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"]
raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9]
raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90]
raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900]
df = pd.DataFrame(raw)
test_viz = viz.PartitionViz(Mock(), {})
groups = ["groupA", "groupB", "groupC"]
levels = test_viz.levels_for("agg_sum", groups, df)
nest = test_viz.nest_values(levels)
self.assertEqual(3, len(nest))
for i in range(0, 3):
self.assertEqual("metric" + str(i + 1), nest[i]["name"])
self.assertEqual(3, len(nest[0]["children"]))
self.assertEqual(1, len(nest[0]["children"][0]["children"]))
self.assertEqual(1, len(nest[0]["children"][0]["children"][0]["children"]))
def test_nest_procs_returns_hierarchy(self):
raw = {}
raw[DTTM_ALIAS] = [100, 200, 300, 100, 200, 300, 100, 200, 300]
raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"]
raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"]
raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"]
raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9]
raw["metric2"] = [10, 20, 30, 40, 50, 60, 70, 80, 90]
raw["metric3"] = [100, 200, 300, 400, 500, 600, 700, 800, 900]
df = pd.DataFrame(raw)
test_viz = viz.PartitionViz(Mock(), {})
groups = ["groupA", "groupB", "groupC"]
metrics = ["metric1", "metric2", "metric3"]
procs = {}
for i in range(0, 4):
df_drop = df.drop(groups[i:], 1)
pivot = df_drop.pivot_table(
index=DTTM_ALIAS, columns=groups[:i], values=metrics
)
procs[i] = pivot
nest = test_viz.nest_procs(procs)
self.assertEqual(3, len(nest))
for i in range(0, 3):
self.assertEqual("metric" + str(i + 1), nest[i]["name"])
self.assertEqual(None, nest[i].get("val"))
self.assertEqual(3, len(nest[0]["children"]))
self.assertEqual(3, len(nest[0]["children"][0]["children"]))
self.assertEqual(1, len(nest[0]["children"][0]["children"][0]["children"]))
self.assertEqual(
1, len(nest[0]["children"][0]["children"][0]["children"][0]["children"])
)
def test_get_data_calls_correct_method(self):
test_viz = viz.PartitionViz(Mock(), {})
df = Mock()
with self.assertRaises(ValueError):
test_viz.get_data(df)
test_viz.levels_for = Mock(return_value=1)
test_viz.nest_values = Mock(return_value=1)
test_viz.form_data["groupby"] = ["groups"]
test_viz.form_data["time_series_option"] = "not_time"
test_viz.get_data(df)
self.assertEqual("agg_sum", test_viz.levels_for.mock_calls[0][1][0])
test_viz.form_data["time_series_option"] = "agg_sum"
test_viz.get_data(df)
self.assertEqual("agg_sum", test_viz.levels_for.mock_calls[1][1][0])
test_viz.form_data["time_series_option"] = "agg_mean"
test_viz.get_data(df)
self.assertEqual("agg_mean", test_viz.levels_for.mock_calls[2][1][0])
test_viz.form_data["time_series_option"] = "point_diff"
test_viz.levels_for_diff = Mock(return_value=1)
test_viz.get_data(df)
self.assertEqual("point_diff", test_viz.levels_for_diff.mock_calls[0][1][0])
test_viz.form_data["time_series_option"] = "point_percent"
test_viz.get_data(df)
self.assertEqual("point_percent", test_viz.levels_for_diff.mock_calls[1][1][0])
test_viz.form_data["time_series_option"] = "point_factor"
test_viz.get_data(df)
self.assertEqual("point_factor", test_viz.levels_for_diff.mock_calls[2][1][0])
test_viz.levels_for_time = Mock(return_value=1)
test_viz.nest_procs = Mock(return_value=1)
test_viz.form_data["time_series_option"] = "adv_anal"
test_viz.get_data(df)
self.assertEqual(1, len(test_viz.levels_for_time.mock_calls))
self.assertEqual(1, len(test_viz.nest_procs.mock_calls))
test_viz.form_data["time_series_option"] = "time_series"
test_viz.get_data(df)
self.assertEqual("agg_sum", test_viz.levels_for.mock_calls[3][1][0])
self.assertEqual(7, len(test_viz.nest_values.mock_calls))
class RoseVisTestCase(SupersetTestCase):
def test_rose_vis_get_data(self):
raw = {}
t1 = pd.Timestamp("2000")
t2 = pd.Timestamp("2002")
t3 = pd.Timestamp("2004")
raw[DTTM_ALIAS] = [t1, t2, t3, t1, t2, t3, t1, t2, t3]
raw["groupA"] = ["a1", "a1", "a1", "b1", "b1", "b1", "c1", "c1", "c1"]
raw["groupB"] = ["a2", "a2", "a2", "b2", "b2", "b2", "c2", "c2", "c2"]
raw["groupC"] = ["a3", "a3", "a3", "b3", "b3", "b3", "c3", "c3", "c3"]
raw["metric1"] = [1, 2, 3, 4, 5, 6, 7, 8, 9]
df = pd.DataFrame(raw)
fd = {"metrics": ["metric1"], "groupby": ["groupA"]}
test_viz = viz.RoseViz(Mock(), fd)
test_viz.metrics = fd["metrics"]
res = test_viz.get_data(df)
expected = {
946684800000000000: [
{"time": t1, "value": 1, "key": ("a1",), "name": ("a1",)},
{"time": t1, "value": 4, "key": ("b1",), "name": ("b1",)},
{"time": t1, "value": 7, "key": ("c1",), "name": ("c1",)},
],
1009843200000000000: [
{"time": t2, "value": 2, "key": ("a1",), "name": ("a1",)},
{"time": t2, "value": 5, "key": ("b1",), "name": ("b1",)},
{"time": t2, "value": 8, "key": ("c1",), "name": ("c1",)},
],
1072915200000000000: [
{"time": t3, "value": 3, "key": ("a1",), "name": ("a1",)},
{"time": t3, "value": 6, "key": ("b1",), "name": ("b1",)},
{"time": t3, "value": 9, "key": ("c1",), "name": ("c1",)},
],
}
self.assertEqual(expected, res)
class TimeSeriesTableVizTestCase(SupersetTestCase):
def test_get_data_metrics(self):
form_data = {"metrics": ["sum__A", "count"], "groupby": []}
datasource = self.get_datasource_mock()
raw = {}
t1 = pd.Timestamp("2000")
t2 = pd.Timestamp("2002")
raw[DTTM_ALIAS] = [t1, t2]
raw["sum__A"] = [15, 20]
raw["count"] = [6, 7]
df = pd.DataFrame(raw)
test_viz = viz.TimeTableViz(datasource, form_data)
data = test_viz.get_data(df)
# Check method correctly transforms data
self.assertEqual(set(["count", "sum__A"]), set(data["columns"]))
time_format = "%Y-%m-%d %H:%M:%S"
expected = {
t1.strftime(time_format): {"sum__A": 15, "count": 6},
t2.strftime(time_format): {"sum__A": 20, "count": 7},
}
self.assertEqual(expected, data["records"])
def test_get_data_group_by(self):
form_data = {"metrics": ["sum__A"], "groupby": ["groupby1"]}
datasource = self.get_datasource_mock()
raw = {}
t1 = pd.Timestamp("2000")
t2 = pd.Timestamp("2002")
raw[DTTM_ALIAS] = [t1, t1, t1, t2, t2, t2]
raw["sum__A"] = [15, 20, 25, 30, 35, 40]
raw["groupby1"] = ["a1", "a2", "a3", "a1", "a2", "a3"]
df = pd.DataFrame(raw)
test_viz = viz.TimeTableViz(datasource, form_data)
data = test_viz.get_data(df)
# Check method correctly transforms data
self.assertEqual(set(["a1", "a2", "a3"]), set(data["columns"]))
time_format = "%Y-%m-%d %H:%M:%S"
expected = {
t1.strftime(time_format): {"a1": 15, "a2": 20, "a3": 25},
t2.strftime(time_format): {"a1": 30, "a2": 35, "a3": 40},
}
self.assertEqual(expected, data["records"])
@patch("superset.viz.BaseViz.query_obj")
def test_query_obj_throws_metrics_and_groupby(self, super_query_obj):
datasource = self.get_datasource_mock()
form_data = {"groupby": ["a"]}
super_query_obj.return_value = {}
test_viz = viz.TimeTableViz(datasource, form_data)
with self.assertRaises(Exception):
test_viz.query_obj()
form_data["metrics"] = ["x", "y"]
test_viz = viz.TimeTableViz(datasource, form_data)
with self.assertRaises(Exception):
test_viz.query_obj()
class BaseDeckGLVizTestCase(SupersetTestCase):
def test_get_metrics(self):
form_data = load_fixture("deck_path_form_data.json")
datasource = self.get_datasource_mock()
test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data)
result = test_viz_deckgl.get_metrics()
assert result == [form_data.get("size")]
form_data = {}
test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data)
result = test_viz_deckgl.get_metrics()
assert result == []
def test_scatterviz_get_metrics(self):
form_data = load_fixture("deck_path_form_data.json")
datasource = self.get_datasource_mock()
form_data = {}
test_viz_deckgl = viz.DeckScatterViz(datasource, form_data)
test_viz_deckgl.point_radius_fixed = {"type": "metric", "value": "int"}
result = test_viz_deckgl.get_metrics()
assert result == ["int"]
form_data = {}
test_viz_deckgl = viz.DeckScatterViz(datasource, form_data)
test_viz_deckgl.point_radius_fixed = {}
result = test_viz_deckgl.get_metrics()
assert result == []
def test_get_js_columns(self):
form_data = load_fixture("deck_path_form_data.json")
datasource = self.get_datasource_mock()
mock_d = {"a": "dummy1", "b": "dummy2", "c": "dummy3"}
test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data)
result = test_viz_deckgl.get_js_columns(mock_d)
assert result == {"color": None}
def test_get_properties(self):
mock_d = {}
form_data = load_fixture("deck_path_form_data.json")
datasource = self.get_datasource_mock()
test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data)
with self.assertRaises(NotImplementedError) as context:
test_viz_deckgl.get_properties(mock_d)
self.assertTrue("" in str(context.exception))
def test_process_spatial_query_obj(self):
form_data = load_fixture("deck_path_form_data.json")
datasource = self.get_datasource_mock()
mock_key = "spatial_key"
mock_gb = []
test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data)
with self.assertRaises(ValueError) as context:
test_viz_deckgl.process_spatial_query_obj(mock_key, mock_gb)
self.assertTrue("Bad spatial key" in str(context.exception))
test_form_data = {
"latlong_key": {"type": "latlong", "lonCol": "lon", "latCol": "lat"},
"delimited_key": {"type": "delimited", "lonlatCol": "lonlat"},
"geohash_key": {"type": "geohash", "geohashCol": "geo"},
}
datasource = self.get_datasource_mock()
expected_results = {
"latlong_key": ["lon", "lat"],
"delimited_key": ["lonlat"],
"geohash_key": ["geo"],
}
for mock_key in ["latlong_key", "delimited_key", "geohash_key"]:
mock_gb = []
test_viz_deckgl = viz.BaseDeckGLViz(datasource, test_form_data)
test_viz_deckgl.process_spatial_query_obj(mock_key, mock_gb)
assert expected_results.get(mock_key) == mock_gb
def test_geojson_query_obj(self):
form_data = load_fixture("deck_geojson_form_data.json")
datasource = self.get_datasource_mock()
test_viz_deckgl = viz.DeckGeoJson(datasource, form_data)
results = test_viz_deckgl.query_obj()
assert results["metrics"] == []
assert results["groupby"] == []
assert results["columns"] == ["test_col"]
def test_parse_coordinates(self):
form_data = load_fixture("deck_path_form_data.json")
datasource = self.get_datasource_mock()
viz_instance = viz.BaseDeckGLViz(datasource, form_data)
coord = viz_instance.parse_coordinates("1.23, 3.21")
self.assertEqual(coord, (1.23, 3.21))
coord = viz_instance.parse_coordinates("1.23 3.21")
self.assertEqual(coord, (1.23, 3.21))
self.assertEqual(viz_instance.parse_coordinates(None), None)
self.assertEqual(viz_instance.parse_coordinates(""), None)
def test_parse_coordinates_raises(self):
form_data = load_fixture("deck_path_form_data.json")
datasource = self.get_datasource_mock()
test_viz_deckgl = viz.BaseDeckGLViz(datasource, form_data)
with self.assertRaises(SpatialException):
test_viz_deckgl.parse_coordinates("NULL")
with self.assertRaises(SpatialException):
test_viz_deckgl.parse_coordinates("fldkjsalkj,fdlaskjfjadlksj")
@patch("superset.utils.core.uuid.uuid4")
def test_filter_nulls(self, mock_uuid4):
mock_uuid4.return_value = uuid.UUID("12345678123456781234567812345678")
test_form_data = {
"latlong_key": {"type": "latlong", "lonCol": "lon", "latCol": "lat"},
"delimited_key": {"type": "delimited", "lonlatCol": "lonlat"},
"geohash_key": {"type": "geohash", "geohashCol": "geo"},
}
datasource = self.get_datasource_mock()
expected_results = {
"latlong_key": [
{
"clause": "WHERE",
"expressionType": "SIMPLE",
"filterOptionName": "12345678-1234-5678-1234-567812345678",
"comparator": "",
"operator": "IS NOT NULL",
"subject": "lat",
"isExtra": False,
},
{
"clause": "WHERE",
"expressionType": "SIMPLE",
"filterOptionName": "12345678-1234-5678-1234-567812345678",
"comparator": "",
"operator": "IS NOT NULL",
"subject": "lon",
"isExtra": False,
},
],
"delimited_key": [
{
"clause": "WHERE",
"expressionType": "SIMPLE",
"filterOptionName": "12345678-1234-5678-1234-567812345678",
"comparator": "",
"operator": "IS NOT NULL",
"subject": "lonlat",
"isExtra": False,
}
],
"geohash_key": [
{
"clause": "WHERE",
"expressionType": "SIMPLE",
"filterOptionName": "12345678-1234-5678-1234-567812345678",
"comparator": "",
"operator": "IS NOT NULL",
"subject": "geo",
"isExtra": False,
}
],
}
for mock_key in ["latlong_key", "delimited_key", "geohash_key"]:
test_viz_deckgl = viz.BaseDeckGLViz(datasource, test_form_data.copy())
test_viz_deckgl.spatial_control_keys = [mock_key]
test_viz_deckgl.add_null_filters()
adhoc_filters = test_viz_deckgl.form_data["adhoc_filters"]
assert expected_results.get(mock_key) == adhoc_filters
class TimeSeriesVizTestCase(SupersetTestCase):
def test_timeseries_unicode_data(self):
datasource = self.get_datasource_mock()
form_data = {"groupby": ["name"], "metrics": ["sum__payout"]}
raw = {}
raw["name"] = [
"Real Madrid C.F.🇺🇸🇬🇧",
"Real Madrid C.F.🇺🇸🇬🇧",
"Real Madrid Basket",
"Real Madrid Basket",
]
raw["__timestamp"] = [
"2018-02-20T00:00:00",
"2018-03-09T00:00:00",
"2018-02-20T00:00:00",
"2018-03-09T00:00:00",
]
raw["sum__payout"] = [2, 2, 4, 4]
df = pd.DataFrame(raw)
test_viz = viz.NVD3TimeSeriesViz(datasource, form_data)
viz_data = {}
viz_data = test_viz.get_data(df)
expected = [
{
u"values": [
{u"y": 4, u"x": u"2018-02-20T00:00:00"},
{u"y": 4, u"x": u"2018-03-09T00:00:00"},
],
u"key": (u"Real Madrid Basket",),
},
{
u"values": [
{u"y": 2, u"x": u"2018-02-20T00:00:00"},
{u"y": 2, u"x": u"2018-03-09T00:00:00"},
],
u"key": (u"Real Madrid C.F.\U0001f1fa\U0001f1f8\U0001f1ec\U0001f1e7",),
},
]
self.assertEqual(expected, viz_data)
def test_process_data_resample(self):
datasource = self.get_datasource_mock()
df = pd.DataFrame(
{
"__timestamp": pd.to_datetime(
["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"]
),
"y": [1.0, 2.0, 5.0, 7.0],
}
)
self.assertEqual(
viz.NVD3TimeSeriesViz(
datasource,
{"metrics": ["y"], "resample_method": "sum", "resample_rule": "1D"},
)
.process_data(df)["y"]
.tolist(),
[1.0, 2.0, 0.0, 0.0, 5.0, 0.0, 7.0],
)
np.testing.assert_equal(
viz.NVD3TimeSeriesViz(
datasource,
{"metrics": ["y"], "resample_method": "asfreq", "resample_rule": "1D"},
)
.process_data(df)["y"]
.tolist(),
[1.0, 2.0, np.nan, np.nan, 5.0, np.nan, 7.0],
)
def test_apply_rolling(self):
datasource = self.get_datasource_mock()
df = pd.DataFrame(
index=pd.to_datetime(
["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"]
),
data={"y": [1.0, 2.0, 3.0, 4.0]},
)
self.assertEqual(
viz.BigNumberViz(
datasource,
{
"metrics": ["y"],
"rolling_type": "cumsum",
"rolling_periods": 0,
"min_periods": 0,
},
)
.apply_rolling(df)["y"]
.tolist(),
[1.0, 3.0, 6.0, 10.0],
)
self.assertEqual(
viz.BigNumberViz(
datasource,
{
"metrics": ["y"],
"rolling_type": "sum",
"rolling_periods": 2,
"min_periods": 0,
},
)
.apply_rolling(df)["y"]
.tolist(),
[1.0, 3.0, 5.0, 7.0],
)
self.assertEqual(
viz.BigNumberViz(
datasource,
{
"metrics": ["y"],
"rolling_type": "mean",
"rolling_periods": 10,
"min_periods": 0,
},
)
.apply_rolling(df)["y"]
.tolist(),
[1.0, 1.5, 2.0, 2.5],
)
class BigNumberVizTestCase(SupersetTestCase):
def test_get_data(self):
datasource = self.get_datasource_mock()
df = pd.DataFrame(
data={
DTTM_ALIAS: pd.to_datetime(
["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"]
),
"y": [1.0, 2.0, 3.0, 4.0],
}
)
data = viz.BigNumberViz(datasource, {"metrics": ["y"]}).get_data(df)
self.assertEqual(data[2], {DTTM_ALIAS: pd.Timestamp("2019-01-05"), "y": 3})
def test_get_data_with_none(self):
datasource = self.get_datasource_mock()
df = pd.DataFrame(
data={
DTTM_ALIAS: pd.to_datetime(
["2019-01-01", "2019-01-02", "2019-01-05", "2019-01-07"]
),
"y": [1.0, 2.0, None, 4.0],
}
)
data = viz.BigNumberViz(datasource, {"metrics": ["y"]}).get_data(df)
assert np.isnan(data[2]["y"])
| [
"logging.getLogger",
"superset.viz.DistributionBarViz",
"superset.viz.BaseViz",
"superset.viz.DeckScatterViz",
"unittest.mock.patch",
"superset.viz.TableViz",
"pandas.to_datetime",
"datetime.datetime",
"superset.viz.PartitionViz",
"pandas.DataFrame",
"superset.viz.DeckGeoJson",
"unittest.mock.Mock",
"superset.viz.NVD3TimeSeriesViz",
"superset.viz.TimeTableViz",
"numpy.isnan",
"uuid.UUID",
"superset.viz.BigNumberViz",
"superset.viz.BaseDeckGLViz",
"pandas.Timestamp"
] | [((1253, 1280), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1270, 1280), False, 'import logging\n'), ((13055, 13094), 'unittest.mock.patch', 'patch', (['"""superset.viz.BaseViz.query_obj"""'], {}), "('superset.viz.BaseViz.query_obj')\n", (13060, 13094), False, 'from unittest.mock import Mock, patch\n'), ((23258, 23297), 'unittest.mock.patch', 'patch', (['"""superset.viz.BaseViz.query_obj"""'], {}), "('superset.viz.BaseViz.query_obj')\n", (23263, 23297), False, 'from unittest.mock import Mock, patch\n'), ((36882, 36921), 'unittest.mock.patch', 'patch', (['"""superset.viz.BaseViz.query_obj"""'], {}), "('superset.viz.BaseViz.query_obj')\n", (36887, 36921), False, 'from unittest.mock import Mock, patch\n'), ((42026, 42065), 'unittest.mock.patch', 'patch', (['"""superset.utils.core.uuid.uuid4"""'], {}), "('superset.utils.core.uuid.uuid4')\n", (42031, 42065), False, 'from unittest.mock import Mock, patch\n'), ((2623, 2629), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (2627, 2629), False, 'from unittest.mock import Mock, patch\n'), ((2683, 2717), 'superset.viz.BaseViz', 'viz.BaseViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (2694, 2717), True, 'import superset.viz as viz\n'), ((3215, 3249), 'superset.viz.BaseViz', 'viz.BaseViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (3226, 3249), True, 'import superset.viz as viz\n'), ((3526, 3532), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (3530, 3532), False, 'from unittest.mock import Mock, patch\n'), ((3557, 3563), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (3561, 3563), False, 'from unittest.mock import Mock, patch\n'), ((3589, 3595), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (3593, 3595), False, 'from unittest.mock import Mock, patch\n'), ((3628, 3634), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (3632, 3634), False, 'from unittest.mock import Mock, patch\n'), ((3656, 3662), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (3660, 3662), False, 'from unittest.mock import Mock, patch\n'), ((3724, 3750), 'unittest.mock.Mock', 'Mock', ([], {'return_value': 'results'}), '(return_value=results)\n', (3728, 3750), False, 'from unittest.mock import Mock, patch\n'), ((3775, 3781), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (3779, 3781), False, 'from unittest.mock import Mock, patch\n'), ((3814, 3846), 'unittest.mock.Mock', 'Mock', ([], {'return_value': 'mock_dttm_col'}), '(return_value=mock_dttm_col)\n', (3818, 3846), False, 'from unittest.mock import Mock, patch\n'), ((3867, 3901), 'superset.viz.BaseViz', 'viz.BaseViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (3878, 3901), True, 'import superset.viz as viz\n'), ((3939, 3945), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (3943, 3945), False, 'from unittest.mock import Mock, patch\n'), ((3988, 4008), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '(0)'}), '(return_value=0)\n', (3992, 4008), False, 'from unittest.mock import Mock, patch\n'), ((4031, 4087), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': "{DTTM_ALIAS: ['1960-01-01 05:00:00']}"}), "(data={DTTM_ALIAS: ['1960-01-01 05:00:00']})\n", (4043, 4087), True, 'import pandas as pd\n'), ((4142, 4148), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (4146, 4148), False, 'from unittest.mock import Mock, patch\n'), ((4181, 4213), 'unittest.mock.Mock', 'Mock', ([], {'return_value': 'mock_dttm_col'}), '(return_value=mock_dttm_col)\n', (4185, 4213), False, 'from unittest.mock import Mock, patch\n'), ((5001, 5048), 'pandas.DataFrame', 'pd.DataFrame', ([], {'data': "{DTTM_ALIAS: ['1960-01-01']}"}), "(data={DTTM_ALIAS: ['1960-01-01']})\n", (5013, 5048), True, 'import pandas as pd\n'), ((5425, 5462), 'superset.viz.BaseViz', 'viz.BaseViz', (['datasource'], {'form_data': '{}'}), '(datasource, form_data={})\n', (5436, 5462), True, 'import superset.viz as viz\n'), ((5574, 5611), 'superset.viz.BaseViz', 'viz.BaseViz', (['datasource'], {'form_data': '{}'}), '(datasource, form_data={})\n', (5585, 5611), True, 'import superset.viz as viz\n'), ((5979, 6016), 'superset.viz.BaseViz', 'viz.BaseViz', (['datasource'], {'form_data': '{}'}), '(datasource, form_data={})\n', (5990, 6016), True, 'import superset.viz as viz\n'), ((6987, 7184), 'pandas.DataFrame', 'pd.DataFrame', (["{'SUM(value1)': [15, 20, 25, 40], 'avg__B': [10, 20, 5, 15], 'avg__C': [11,\n 22, 33, 44], 'count': [6, 7, 8, 9], 'groupA': ['A', 'B', 'C', 'C'],\n 'groupB': ['x', 'x', 'y', 'z']}"], {}), "({'SUM(value1)': [15, 20, 25, 40], 'avg__B': [10, 20, 5, 15],\n 'avg__C': [11, 22, 33, 44], 'count': [6, 7, 8, 9], 'groupA': ['A', 'B',\n 'C', 'C'], 'groupB': ['x', 'x', 'y', 'z']})\n", (6999, 7184), True, 'import pandas as pd\n'), ((7330, 7365), 'superset.viz.TableViz', 'viz.TableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (7342, 7365), True, 'import superset.viz as viz\n'), ((10191, 10226), 'superset.viz.TableViz', 'viz.TableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (10203, 10226), True, 'import superset.viz as viz\n'), ((11655, 11690), 'superset.viz.TableViz', 'viz.TableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (11667, 11690), True, 'import superset.viz as viz\n'), ((12330, 12365), 'superset.viz.TableViz', 'viz.TableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (12342, 12365), True, 'import superset.viz as viz\n'), ((13509, 13544), 'superset.viz.TableViz', 'viz.TableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (13521, 13544), True, 'import superset.viz as viz\n'), ((15551, 15605), 'pandas.DataFrame', 'pd.DataFrame', (["{'SUM(value1)': [15], 'sum_value': [15]}"], {}), "({'SUM(value1)': [15], 'sum_value': [15]})\n", (15563, 15605), True, 'import pandas as pd\n'), ((15673, 15708), 'superset.viz.TableViz', 'viz.TableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (15685, 15708), True, 'import superset.viz as viz\n'), ((16107, 16204), 'pandas.DataFrame', 'pd.DataFrame', (["{'toppings': ['cheese', 'pepperoni', 'anchovies', None], 'votes': [3, 5, 1, 2]}"], {}), "({'toppings': ['cheese', 'pepperoni', 'anchovies', None],\n 'votes': [3, 5, 1, 2]})\n", (16119, 16204), True, 'import pandas as pd\n'), ((16289, 16334), 'superset.viz.DistributionBarViz', 'viz.DistributionBarViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (16311, 16334), True, 'import superset.viz as viz\n'), ((16929, 16993), 'pandas.DataFrame', 'pd.DataFrame', (["{'beds': [0, 1, nan, 2], 'count': [30, 42, 3, 29]}"], {}), "({'beds': [0, 1, nan, 2], 'count': [30, 42, 3, 29]})\n", (16941, 16993), True, 'import pandas as pd\n'), ((17013, 17058), 'superset.viz.DistributionBarViz', 'viz.DistributionBarViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (17035, 17058), True, 'import superset.viz as viz\n'), ((17651, 17798), 'pandas.DataFrame', 'pd.DataFrame', (["{'toppings': ['cheese', 'pepperoni', 'cheese', 'pepperoni'], 'role': [\n 'engineer', 'engineer', None, None], 'votes': [3, 5, 1, 2]}"], {}), "({'toppings': ['cheese', 'pepperoni', 'cheese', 'pepperoni'],\n 'role': ['engineer', 'engineer', None, None], 'votes': [3, 5, 1, 2]})\n", (17663, 17798), True, 'import pandas as pd\n'), ((17899, 17944), 'superset.viz.DistributionBarViz', 'viz.DistributionBarViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (17921, 17944), True, 'import superset.viz as viz\n'), ((19180, 19197), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (19192, 19197), True, 'import pandas as pd\n'), ((22351, 22368), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (22363, 22368), True, 'import pandas as pd\n'), ((23454, 23493), 'superset.viz.PartitionViz', 'viz.PartitionViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (23470, 23493), True, 'import superset.viz as viz\n'), ((24357, 24374), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (24369, 24374), True, 'import pandas as pd\n'), ((26583, 26600), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (26595, 26600), True, 'import pandas as pd\n'), ((27881, 27898), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (27893, 27898), True, 'import pandas as pd\n'), ((28118, 28147), 'unittest.mock.Mock', 'Mock', ([], {'side_effect': 'return_args'}), '(side_effect=return_args)\n', (28122, 28147), False, 'from unittest.mock import Mock, patch\n'), ((29256, 29273), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (29268, 29273), True, 'import pandas as pd\n'), ((30394, 30411), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (30406, 30411), True, 'import pandas as pd\n'), ((31482, 31488), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (31486, 31488), False, 'from unittest.mock import Mock, patch\n'), ((31597, 31617), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '(1)'}), '(return_value=1)\n', (31601, 31617), False, 'from unittest.mock import Mock, patch\n'), ((31649, 31669), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '(1)'}), '(return_value=1)\n', (31653, 31669), False, 'from unittest.mock import Mock, patch\n'), ((32327, 32347), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '(1)'}), '(return_value=1)\n', (32331, 32347), False, 'from unittest.mock import Mock, patch\n'), ((32866, 32886), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '(1)'}), '(return_value=1)\n', (32870, 32886), False, 'from unittest.mock import Mock, patch\n'), ((32917, 32937), 'unittest.mock.Mock', 'Mock', ([], {'return_value': '(1)'}), '(return_value=1)\n', (32921, 32937), False, 'from unittest.mock import Mock, patch\n'), ((33514, 33534), 'pandas.Timestamp', 'pd.Timestamp', (['"""2000"""'], {}), "('2000')\n", (33526, 33534), True, 'import pandas as pd\n'), ((33548, 33568), 'pandas.Timestamp', 'pd.Timestamp', (['"""2002"""'], {}), "('2002')\n", (33560, 33568), True, 'import pandas as pd\n'), ((33582, 33602), 'pandas.Timestamp', 'pd.Timestamp', (['"""2004"""'], {}), "('2004')\n", (33594, 33602), True, 'import pandas as pd\n'), ((33969, 33986), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (33981, 33986), True, 'import pandas as pd\n'), ((35300, 35320), 'pandas.Timestamp', 'pd.Timestamp', (['"""2000"""'], {}), "('2000')\n", (35312, 35320), True, 'import pandas as pd\n'), ((35334, 35354), 'pandas.Timestamp', 'pd.Timestamp', (['"""2002"""'], {}), "('2002')\n", (35346, 35354), True, 'import pandas as pd\n'), ((35466, 35483), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (35478, 35483), True, 'import pandas as pd\n'), ((35503, 35542), 'superset.viz.TimeTableViz', 'viz.TimeTableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (35519, 35542), True, 'import superset.viz as viz\n'), ((36145, 36165), 'pandas.Timestamp', 'pd.Timestamp', (['"""2000"""'], {}), "('2000')\n", (36157, 36165), True, 'import pandas as pd\n'), ((36179, 36199), 'pandas.Timestamp', 'pd.Timestamp', (['"""2002"""'], {}), "('2002')\n", (36191, 36199), True, 'import pandas as pd\n'), ((36376, 36393), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (36388, 36393), True, 'import pandas as pd\n'), ((36413, 36452), 'superset.viz.TimeTableViz', 'viz.TimeTableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (36429, 36452), True, 'import superset.viz as viz\n'), ((37144, 37183), 'superset.viz.TimeTableViz', 'viz.TimeTableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (37160, 37183), True, 'import superset.viz as viz\n'), ((37321, 37360), 'superset.viz.TimeTableViz', 'viz.TimeTableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (37337, 37360), True, 'import superset.viz as viz\n'), ((37653, 37693), 'superset.viz.BaseDeckGLViz', 'viz.BaseDeckGLViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (37670, 37693), True, 'import superset.viz as viz\n'), ((37840, 37880), 'superset.viz.BaseDeckGLViz', 'viz.BaseDeckGLViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (37857, 37880), True, 'import superset.viz as viz\n'), ((38159, 38200), 'superset.viz.DeckScatterViz', 'viz.DeckScatterViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (38177, 38200), True, 'import superset.viz as viz\n'), ((38411, 38452), 'superset.viz.DeckScatterViz', 'viz.DeckScatterViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (38429, 38452), True, 'import superset.viz as viz\n'), ((38810, 38850), 'superset.viz.BaseDeckGLViz', 'viz.BaseDeckGLViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (38827, 38850), True, 'import superset.viz as viz\n'), ((39140, 39180), 'superset.viz.BaseDeckGLViz', 'viz.BaseDeckGLViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (39157, 39180), True, 'import superset.viz as viz\n'), ((39588, 39628), 'superset.viz.BaseDeckGLViz', 'viz.BaseDeckGLViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (39605, 39628), True, 'import superset.viz as viz\n'), ((40785, 40823), 'superset.viz.DeckGeoJson', 'viz.DeckGeoJson', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (40800, 40823), True, 'import superset.viz as viz\n'), ((41172, 41212), 'superset.viz.BaseDeckGLViz', 'viz.BaseDeckGLViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (41189, 41212), True, 'import superset.viz as viz\n'), ((41747, 41787), 'superset.viz.BaseDeckGLViz', 'viz.BaseDeckGLViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (41764, 41787), True, 'import superset.viz as viz\n'), ((42145, 42190), 'uuid.UUID', 'uuid.UUID', (['"""12345678123456781234567812345678"""'], {}), "('12345678123456781234567812345678')\n", (42154, 42190), False, 'import uuid\n'), ((45178, 45195), 'pandas.DataFrame', 'pd.DataFrame', (['raw'], {}), '(raw)\n', (45190, 45195), True, 'import pandas as pd\n'), ((45216, 45260), 'superset.viz.NVD3TimeSeriesViz', 'viz.NVD3TimeSeriesViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (45237, 45260), True, 'import superset.viz as viz\n'), ((49374, 49396), 'numpy.isnan', 'np.isnan', (["data[2]['y']"], {}), "(data[2]['y'])\n", (49382, 49396), True, 'import numpy as np\n'), ((1484, 1518), 'superset.viz.BaseViz', 'viz.BaseViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (1495, 1518), True, 'import superset.viz as viz\n'), ((12770, 12805), 'superset.viz.TableViz', 'viz.TableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (12782, 12805), True, 'import superset.viz as viz\n'), ((12980, 13015), 'superset.viz.TableViz', 'viz.TableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (12992, 13015), True, 'import superset.viz as viz\n'), ((14096, 14131), 'superset.viz.TableViz', 'viz.TableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (14108, 14131), True, 'import superset.viz as viz\n'), ((14820, 14855), 'superset.viz.TableViz', 'viz.TableViz', (['datasource', 'form_data'], {}), '(datasource, form_data)\n', (14832, 14855), True, 'import superset.viz as viz\n'), ((24487, 24493), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (24491, 24493), False, 'from unittest.mock import Mock, patch\n'), ((26685, 26691), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (26689, 26691), False, 'from unittest.mock import Mock, patch\n'), ((27983, 27989), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (27987, 27989), False, 'from unittest.mock import Mock, patch\n'), ((29310, 29316), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (29314, 29316), False, 'from unittest.mock import Mock, patch\n'), ((30448, 30454), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (30452, 30454), False, 'from unittest.mock import Mock, patch\n'), ((31457, 31463), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (31461, 31463), False, 'from unittest.mock import Mock, patch\n'), ((34079, 34085), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (34083, 34085), False, 'from unittest.mock import Mock, patch\n'), ((40428, 40473), 'superset.viz.BaseDeckGLViz', 'viz.BaseDeckGLViz', (['datasource', 'test_form_data'], {}), '(datasource, test_form_data)\n', (40445, 40473), True, 'import superset.viz as viz\n'), ((46100, 46172), 'pandas.to_datetime', 'pd.to_datetime', (["['2019-01-01', '2019-01-02', '2019-01-05', '2019-01-07']"], {}), "(['2019-01-01', '2019-01-02', '2019-01-05', '2019-01-07'])\n", (46114, 46172), True, 'import pandas as pd\n'), ((47038, 47110), 'pandas.to_datetime', 'pd.to_datetime', (["['2019-01-01', '2019-01-02', '2019-01-05', '2019-01-07']"], {}), "(['2019-01-01', '2019-01-02', '2019-01-05', '2019-01-07'])\n", (47052, 47110), True, 'import pandas as pd\n'), ((48794, 48842), 'superset.viz.BigNumberViz', 'viz.BigNumberViz', (['datasource', "{'metrics': ['y']}"], {}), "(datasource, {'metrics': ['y']})\n", (48810, 48842), True, 'import superset.viz as viz\n'), ((48903, 48929), 'pandas.Timestamp', 'pd.Timestamp', (['"""2019-01-05"""'], {}), "('2019-01-05')\n", (48915, 48929), True, 'import pandas as pd\n'), ((49297, 49345), 'superset.viz.BigNumberViz', 'viz.BigNumberViz', (['datasource', "{'metrics': ['y']}"], {}), "(datasource, {'metrics': ['y']})\n", (49313, 49345), True, 'import superset.viz as viz\n'), ((4447, 4473), 'datetime.datetime', 'datetime', (['(1960)', '(1)', '(1)', '(5)', '(0)'], {}), '(1960, 1, 1, 5, 0)\n', (4455, 4473), False, 'from datetime import datetime\n'), ((4679, 4705), 'datetime.datetime', 'datetime', (['(1960)', '(1)', '(1)', '(5)', '(0)'], {}), '(1960, 1, 1, 5, 0)\n', (4687, 4705), False, 'from datetime import datetime\n'), ((4893, 4919), 'datetime.datetime', 'datetime', (['(1960)', '(1)', '(1)', '(6)', '(0)'], {}), '(1960, 1, 1, 6, 0)\n', (4901, 4919), False, 'from datetime import datetime\n'), ((5230, 5256), 'datetime.datetime', 'datetime', (['(1960)', '(1)', '(1)', '(0)', '(0)'], {}), '(1960, 1, 1, 0, 0)\n', (5238, 5256), False, 'from datetime import datetime\n'), ((48600, 48672), 'pandas.to_datetime', 'pd.to_datetime', (["['2019-01-01', '2019-01-02', '2019-01-05', '2019-01-07']"], {}), "(['2019-01-01', '2019-01-02', '2019-01-05', '2019-01-07'])\n", (48614, 48672), True, 'import pandas as pd\n'), ((49102, 49174), 'pandas.to_datetime', 'pd.to_datetime', (["['2019-01-01', '2019-01-02', '2019-01-05', '2019-01-07']"], {}), "(['2019-01-01', '2019-01-02', '2019-01-05', '2019-01-07'])\n", (49116, 49174), True, 'import pandas as pd\n'), ((46318, 46424), 'superset.viz.NVD3TimeSeriesViz', 'viz.NVD3TimeSeriesViz', (['datasource', "{'metrics': ['y'], 'resample_method': 'sum', 'resample_rule': '1D'}"], {}), "(datasource, {'metrics': ['y'], 'resample_method':\n 'sum', 'resample_rule': '1D'})\n", (46339, 46424), True, 'import superset.viz as viz\n'), ((46631, 46740), 'superset.viz.NVD3TimeSeriesViz', 'viz.NVD3TimeSeriesViz', (['datasource', "{'metrics': ['y'], 'resample_method': 'asfreq', 'resample_rule': '1D'}"], {}), "(datasource, {'metrics': ['y'], 'resample_method':\n 'asfreq', 'resample_rule': '1D'})\n", (46652, 46740), True, 'import superset.viz as viz\n'), ((47236, 47354), 'superset.viz.BigNumberViz', 'viz.BigNumberViz', (['datasource', "{'metrics': ['y'], 'rolling_type': 'cumsum', 'rolling_periods': 0,\n 'min_periods': 0}"], {}), "(datasource, {'metrics': ['y'], 'rolling_type': 'cumsum',\n 'rolling_periods': 0, 'min_periods': 0})\n", (47252, 47354), True, 'import superset.viz as viz\n'), ((47639, 47754), 'superset.viz.BigNumberViz', 'viz.BigNumberViz', (['datasource', "{'metrics': ['y'], 'rolling_type': 'sum', 'rolling_periods': 2,\n 'min_periods': 0}"], {}), "(datasource, {'metrics': ['y'], 'rolling_type': 'sum',\n 'rolling_periods': 2, 'min_periods': 0})\n", (47655, 47754), True, 'import superset.viz as viz\n'), ((48038, 48155), 'superset.viz.BigNumberViz', 'viz.BigNumberViz', (['datasource', "{'metrics': ['y'], 'rolling_type': 'mean', 'rolling_periods': 10,\n 'min_periods': 0}"], {}), "(datasource, {'metrics': ['y'], 'rolling_type': 'mean',\n 'rolling_periods': 10, 'min_periods': 0})\n", (48054, 48155), True, 'import superset.viz as viz\n')] |
#!/usr/bin/env python
import pandas as pd
from pathlib import Path
from torch.utils.data import DataLoader
class ModelContainer(object):
def __init__(self, model, optimizer, loss_fn, scheduler=None):
self.model = model
self.optimizer = optimizer
self.loss_fn = loss_fn
self.scheduler = scheduler
class DataContainer(object):
def __init__(self, df_with_split: pd.DataFrame, dataset_class, vectorizer_file: Path, batch_size:
int, with_test=True, is_load: bool=True) -> None:
self.train_df = df_with_split.loc[df_with_split['split'] == 'train']
self.val_df = df_with_split.loc[df_with_split['split'] == 'val']
self._bs = batch_size
self.with_test = with_test
self.is_load = is_load
self._lengths = {'train_size': len(self.train_df), 'val_size': len(self.val_df)}
self._n_batches = [self._lengths['train_size'] // self._bs, self._lengths['val_size'] //
self._bs]
if not self.is_load:
print("Creating and saving vectorizer")
train_ds = dataset_class.load_data_and_create_vectorizer(self.train_df)
train_ds.save_vectorizer(vectorizer_file)
self.train_ds = dataset_class.load_data_and_vectorizer_from_file(self.train_df, vectorizer_file)
self.vectorizer = self.train_ds.vectorizer
self.surname_vocab = self.vectorizer.surname_vocab
self.nationality_vocab = self.vectorizer.nationality_vocab
self.train_dl = DataLoader(self.train_ds, self._bs, shuffle=True, drop_last=True)
self.val_ds = dataset_class.load_data_and_vectorizer(self.val_df, self.vectorizer)
self.val_dl = DataLoader(self.val_ds, self._bs, shuffle=True, drop_last=True)
if self.with_test:
self.test_df = df_with_split.loc[df_with_split['split'] == 'test']
self._lengths['test_size'] = len(self.test_df)
self._n_batches.append(self._lengths['test_size'] // self._bs)
self.test_ds = dataset_class.load_data_and_vectorizer(self.test_df, self.vectorizer)
self.test_dl = DataLoader(self.test_ds, self._bs, shuffle=True, drop_last=True)
def get_loaders(self):
return self.train_dl, self.val_dl, self.test_dl
@property
def train_batches(self):
return self._n_batches[0]
@property
def val_batches(self):
return self._n_batches[1]
@property
def test_batches(self):
if not self.with_test:
raise NameError("No test dataset was provided")
return self._n_batches[2]
@property
def vocab_size(self):
return len(self.surname_vocab)
@property
def n_classes(self):
return len(self.nationality_vocab)
@property
def sizes(self):
return self._lengths
| [
"torch.utils.data.DataLoader"
] | [((1411, 1476), 'torch.utils.data.DataLoader', 'DataLoader', (['self.train_ds', 'self._bs'], {'shuffle': '(True)', 'drop_last': '(True)'}), '(self.train_ds, self._bs, shuffle=True, drop_last=True)\n', (1421, 1476), False, 'from torch.utils.data import DataLoader\n'), ((1583, 1646), 'torch.utils.data.DataLoader', 'DataLoader', (['self.val_ds', 'self._bs'], {'shuffle': '(True)', 'drop_last': '(True)'}), '(self.val_ds, self._bs, shuffle=True, drop_last=True)\n', (1593, 1646), False, 'from torch.utils.data import DataLoader\n'), ((1978, 2042), 'torch.utils.data.DataLoader', 'DataLoader', (['self.test_ds', 'self._bs'], {'shuffle': '(True)', 'drop_last': '(True)'}), '(self.test_ds, self._bs, shuffle=True, drop_last=True)\n', (1988, 2042), False, 'from torch.utils.data import DataLoader\n')] |
#!/usr/bin/env python
from __future__ import print_function
import logging
import os
def setup_logging():
# Set log level of the messages to show.
level_name = os.environ.get('BUCK_WRAPPER_LOG_LEVEL', 'INFO')
level_name_to_level = {
'CRITICAL': logging.CRITICAL,
'ERROR': logging.ERROR,
'WARNING': logging.WARNING,
'INFO': logging.INFO,
'DEBUG': logging.DEBUG,
'NOTSET': logging.NOTSET,
}
level = level_name_to_level.get(level_name.upper(), logging.INFO)
logging.basicConfig(
level=level,
format=(
'%(asctime)s [%(levelname)s][%(filename)s:%(lineno)d] %(message)s'
))
| [
"logging.basicConfig",
"os.environ.get"
] | [((171, 219), 'os.environ.get', 'os.environ.get', (['"""BUCK_WRAPPER_LOG_LEVEL"""', '"""INFO"""'], {}), "('BUCK_WRAPPER_LOG_LEVEL', 'INFO')\n", (185, 219), False, 'import os\n'), ((530, 642), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'level', 'format': '"""%(asctime)s [%(levelname)s][%(filename)s:%(lineno)d] %(message)s"""'}), "(level=level, format=\n '%(asctime)s [%(levelname)s][%(filename)s:%(lineno)d] %(message)s')\n", (549, 642), False, 'import logging\n')] |
"""
Name: Bondi
References: Bondi, Proc. Roy. Soc. Lond. A, v282, p303, (1964)
Coordinates: Spherical
Symmetry: Spherical
Notes: Outgoing Coordinates
"""
from sympy import Function, diag, sin, symbols
coords = symbols("r v theta phi", real=True)
variables = ()
functions = symbols("C M", cls=Function)
r, v, th, ph = coords
C, M = functions
metric = diag(0, -C(r, v) ** 2 * (1 - 2 * M(r, v) / r), r ** 2, r ** 2 * sin(th) ** 2)
metric[0, 1] = metric[1, 0] = -C(r, v)
| [
"sympy.symbols",
"sympy.sin"
] | [((211, 246), 'sympy.symbols', 'symbols', (['"""r v theta phi"""'], {'real': '(True)'}), "('r v theta phi', real=True)\n", (218, 246), False, 'from sympy import Function, diag, sin, symbols\n'), ((274, 302), 'sympy.symbols', 'symbols', (['"""C M"""'], {'cls': 'Function'}), "('C M', cls=Function)\n", (281, 302), False, 'from sympy import Function, diag, sin, symbols\n'), ((415, 422), 'sympy.sin', 'sin', (['th'], {}), '(th)\n', (418, 422), False, 'from sympy import Function, diag, sin, symbols\n')] |
from __future__ import unicode_literals
import json
from django.apps import apps
from django.core.urlresolvers import NoReverseMatch, reverse
from django.http import Http404, HttpRequest, QueryDict
from django.test import TestCase, override_settings
from django.utils import timezone
from wagtail.wagtailcore.models import Site
from wagtailsharing.models import SharingSite
import mock
from model_mommy import mommy
from ask_cfpb.models import ENGLISH_PARENT_SLUG, SPANISH_PARENT_SLUG
from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search
from v1.util.migrations import get_or_create_page
now = timezone.now()
class AnswerPagePreviewCase(TestCase):
def setUp(self):
from v1.models import HomePage
from ask_cfpb.models import Answer
self.ROOT_PAGE = HomePage.objects.get(slug='cfgov')
self.english_parent_page = get_or_create_page(
apps,
'ask_cfpb',
'AnswerLandingPage',
'Ask CFPB',
ENGLISH_PARENT_SLUG,
self.ROOT_PAGE,
language='en',
live=True)
self.spanish_parent_page = get_or_create_page(
apps,
'ask_cfpb',
'AnswerLandingPage',
'Obtener respuestas',
SPANISH_PARENT_SLUG,
self.ROOT_PAGE,
language='es',
live=True)
self.test_answer = mommy.make(
Answer,
answer="Test answer.",
question="Test question.",
slug='test-question',
update_english_page=True,
update_spanish_page=False)
self.site = mommy.make(
Site,
root_page=self.ROOT_PAGE,
hostname='localhost',
port=8000,
is_default_site=True)
self.sharing_site = mommy.make(
SharingSite,
site=self.site,
hostname='preview.localhost',
port=8000)
@mock.patch('ask_cfpb.views.ServeView.serve_latest_revision')
def test_preview_page(self, mock_serve):
from ask_cfpb.views import view_answer
page = self.test_answer.english_page
revision = page.save_revision()
revision.publish()
test_request = HttpRequest()
test_request.META['SERVER_NAME'] = 'preview.localhost'
test_request.META['SERVER_PORT'] = 8000
view_answer(
test_request, 'test-question', 'en', self.test_answer.pk)
self.assertEqual(mock_serve.call_count, 1)
def test_answer_page_not_live(self):
from ask_cfpb.views import view_answer
page = self.test_answer.english_page
page.live = False
page.save()
test_request = HttpRequest()
with self.assertRaises(Http404):
view_answer(
test_request,
'test-question',
'en',
self.test_answer.pk)
class AnswerViewTestCase(TestCase):
def setUp(self):
from v1.models import HomePage
self.ROOT_PAGE = HomePage.objects.get(slug='cfgov')
self.english_parent_page = get_or_create_page(
apps,
'ask_cfpb',
'AnswerLandingPage',
'Ask CFPB',
ENGLISH_PARENT_SLUG,
self.ROOT_PAGE,
language='en',
live=True)
self.spanish_parent_page = get_or_create_page(
apps,
'ask_cfpb',
'AnswerLandingPage',
'Obtener respuestas',
SPANISH_PARENT_SLUG,
self.ROOT_PAGE,
language='es',
live=True)
def test_annotate_links(self):
mock_answer = (
'<p>Answer with a <a href="http://fake.com">fake link.</a></p>')
(annotated_answer, links) = annotate_links(mock_answer)
self.assertEqual(
annotated_answer,
'<html><body><p>Answer with a <a href="http://fake.com">fake '
'link.</a><sup>1</sup></p></body></html>')
self.assertEqual(links, [(1, str('http://fake.com'))])
def test_annotate_links_no_href(self):
mock_answer = (
'<p>Answer with a <a>fake link.</a></p>')
(annotated_answer, links) = annotate_links(mock_answer)
self.assertEqual(links, [])
def test_annotate_links_no_site(self):
site = Site.objects.get(is_default_site=True)
site.is_default_site = False
site.save()
with self.assertRaises(RuntimeError) as context:
annotate_links('answer')
self.assertIn('no default wagtail site', str(context.exception))
def test_bad_language_search(self):
with self.assertRaises(NoReverseMatch):
self.client.get(reverse(
'ask-search-en',
kwargs={'language': 'zz'}), {'q': 'payday'})
@mock.patch('ask_cfpb.views.SearchQuerySet.filter')
def test_en_search_results_page_not_created(self, mock_filter):
mock_queryset = mock.Mock()
mock_queryset.count.return_value = 0
mock_filter.return_value = [mock_queryset]
response = self.client.get(reverse(
'ask-search-en'), {'q': 'payday'})
self.assertEqual(mock_filter.call_count, 1)
self.assertTrue(mock_filter.called_with(language='en', q='payday'))
self.assertEqual(response.status_code, 404)
@mock.patch('ask_cfpb.views.SearchQuerySet')
def test_en_search(self, mock_sqs):
from v1.util.migrations import get_or_create_page
mock_page = get_or_create_page(
apps,
'ask_cfpb',
'AnswerResultsPage',
'Mock results page',
'ask-cfpb-search-results',
self.ROOT_PAGE,
language='en')
mock_return = mock.Mock()
mock_return.url = 'mockcfpb.gov'
mock_return.autocomplete = 'A mock question'
mock_return.text = 'Mock answer text.'
mock_queryset = mock.Mock()
mock_queryset.__iter__ = mock.Mock(return_value=iter([mock_return]))
mock_queryset.count.return_value = 1
mock_sqs_instance = mock_sqs.return_value.models.return_value
mock_sqs_instance.filter.return_value = mock_queryset
mock_sqs_instance.spelling_suggestion.return_value = 'payday'
response = self.client.get(reverse(
'ask-search-en'), {'q': 'payday'})
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.context_data['page'],
mock_page)
self.assertEqual(
response.context_data['page'].suggestion,
None)
self.assertEqual(mock_sqs_instance.filter.call_count, 1)
self.assertTrue(mock_sqs_instance.filter.called_with(
language='en', q='payday'))
@mock.patch('ask_cfpb.views.SearchQuerySet')
def test_en_search_no_term(self, mock_sqs):
from v1.util.migrations import get_or_create_page
mock_page = get_or_create_page(
apps,
'ask_cfpb',
'AnswerResultsPage',
'Mock results page',
'ask-cfpb-search-results',
self.ROOT_PAGE,
language='en')
response = self.client.get(reverse(
'ask-search-en'), {'q': ''})
self.assertEqual(response.status_code, 200)
self.assertEqual(
response.context_data['page'],
mock_page)
self.assertEqual(
response.context_data['page'].query,
'')
self.assertEqual(
response.context_data['page'].result_query,
'')
@override_settings(FLAGS={'ASK_SEARCH_TYPOS': {'boolean': True}})
@mock.patch('ask_cfpb.views.SearchQuerySet')
def test_en_search_suggestion(self, mock_sqs):
from v1.util.migrations import get_or_create_page
mock_page = get_or_create_page(
apps,
'ask_cfpb',
'AnswerResultsPage',
'Mock results page',
'ask-cfpb-search-results',
self.english_parent_page,
language='en',
live=True)
mock_return = mock.Mock()
mock_return.url = 'mockcfpb.gov'
mock_return.autocomplete = 'A mock question'
mock_return.text = 'Mock answer text.'
mock_queryset = mock.Mock()
mock_queryset.__iter__ = mock.Mock(return_value=iter([mock_return]))
mock_queryset.count.return_value = 0
mock_sqs_instance = mock_sqs.return_value.models.return_value
mock_sqs_instance.filter.return_value = mock_queryset
mock_sqs_instance.spelling_suggestion.return_value = 'payday'
response = self.client.get(reverse(
'ask-search-en'), {'q': 'paydya'})
self.assertEqual(response.status_code, 200)
response_page = response.context_data['page']
self.assertEqual(response_page, mock_page)
self.assertEqual(response_page.suggestion, 'paydya')
self.assertEqual(response_page.result_query, 'payday')
self.assertEqual(response_page.query, 'paydya')
@mock.patch('ask_cfpb.views.redirect_ask_search')
def test_ask_search_encounters_facets(self, mock_redirect):
request = HttpRequest()
request.GET['selected_facets'] = 'category_exact:my_category'
ask_search(request)
self.assertEqual(mock_redirect.call_count, 1)
@mock.patch('ask_cfpb.views.redirect')
def test_redirect_ask_search_passes_query_string(self, mock_redirect):
request = HttpRequest()
request.GET['q'] = 'hoodoo'
redirect_ask_search(request)
self.assertEqual(mock_redirect.call_count, 1)
@mock.patch('ask_cfpb.views.redirect')
def test_spanish_redirect_ask_search_passes_query_string(
self, mock_redirect):
request = HttpRequest()
request.GET['selected_facets'] = 'category_exact:my_categoria'
redirect_ask_search(request, language='es')
self.assertEqual(mock_redirect.call_count, 1)
@mock.patch('ask_cfpb.views.SearchQuerySet.filter')
def test_es_search(self, mock_filter):
get_or_create_page(
apps,
'ask_cfpb',
'AnswerResultsPage',
'Mock Spanish results page',
'respuestas',
self.spanish_parent_page,
language='es',
live=True)
mock_return = mock.Mock()
mock_return.url = 'mockcfpb.gov'
mock_return.autocomplete = 'A mock question'
mock_return.text = 'Mock answer text.'
mock_queryset = mock.Mock()
mock_queryset.__iter__ = mock.Mock(return_value=iter([mock_return]))
mock_queryset.count.return_value = 1
mock_filter.return_value = mock_queryset
self.client.get(reverse(
'ask-search-es', kwargs={'language': 'es'}), {'q': 'payday'})
self.assertEqual(mock_filter.call_count, 1)
self.assertTrue(mock_filter.called_with(language='es', q='payday'))
@mock.patch('ask_cfpb.views.SearchQuerySet.filter')
def test_search_page_en_selection(self, mock_filter):
page = get_or_create_page(
apps,
'ask_cfpb',
'AnswerResultsPage',
'Mock results page',
'ask-cfpb-search-results',
self.english_parent_page,
language='en',
live=True)
mock_return = mock.Mock()
mock_return.url = 'url'
mock_return.autocomplete = 'question text'
mock_queryset = mock.Mock()
mock_queryset.__iter__ = mock.Mock(return_value=iter([mock_return]))
mock_queryset.count.return_value = 1
mock_filter.return_value = mock_queryset
self.client.get(reverse(
'ask-search-en'), {'q': 'tuition'})
self.assertEqual(mock_filter.call_count, 1)
self.assertEqual(page.language, 'en')
self.assertEqual(page.answers, [])
self.assertEqual(
page.get_template(HttpRequest()),
'ask-cfpb/answer-search-results.html')
@mock.patch('ask_cfpb.views.SearchQuerySet.filter')
def test_search_page_es_selection(self, mock_filter):
page = get_or_create_page(
apps,
'ask_cfpb',
'AnswerResultsPage',
'Mock Spanish results page',
'respuestas',
self.spanish_parent_page,
language='es',
live=True)
mock_return = mock.Mock()
mock_return.url = 'url'
mock_return.autocomplete = 'question text'
mock_queryset = mock.Mock()
mock_queryset.__iter__ = mock.Mock(return_value=iter([mock_return]))
mock_queryset.count.return_value = 1
mock_filter.return_value = mock_queryset
self.client.get(reverse(
'ask-search-es', kwargs={'language': 'es'}), {'q': 'hipotecas'})
self.assertEqual(mock_filter.call_count, 1)
self.assertEqual(page.language, 'es')
self.assertEqual(page.answers, [])
self.assertEqual(
page.get_template(HttpRequest()),
'ask-cfpb/answer-search-spanish-results.html')
@mock.patch('ask_cfpb.views.SearchQuerySet.filter')
def test_json_response(self, mock_filter):
get_or_create_page(
apps,
'ask_cfpb',
'AnswerResultsPage',
'Mock results page',
'ask-cfpb-search-results',
self.english_parent_page,
language='en',
live=True)
mock_return = mock.Mock()
mock_return.url = "inscisive_url.com"
mock_return.autocomplete = "inscisive question"
mock_return.text = "inscisive text"
mock_queryset = mock.Mock()
mock_queryset.__iter__ = mock.Mock(return_value=iter([mock_return]))
mock_queryset.count.return_value = 1
mock_filter.return_value = mock_queryset
response = self.client.get(reverse(
'ask-search-en-json',
kwargs={'as_json': 'json'}), {'q': 'tuition'})
self.assertEqual(response.status_code, 200)
self.assertEqual(mock_filter.call_count, 1)
self.assertEqual(json.loads(response.content)['query'], 'tuition')
def test_autocomplete_en_blank_term(self):
result = self.client.get(reverse(
'ask-autocomplete-en'), {'term': ''})
output = json.loads(result.content)
self.assertEqual(output, [])
def test_autocomplete_es_blank_term(self):
result = self.client.get(reverse(
'ask-autocomplete-es',
kwargs={'language': 'es'}), {'term': ''})
output = json.loads(result.content)
self.assertEqual(output, [])
@mock.patch('ask_cfpb.views.SearchQuerySet.autocomplete')
def test_autocomplete_en(self, mock_autocomplete):
mock_search_result = mock.Mock()
mock_search_result.autocomplete = 'question'
mock_search_result.url = 'url'
mock_autocomplete.return_value = [mock_search_result]
result = self.client.get(reverse(
'ask-autocomplete-en'), {'term': 'question'})
self.assertEqual(mock_autocomplete.call_count, 1)
output = json.loads(result.content)
self.assertEqual(
sorted(output[0].keys()),
['question', 'url'])
@mock.patch('ask_cfpb.views.SearchQuerySet.autocomplete')
def test_autocomplete_es(self, mock_autocomplete):
mock_search_result = mock.Mock()
mock_search_result.autocomplete = 'question'
mock_search_result.url = 'url'
mock_autocomplete.return_value = [mock_search_result]
result = self.client.get(reverse(
'ask-autocomplete-es',
kwargs={'language': 'es'}), {'term': 'question'})
self.assertEqual(mock_autocomplete.call_count, 1)
output = json.loads(result.content)
self.assertEqual(
sorted(output[0].keys()),
['question', 'url'])
class RedirectAskSearchTestCase(TestCase):
def test_redirect_search_no_facets(self):
request = HttpRequest()
with self.assertRaises(Http404):
redirect_ask_search(request)
def test_redirect_search_blank_facets(self):
request = HttpRequest()
request.GET['selected_facets'] = ''
with self.assertRaises(Http404):
redirect_ask_search(request)
def test_redirect_search_no_query(self):
request = HttpRequest()
request.GET['q'] = ' '
with self.assertRaises(Http404):
redirect_ask_search(request)
def test_redirect_search_with_category(self):
category_querystring = (
'selected_facets=category_exact:my_category'
'&selected_facets=category_exact:my_category2'
'&selected_facets=audience_exact:Older+Americans'
'&selected_facets=audience_exact:my_audience2'
'&selected_facets=tag_exact:mytag1'
'&selected_facets=tag_exact:mytag2')
request = HttpRequest()
request.GET = QueryDict(category_querystring)
result = redirect_ask_search(request)
self.assertEqual(result.get('location'),
'/ask-cfpb/category-my_category/')
def test_redirect_search_with_audience(self):
audience_querystring = (
'selected_facets=audience_exact:Older+Americans'
'&selected_facets=audience_exact:my_audience2')
request = HttpRequest()
request.GET = QueryDict(audience_querystring)
result = redirect_ask_search(request)
self.assertEqual(
result.get('location'),
'/ask-cfpb/audience-older-americans/')
def test_spanish_redirect_search_with_tag(self):
target_tag = 'spanishtag1'
tag_querystring = (
'selected_facets=tag_exact:{}'
'&selected_facets=tag_exact:spanishtag2'.format(target_tag))
request = HttpRequest()
request.GET = QueryDict(tag_querystring)
result = redirect_ask_search(request, language='es')
self.assertEqual(
result.get('location'),
'/es/obtener-respuestas/buscar-por-etiqueta/{}/'.format(
target_tag))
def test_english_redirect_search_with_tag(self):
target_tag = 'englishtag1'
tag_querystring = (
'selected_facets=tag_exact:{}'
'&selected_facets=tag_exact:englishtag2'.format(target_tag))
request = HttpRequest()
request.GET = QueryDict(tag_querystring)
result = redirect_ask_search(request, language='en')
self.assertEqual(
result.get('location'),
'/ask-cfpb/search-by-tag/{}/'.format(
target_tag))
def test_redirect_search_with_unrecognized_facet_raises_404(self):
querystring = \
'sort=-updated_at&selected_facets=imtkfidycqszgfdb&page=60'
request = HttpRequest()
request.GET = QueryDict(querystring)
with self.assertRaises(Http404):
redirect_ask_search(request)
| [
"ask_cfpb.views.redirect_ask_search",
"model_mommy.mommy.make",
"mock.patch",
"v1.util.migrations.get_or_create_page",
"json.loads",
"django.http.QueryDict",
"mock.Mock",
"ask_cfpb.views.view_answer",
"wagtail.wagtailcore.models.Site.objects.get",
"django.utils.timezone.now",
"django.test.override_settings",
"v1.models.HomePage.objects.get",
"django.core.urlresolvers.reverse",
"ask_cfpb.views.annotate_links",
"django.http.HttpRequest",
"ask_cfpb.views.ask_search"
] | [((623, 637), 'django.utils.timezone.now', 'timezone.now', ([], {}), '()\n', (635, 637), False, 'from django.utils import timezone\n'), ((1970, 2030), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.ServeView.serve_latest_revision"""'], {}), "('ask_cfpb.views.ServeView.serve_latest_revision')\n", (1980, 2030), False, 'import mock\n'), ((4849, 4899), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.SearchQuerySet.filter"""'], {}), "('ask_cfpb.views.SearchQuerySet.filter')\n", (4859, 4899), False, 'import mock\n'), ((5377, 5420), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.SearchQuerySet"""'], {}), "('ask_cfpb.views.SearchQuerySet')\n", (5387, 5420), False, 'import mock\n'), ((6806, 6849), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.SearchQuerySet"""'], {}), "('ask_cfpb.views.SearchQuerySet')\n", (6816, 6849), False, 'import mock\n'), ((7623, 7687), 'django.test.override_settings', 'override_settings', ([], {'FLAGS': "{'ASK_SEARCH_TYPOS': {'boolean': True}}"}), "(FLAGS={'ASK_SEARCH_TYPOS': {'boolean': True}})\n", (7640, 7687), False, 'from django.test import TestCase, override_settings\n'), ((7693, 7736), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.SearchQuerySet"""'], {}), "('ask_cfpb.views.SearchQuerySet')\n", (7703, 7736), False, 'import mock\n'), ((9095, 9143), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.redirect_ask_search"""'], {}), "('ask_cfpb.views.redirect_ask_search')\n", (9105, 9143), False, 'import mock\n'), ((9398, 9435), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.redirect"""'], {}), "('ask_cfpb.views.redirect')\n", (9408, 9435), False, 'import mock\n'), ((9676, 9713), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.redirect"""'], {}), "('ask_cfpb.views.redirect')\n", (9686, 9713), False, 'import mock\n'), ((10025, 10075), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.SearchQuerySet.filter"""'], {}), "('ask_cfpb.views.SearchQuerySet.filter')\n", (10035, 10075), False, 'import mock\n'), ((11000, 11050), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.SearchQuerySet.filter"""'], {}), "('ask_cfpb.views.SearchQuerySet.filter')\n", (11010, 11050), False, 'import mock\n'), ((12054, 12104), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.SearchQuerySet.filter"""'], {}), "('ask_cfpb.views.SearchQuerySet.filter')\n", (12064, 12104), False, 'import mock\n'), ((13140, 13190), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.SearchQuerySet.filter"""'], {}), "('ask_cfpb.views.SearchQuerySet.filter')\n", (13150, 13190), False, 'import mock\n'), ((14691, 14747), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.SearchQuerySet.autocomplete"""'], {}), "('ask_cfpb.views.SearchQuerySet.autocomplete')\n", (14701, 14747), False, 'import mock\n'), ((15303, 15359), 'mock.patch', 'mock.patch', (['"""ask_cfpb.views.SearchQuerySet.autocomplete"""'], {}), "('ask_cfpb.views.SearchQuerySet.autocomplete')\n", (15313, 15359), False, 'import mock\n'), ((808, 842), 'v1.models.HomePage.objects.get', 'HomePage.objects.get', ([], {'slug': '"""cfgov"""'}), "(slug='cfgov')\n", (828, 842), False, 'from v1.models import HomePage\n'), ((878, 1014), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerLandingPage"""', '"""Ask CFPB"""', 'ENGLISH_PARENT_SLUG', 'self.ROOT_PAGE'], {'language': '"""en"""', 'live': '(True)'}), "(apps, 'ask_cfpb', 'AnswerLandingPage', 'Ask CFPB',\n ENGLISH_PARENT_SLUG, self.ROOT_PAGE, language='en', live=True)\n", (896, 1014), False, 'from v1.util.migrations import get_or_create_page\n'), ((1143, 1294), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerLandingPage"""', '"""Obtener respuestas"""', 'SPANISH_PARENT_SLUG', 'self.ROOT_PAGE'], {'language': '"""es"""', 'live': '(True)'}), "(apps, 'ask_cfpb', 'AnswerLandingPage',\n 'Obtener respuestas', SPANISH_PARENT_SLUG, self.ROOT_PAGE, language=\n 'es', live=True)\n", (1161, 1294), False, 'from v1.util.migrations import get_or_create_page\n'), ((1410, 1558), 'model_mommy.mommy.make', 'mommy.make', (['Answer'], {'answer': '"""Test answer."""', 'question': '"""Test question."""', 'slug': '"""test-question"""', 'update_english_page': '(True)', 'update_spanish_page': '(False)'}), "(Answer, answer='Test answer.', question='Test question.', slug=\n 'test-question', update_english_page=True, update_spanish_page=False)\n", (1420, 1558), False, 'from model_mommy import mommy\n'), ((1647, 1748), 'model_mommy.mommy.make', 'mommy.make', (['Site'], {'root_page': 'self.ROOT_PAGE', 'hostname': '"""localhost"""', 'port': '(8000)', 'is_default_site': '(True)'}), "(Site, root_page=self.ROOT_PAGE, hostname='localhost', port=8000,\n is_default_site=True)\n", (1657, 1748), False, 'from model_mommy import mommy\n'), ((1834, 1919), 'model_mommy.mommy.make', 'mommy.make', (['SharingSite'], {'site': 'self.site', 'hostname': '"""preview.localhost"""', 'port': '(8000)'}), "(SharingSite, site=self.site, hostname='preview.localhost', port=8000\n )\n", (1844, 1919), False, 'from model_mommy import mommy\n'), ((2258, 2271), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (2269, 2271), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((2391, 2460), 'ask_cfpb.views.view_answer', 'view_answer', (['test_request', '"""test-question"""', '"""en"""', 'self.test_answer.pk'], {}), "(test_request, 'test-question', 'en', self.test_answer.pk)\n", (2402, 2460), False, 'from ask_cfpb.views import view_answer\n'), ((2728, 2741), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (2739, 2741), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((3054, 3088), 'v1.models.HomePage.objects.get', 'HomePage.objects.get', ([], {'slug': '"""cfgov"""'}), "(slug='cfgov')\n", (3074, 3088), False, 'from v1.models import HomePage\n'), ((3124, 3260), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerLandingPage"""', '"""Ask CFPB"""', 'ENGLISH_PARENT_SLUG', 'self.ROOT_PAGE'], {'language': '"""en"""', 'live': '(True)'}), "(apps, 'ask_cfpb', 'AnswerLandingPage', 'Ask CFPB',\n ENGLISH_PARENT_SLUG, self.ROOT_PAGE, language='en', live=True)\n", (3142, 3260), False, 'from v1.util.migrations import get_or_create_page\n'), ((3389, 3540), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerLandingPage"""', '"""Obtener respuestas"""', 'SPANISH_PARENT_SLUG', 'self.ROOT_PAGE'], {'language': '"""es"""', 'live': '(True)'}), "(apps, 'ask_cfpb', 'AnswerLandingPage',\n 'Obtener respuestas', SPANISH_PARENT_SLUG, self.ROOT_PAGE, language=\n 'es', live=True)\n", (3407, 3540), False, 'from v1.util.migrations import get_or_create_page\n'), ((3802, 3829), 'ask_cfpb.views.annotate_links', 'annotate_links', (['mock_answer'], {}), '(mock_answer)\n', (3816, 3829), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((4237, 4264), 'ask_cfpb.views.annotate_links', 'annotate_links', (['mock_answer'], {}), '(mock_answer)\n', (4251, 4264), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((4360, 4398), 'wagtail.wagtailcore.models.Site.objects.get', 'Site.objects.get', ([], {'is_default_site': '(True)'}), '(is_default_site=True)\n', (4376, 4398), False, 'from wagtail.wagtailcore.models import Site\n'), ((4992, 5003), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (5001, 5003), False, 'import mock\n'), ((5539, 5683), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerResultsPage"""', '"""Mock results page"""', '"""ask-cfpb-search-results"""', 'self.ROOT_PAGE'], {'language': '"""en"""'}), "(apps, 'ask_cfpb', 'AnswerResultsPage',\n 'Mock results page', 'ask-cfpb-search-results', self.ROOT_PAGE,\n language='en')\n", (5557, 5683), False, 'from v1.util.migrations import get_or_create_page\n'), ((5784, 5795), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (5793, 5795), False, 'import mock\n'), ((5962, 5973), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (5971, 5973), False, 'import mock\n'), ((6976, 7120), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerResultsPage"""', '"""Mock results page"""', '"""ask-cfpb-search-results"""', 'self.ROOT_PAGE'], {'language': '"""en"""'}), "(apps, 'ask_cfpb', 'AnswerResultsPage',\n 'Mock results page', 'ask-cfpb-search-results', self.ROOT_PAGE,\n language='en')\n", (6994, 7120), False, 'from v1.util.migrations import get_or_create_page\n'), ((7866, 8032), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerResultsPage"""', '"""Mock results page"""', '"""ask-cfpb-search-results"""', 'self.english_parent_page'], {'language': '"""en"""', 'live': '(True)'}), "(apps, 'ask_cfpb', 'AnswerResultsPage',\n 'Mock results page', 'ask-cfpb-search-results', self.\n english_parent_page, language='en', live=True)\n", (7884, 8032), False, 'from v1.util.migrations import get_or_create_page\n'), ((8144, 8155), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (8153, 8155), False, 'import mock\n'), ((8322, 8333), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (8331, 8333), False, 'import mock\n'), ((9226, 9239), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (9237, 9239), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((9318, 9337), 'ask_cfpb.views.ask_search', 'ask_search', (['request'], {}), '(request)\n', (9328, 9337), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((9529, 9542), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (9540, 9542), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((9587, 9615), 'ask_cfpb.views.redirect_ask_search', 'redirect_ask_search', (['request'], {}), '(request)\n', (9606, 9615), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((9828, 9841), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (9839, 9841), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((9921, 9964), 'ask_cfpb.views.redirect_ask_search', 'redirect_ask_search', (['request'], {'language': '"""es"""'}), "(request, language='es')\n", (9940, 9964), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((10127, 10287), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerResultsPage"""', '"""Mock Spanish results page"""', '"""respuestas"""', 'self.spanish_parent_page'], {'language': '"""es"""', 'live': '(True)'}), "(apps, 'ask_cfpb', 'AnswerResultsPage',\n 'Mock Spanish results page', 'respuestas', self.spanish_parent_page,\n language='es', live=True)\n", (10145, 10287), False, 'from v1.util.migrations import get_or_create_page\n'), ((10399, 10410), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (10408, 10410), False, 'import mock\n'), ((10576, 10587), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (10585, 10587), False, 'import mock\n'), ((11124, 11290), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerResultsPage"""', '"""Mock results page"""', '"""ask-cfpb-search-results"""', 'self.english_parent_page'], {'language': '"""en"""', 'live': '(True)'}), "(apps, 'ask_cfpb', 'AnswerResultsPage',\n 'Mock results page', 'ask-cfpb-search-results', self.\n english_parent_page, language='en', live=True)\n", (11142, 11290), False, 'from v1.util.migrations import get_or_create_page\n'), ((11401, 11412), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (11410, 11412), False, 'import mock\n'), ((11520, 11531), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (11529, 11531), False, 'import mock\n'), ((12178, 12338), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerResultsPage"""', '"""Mock Spanish results page"""', '"""respuestas"""', 'self.spanish_parent_page'], {'language': '"""es"""', 'live': '(True)'}), "(apps, 'ask_cfpb', 'AnswerResultsPage',\n 'Mock Spanish results page', 'respuestas', self.spanish_parent_page,\n language='es', live=True)\n", (12196, 12338), False, 'from v1.util.migrations import get_or_create_page\n'), ((12450, 12461), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (12459, 12461), False, 'import mock\n'), ((12569, 12580), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (12578, 12580), False, 'import mock\n'), ((13246, 13412), 'v1.util.migrations.get_or_create_page', 'get_or_create_page', (['apps', '"""ask_cfpb"""', '"""AnswerResultsPage"""', '"""Mock results page"""', '"""ask-cfpb-search-results"""', 'self.english_parent_page'], {'language': '"""en"""', 'live': '(True)'}), "(apps, 'ask_cfpb', 'AnswerResultsPage',\n 'Mock results page', 'ask-cfpb-search-results', self.\n english_parent_page, language='en', live=True)\n", (13264, 13412), False, 'from v1.util.migrations import get_or_create_page\n'), ((13523, 13534), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (13532, 13534), False, 'import mock\n'), ((13705, 13716), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (13714, 13716), False, 'import mock\n'), ((14361, 14387), 'json.loads', 'json.loads', (['result.content'], {}), '(result.content)\n', (14371, 14387), False, 'import json\n'), ((14621, 14647), 'json.loads', 'json.loads', (['result.content'], {}), '(result.content)\n', (14631, 14647), False, 'import json\n'), ((14832, 14843), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (14841, 14843), False, 'import mock\n'), ((15173, 15199), 'json.loads', 'json.loads', (['result.content'], {}), '(result.content)\n', (15183, 15199), False, 'import json\n'), ((15444, 15455), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (15453, 15455), False, 'import mock\n'), ((15824, 15850), 'json.loads', 'json.loads', (['result.content'], {}), '(result.content)\n', (15834, 15850), False, 'import json\n'), ((16058, 16071), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (16069, 16071), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((16222, 16235), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (16233, 16235), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((16426, 16439), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (16437, 16439), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((16989, 17002), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (17000, 17002), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((17025, 17056), 'django.http.QueryDict', 'QueryDict', (['category_querystring'], {}), '(category_querystring)\n', (17034, 17056), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((17074, 17102), 'ask_cfpb.views.redirect_ask_search', 'redirect_ask_search', (['request'], {}), '(request)\n', (17093, 17102), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((17435, 17448), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (17446, 17448), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((17471, 17502), 'django.http.QueryDict', 'QueryDict', (['audience_querystring'], {}), '(audience_querystring)\n', (17480, 17502), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((17520, 17548), 'ask_cfpb.views.redirect_ask_search', 'redirect_ask_search', (['request'], {}), '(request)\n', (17539, 17548), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((17913, 17926), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (17924, 17926), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((17949, 17975), 'django.http.QueryDict', 'QueryDict', (['tag_querystring'], {}), '(tag_querystring)\n', (17958, 17975), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((17993, 18036), 'ask_cfpb.views.redirect_ask_search', 'redirect_ask_search', (['request'], {'language': '"""es"""'}), "(request, language='es')\n", (18012, 18036), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((18448, 18461), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (18459, 18461), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((18484, 18510), 'django.http.QueryDict', 'QueryDict', (['tag_querystring'], {}), '(tag_querystring)\n', (18493, 18510), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((18528, 18571), 'ask_cfpb.views.redirect_ask_search', 'redirect_ask_search', (['request'], {'language': '"""en"""'}), "(request, language='en')\n", (18547, 18571), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((18899, 18912), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (18910, 18912), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((18935, 18957), 'django.http.QueryDict', 'QueryDict', (['querystring'], {}), '(querystring)\n', (18944, 18957), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((2795, 2864), 'ask_cfpb.views.view_answer', 'view_answer', (['test_request', '"""test-question"""', '"""en"""', 'self.test_answer.pk'], {}), "(test_request, 'test-question', 'en', self.test_answer.pk)\n", (2806, 2864), False, 'from ask_cfpb.views import view_answer\n'), ((4525, 4549), 'ask_cfpb.views.annotate_links', 'annotate_links', (['"""answer"""'], {}), "('answer')\n", (4539, 4549), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((5135, 5159), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-search-en"""'], {}), "('ask-search-en')\n", (5142, 5159), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((6335, 6359), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-search-en"""'], {}), "('ask-search-en')\n", (6342, 6359), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((7234, 7258), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-search-en"""'], {}), "('ask-search-en')\n", (7241, 7258), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((8695, 8719), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-search-en"""'], {}), "('ask-search-en')\n", (8702, 8719), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((10783, 10834), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-search-es"""'], {'kwargs': "{'language': 'es'}"}), "('ask-search-es', kwargs={'language': 'es'})\n", (10790, 10834), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((11727, 11751), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-search-en"""'], {}), "('ask-search-en')\n", (11734, 11751), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((12776, 12827), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-search-es"""'], {'kwargs': "{'language': 'es'}"}), "('ask-search-es', kwargs={'language': 'es'})\n", (12783, 12827), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((13923, 13980), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-search-en-json"""'], {'kwargs': "{'as_json': 'json'}"}), "('ask-search-en-json', kwargs={'as_json': 'json'})\n", (13930, 13980), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((14285, 14315), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-autocomplete-en"""'], {}), "('ask-autocomplete-en')\n", (14292, 14315), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((14506, 14563), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-autocomplete-es"""'], {'kwargs': "{'language': 'es'}"}), "('ask-autocomplete-es', kwargs={'language': 'es'})\n", (14513, 14563), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((15031, 15061), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-autocomplete-en"""'], {}), "('ask-autocomplete-en')\n", (15038, 15061), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((15643, 15700), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-autocomplete-es"""'], {'kwargs': "{'language': 'es'}"}), "('ask-autocomplete-es', kwargs={'language': 'es'})\n", (15650, 15700), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((16125, 16153), 'ask_cfpb.views.redirect_ask_search', 'redirect_ask_search', (['request'], {}), '(request)\n', (16144, 16153), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((16333, 16361), 'ask_cfpb.views.redirect_ask_search', 'redirect_ask_search', (['request'], {}), '(request)\n', (16352, 16361), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((16524, 16552), 'ask_cfpb.views.redirect_ask_search', 'redirect_ask_search', (['request'], {}), '(request)\n', (16543, 16552), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((19011, 19039), 'ask_cfpb.views.redirect_ask_search', 'redirect_ask_search', (['request'], {}), '(request)\n', (19030, 19039), False, 'from ask_cfpb.views import annotate_links, ask_search, redirect_ask_search\n'), ((4740, 4791), 'django.core.urlresolvers.reverse', 'reverse', (['"""ask-search-en"""'], {'kwargs': "{'language': 'zz'}"}), "('ask-search-en', kwargs={'language': 'zz'})\n", (4747, 4791), False, 'from django.core.urlresolvers import NoReverseMatch, reverse\n'), ((11981, 11994), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (11992, 11994), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((13059, 13072), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (13070, 13072), False, 'from django.http import Http404, HttpRequest, QueryDict\n'), ((14154, 14182), 'json.loads', 'json.loads', (['response.content'], {}), '(response.content)\n', (14164, 14182), False, 'import json\n')] |