content
stringlengths 7
928k
| avg_line_length
float64 3.5
33.8k
| max_line_length
int64 6
139k
| alphanum_fraction
float64 0.08
0.96
| licenses
sequence | repository_name
stringlengths 7
104
| path
stringlengths 4
230
| size
int64 7
928k
| lang
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|
# Copyright 2020 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python AsyncIO implementation of the GRPC helloworld.Greeter server."""
import logging
import asyncio
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
class Greeter(helloworld_pb2_grpc.GreeterServicer):
async def SayHello(
self, request: helloworld_pb2.HelloRequest,
context: grpc.aio.ServicerContext) -> helloworld_pb2.HelloReply:
return helloworld_pb2.HelloReply(message='Hello, %s!' % request.name)
async def serve() -> None:
server = grpc.aio.server()
helloworld_pb2_grpc.add_GreeterServicer_to_server(Greeter(), server)
listen_addr = '[::]:50051'
server.add_insecure_port(listen_addr)
logging.info("Starting server on %s", listen_addr)
await server.start()
try:
await server.wait_for_termination()
except KeyboardInterrupt:
# Shuts down the server with 0 seconds of grace period. During the
# grace period, the server won't accept new connections and allow
# existing RPCs to continue within the grace period.
await server.stop(0)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
asyncio.run(serve())
| 34.215686 | 78 | 0.731805 | [
"Apache-2.0"
] | 1261385937/grpc | examples/python/helloworld/async_greeter_server.py | 1,745 | Python |
import scipy.sparse as sp
import numpy as np
import torch
import time
import os
from configparser import ConfigParser
import sys
sys.path.append('/home/shiyan/project/gcn_for_prediction_of_protein_interactions/')
from src.util.load_data import load_data, sparse_to_tuple, mask_test_edges, preprocess_graph
from src.util.loss import arga_loss_function, varga_loss_function
from src.util.metrics import get_roc_score
from src.util import define_optimizer
from src.graph_nheads_att_gan.model import NHGATModelGAN
DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
class Train():
def __init__(self):
pass
def train_model(self, config_path):
if os.path.exists(config_path) and (os.path.split(config_path)[1].split('.')[0] == 'config') and (
os.path.splitext(config_path)[1].split('.')[1] == 'cfg'):
# load config file
config = ConfigParser()
config.read(config_path)
section = config.sections()[0]
# data catalog path
data_catalog = config.get(section, "data_catalog")
# train file path
train_file_name = config.get(section, "train_file_name")
# model save/load path
model_path = config.get(section, "model_path")
# model param config
hidden_dim1 = config.getint(section, "hidden_dim1")
hidden_dim2 = config.getint(section, "hidden_dim2")
hidden_dim3 = config.getint(section, 'hidden_dim3')
num_heads = config.getint(section, 'num_heads')
dropout = config.getfloat(section, "dropout")
vae_bool = config.getboolean(section, 'vae_bool')
alpha = config.getfloat(section, 'alpha')
lr = config.getfloat(section, "lr")
lr_decay = config.getfloat(section, 'lr_decay')
weight_decay = config.getfloat(section, "weight_decay")
gamma = config.getfloat(section, "gamma")
momentum = config.getfloat(section, "momentum")
eps = config.getfloat(section, "eps")
clip = config.getfloat(section, "clip")
epochs = config.getint(section, "epochs")
optimizer_name = config.get(section, "optimizer")
# 加载相关数据
adj = load_data(os.path.join(data_catalog, train_file_name))
num_nodes = adj.shape[0]
num_edges = adj.sum()
features = sparse_to_tuple(sp.identity(num_nodes))
num_features = features[2][1]
# 去除对角线元素
# 下边的右部分为:返回adj_orig的对角元素(一维),并增加一维,抽出adj_orig的对角元素并构建只有这些对角元素的对角矩阵
adj_orig = adj - sp.dia_matrix((adj.diagonal()[np.newaxis, :], [0]), shape=adj.shape)
adj_orig.eliminate_zeros()
adj_train, train_edges, val_edges, val_edges_false, test_edges, test_edges_false = mask_test_edges(adj_orig)
adj = adj_train
# 返回D^{-0.5}SD^{-0.5}的coords, data, shape,其中S=A+I
adj_norm = preprocess_graph(adj)
adj_label = adj_train + sp.eye(adj_train.shape[0])
# adj_label = sparse_to_tuple(adj_label)
adj_label = torch.FloatTensor(adj_label.toarray()).to(DEVICE)
'''
注意,adj的每个元素非1即0。pos_weight是用于训练的邻接矩阵中负样本边(既不存在的边)和正样本边的倍数(即比值),这个数值在二分类交叉熵损失函数中用到,
如果正样本边所占的比例和负样本边所占比例失衡,比如正样本边很多,负样本边很少,那么在求loss的时候可以提供weight参数,将正样本边的weight设置小一点,负样本边的weight设置大一点,
此时能够很好的平衡两类在loss中的占比,任务效果可以得到进一步提升。参考:https://www.zhihu.com/question/383567632
负样本边的weight都为1,正样本边的weight都为pos_weight
'''
pos_weight = float(adj.shape[0] * adj.shape[0] - num_edges) / num_edges
norm = adj.shape[0] * adj.shape[0] / float((adj.shape[0] * adj.shape[0] - adj.sum()) * 2)
# create model
print('create model ...')
model = NHGATModelGAN(num_features, hidden_dim1=hidden_dim1, hidden_dim2=hidden_dim2, hidden_dim3=hidden_dim3, num_heads=num_heads, dropout=dropout, alpha=alpha, vae_bool=vae_bool)
# define optimizer
if optimizer_name == 'adam':
optimizer = define_optimizer.define_optimizer_adam(model, lr=lr, weight_decay=weight_decay)
elif optimizer_name == 'adamw':
optimizer = define_optimizer.define_optimizer_adamw(model, lr=lr, weight_decay=weight_decay)
elif optimizer_name == 'sgd':
optimizer = define_optimizer.define_optimizer_sgd(model, lr=lr, momentum=momentum,
weight_decay=weight_decay)
elif optimizer_name == 'adagrad':
optimizer = define_optimizer.define_optimizer_adagrad(model, lr=lr, lr_decay=lr_decay,
weight_decay=weight_decay)
elif optimizer_name == 'rmsprop':
optimizer = define_optimizer.define_optimizer_rmsprop(model, lr=lr, weight_decay=weight_decay,
momentum=momentum)
elif optimizer_name == 'adadelta':
optimizer = define_optimizer.define_optimizer_adadelta(model, lr=lr, weight_decay=weight_decay)
else:
raise NameError('No define optimization function name!')
model = model.to(DEVICE)
# 稀疏张量被表示为一对致密张量:一维张量和二维张量的索引。可以通过提供这两个张量来构造稀疏张量
adj_norm = torch.sparse.FloatTensor(torch.LongTensor(adj_norm[0].T),
torch.FloatTensor(adj_norm[1]),
torch.Size(adj_norm[2]))
features = torch.sparse.FloatTensor(torch.LongTensor(features[0].T),
torch.FloatTensor(features[1]),
torch.Size(features[2])).to_dense()
adj_norm = adj_norm.to(DEVICE)
features = features.to(DEVICE)
norm = torch.FloatTensor(np.array(norm)).to(DEVICE)
pos_weight = torch.tensor(pos_weight).to(DEVICE)
num_nodes = torch.tensor(num_nodes).to(DEVICE)
print('start training...')
best_valid_roc_score = float('-inf')
hidden_emb = None
model.train()
for epoch in range(epochs):
t = time.time()
optimizer.zero_grad()
# 解码后的邻接矩阵,判别器
recovered, dis_real, dis_fake, mu, logvar = model(features, adj_norm)
if vae_bool:
loss = varga_loss_function(preds=recovered, labels=adj_label,
mu=mu, logvar=logvar,
dis_real=dis_real, dis_fake=dis_fake,
n_nodes=num_nodes,
norm=norm, pos_weight=pos_weight)
else:
loss = arga_loss_function(preds=recovered, labels=adj_label,
dis_real=dis_real, dis_fake=dis_fake,
norm=norm, pos_weight=pos_weight)
loss.backward()
torch.nn.utils.clip_grad_norm_(model.parameters(), clip)
cur_loss = loss.item()
optimizer.step()
hidden_emb = mu.data.cpu().numpy()
# 评估验证集,val set
roc_score, ap_score = get_roc_score(hidden_emb, adj_orig, val_edges, val_edges_false)
# 保存最好的roc score
if roc_score > best_valid_roc_score:
best_valid_roc_score = roc_score
# 不需要保存整个model,只需保存hidden_emb,因为后面的解码是用hidden_emb内积的形式作推断
np.save(model_path, hidden_emb)
print("Epoch:", '%04d' % (epoch + 1), "train_loss = ", "{:.5f}".format(cur_loss),
"val_roc_score = ", "{:.5f}".format(roc_score),
"average_precision_score = ", "{:.5f}".format(ap_score),
"time=", "{:.5f}".format(time.time() - t)
)
print("Optimization Finished!")
# 评估测试集,test set
roc_score, ap_score = get_roc_score(hidden_emb, adj_orig, test_edges, test_edges_false)
print('test roc score: {}'.format(roc_score))
print('test ap score: {}'.format(ap_score))
else:
raise FileNotFoundError('File config.cfg not found : ' + config_path)
if __name__ == '__main__':
config_path = os.path.join(os.getcwd(), 'config.cfg')
train = Train()
train.train_model(config_path)
| 47.351351 | 192 | 0.574543 | [
"Apache-2.0"
] | jiangnanboy/gcn_for_prediction_of_protein_interactions | src/graph_nheads_att_gan/train.py | 9,490 | Python |
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for backend_context."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import operator
try:
# python version >= 3.3
from unittest import mock
except ImportError:
import mock # pylint: disable=unused-import
import tensorflow as tf
from google.protobuf import text_format
from tensorboard import context
from tensorboard.backend.event_processing import data_provider
from tensorboard.backend.event_processing import event_accumulator
from tensorboard.backend.event_processing import plugin_event_multiplexer
from tensorboard.compat.proto import summary_pb2
from tensorboard.plugins import base_plugin
from tensorboard.plugins.hparams import api_pb2
from tensorboard.plugins.hparams import backend_context
from tensorboard.plugins.hparams import metadata
from tensorboard.plugins.hparams import plugin_data_pb2
from tensorboard.plugins.scalar import metadata as scalars_metadata
DATA_TYPE_EXPERIMENT = "experiment"
DATA_TYPE_SESSION_START_INFO = "session_start_info"
DATA_TYPE_SESSION_END_INFO = "session_end_info"
class BackendContextTest(tf.test.TestCase):
# Make assertProtoEquals print all the diff.
maxDiff = None # pylint: disable=invalid-name
def setUp(self):
self._mock_tb_context = base_plugin.TBContext()
# TODO(#3425): Remove mocking or switch to mocking data provider
# APIs directly.
self._mock_multiplexer = mock.create_autospec(
plugin_event_multiplexer.EventMultiplexer
)
self._mock_tb_context.multiplexer = self._mock_multiplexer
self._mock_multiplexer.PluginRunToTagToContent.side_effect = (
self._mock_plugin_run_to_tag_to_content
)
self._mock_multiplexer.AllSummaryMetadata.side_effect = (
self._mock_all_summary_metadata
)
self._mock_multiplexer.SummaryMetadata.side_effect = (
self._mock_summary_metadata
)
self._mock_tb_context.data_provider = data_provider.MultiplexerDataProvider(
self._mock_multiplexer, "/path/to/logs"
)
self.session_1_start_info_ = ""
self.session_2_start_info_ = ""
self.session_3_start_info_ = ""
def _mock_all_summary_metadata(self):
result = {}
hparams_content = {
"exp/session_1": {
metadata.SESSION_START_INFO_TAG: self._serialized_plugin_data(
DATA_TYPE_SESSION_START_INFO, self.session_1_start_info_
),
},
"exp/session_2": {
metadata.SESSION_START_INFO_TAG: self._serialized_plugin_data(
DATA_TYPE_SESSION_START_INFO, self.session_2_start_info_
),
},
"exp/session_3": {
metadata.SESSION_START_INFO_TAG: self._serialized_plugin_data(
DATA_TYPE_SESSION_START_INFO, self.session_3_start_info_
),
},
}
scalars_content = {
"exp/session_1": {"loss": b"", "accuracy": b""},
"exp/session_1/eval": {"loss": b"",},
"exp/session_1/train": {"loss": b"",},
"exp/session_2": {"loss": b"", "accuracy": b"",},
"exp/session_2/eval": {"loss": b"",},
"exp/session_2/train": {"loss": b"",},
"exp/session_3": {"loss": b"", "accuracy": b"",},
"exp/session_3/eval": {"loss": b"",},
"exp/session_3xyz/": {"loss2": b"",},
}
for (run, tag_to_content) in hparams_content.items():
result.setdefault(run, {})
for (tag, content) in tag_to_content.items():
m = summary_pb2.SummaryMetadata()
m.data_class = summary_pb2.DATA_CLASS_TENSOR
m.plugin_data.plugin_name = metadata.PLUGIN_NAME
m.plugin_data.content = content
result[run][tag] = m
for (run, tag_to_content) in scalars_content.items():
result.setdefault(run, {})
for (tag, content) in tag_to_content.items():
m = summary_pb2.SummaryMetadata()
m.data_class = summary_pb2.DATA_CLASS_SCALAR
m.plugin_data.plugin_name = scalars_metadata.PLUGIN_NAME
m.plugin_data.content = content
result[run][tag] = m
return result
def _mock_plugin_run_to_tag_to_content(self, plugin_name):
result = {}
for (
run,
tag_to_metadata,
) in self._mock_multiplexer.AllSummaryMetadata().items():
for (tag, metadata) in tag_to_metadata.items():
if metadata.plugin_data.plugin_name != plugin_name:
continue
result.setdefault(run, {})
result[run][tag] = metadata.plugin_data.content
return result
def _mock_summary_metadata(self, run, tag):
return self._mock_multiplexer.AllSummaryMetadata()[run][tag]
def test_experiment_with_experiment_tag(self):
experiment = """
description: 'Test experiment'
metric_infos: [
{ name: { tag: 'current_temp' } }
]
"""
run = "exp"
tag = metadata.EXPERIMENT_TAG
m = summary_pb2.SummaryMetadata()
m.data_class = summary_pb2.DATA_CLASS_TENSOR
m.plugin_data.plugin_name = metadata.PLUGIN_NAME
m.plugin_data.content = self._serialized_plugin_data(
DATA_TYPE_EXPERIMENT, experiment
)
self._mock_multiplexer.AllSummaryMetadata.side_effect = None
self._mock_multiplexer.AllSummaryMetadata.return_value = {run: {tag: m}}
ctxt = backend_context.Context(self._mock_tb_context)
request_ctx = context.RequestContext()
self.assertProtoEquals(
experiment,
ctxt.experiment_from_metadata(
request_ctx, "123", ctxt.hparams_metadata(request_ctx, "123")
),
)
def test_experiment_without_experiment_tag(self):
self.session_1_start_info_ = """
hparams: [
{key: 'batch_size' value: {number_value: 100}},
{key: 'lr' value: {number_value: 0.01}},
{key: 'model_type' value: {string_value: 'CNN'}}
]
"""
self.session_2_start_info_ = """
hparams:[
{key: 'batch_size' value: {number_value: 200}},
{key: 'lr' value: {number_value: 0.02}},
{key: 'model_type' value: {string_value: 'LATTICE'}}
]
"""
self.session_3_start_info_ = """
hparams:[
{key: 'batch_size' value: {number_value: 300}},
{key: 'lr' value: {number_value: 0.05}},
{key: 'model_type' value: {string_value: 'CNN'}}
]
"""
expected_exp = """
hparam_infos: {
name: 'batch_size'
type: DATA_TYPE_FLOAT64
},
hparam_infos: {
name: 'lr'
type: DATA_TYPE_FLOAT64
},
hparam_infos: {
name: 'model_type'
type: DATA_TYPE_STRING
domain_discrete: {
values: [{string_value: 'CNN'},
{string_value: 'LATTICE'}]
}
}
metric_infos: {
name: {group: '', tag: 'accuracy'}
}
metric_infos: {
name: {group: '', tag: 'loss'}
}
metric_infos: {
name: {group: 'eval', tag: 'loss'}
}
metric_infos: {
name: {group: 'train', tag: 'loss'}
}
"""
ctxt = backend_context.Context(self._mock_tb_context)
request_ctx = context.RequestContext()
actual_exp = ctxt.experiment_from_metadata(
request_ctx, "123", ctxt.hparams_metadata(request_ctx, "123")
)
_canonicalize_experiment(actual_exp)
self.assertProtoEquals(expected_exp, actual_exp)
def test_experiment_without_experiment_tag_different_hparam_types(self):
self.session_1_start_info_ = """
hparams:[
{key: 'batch_size' value: {number_value: 100}},
{key: 'lr' value: {string_value: '0.01'}}
]
"""
self.session_2_start_info_ = """
hparams:[
{key: 'lr' value: {number_value: 0.02}},
{key: 'model_type' value: {string_value: 'LATTICE'}}
]
"""
self.session_3_start_info_ = """
hparams:[
{key: 'batch_size' value: {bool_value: true}},
{key: 'model_type' value: {string_value: 'CNN'}}
]
"""
expected_exp = """
hparam_infos: {
name: 'batch_size'
type: DATA_TYPE_STRING
domain_discrete: {
values: [{string_value: '100.0'},
{string_value: 'true'}]
}
}
hparam_infos: {
name: 'lr'
type: DATA_TYPE_STRING
domain_discrete: {
values: [{string_value: '0.01'},
{string_value: '0.02'}]
}
}
hparam_infos: {
name: 'model_type'
type: DATA_TYPE_STRING
domain_discrete: {
values: [{string_value: 'CNN'},
{string_value: 'LATTICE'}]
}
}
metric_infos: {
name: {group: '', tag: 'accuracy'}
}
metric_infos: {
name: {group: '', tag: 'loss'}
}
metric_infos: {
name: {group: 'eval', tag: 'loss'}
}
metric_infos: {
name: {group: 'train', tag: 'loss'}
}
"""
ctxt = backend_context.Context(self._mock_tb_context)
request_ctx = context.RequestContext()
actual_exp = ctxt.experiment_from_metadata(
request_ctx, "123", ctxt.hparams_metadata(request_ctx, "123")
)
_canonicalize_experiment(actual_exp)
self.assertProtoEquals(expected_exp, actual_exp)
def test_experiment_without_experiment_tag_many_distinct_values(self):
self.session_1_start_info_ = """
hparams:[
{key: 'batch_size' value: {number_value: 100}},
{key: 'lr' value: {string_value: '0.01'}}
]
"""
self.session_2_start_info_ = """
hparams:[
{key: 'lr' value: {number_value: 0.02}},
{key: 'model_type' value: {string_value: 'CNN'}}
]
"""
self.session_3_start_info_ = """
hparams:[
{key: 'batch_size' value: {bool_value: true}},
{key: 'model_type' value: {string_value: 'CNN'}}
]
"""
expected_exp = """
hparam_infos: {
name: 'batch_size'
type: DATA_TYPE_STRING
}
hparam_infos: {
name: 'lr'
type: DATA_TYPE_STRING
}
hparam_infos: {
name: 'model_type'
type: DATA_TYPE_STRING
domain_discrete: {
values: [{string_value: 'CNN'}]
}
}
metric_infos: {
name: {group: '', tag: 'accuracy'}
}
metric_infos: {
name: {group: '', tag: 'loss'}
}
metric_infos: {
name: {group: 'eval', tag: 'loss'}
}
metric_infos: {
name: {group: 'train', tag: 'loss'}
}
"""
ctxt = backend_context.Context(
self._mock_tb_context, max_domain_discrete_len=1
)
request_ctx = context.RequestContext()
actual_exp = ctxt.experiment_from_metadata(
request_ctx, "123", ctxt.hparams_metadata(request_ctx, "123"),
)
_canonicalize_experiment(actual_exp)
self.assertProtoEquals(expected_exp, actual_exp)
def _serialized_plugin_data(self, data_oneof_field, text_protobuffer):
oneof_type_dict = {
DATA_TYPE_EXPERIMENT: api_pb2.Experiment,
DATA_TYPE_SESSION_START_INFO: plugin_data_pb2.SessionStartInfo,
DATA_TYPE_SESSION_END_INFO: plugin_data_pb2.SessionEndInfo,
}
protobuffer = text_format.Merge(
text_protobuffer, oneof_type_dict[data_oneof_field]()
)
plugin_data = plugin_data_pb2.HParamsPluginData()
getattr(plugin_data, data_oneof_field).CopyFrom(protobuffer)
return metadata.create_summary_metadata(plugin_data).plugin_data.content
def _canonicalize_experiment(exp):
"""Sorts the repeated fields of an Experiment message."""
exp.hparam_infos.sort(key=operator.attrgetter("name"))
exp.metric_infos.sort(key=operator.attrgetter("name.group", "name.tag"))
for hparam_info in exp.hparam_infos:
if hparam_info.HasField("domain_discrete"):
hparam_info.domain_discrete.values.sort(
key=operator.attrgetter("string_value")
)
if __name__ == "__main__":
tf.test.main()
| 37.452128 | 84 | 0.569805 | [
"Apache-2.0"
] | aryaman4/tensorboard | tensorboard/plugins/hparams/backend_context_test.py | 14,082 | Python |
#!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
# Create one-input, one-output, no-fee transaction:
class RawTransactionsTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 4)
def setup_network(self, split=False):
self.nodes = start_nodes(4, self.options.tmpdir)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
def run_test(self):
print "Mining blocks..."
min_relay_tx_fee = self.nodes[0].getnetworkinfo()['relayfee']
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
# if the fee's positive delta is higher than this value tests will fail,
# neg. delta always fail the tests.
# The size of the signature of every input may be at most 2 bytes larger
# than a minimum sized signature.
# = 2 bytes * minRelayTxFeePerByte
feeTolerance = 2 * min_relay_tx_fee/1000
self.nodes[2].generate(1)
self.sync_all()
self.nodes[0].generate(121)
self.sync_all()
watchonly_address = self.nodes[0].getnewaddress()
watchonly_pubkey = self.nodes[0].validateaddress(watchonly_address)["pubkey"]
watchonly_amount = Decimal(2000)
self.nodes[3].importpubkey(watchonly_pubkey, "", True)
watchonly_txid = self.nodes[0].sendtoaddress(watchonly_address, watchonly_amount)
self.nodes[0].sendtoaddress(self.nodes[3].getnewaddress(), watchonly_amount / 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 15)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 10)
self.nodes[0].sendtoaddress(self.nodes[2].getnewaddress(), 50)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
###############
# simple test #
###############
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enought inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 22 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0) #test if we have enough inputs
##############################
# simple test with two coins #
##############################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 26 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
################################
# simple test with two outputs #
################################
inputs = [ ]
outputs = { self.nodes[0].getnewaddress() : 26, self.nodes[1].getnewaddress() : 25 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert(len(dec_tx['vin']) > 0)
assert_equal(dec_tx['vin'][0]['scriptSig']['hex'], '')
#########################################################################
# test a fundrawtransaction with a VIN greater than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 50:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#####################################################################
# test a fundrawtransaction with which will not get a change output #
#####################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 50:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : Decimal(50) - fee - feeTolerance }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
for out in dec_tx['vout']:
totalOut += out['value']
assert_equal(rawtxfund['changepos'], -1)
assert_equal(fee + totalOut, utx['amount']) #compare vin total and totalout+fee
#########################################################################
# test a fundrawtransaction with a VIN smaller than the required amount #
#########################################################################
utx = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 10:
utx = aUtx
break
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']}]
outputs = { self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
# 4-byte version + 1-byte vin count + 36-byte prevout then script_len
rawtx = rawtx[:82] + "0100" + rawtx[84:]
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for i, out in enumerate(dec_tx['vout']):
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
else:
assert_equal(i, rawtxfund['changepos'])
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
assert_equal("00", dec_tx['vin'][0]['scriptSig']['hex'])
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
###########################################
# test a fundrawtransaction with two VINs #
###########################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 10:
utx = aUtx
if aUtx['amount'] == 50:
utx2 = aUtx
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 60 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 1)
assert_equal(len(dec_tx['vout']), 2)
matchingIns = 0
for vinOut in dec_tx['vin']:
for vinIn in inputs:
if vinIn['txid'] == vinOut['txid']:
matchingIns+=1
assert_equal(matchingIns, 2) #we now must see two vins identical to vins given as params
#########################################################
# test a fundrawtransaction with two VINs and two vOUTs #
#########################################################
utx = False
utx2 = False
listunspent = self.nodes[2].listunspent()
for aUtx in listunspent:
if aUtx['amount'] == 10:
utx = aUtx
if aUtx['amount'] == 50:
utx2 = aUtx
assert(utx!=False)
inputs = [ {'txid' : utx['txid'], 'vout' : utx['vout']},{'txid' : utx2['txid'], 'vout' : utx2['vout']} ]
outputs = { self.nodes[0].getnewaddress() : 60, self.nodes[0].getnewaddress() : 10 }
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(utx['txid'], dec_tx['vin'][0]['txid'])
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
fee = rawtxfund['fee']
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
totalOut = 0
matchingOuts = 0
for out in dec_tx['vout']:
totalOut += out['value']
if out['scriptPubKey']['addresses'][0] in outputs:
matchingOuts+=1
assert_equal(matchingOuts, 2)
assert_equal(len(dec_tx['vout']), 3)
##############################################
# test a fundrawtransaction with invalid vin #
##############################################
listunspent = self.nodes[2].listunspent()
inputs = [ {'txid' : "1c7f966dab21119bac53213a2bc7532bff1fa844c124fd750a7d0b1332440bd1", 'vout' : 0} ] #invalid vin!
outputs = { self.nodes[0].getnewaddress() : 10}
rawtx = self.nodes[2].createrawtransaction(inputs, outputs)
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
try:
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
raise AssertionError("Spent more than available")
except JSONRPCException as e:
assert("Insufficient" in e.error['message'])
############################################################
#compare fee of a standard pubkeyhash transaction
inputs = []
outputs = {self.nodes[1].getnewaddress():11}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 11)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction with multiple outputs
inputs = []
outputs = {self.nodes[1].getnewaddress():11,self.nodes[1].getnewaddress():12,self.nodes[1].getnewaddress():1,self.nodes[1].getnewaddress():13,self.nodes[1].getnewaddress():2,self.nodes[1].getnewaddress():3}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendmany("", outputs)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a 2of2 multisig p2sh transaction
# create 2of2 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
mSigObj = self.nodes[1].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
inputs = []
outputs = {mSigObj:11}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 11)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
#compare fee of a standard pubkeyhash transaction
# create 4of5 addr
addr1 = self.nodes[1].getnewaddress()
addr2 = self.nodes[1].getnewaddress()
addr3 = self.nodes[1].getnewaddress()
addr4 = self.nodes[1].getnewaddress()
addr5 = self.nodes[1].getnewaddress()
addr1Obj = self.nodes[1].validateaddress(addr1)
addr2Obj = self.nodes[1].validateaddress(addr2)
addr3Obj = self.nodes[1].validateaddress(addr3)
addr4Obj = self.nodes[1].validateaddress(addr4)
addr5Obj = self.nodes[1].validateaddress(addr5)
mSigObj = self.nodes[1].addmultisigaddress(4, [addr1Obj['pubkey'], addr2Obj['pubkey'], addr3Obj['pubkey'], addr4Obj['pubkey'], addr5Obj['pubkey']])
inputs = []
outputs = {mSigObj:11}
rawTx = self.nodes[0].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[0].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[0].sendtoaddress(mSigObj, 11)
signedFee = self.nodes[0].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance)
############################################################
############################################################
# spend a 2of2 multisig transaction over fundraw
# create 2of2 addr
addr1 = self.nodes[2].getnewaddress()
addr2 = self.nodes[2].getnewaddress()
addr1Obj = self.nodes[2].validateaddress(addr1)
addr2Obj = self.nodes[2].validateaddress(addr2)
mSigObj = self.nodes[2].addmultisigaddress(2, [addr1Obj['pubkey'], addr2Obj['pubkey']])
# send 12 ESMA to msig addr
txId = self.nodes[0].sendtoaddress(mSigObj, 12)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
oldBalance = self.nodes[1].getbalance()
inputs = []
outputs = {self.nodes[1].getnewaddress():11}
rawTx = self.nodes[2].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[2].fundrawtransaction(rawTx)
signedTx = self.nodes[2].signrawtransaction(fundedTx['hex'])
txId = self.nodes[2].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('11.0000000'), self.nodes[1].getbalance())
############################################################
# locked wallet test
self.nodes[1].encryptwallet("test")
self.nodes.pop(1)
stop_nodes(self.nodes)
wait_bitcoinds()
self.nodes = start_nodes(4, self.options.tmpdir)
# This test is not meant to test fee estimation and we'd like
# to be sure all txs are sent at a consistent desired feerate
for node in self.nodes:
node.settxfee(min_relay_tx_fee)
connect_nodes_bi(self.nodes,0,1)
connect_nodes_bi(self.nodes,1,2)
connect_nodes_bi(self.nodes,0,2)
connect_nodes_bi(self.nodes,0,3)
self.is_network_split=False
self.sync_all()
try:
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), 12)
raise AssertionError("Wallet unlocked without passphrase")
except JSONRPCException as e:
assert('walletpassphrase' in e.error['message'])
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():11}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#now we need to unlock
self.nodes[1].walletpassphrase("test", 100)
signedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(signedTx['hex'])
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
# make sure funds are received at node1
assert_equal(oldBalance+Decimal('511.0000000'), self.nodes[0].getbalance())
###############################################
# multiple (~19) inputs tx test | Compare fee #
###############################################
#empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
#create same transaction over sendtoaddress
txId = self.nodes[1].sendmany("", outputs)
signedFee = self.nodes[1].getrawmempool(True)[txId]['fee']
#compare fee
feeDelta = Decimal(fundedTx['fee']) - Decimal(signedFee)
assert(feeDelta >= 0 and feeDelta <= feeTolerance*19) #~19 inputs
#############################################
# multiple (~19) inputs tx test | sign/send #
#############################################
#again, empty node1, send some small coins from node0 to node1
self.nodes[1].sendtoaddress(self.nodes[0].getnewaddress(), self.nodes[1].getbalance(), "", "", True)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
for i in range(0,20):
self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 0.01)
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
#fund a tx with ~20 small inputs
oldBalance = self.nodes[0].getbalance()
inputs = []
outputs = {self.nodes[0].getnewaddress():0.15,self.nodes[0].getnewaddress():0.04}
rawTx = self.nodes[1].createrawtransaction(inputs, outputs)
fundedTx = self.nodes[1].fundrawtransaction(rawTx)
fundedAndSignedTx = self.nodes[1].signrawtransaction(fundedTx['hex'])
txId = self.nodes[1].sendrawtransaction(fundedAndSignedTx['hex'])
self.sync_all()
self.nodes[0].generate(1)
self.sync_all()
assert_equal(oldBalance+Decimal('500.19000000'), self.nodes[0].getbalance()) #0.19+block reward
#####################################################
# test fundrawtransaction with OP_RETURN and no vin #
#####################################################
rawtx = "0100000000010000000000000000066a047465737400000000"
dec_tx = self.nodes[2].decoderawtransaction(rawtx)
assert_equal(len(dec_tx['vin']), 0)
assert_equal(len(dec_tx['vout']), 1)
rawtxfund = self.nodes[2].fundrawtransaction(rawtx)
dec_tx = self.nodes[2].decoderawtransaction(rawtxfund['hex'])
assert_greater_than(len(dec_tx['vin']), 0) # at least one vin
assert_equal(len(dec_tx['vout']), 2) # one change output added
##################################################
# test a fundrawtransaction using only watchonly #
##################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount / 2}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 1)
assert_equal(res_dec["vin"][0]["txid"], watchonly_txid)
assert("fee" in result.keys())
assert_greater_than(result["changepos"], -1)
###############################################################
# test fundrawtransaction using the entirety of watched funds #
###############################################################
inputs = []
outputs = {self.nodes[2].getnewaddress() : watchonly_amount}
rawtx = self.nodes[3].createrawtransaction(inputs, outputs)
result = self.nodes[3].fundrawtransaction(rawtx, True)
res_dec = self.nodes[0].decoderawtransaction(result["hex"])
assert_equal(len(res_dec["vin"]), 2)
assert(res_dec["vin"][0]["txid"] == watchonly_txid or res_dec["vin"][1]["txid"] == watchonly_txid)
assert_greater_than(result["fee"], 0)
assert_greater_than(result["changepos"], -1)
assert_equal(result["fee"] + res_dec["vout"][result["changepos"]]["value"], watchonly_amount / 10)
signedtx = self.nodes[3].signrawtransaction(result["hex"])
assert(not signedtx["complete"])
signedtx = self.nodes[0].signrawtransaction(signedtx["hex"])
assert(signedtx["complete"])
self.nodes[0].sendrawtransaction(signedtx["hex"])
if __name__ == '__main__':
RawTransactionsTest().main()
| 40.438017 | 214 | 0.556019 | [
"MIT"
] | alik918/esmacoin | qa/rpc-tests/fundrawtransaction.py | 24,465 | Python |
# Copyright (c) 2017, John Skinner
import unittest
import numpy as np
import arvet.database.tests.database_connection as dbconn
from arvet.config.path_manager import PathManager
import arvet.batch_analysis.task as task
class MockTask(task.Task):
def run_task(self, path_manager: PathManager):
pass
def get_unique_name(self) -> str:
return "mock_task_{0}".format(self.pk)
class TestTaskDatabase(unittest.TestCase):
@classmethod
def setUpClass(cls):
dbconn.connect_to_test_db()
def setUp(self):
# Remove the collection as the start of the test, so that we're sure it's empty
task.Task._mongometa.collection.drop()
@classmethod
def tearDownClass(cls):
# Clean up after ourselves by dropping the collection for this model
task.Task._mongometa.collection.drop()
def test_stores_and_loads_simple(self):
obj = MockTask(state=task.JobState.UNSTARTED)
obj.save()
# Load all the entities
all_entities = list(task.Task.objects.all())
self.assertGreaterEqual(len(all_entities), 1)
self.assertEqual(all_entities[0], obj)
all_entities[0].delete()
def test_stores_and_loads_all_params(self):
obj = MockTask(
state=task.JobState.RUNNING,
node_id='test-hpc',
job_id=15,
num_cpus=3,
num_gpus=150,
memory_requirements='4KB',
expected_duration='100:00:00'
)
obj.save()
# Load all the entities
all_entities = list(task.Task.objects.all())
self.assertGreaterEqual(len(all_entities), 1)
self.assertEqual(all_entities[0], obj)
all_entities[0].delete()
def test_stores_and_loads_after_change_state(self):
obj = MockTask(
state=task.JobState.RUNNING,
node_id='test-hpc',
job_id=15,
num_cpus=3,
num_gpus=150,
memory_requirements='4KB',
expected_duration='100:00:00'
)
obj.save()
all_entities = list(task.Task.objects.all())
self.assertGreaterEqual(len(all_entities), 1)
self.assertEqual(all_entities[0], obj)
obj.mark_job_failed()
obj.save()
all_entities = list(task.Task.objects.all())
self.assertGreaterEqual(len(all_entities), 1)
self.assertEqual(all_entities[0], obj)
obj.mark_job_started('test_node', 143)
obj.save()
all_entities = list(task.Task.objects.all())
self.assertGreaterEqual(len(all_entities), 1)
self.assertEqual(all_entities[0], obj)
obj.mark_job_complete()
obj.save()
all_entities = list(task.Task.objects.all())
self.assertGreaterEqual(len(all_entities), 1)
self.assertEqual(all_entities[0], obj)
class TestTask(unittest.TestCase):
def test_mark_job_started_changes_unstarted_to_running(self):
subject = MockTask(state=task.JobState.UNSTARTED)
self.assertTrue(subject.is_unstarted)
self.assertFalse(subject.is_running)
subject.mark_job_started('test', 12)
self.assertFalse(subject.is_unstarted)
self.assertTrue(subject.is_running)
def test_mark_job_started_doesnt_affect_already_running_jobs(self):
subject = MockTask(state=task.JobState.RUNNING, node_id='external', job_id=3)
self.assertFalse(subject.is_unstarted)
self.assertTrue(subject.is_running)
self.assertFalse(subject.is_finished)
subject.mark_job_started('test', 12)
self.assertFalse(subject.is_unstarted)
self.assertTrue(subject.is_running)
self.assertFalse(subject.is_finished)
self.assertEqual('external', subject.node_id)
self.assertEqual(3, subject.job_id)
def test_mark_job_started_doesnt_affect_finished_jobs(self):
subject = MockTask(state=task.JobState.DONE)
self.assertFalse(subject.is_unstarted)
self.assertFalse(subject.is_running)
self.assertTrue(subject.is_finished)
subject.mark_job_started('test', 12)
self.assertFalse(subject.is_unstarted)
self.assertFalse(subject.is_running)
self.assertTrue(subject.is_finished)
self.assertIsNone(subject.node_id)
self.assertIsNone(subject.job_id)
def test_mark_job_failed_changes_running_to_unstarted(self):
subject = MockTask(state=task.JobState.RUNNING, node_id='test', job_id=5)
self.assertFalse(subject.is_unstarted)
self.assertTrue(subject.is_running)
subject.mark_job_failed()
self.assertTrue(subject.is_unstarted)
self.assertFalse(subject.is_running)
self.assertIsNone(subject.node_id)
self.assertIsNone(subject.job_id)
def test_mark_job_failed_increases_failed_count(self):
subject = MockTask(state=task.JobState.RUNNING, node_id='test', job_id=5, failure_count=4)
self.assertFalse(subject.is_unstarted)
self.assertTrue(subject.is_running)
subject.mark_job_failed()
self.assertTrue(subject.is_unstarted)
self.assertFalse(subject.is_running)
self.assertEqual(5, subject.failure_count)
def test_mark_job_failed_doesnt_affect_unstarted_jobs(self):
subject = MockTask(state=task.JobState.UNSTARTED)
self.assertTrue(subject.is_unstarted)
self.assertFalse(subject.is_finished)
subject.mark_job_failed()
self.assertTrue(subject.is_unstarted)
self.assertFalse(subject.is_finished)
self.assertEqual(0, subject.failure_count)
def test_mark_job_failed_doesnt_affect_finished_jobs(self):
subject = MockTask(state=task.JobState.DONE)
self.assertFalse(subject.is_unstarted)
self.assertFalse(subject.is_running)
self.assertTrue(subject.is_finished)
subject.mark_job_failed()
self.assertFalse(subject.is_unstarted)
self.assertFalse(subject.is_running)
self.assertTrue(subject.is_finished)
self.assertIsNone(subject.node_id)
self.assertIsNone(subject.job_id)
def test_mark_job_complete_changes_running_to_finished(self):
subject = MockTask(state=task.JobState.RUNNING, node_id='test', job_id=5)
self.assertFalse(subject.is_unstarted)
self.assertTrue(subject.is_running)
subject.mark_job_complete()
self.assertFalse(subject.is_running)
self.assertTrue(subject.is_finished)
self.assertIsNone(subject.node_id)
self.assertIsNone(subject.job_id)
def test_mark_job_complete_doesnt_affect_unstarted_jobs(self):
subject = MockTask(state=task.JobState.UNSTARTED)
self.assertTrue(subject.is_unstarted)
self.assertFalse(subject.is_running)
self.assertFalse(subject.is_finished)
subject.mark_job_complete()
self.assertTrue(subject.is_unstarted)
self.assertFalse(subject.is_running)
self.assertFalse(subject.is_finished)
def test_mark_job_complete_doesnt_affect_finished_jobs(self):
subject = MockTask(state=task.JobState.DONE)
self.assertFalse(subject.is_unstarted)
self.assertFalse(subject.is_running)
self.assertTrue(subject.is_finished)
subject.mark_job_complete()
self.assertFalse(subject.is_unstarted)
self.assertFalse(subject.is_running)
self.assertTrue(subject.is_finished)
self.assertIsNone(subject.node_id)
self.assertIsNone(subject.job_id)
def test_change_job_id_doesnt_affect_state(self):
subject = MockTask(state=task.JobState.RUNNING)
self.assertFalse(subject.is_unstarted)
self.assertTrue(subject.is_running)
self.assertFalse(subject.is_finished)
subject.change_job_id('test', 12)
self.assertFalse(subject.is_unstarted)
self.assertTrue(subject.is_running)
self.assertFalse(subject.is_finished)
def test_change_job_id_changes_job_info(self):
subject = MockTask(state=task.JobState.RUNNING, node_id='external', job_id=3)
self.assertEqual('external', subject.node_id)
self.assertEqual(3, subject.job_id)
subject.change_job_id('test', 12)
self.assertEqual('test', subject.node_id)
self.assertEqual(12, subject.job_id)
def test_change_job_id_doesnt_affect_unstarted_jobs(self):
subject = MockTask(state=task.JobState.UNSTARTED)
self.assertTrue(subject.is_unstarted)
subject.change_job_id('test', 12)
self.assertTrue(subject.is_unstarted)
self.assertIsNone(subject.node_id)
self.assertIsNone(subject.job_id)
def test_change_job_id_doesnt_affect_finished_jobs(self):
subject = MockTask(state=task.JobState.DONE, node_id='external', job_id=3)
self.assertTrue(subject.is_finished)
self.assertEqual('external', subject.node_id)
self.assertEqual(3, subject.job_id)
subject.change_job_id('test', 12)
self.assertTrue(subject.is_finished)
self.assertEqual('external', subject.node_id)
self.assertEqual(3, subject.job_id)
def test_state_remains_consistent(self):
random = np.random.RandomState(144135)
subject = MockTask(state=task.JobState.UNSTARTED)
for idx in range(50):
change = random.randint(0, 4 if idx > 30 else 3)
if idx > 30 and change == 3:
subject.mark_job_complete()
elif change == 2:
subject.change_job_id('external', random.randint(0, 1000))
elif change == 1:
subject.mark_job_started('test', random.randint(0, 1000))
else:
subject.mark_job_failed()
# Make sure that the node id and job id match the state
if subject.is_unstarted or subject.is_finished:
self.assertIsNone(subject.node_id)
self.assertIsNone(subject.job_id)
else:
self.assertIsNotNone(subject.node_id)
self.assertIsNotNone(subject.job_id)
| 38.830769 | 98 | 0.682448 | [
"BSD-2-Clause"
] | jskinn/arvet | arvet/batch_analysis/tests/test_task.py | 10,096 | Python |
from datetime import datetime
import uuid
from django.db import models
import django.forms as forms
import django_filters.fields as filter_fields
from apps.ineedstudent.models import Hospital
from .filters import StudentJobRequirementsFilter
from .models import * # noqa: F401, F403
from .models import COUNTRY_CODE_CHOICES
class LocationFilterModel(models.Model):
plz = models.CharField(max_length=5, null=True)
distance = models.IntegerField(default=0)
countrycode = models.CharField(max_length=2, choices=COUNTRY_CODE_CHOICES, default="DE",)
uuid = models.CharField(max_length=100, blank=True, unique=True, default=uuid.uuid4)
class StudentListFilterModel(models.Model):
hospital = models.ForeignKey(Hospital, on_delete=models.CASCADE)
location = LocationFilterModel
uuid = models.CharField(max_length=100, blank=True, unique=True, default=uuid.uuid4)
registration_date = models.DateTimeField(default=datetime.now, blank=True, null=True)
name = models.CharField(max_length=100)
jrf = StudentJobRequirementsFilter()
for f_name, jr_filter in jrf.base_filters.items():
if type(jr_filter.field) == forms.NullBooleanField:
StudentListFilterModel.add_to_class(
f_name, models.NullBooleanField(default=None, null=True)
)
elif type(jr_filter.field) == forms.DecimalField:
StudentListFilterModel.add_to_class(f_name, models.IntegerField(default=0))
elif type(jr_filter.field) == filter_fields.ChoiceField:
StudentListFilterModel.add_to_class(
f_name, models.IntegerField(default=0, choices=jr_filter.field.choices)
)
elif type(jr_filter.field) == forms.DateField:
StudentListFilterModel.add_to_class(
f_name, models.DateField(null=True, default=datetime.now)
)
else:
raise ValueError(
"I do not know what to do with field type '%s' for '%s'"
% (type(jr_filter.field), f_name)
)
| 34.666667 | 93 | 0.730263 | [
"MIT"
] | match4healthcare/match4healthcare | backend/apps/iamstudent/models_persistent_filter.py | 1,976 | Python |
# coding: utf-8
"""
vautoscaling
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from ncloud_vautoscaling.model.process import Process # noqa: F401,E501
class ResumeProcessesResponse(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'request_id': 'str',
'return_code': 'str',
'return_message': 'str',
'total_rows': 'int',
'process_list': 'list[Process]'
}
attribute_map = {
'request_id': 'requestId',
'return_code': 'returnCode',
'return_message': 'returnMessage',
'total_rows': 'totalRows',
'process_list': 'processList'
}
def __init__(self, request_id=None, return_code=None, return_message=None, total_rows=None, process_list=None): # noqa: E501
"""ResumeProcessesResponse - a model defined in Swagger""" # noqa: E501
self._request_id = None
self._return_code = None
self._return_message = None
self._total_rows = None
self._process_list = None
self.discriminator = None
if request_id is not None:
self.request_id = request_id
if return_code is not None:
self.return_code = return_code
if return_message is not None:
self.return_message = return_message
if total_rows is not None:
self.total_rows = total_rows
if process_list is not None:
self.process_list = process_list
@property
def request_id(self):
"""Gets the request_id of this ResumeProcessesResponse. # noqa: E501
:return: The request_id of this ResumeProcessesResponse. # noqa: E501
:rtype: str
"""
return self._request_id
@request_id.setter
def request_id(self, request_id):
"""Sets the request_id of this ResumeProcessesResponse.
:param request_id: The request_id of this ResumeProcessesResponse. # noqa: E501
:type: str
"""
self._request_id = request_id
@property
def return_code(self):
"""Gets the return_code of this ResumeProcessesResponse. # noqa: E501
:return: The return_code of this ResumeProcessesResponse. # noqa: E501
:rtype: str
"""
return self._return_code
@return_code.setter
def return_code(self, return_code):
"""Sets the return_code of this ResumeProcessesResponse.
:param return_code: The return_code of this ResumeProcessesResponse. # noqa: E501
:type: str
"""
self._return_code = return_code
@property
def return_message(self):
"""Gets the return_message of this ResumeProcessesResponse. # noqa: E501
:return: The return_message of this ResumeProcessesResponse. # noqa: E501
:rtype: str
"""
return self._return_message
@return_message.setter
def return_message(self, return_message):
"""Sets the return_message of this ResumeProcessesResponse.
:param return_message: The return_message of this ResumeProcessesResponse. # noqa: E501
:type: str
"""
self._return_message = return_message
@property
def total_rows(self):
"""Gets the total_rows of this ResumeProcessesResponse. # noqa: E501
:return: The total_rows of this ResumeProcessesResponse. # noqa: E501
:rtype: int
"""
return self._total_rows
@total_rows.setter
def total_rows(self, total_rows):
"""Sets the total_rows of this ResumeProcessesResponse.
:param total_rows: The total_rows of this ResumeProcessesResponse. # noqa: E501
:type: int
"""
self._total_rows = total_rows
@property
def process_list(self):
"""Gets the process_list of this ResumeProcessesResponse. # noqa: E501
:return: The process_list of this ResumeProcessesResponse. # noqa: E501
:rtype: list[Process]
"""
return self._process_list
@process_list.setter
def process_list(self, process_list):
"""Sets the process_list of this ResumeProcessesResponse.
:param process_list: The process_list of this ResumeProcessesResponse. # noqa: E501
:type: list[Process]
"""
self._process_list = process_list
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResumeProcessesResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 28.453704 | 129 | 0.607062 | [
"MIT"
] | NaverCloudPlatform/ncloud-sdk-python | lib/services/vautoscaling/ncloud_vautoscaling/model/resume_processes_response.py | 6,146 | Python |
#!/usr/bin/env python3
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from . import css_checker
from os import path as os_path
import re
from sys import path as sys_path
import unittest
_HERE = os_path.dirname(os_path.abspath(__file__))
sys_path.append(os_path.join(_HERE, '..', '..'))
from PRESUBMIT_test_mocks import MockInputApi, MockOutputApi, MockFile
class CssCheckerTest(unittest.TestCase):
def setUp(self):
super(CssCheckerTest, self).setUp()
self.input_api = MockInputApi()
self.checker = css_checker.CSSChecker(self.input_api, MockOutputApi())
def _create_file(self, contents, filename):
self.input_api.files.append(MockFile(filename, contents.splitlines()))
def VerifyContentIsValid(self, contents, filename='fake.css'):
self._create_file(contents, filename)
results = self.checker.RunChecks()
self.assertEqual(len(results), 0)
def VerifyContentsProducesOutput(self, contents, output, filename='fake.css'):
self._create_file(contents, filename)
results = self.checker.RunChecks()
self.assertEqual(len(results), 1)
self.assertEqual(results[0].message, filename + ':\n' + output.strip())
def testCssAlphaWithAtBlock(self):
self.VerifyContentsProducesOutput("""
<include src="../shared/css/cr/ui/overlay.css">
<include src="chrome://resources/totally-cool.css" />
/* A hopefully safely ignored comment and @media statement. /**/
@media print {
div {
display: block;
color: red;
}
}
.rule {
z-index: 5;
<if expr="not is macosx">
background-image: url(chrome://resources/BLAH); /* TODO(dbeam): Fix this. */
background-color: rgb(235, 239, 249);
</if>
<if expr="is_macosx">
background-color: white;
background-image: url(chrome://resources/BLAH2);
</if>
color: black;
}
<if expr="is_macosx">
.language-options-right {
visibility: hidden;
opacity: 1; /* TODO(dbeam): Fix this. */
}
</if>
@media (prefers-color-scheme: dark) {
a[href] {
z-index: 3;
color: blue;
}
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
display: block;
color: red;
z-index: 5;
color: black;
z-index: 3;
color: blue;""")
def testCssStringWithAt(self):
self.VerifyContentIsValid("""
#logo {
background-image: url(images/google_logo.png@2x);
}
body.alternate-logo #logo {
-webkit-mask-image: url(images/google_logo.png@2x);
background: none;
}
div {
margin-inline-start: 5px;
}
.stuff1 {
}
.stuff2 {
}
""")
def testCssAlphaWithNonStandard(self):
self.VerifyContentsProducesOutput("""
div {
/* A hopefully safely ignored comment and @media statement. /**/
color: red;
-webkit-margin-before-collapse: discard;
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
color: red;
-webkit-margin-before-collapse: discard;""")
def testCssAlphaWithLongerDashedProps(self):
self.VerifyContentsProducesOutput("""
div {
border-inline-start: 5px; /* A hopefully removed comment. */
border: 5px solid red;
}""", """
- Alphabetize properties and list vendor specific (i.e. -webkit) above standard.
border-inline-start: 5px;
border: 5px solid red;""")
def testCssAlphaWithVariables(self):
self.VerifyContentIsValid("""
#id {
--zzyxx-xylophone: 3px;
--aardvark-animal: var(--zzyxz-xylophone);
}
""")
def testCssBracesHaveSpaceBeforeAndNothingAfter(self):
self.VerifyContentsProducesOutput("""
/* Hello! */div/* Comment here*/{
display: block;
}
blah /* hey! */
{
rule: value;
}
.mixed-in {
display: none;
}
.this.is { /* allowed */
rule: value;
}""", """
- Start braces ({) end a selector, have a space before them and no rules after.
div{
{""")
def testMixins(self):
self.VerifyContentsProducesOutput(
"""
.mixed-in {
--css-mixin: {
color: red;
}
}""", """
- Avoid using CSS mixins. Use CSS shadow parts, CSS variables, or common CSS \
classes instead.
--css-mixin: {""")
def testCssClassesUseDashes(self):
self.VerifyContentsProducesOutput("""
.className,
.ClassName,
.class-name /* We should not catch this. */,
.class_name,
[i18n-values*='.innerHTML:'] {
display: block;
}""", """
- Classes use .dash-form.
.className,
.ClassName,
.class_name,""")
def testCssCloseBraceOnNewLine(self):
self.VerifyContentsProducesOutput("""
@-webkit-keyframe blah {
from { height: rotate(-10turn); }
100% { height: 500px; }
}
#id { /* $i18n{*} and $i18nRaw{*} should be ignored. */
rule: $i18n{someValue};
rule2: $i18nRaw{someValue};
}
#rule {
rule: value; }""", """
- Always put a rule closing brace (}) on a new line.
rule: value; }""")
def testCssColonsHaveSpaceAfter(self):
self.VerifyContentsProducesOutput("""
div:not(.class):not([attr=5]), /* We should not catch this. */
div:not(.class):not([attr]) /* Nor this. */ {
background: url(data:image/jpeg,asdfasdfsadf); /* Ignore this. */
background: -webkit-linear-gradient(left, red,
80% blah blee blar);
color: red;
display:block;
}""", """
- Colons (:) should have a space after them.
display:block;
- Don't use data URIs in source files. Use grit instead.
background: url(data:image/jpeg,asdfasdfsadf);""")
def testCssFavorSingleQuotes(self):
self.VerifyContentsProducesOutput("""
html[dir="rtl"] body,
html[dir=ltr] body /* TODO(dbeam): Require '' around rtl in future? */ {
font-family: "Open Sans";
<if expr="is_macosx">
blah: blee;
</if>
}""", """
- Use single quotes (') instead of double quotes (") in strings.
html[dir="rtl"] body,
font-family: "Open Sans";""")
def testCssHexCouldBeShorter(self):
self.VerifyContentsProducesOutput("""
#abc,
#abc-,
#abc-ghij,
#abcdef-,
#abcdef-ghij,
#aaaaaa,
#bbaacc {
background-color: #336699; /* Ignore short hex rule if not gray. */
color: #999999;
color: #666;
}""", """
- Use abbreviated hex (#rgb) when in form #rrggbb.
color: #999999; (replace with #999)
- Use rgb() over #hex when not a shade of gray (like #333).
background-color: #336699; (replace with rgb(51, 102, 153))""")
def testCssUseMillisecondsForSmallTimes(self):
self.VerifyContentsProducesOutput("""
.transition-0s /* This is gross but may happen. */ {
transform: one 0.2s;
transform: two .1s;
transform: tree 1s;
transform: four 300ms;
}""", """
- Use milliseconds for time measurements under 1 second.
transform: one 0.2s; (replace with 200ms)
transform: two .1s; (replace with 100ms)""")
def testCssNoDataUrisInSourceFiles(self):
self.VerifyContentsProducesOutput("""
img {
background: url( data:image/jpeg,4\/\/350|\/|3|2 );
}""", """
- Don't use data URIs in source files. Use grit instead.
background: url( data:image/jpeg,4\/\/350|\/|3|2 );""")
def testCssNoMixinShims(self):
self.VerifyContentsProducesOutput("""
:host {
--good-property: red;
--not-okay-mixin_-_not-okay-property: green;
}""", """
- Don't override custom properties created by Polymer's mixin shim. Set \
mixins or documented custom properties directly.
--not-okay-mixin_-_not-okay-property: green;""")
def testCssNoQuotesInUrl(self):
self.VerifyContentsProducesOutput("""
img {
background: url('chrome://resources/images/blah.jpg');
background: url("../../folder/hello.png");
}""", """
- Use single quotes (') instead of double quotes (") in strings.
background: url("../../folder/hello.png");
- Don't use quotes in url().
background: url('chrome://resources/images/blah.jpg');
background: url("../../folder/hello.png");""")
def testCssOneRulePerLine(self):
self.VerifyContentsProducesOutput("""
a:not([hidden]):not(.custom-appearance):not([version=1]):first-of-type,
a:not([hidden]):not(.custom-appearance):not([version=1]):first-of-type ~
input[type='checkbox']:not([hidden]),
div {
background: url(chrome://resources/BLAH);
rule: value; /* rule: value; */
rule: value; rule: value;
}
""", """
- One rule per line (what not to do: color: red; margin: 0;).
rule: value; rule: value;""")
def testCssOneSelectorPerLine(self):
self.VerifyContentsProducesOutput(
"""
a,
div,a,
div,/* Hello! */ span,
#id.class([dir=rtl]):not(.class):any(a, b, d),
div :is(:not(a), #b, .c) {
rule: value;
}
a,
div,a {
some-other: rule here;
}""", """
- One selector per line (what not to do: a, b {}).
div,a,
div, span,
div,a {""")
def testCssPseudoElementDoubleColon(self):
self.VerifyContentsProducesOutput("""
a:href,
br::after,
::-webkit-scrollbar-thumb,
a:not([empty]):hover:focus:active, /* shouldn't catch here and above */
abbr:after,
.tree-label:empty:after,
b:before,
:-WebKit-ScrollBar {
rule: value;
}""", """
- Pseudo-elements should use double colon (i.e. ::after).
:after (should be ::after)
:after (should be ::after)
:before (should be ::before)
:-WebKit-ScrollBar (should be ::-WebKit-ScrollBar)
""")
def testCssRgbIfNotGray(self):
self.VerifyContentsProducesOutput(
"""
#abc,
#aaa,
#aabbcc {
background: -webkit-linear-gradient(left, from(#abc), to(#def));
color: #bad;
color: #bada55;
}""", """
- Use rgb() over #hex when not a shade of gray (like #333).
background: -webkit-linear-gradient(left, from(#abc), to(#def)); """
"""(replace with rgb(170, 187, 204), rgb(221, 238, 255))
color: #bad; (replace with rgb(187, 170, 221))
color: #bada55; (replace with rgb(186, 218, 85))""")
def testPrefixedLogicalAxis(self):
self.VerifyContentsProducesOutput("""
.test {
-webkit-logical-height: 50%;
-webkit-logical-width: 50%;
-webkit-max-logical-height: 200px;
-webkit-max-logical-width: 200px;
-webkit-min-logical-height: 100px;
-webkit-min-logical-width: 100px;
}
""", """
- Unprefix logical axis property.
-webkit-logical-height: 50%; (replace with block-size)
-webkit-logical-width: 50%; (replace with inline-size)
-webkit-max-logical-height: 200px; (replace with max-block-size)
-webkit-max-logical-width: 200px; (replace with max-inline-size)
-webkit-min-logical-height: 100px; (replace with min-block-size)
-webkit-min-logical-width: 100px; (replace with min-inline-size)""")
def testPrefixedLogicalSide(self):
self.VerifyContentsProducesOutput("""
.test {
-webkit-border-after: 1px solid blue;
-webkit-border-after-color: green;
-webkit-border-after-style: dotted;
-webkit-border-after-width: 10px;
-webkit-border-before: 2px solid blue;
-webkit-border-before-color: green;
-webkit-border-before-style: dotted;
-webkit-border-before-width: 20px;
-webkit-border-end: 3px solid blue;
-webkit-border-end-color: green;
-webkit-border-end-style: dotted;
-webkit-border-end-width: 30px;
-webkit-border-start: 4px solid blue;
-webkit-border-start-color: green;
-webkit-border-start-style: dotted;
-webkit-border-start-width: 40px;
-webkit-margin-after: 1px;
-webkit-margin-after-collapse: discard;
-webkit-margin-before: 2px;
-webkit-margin-before-collapse: discard;
-webkit-margin-end: 3px;
-webkit-margin-end-collapse: discard;
-webkit-margin-start: 4px;
-webkit-margin-start-collapse: discard;
-webkit-padding-after: 1px;
-webkit-padding-before: 2px;
-webkit-padding-end: 3px;
-webkit-padding-start: 4px;
}
""", """
- Unprefix logical side property.
-webkit-border-after: 1px solid blue; (replace with border-block-end)
-webkit-border-after-color: green; (replace with border-block-end-color)
-webkit-border-after-style: dotted; (replace with border-block-end-style)
-webkit-border-after-width: 10px; (replace with border-block-end-width)
-webkit-border-before: 2px solid blue; (replace with border-block-start)
-webkit-border-before-color: green; (replace with border-block-start-color)
-webkit-border-before-style: dotted; (replace with border-block-start-style)
-webkit-border-before-width: 20px; (replace with border-block-start-width)
-webkit-border-end: 3px solid blue; (replace with border-inline-end)
-webkit-border-end-color: green; (replace with border-inline-end-color)
-webkit-border-end-style: dotted; (replace with border-inline-end-style)
-webkit-border-end-width: 30px; (replace with border-inline-end-width)
-webkit-border-start: 4px solid blue; (replace with border-inline-start)
-webkit-border-start-color: green; (replace with border-inline-start-color)
-webkit-border-start-style: dotted; (replace with border-inline-start-style)
-webkit-border-start-width: 40px; (replace with border-inline-start-width)
-webkit-margin-after: 1px; (replace with margin-block-end)
-webkit-margin-before: 2px; (replace with margin-block-start)
-webkit-margin-end: 3px; (replace with margin-inline-end)
-webkit-margin-start: 4px; (replace with margin-inline-start)
-webkit-padding-after: 1px; (replace with padding-block-end)
-webkit-padding-before: 2px; (replace with padding-block-start)
-webkit-padding-end: 3px; (replace with padding-inline-end)
-webkit-padding-start: 4px; (replace with padding-inline-start)""")
def testStartEndInsteadOfLeftRight(self):
self.VerifyContentsProducesOutput("""
.inline-node {
--var-is-ignored-left: 10px;
--var-is-ignored-right: 10px;
border-left-color: black;
border-right: 1px solid blue; /* csschecker-disable-line left-right */
margin-right: 5px;
padding-left: 10px; /* csschecker-disable-line some-other-thing */
text-align: right;
}""", """
- Use -start/end instead of -left/right (https://goo.gl/gQYY7z, add /* csschecker-disable-line left-right */ to suppress)
border-left-color: black; (replace with border-inline-start-color)
margin-right: 5px; (replace with margin-inline-end)
padding-left: 10px; (replace with padding-inline-start)
text-align: right; (replace with text-align: end)
""")
def testCssZeroWidthLengths(self):
self.VerifyContentsProducesOutput("""
@-webkit-keyframe anim {
0% { /* Ignore key frames */
width: 0px;
}
10% {
width: 10px;
}
50% { background-image: url(blah.svg); }
100% {
width: 100px;
}
}
#logo {
background-image: url(images/google_logo.png@2x);
}
body.alternate-logo #logo {
-webkit-mask-image: url(images/google_logo.png@2x);
}
/* http://crbug.com/359682 */
#spinner-container #spinner {
-webkit-animation-duration: 1.0s;
background-image: url(images/google_logo0.svg);
}
.media-button.play > .state0.active,
.media-button[state='0'] > .state0.normal /* blah */, /* blee */
.media-button[state='0']:not(.disabled):hover > .state0.hover {
-webkit-animation: anim 0s;
-webkit-animation-duration: anim 0ms;
-webkit-transform: scale(0%);
background-position-x: 0em;
background-position-y: 0ex;
border-width: 0em;
color: hsl(0, 0%, 85%); /* Shouldn't trigger error. */
opacity: .0;
opacity: 0.0;
opacity: 0.;
}
@page {
border-width: 0mm;
height: 0cm;
width: 0in;
}""", """
- Use "0" for zero-width lengths (i.e. 0px -> 0)
width: 0px;
-webkit-transform: scale(0%);
background-position-x: 0em;
background-position-y: 0ex;
border-width: 0em;
opacity: .0;
opacity: 0.0;
opacity: 0.;
border-width: 0mm;
height: 0cm;
width: 0in;
""")
def testInlineStyleInHtml(self):
self.VerifyContentsProducesOutput("""<!doctype html>
<html>
<head>
<!-- Don't warn about problems outside of style tags
html,
body {
margin: 0;
height: 100%;
}
-->
<style>
body {
flex-direction:column;
}
</style>
</head>
</html>""", """
- Colons (:) should have a space after them.
flex-direction:column;
""", filename='test.html')
def testInlineStyleInHtmlWithIncludes(self):
self.VerifyContentsProducesOutput("""<!doctype html>
<html>
<style include="fake-shared-css other-shared-css">
body {
flex-direction:column;
}
</style>
</head>
</html>""", """
- Colons (:) should have a space after them.
flex-direction:column;
""", filename='test.html')
def testInlineStyleInHtmlWithTagsInComments(self):
self.VerifyContentsProducesOutput("""<!doctype html>
<html>
<style>
body {
/* You better ignore the <tag> in this comment! */
flex-direction:column;
}
</style>
</head>
</html>""", """
- Colons (:) should have a space after them.
flex-direction:column;
""", filename='test.html')
def testRemoveAtBlocks(self):
self.assertEqual(self.checker.RemoveAtBlocks("""
@media (prefers-color-scheme: dark) {
.magic {
color: #000;
}
}"""), """
.magic {
color: #000;
}""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@media (prefers-color-scheme: dark) {
.magic {
--mixin-definition: {
color: red;
};
}
}"""), """
.magic {
--mixin-definition: {
color: red;
};
}""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@keyframes jiggle {
from { left: 0; }
50% { left: 100%; }
to { left: 10%; }
}"""), """
from { left: 0; }
50% { left: 100%; }
to { left: 10%; }""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@media print {
.rule1 {
color: black;
}
.rule2 {
margin: 1in;
}
}"""), """
.rule1 {
color: black;
}
.rule2 {
margin: 1in;
}""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@media (prefers-color-scheme: dark) {
.rule1 {
color: gray;
}
.rule2 {
margin: .5in;
}
@keyframe dark-fade {
0% { background: black; }
100% { background: darkgray; }
}
}"""), """
.rule1 {
color: gray;
}
.rule2 {
margin: .5in;
}
0% { background: black; }
100% { background: darkgray; }""")
self.assertEqual(self.checker.RemoveAtBlocks("""
@-webkit-keyframe anim {
0% { /* Ignore key frames */
width: 0px;
}
10% {
width: 10px;
}
50% { background-image: url(blah.svg); }
100% {
width: 100px;
}
}"""), """
0% { /* Ignore key frames */
width: 0px;
}
10% {
width: 10px;
}
50% { background-image: url(blah.svg); }
100% {
width: 100px;
}""")
if __name__ == '__main__':
unittest.main()
| 27.007418 | 121 | 0.658683 | [
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | Alan-love/chromium | tools/web_dev_style/css_checker_test.py | 18,203 | Python |
from typing import Dict, List, Optional
from db.sql.dal.general import sanitize
from db.sql.utils import query_to_dicts
class Region:
admin: str
admin_id: str
region_type: str
country: str
country_id: str
admin1: Optional[str]
admin1_id: Optional[str]
admin2: Optional[str]
admin2_id: Optional[str]
admin3: Optional[str]
admin3_id: Optional[str]
region_coordinate: Optional[str]
alias: Optional[str]
COUNTRY = 'Q6256'
ADMIN1 = 'Q10864048'
ADMIN2 = 'Q13220204'
ADMIN3 = 'Q13221722'
def __init__(self, **kwargs):
self.admin = kwargs['admin']
self.admin_id = kwargs['admin_id']
self.region_type = kwargs['region_type']
self.country = kwargs['country']
self.country_id = kwargs['country_id']
self.admin1 = kwargs.get('admin1')
self.admin1_id = kwargs.get('admin1_id')
self.admin2 = kwargs.get('admin2')
self.admin2_id = kwargs.get('admin2_id')
self.admin3 = kwargs.get('admin3')
self.admin3_id = kwargs.get('admin3_id')
self.region_coordinate = kwargs.get('region_coordinate')
self.alias = kwargs.get('alias')
# country, admin1 and admin2 queries return both admin and country,admin1,admin2 fields.
# admin3 queries do not, so we need to feel these fields ourselves
if self.region_type == Region.ADMIN3:
self.admin3_id, self.admin_3 = self.admin_id, self.admin
def __getitem__(self, key: str) -> str:
return getattr(self, key)
def query_country_qnodes(countries: List[str]) -> Dict[str, Optional[str]]:
# Translates countries to Q-nodes. Returns a dictionary of each input country and its QNode (None if not found)
# We look for countries in a case-insensitive fashion.
if not countries:
return {}
regions = query_countries(countries)
result_dict: Dict[str, Optional[str]] = {region.country: region.country_id for region in regions}
# The result dictionary contains all the countries we have found, we need to add those we did not find
found_countries = set([country.lower() for country in result_dict.keys()])
for country in countries:
if country.lower() not in found_countries:
result_dict[country] = None
return result_dict
def list_to_where(field: str, elements: List[str], lower=False) -> Optional[str]:
if not elements:
return None
if lower:
elements = [element.lower() for element in elements]
field = f"LOWER({field})"
santized = [sanitize(element) for element in elements]
quoted = [f"'{element}'" for element in santized]
joined = ', '.join(quoted)
return f"{field} IN ({joined})"
def region_where_clause(region_field: str, region_list: List[str], region_id_field: str,
region_id_list: List[str], alias_field: Optional[str] = None) -> str:
if not region_list and not region_id_list:
return "1=1"
region_where = list_to_where(region_field, region_list, lower=True) or "0=1"
if alias_field:
alias_where = list_to_where(alias_field, region_list, lower=True) or "0=1"
else:
alias_where = "0=1"
region_id_where = list_to_where(region_id_field, region_id_list) or "0=1"
return f'({region_where} OR {region_id_where} OR {alias_where})'
def _query_regions(query: str) -> List[Region]:
dicts = query_to_dicts(query)
return [Region(**d) for d in dicts]
def query_countries(countries: List[str] = [], country_ids: List[str] = []) -> List[Region]:
""" Returns a list of countries:
If countries or country_ids are not empty, only those countries are returned (all of those in both lists)
Otherwise, all countries are returned
"""
where = region_where_clause('s_country_label.text', countries, 'e_country.node1', country_ids)
query = f'''
SELECT e_country.node1 AS admin_id,
s_country_label.text AS admin,
'Q6256' AS region_type,
e_country.node1 AS country_id,
s_country_label.text AS country,
NULL as admin1_id,
NULL as admin1,
NULL as admin2_id,
NULL as admin2,
NULL as admin3_id,
NULL as admin3
FROM edges e_country
JOIN edges e_country_label JOIN strings s_country_label ON (e_country_label.id=s_country_label.edge_id)
ON (e_country.node1=e_country_label.node1 AND e_country_label.label='label')
WHERE e_country.label='P31' AND e_country.node2='Q6256' AND {where}
ORDER BY country
'''
return _query_regions(query)
def query_admin1s(country: Optional[str] = None, country_id: Optional[str] = None, admin1s: List[str] = [],
admin1_ids: List[str] = []) -> List[Region]:
"""
Returns a list of admin1s. If country or country_id is specified, return the admin1s only of that country.
If admin1s or admin1_ids are provided, only those admins are returned.
If all arguments are empty, all admin1s in the system are returned.
"""
if country and country_id:
raise ValueError('Only one of country, country_id may be specified')
if country_id:
country_where = f"e_country.node2='{country_id}'"
elif country: # We are certain country is not None here, but need an `elif` because mypy isn't certain
country_where = f"LOWER(s_country_label.text)='{country.lower()}'"
else:
country_where = "1=1"
admin1_where = region_where_clause('s_admin1_label.text', admin1s, 'e_admin1.node1', admin1_ids)
query = f'''
SELECT e_admin1.node1 AS admin_id,
s_admin1_label.text AS admin,
'Q10864048' AS region_type,
e_country.node2 AS country_id,
s_country_label.text AS country,
e_admin1.node1 as admin1_id,
s_admin1_label.text as admin1,
NULL as admin2_id,
NULL as admin2,
NULL as admin3_id,
NULL as admin3
FROM edges e_admin1
JOIN edges e_admin1_label JOIN strings s_admin1_label ON (e_admin1_label.id=s_admin1_label.edge_id)
ON (e_admin1.node1=e_admin1_label.node1 AND e_admin1_label.label='label')
JOIN edges e_country ON (e_country.node1=e_admin1.node1 AND e_country.label='P17')
JOIN edges e_country_label JOIN strings s_country_label ON (e_country_label.id=s_country_label.edge_id)
ON (e_country.node2=e_country_label.node1 AND e_country_label.label='label')
WHERE e_admin1.label='P31' AND e_admin1.node2='Q10864048' AND {country_where} AND {admin1_where}
ORDER BY admin1
'''
return _query_regions(query)
def query_admin2s(admin1: Optional[str] = None, admin1_id: Optional[str] = None, admin2s: List[str] = [],
admin2_ids: List[str] = []) -> List[Region]:
"""
Returns a list of admin2s. If admin1 or admin1_id is specified, return the admin2s only of that admin1.
If admin2s or admin2_ids are provided, only those admins are returned.
If all arguments are empty, all admin2s in the system are returned.
"""
if admin1 and admin1_id:
raise ValueError('Only one of admin1, admin1_id may be specified')
if admin1_id:
admin1_where = f"e_admin1.node2='{admin1_id}'"
elif admin1:
admin1_where = f"LOWER(s_admin1_label.text)=LOWER('{admin1}')"
else:
admin1_where = "1=1"
admin2_where = region_where_clause('s_admin2_label.text', admin2s, 'e_admin2.node1', admin2_ids)
query = f'''
SELECT e_admin2.node1 AS admin_id,
s_admin2_label.text AS admin,
'Q13220204' AS region_type,
e_country.node2 AS country_id,
s_country_label.text AS country,
e_admin1.node2 AS admin1_id,
s_admin1_label.text AS admin1,
e_admin2.node1 AS admin2_id,
s_admin2_label.text AS admin2,
NULL as admin3_id,
NULL as admin3
FROM edges e_admin2
JOIN edges e_admin2_label JOIN strings s_admin2_label ON (e_admin2_label.id=s_admin2_label.edge_id)
ON (e_admin2.node1=e_admin2_label.node1 AND e_admin2_label.label='label')
JOIN edges e_admin1 ON (e_admin1.node1=e_admin2.node1 AND e_admin1.label='P2006190001')
JOIN edges e_admin1_label JOIN strings s_admin1_label ON (e_admin1_label.id=s_admin1_label.edge_id)
ON (e_admin1.node2=e_admin1_label.node1 AND e_admin1_label.label='label')
JOIN edges e_country ON (e_country.node1=e_admin1.node2 AND e_country.label='P17')
JOIN edges e_country_label JOIN strings s_country_label ON (e_country_label.id=s_country_label.edge_id)
ON (e_country.node2=e_country_label.node1 AND e_country_label.label='label')
WHERE e_admin2.label='P31' AND e_admin2.node2='Q13220204' AND {admin1_where} AND {admin2_where}
ORDER BY admin2
'''
return _query_regions(query)
def query_admin3s(admin2: Optional[str] = None, admin2_id: Optional[str] = None, admin3s: List[str] = [],
admin3_ids: List[str] = [], debug=False) -> List[Region]:
"""
Returns a list of admin3s. If admin2 or admin2_id is specified, return the admin3s only of that admin2.
If admin3s or admin3_ids are provided, only those admins are returned.
If all arguments are empty, all admin3s in the system are returned.
"""
if admin2 and admin2_id:
raise ValueError('Only one of admin2, admin2_id may be specified')
if admin2_id:
admin2_where = f"e_admin2.node2='{admin2_id}'"
elif admin2:
admin2_where = f"LOWER(s_admin2_label.text)=LOWER('{admin2}')"
else:
admin2_where = "1=1"
admin3_where = region_where_clause('s_admin3_label.text', admin3s, 'e_admin3.node1', admin3_ids)
query = f'''
SELECT e_admin3.node1 AS admin_id,
s_admin3_label.text AS admin,
'Q13221722' AS region_type,
e_country.node2 AS country_id,
s_country_label.text AS country,
e_admin1.node2 AS admin1_id,
s_admin1_label.text AS admin1,
e_admin2.node2 AS admin2_id,
s_admin2_label.text AS admin2,
e_admin2.node1 AS admin3_id,
s_admin3_label.text AS admin3
FROM
edges e_admin3
JOIN edges e_admin3_label JOIN strings s_admin3_label ON (e_admin3_label.id=s_admin3_label.edge_id)
ON (e_admin3.node1=e_admin3_label.node1 AND e_admin3_label.label='label')
JOIN edges e_admin2 ON (e_admin2.node1=e_admin3.node1 AND e_admin2.label='P2006190002')
JOIN edges e_admin2_label JOIN strings s_admin2_label ON (e_admin2_label.id=s_admin2_label.edge_id)
ON (e_admin2.node2=e_admin2_label.node1 AND e_admin2_label.label='label')
JOIN edges e_admin1 ON (e_admin1.node1=e_admin2.node1 AND e_admin1.label='P2006190001')
JOIN edges e_admin1_label JOIN strings s_admin1_label ON (e_admin1_label.id=s_admin1_label.edge_id)
ON (e_admin1.node2=e_admin1_label.node1 AND e_admin1_label.label='label')
JOIN edges e_country ON (e_country.node1=e_admin1.node2 AND e_country.label='P17')
JOIN edges e_country_label JOIN strings s_country_label ON (e_country_label.id=s_country_label.edge_id)
ON (e_country.node2=e_country_label.node1 AND e_country_label.label='label')
WHERE e_admin3.label='P31' AND e_admin3.node2='Q13221722' AND {admin2_where} AND {admin3_where}
ORDER BY admin3
'''
if debug:
print(query)
return _query_regions(query)
def query_admins(admins: List[str] = [], admin_ids: List[str] = [], debug=False) -> List[Region]:
where = region_where_clause('s_region_label.text', admins, 'e_region.node1', admin_ids, 's_region_alias.text')
query = f'''
SELECT e_region.node1 AS admin_id, s_region_label.text AS admin, e_region.node2 AS region_type,
e_country.node2 AS country_id, s_country_label.text AS country,
e_admin1.node2 AS admin1_id, s_admin1_label.text AS admin1,
e_admin2.node2 AS admin2_id, s_admin2_label.text AS admin2,
'POINT(' || c_coordinate.longitude || ' ' || c_coordinate.latitude || ')' as region_coordinate,
s_region_alias.text AS alias
FROM edges e_region
JOIN edges e_region_label ON (e_region_label.node1=e_region.node1 AND e_region_label.label='label')
JOIN strings s_region_label ON (e_region_label.id=s_region_label.edge_id)
JOIN edges e_country
JOIN edges e_country_label
JOIN strings s_country_label
ON (s_country_label.edge_id=e_country_label.id)
ON (e_country.node2=e_country_label.node1 AND e_country_label.label='label')
ON (e_region.node1=e_country.node1 AND e_country.label='P17')
LEFT JOIN edges e_admin1
JOIN edges e_admin1_label
JOIN strings s_admin1_label
ON (s_admin1_label.edge_id=e_admin1_label.id)
ON (e_admin1.node2=e_admin1_label.node1 AND e_admin1_label.label='label')
ON (e_region.node1=e_admin1.node1 AND e_admin1.label='P2006190001')
LEFT JOIN edges e_admin2
JOIN edges e_admin2_label
JOIN strings s_admin2_label
ON (s_admin2_label.edge_id=e_admin2_label.id)
ON (e_admin2.node2=e_admin2_label.node1 AND e_admin2_label.label='label')
ON (e_region.node1=e_admin2.node1 AND e_admin2.label='P2006190002')
LEFT JOIN edges e_coordinate
JOIN coordinates c_coordinate
ON (c_coordinate.edge_id=e_coordinate.id)
ON (e_region.node1=e_coordinate.node1 AND e_coordinate.label='P625')
LEFT JOIN edges e_region_alias
JOIN strings s_region_alias
ON (s_region_alias.edge_id=e_region_alias.id)
ON (e_region.node1=e_region_alias.node1 AND e_region_alias.label='alias')
WHERE e_region.label='P31' AND e_region.node2 IN ('Q6256', 'Q10864048', 'Q13220204', 'Q13221722') AND {where}
'''
if debug:
print(query)
return _query_regions(query)
| 44.449686 | 115 | 0.678882 | [
"MIT"
] | Otamio/datamart-api | db/sql/dal/regions.py | 14,135 | Python |
# -*- coding: utf-8 -*-
import json
import csv
import scrapy
import re
from locations.items import GeojsonPointItem
COOKIES = {
"bm_sz": "04B124C1C96D68082A9F61BAAAF0B6D5~YAAQdjsvF22E8Xl6AQAACr1VfAxPEt+enarZyrOZrBaNvyuX71lK5QPuDR/FgDEWBZVMRhjiIf000W7Z1PiAjxobrz2Y5LcYMH3CvUNvpdS3MjVLUMGwMEBCf9L5nD5Gs9ho2YL8T7Tz7lYvpolvaOlJnKrHyhCFxxk/uyBZ2G/0QrGKLwSaCQShDsz7ink=",
"_abck": "440E40C406E69413DCCC08ABAA3E9022~-1~YAAQdjsvF26E8Xl6AQAACr1VfAYznoJdJhX7TNIZW1Rfh6qRhzquXg+L1TWoaL7nZUjXlNls2iPIKFQrCdrWqY/CNXW+mHyXibInMflIXJi5VVB/Swq53kABYJDuXYSlCunYvJAzMSr1q12NOYswz134Y8HRNzVWhkb2jMS5whmHxS/v0vniIvS1TQtKjEQlMGzQYmN41CmLX0JobipQhDtUB4VyNwztb2DCAZiqDX8BLwWg7h/DtPd4158qU69hNhayFTgWmD76/MiR8/T536tMmcoRyWLl4fEtP/XUmKOcksuZO7dbfNxXBffTxIXPYwf1eO77LNuZTCQq5kfsGZLJX8ODju2KSjnIF1vdnyHAe98FDIm+hw==~-1~-1~-1"
}
HEADERS = {
'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
'accept-encoding': 'gzip, deflate, br',
'cache-control': 'max-age=0',
'referer': 'https://www.aldi.co.uk/store-finder',
'sec-ch-ua': '" Not;A Brand";v="99", "Google Chrome";v="91", "Chromium";v="91"',
'sec-ch-ua-mobile': '?0',
'sec-fetch-dest': 'document',
'sec-fetch-mode': 'navigate',
'sec-fetch-site': 'same-origin',
'sec-fetch-user': '?1',
'upgrade-insecure-requests': '1',
'user-agent': 'Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.84 Safari/537.36',
}
class AldiUKSpider(scrapy.Spider):
name = "aldi_uk"
item_attributes = {'brand': "Aldi"}
allowed_domains = ['aldi.co.uk']
download_delay = 0.5
custom_settings = {
'USER_AGENT': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36',
}
def start_requests(self):
url = 'https://www.aldi.co.uk/sitemap/store-en_gb-gbp'
yield scrapy.http.FormRequest(
url=url,
method='GET',
dont_filter=True,
cookies=COOKIES,
headers=HEADERS,
callback=self.parse
)
def parse(self, response):
response.selector.remove_namespaces()
store_urls = response.xpath('//url/loc/text()').extract()
for store_url in store_urls:
yield scrapy.http.FormRequest(
url=store_url,
method='GET',
dont_filter=True,
cookies=COOKIES,
headers=HEADERS,
callback=self.parse_store
)
def parse_store(self, response):
store_js = response.xpath('//script[@type="text/javascript"]/text()').extract_first()
json_data = re.search('gtmData =(.+?);', store_js).group(1)
data = json.loads(json_data)
geojson_data = response.xpath('//script[@class="js-store-finder-initial-state"][@type="application/json"]/text()').extract_first()
geodata = json.loads(geojson_data)
properties = {
'name': data['seoData']['name'],
'ref': data['seoData']['name'],
'addr_full': data['seoData']['address']['streetAddress'],
'city': data['seoData']['address']['addressLocality'],
'postcode': data['seoData']['address']['postalCode'],
'country': data['seoData']['address']['addressCountry'],
'website': response.request.url,
'opening_hours': str(data['seoData']['openingHours']).replace('[','').replace(']','').replace("'",''),
'lat': geodata['store']['latlng']['lat'],
'lon': geodata['store']['latlng']['lng'],
}
yield GeojsonPointItem(**properties)
| 42.988636 | 436 | 0.636532 | [
"MIT"
] | ChrisSoderberg/alltheplaces | locations/spiders/aldi_uk.py | 3,783 | Python |
import argparse
import logging
from pprint import pformat
from . import guide
from . import settings
log = logging.getLogger(__name__)
def cli(settingsobject=None):
parser = argparse.ArgumentParser(description='Create a CSS/LESS/SASS style guide.')
if not settingsobject:
parser.add_argument('-f', '--settingsfile',
dest='settingsfile', default='vitalstyles.json',
help='Path to settings file. Defaults to "vitalstyles.json".')
parser.add_argument('-l', '--loglevel',
dest='loglevel', default='INFO',
choices=['DEBUG', 'INFO', 'ERROR'], help='Loglevel.')
args = parser.parse_args()
loglevel = getattr(logging, args.loglevel)
logging.basicConfig(
format='[%(name)s] %(levelname)s: %(message)s',
level=loglevel
)
if loglevel > logging.DEBUG:
markdownlogger = logging.getLogger('MARKDOWN')
markdownlogger.setLevel(logging.WARNING)
if not settingsobject:
settingsobject = settings.Settings(args.settingsfile)
logging.debug('Creating vitalstyles styleguide with the following settings:\n%s',
pformat(settingsobject.settings))
guide.Guide(settingsobject).render()
if __name__ == '__main__':
cli()
| 29.880952 | 87 | 0.67012 | [
"BSD-3-Clause"
] | appressoas/vitalstyles | vitalstyles/cli.py | 1,255 | Python |
class Player():
def __init__(self):
print("PLYR FAK SUM BODIE")
| 15.4 | 35 | 0.597403 | [
"MIT"
] | reecebenson/DADSA-Tennis-PartA | .history/classes/Player_20171106170937.py | 77 | Python |
#!/usr/bin/env python
# vim: set sts=4 sw=4 et:
import time
import xmlrpc.client
from . import players
from . import rpc
from .common import GameState, CardSet, GameError, RuleError, ProtocolError, simple_decorator
from .events import EventList, CardPlayedEvent, MessageEvent, TrickPlayedEvent, TurnEvent, StateChangedEvent
@simple_decorator
def error2fault(func):
"""
Catch known exceptions and translate them to
XML-RPC faults.
"""
def catcher(*args):
try:
return func(*args)
except GameError as error:
raise xmlrpc.client.Fault(GameError.rpc_code, str(error))
except RuleError as error:
raise xmlrpc.client.Fault(RuleError.rpc_code, str(error))
except ProtocolError as error:
raise xmlrpc.client.Fault(ProtocolError.rpc_code, str(error))
return catcher
@simple_decorator
def fault2error(func):
"""
Catch known XML-RPC faults and translate them to
custom exceptions.
"""
def catcher(*args):
try:
return func(*args)
except xmlrpc.client.Fault as error:
error_classes = (GameError, RuleError, ProtocolError)
for klass in error_classes:
if error.faultCode == klass.rpc_code:
raise klass(error.faultString)
raise error
return catcher
class XMLRPCCliPlayer(players.CliPlayer):
"""
XML-RPC command line interface human player.
"""
def __init__(self, player_name):
players.CliPlayer.__init__(self, player_name)
self.game_state = GameState()
self.hand = None
def handle_event(self, event):
if isinstance(event, CardPlayedEvent):
self.card_played(event.player, event.card, event.game_state)
elif isinstance(event, MessageEvent):
self.send_message(event.sender, event.message)
elif isinstance(event, TrickPlayedEvent):
self.trick_played(event.player, event.game_state)
elif isinstance(event, TurnEvent):
self.game_state.update(event.game_state)
state = self.controller.get_state(self.id)
self.hand = state['hand']
self.game_state.update(state['game_state'])
elif isinstance(event, StateChangedEvent):
self.game_state.update(event.game_state)
else:
print("unknown event: %s" % event)
def wait_for_turn(self):
"""
Wait for this player's turn.
"""
while True:
time.sleep(0.5)
if self.controller is not None:
events = self.controller.get_events(self.id)
for event in events:
self.handle_event(event)
if self.game_state.turn_id == self.id:
break
class XMLRPCProxyController():
"""
Client-side proxy object for the server/GameController.
"""
def __init__(self, server_uri):
super(XMLRPCProxyController, self).__init__()
if not server_uri.startswith('http://') and \
not server_uri.startswith('https://'):
server_uri = 'http://' + server_uri
self.server = xmlrpc.client.ServerProxy(server_uri)
self.game_id = None
self.akey = None
@fault2error
def play_card(self, _player, card):
self.server.game.play_card(self.akey, self.game_id, rpc.rpc_encode(card))
@fault2error
def get_events(self, _player_id):
return rpc.rpc_decode(EventList, self.server.get_events(self.akey))
@fault2error
def get_state(self, _player_id):
state = self.server.game.get_state(self.akey, self.game_id)
state['game_state'] = rpc.rpc_decode(GameState, state['game_state'])
state['hand'] = rpc.rpc_decode(CardSet, state['hand'])
return state
@fault2error
def player_quit(self, _player_id):
self.server.player.quit(self.akey)
@fault2error
def register_player(self, player):
player.controller = self
plr_data = self.server.player.register(rpc.rpc_encode(player))
player.id = plr_data['id']
self.akey = plr_data['akey']
@fault2error
def start_game_with_bots(self):
return self.server.game.start_with_bots(self.akey, self.game_id)
@fault2error
def create_game(self):
self.game_id = self.server.game.create(self.akey)
return self.game_id
| 31.621429 | 108 | 0.63813 | [
"BSD-3-Clause"
] | jait/tupelo | tupelo/xmlrpc.py | 4,427 | Python |
import time
import numpy as np
import tensorflow as tf
import layers as L
import vat
FLAGS = tf.app.flags.FLAGS
tf.app.flags.DEFINE_string('device', '/gpu:0', "device")
tf.app.flags.DEFINE_string('dataset', 'cifar10', "{cifar10, svhn}")
tf.app.flags.DEFINE_string('log_dir', "", "log_dir")
tf.app.flags.DEFINE_integer('seed', 1, "initial random seed")
tf.app.flags.DEFINE_bool('validation', False, "")
tf.app.flags.DEFINE_integer('batch_size', 32, "the number of examples in a batch")
tf.app.flags.DEFINE_integer('ul_batch_size', 128, "the number of unlabeled examples in a batch")
tf.app.flags.DEFINE_integer('eval_batch_size', 100, "the number of eval examples in a batch")
tf.app.flags.DEFINE_integer('eval_freq', 5, "")
tf.app.flags.DEFINE_integer('num_epochs', 120, "the number of epochs for training")
tf.app.flags.DEFINE_integer('epoch_decay_start', 80, "epoch of starting learning rate decay")
tf.app.flags.DEFINE_integer('num_iter_per_epoch', 400, "the number of updates per epoch")
tf.app.flags.DEFINE_float('learning_rate', 0.001, "initial leanring rate")
tf.app.flags.DEFINE_float('mom1', 0.9, "initial momentum rate")
tf.app.flags.DEFINE_float('mom2', 0.5, "momentum rate after epoch_decay_start")
tf.app.flags.DEFINE_string('method', 'vat', "{vat, vatent, baseline}")
if FLAGS.dataset == 'cifar10':
from cifar10 import inputs, unlabeled_inputs
elif FLAGS.dataset == 'svhn':
from svhn import inputs, unlabeled_inputs
else:
raise NotImplementedError
NUM_EVAL_EXAMPLES = 5000
def build_training_graph(x, y, ul_x, ul_u, lr, mom):
global_step = tf.get_variable(
name="global_step",
shape=[],
dtype=tf.float32,
initializer=tf.constant_initializer(0.0),
trainable=False,
)
logit = vat.forward(x)
nll_loss = L.ce_loss(logit, y)
with tf.variable_scope(tf.get_variable_scope(), reuse=True):
if FLAGS.method == 'vat':
ul_logit = vat.forward(ul_x, is_training=True, update_batch_stats=False)
vat_loss, ul_u_updated = vat.virtual_adversarial_loss(ul_x, ul_u, ul_logit)
additional_loss = vat_loss
elif FLAGS.method == 'vatent':
ul_logit = vat.forward(ul_x, is_training=True, update_batch_stats=False)
vat_loss, ul_u_updated = vat.virtual_adversarial_loss(ul_x, ul_u, ul_logit)
ent_loss = L.entropy_y_x(ul_logit)
additional_loss = vat_loss + ent_loss
elif FLAGS.method == 'baseline':
additional_loss = 0
else:
raise NotImplementedError
loss = nll_loss + additional_loss
opt = tf.train.AdamOptimizer(learning_rate=lr, beta1=mom)
tvars = tf.trainable_variables()
grads_and_vars = opt.compute_gradients(loss, tvars)
train_op = opt.apply_gradients(grads_and_vars, global_step=global_step)
return loss, train_op, global_step, ul_u_updated
def build_eval_graph(x, y, ul_x, ul_u):
losses = {}
logit = vat.forward(x, is_training=False, update_batch_stats=False)
nll_loss = L.ce_loss(logit, y)
losses['NLL'] = nll_loss
acc = L.accuracy(logit, y)
losses['Acc'] = acc
scope = tf.get_variable_scope()
scope.reuse_variables()
# at_loss = vat.adversarial_loss(x, y, nll_loss, is_training=False)
# losses['AT_loss'] = at_loss
ul_logit = vat.forward(ul_x, is_training=False, update_batch_stats=False)
vat_loss = vat.virtual_adversarial_loss(ul_x, ul_u, ul_logit, is_training=False)
losses['VAT_loss'] = vat_loss
return losses
def main(_):
print(FLAGS.epsilon, FLAGS.top_bn)
np.random.seed(seed=FLAGS.seed)
tf.set_random_seed(np.random.randint(1234))
with tf.Graph().as_default() as g:
with tf.device("/cpu:0"):
images, labels = inputs(batch_size=FLAGS.batch_size,
train=True,
validation=FLAGS.validation,
shuffle=True)
ul_images = tf.placeholder(shape=images.shape, dtype=tf.float32)
'''unlabeled_inputs(batch_size=FLAGS.ul_batch_size,
validation=FLAGS.validation,
shuffle=True)'''
images_eval_train, labels_eval_train = inputs(batch_size=FLAGS.eval_batch_size,
train=True,
validation=FLAGS.validation,
shuffle=True)
ul_images_eval_train = unlabeled_inputs(batch_size=FLAGS.eval_batch_size,
validation=FLAGS.validation,
shuffle=True)
images_eval_test, labels_eval_test = inputs(batch_size=FLAGS.eval_batch_size,
train=False,
validation=FLAGS.validation,
shuffle=True)
def placeholder_like(x, name=None):
return tf.placeholder(shape=x.shape, dtype=tf.float32, name=name)
def random_sphere(shape):
n = tf.random_normal(shape=shape, dtype=tf.float32)
n = tf.reshape(n, shape=(int(shape[0]), -1))
n = tf.nn.l2_normalize(n, dim=1)
n = tf.reshape(n, shape)
return n
def random_sphere_numpy(shape):
n = np.random.normal(size=shape)
proj_shape = tuple([n.shape[0]] + [1 for _ in range(len(shape) - 1)])
return n / np.linalg.norm(n.reshape((n.shape[0], -1)), axis=1).reshape(proj_shape)
print(ul_images.shape)
# ul_u = random_sphere(ul_images.shape)
# ul_u_eval_train = random_sphere(ul_images_eval_train.shape)
# ul_u_eval_test = random_sphere(images_eval_test.shape)
ul_u = placeholder_like(ul_images, "ul_u")
ul_u_eval_train = placeholder_like(ul_images_eval_train, "ul_u_eval_train")
ul_u_eval_test = placeholder_like(images_eval_test, "ul_u_eval_test")
with tf.device(FLAGS.device):
lr = tf.placeholder(tf.float32, shape=[], name="learning_rate")
mom = tf.placeholder(tf.float32, shape=[], name="momentum")
with tf.variable_scope("CNN") as scope:
# Build training graph
loss, train_op, global_step, ul_u_updated = build_training_graph(
images, labels, ul_images, ul_u, lr, mom)
scope.reuse_variables()
# Build eval graph
losses_eval_train = build_eval_graph(images_eval_train, labels_eval_train, ul_images_eval_train, ul_u_eval_train)
losses_eval_test = build_eval_graph(images_eval_test, labels_eval_test, images_eval_test, ul_u_eval_test)
init_op = tf.global_variables_initializer()
if not FLAGS.log_dir:
logdir = None
writer_train = None
writer_test = None
else:
logdir = FLAGS.log_dir
writer_train = tf.summary.FileWriter(FLAGS.log_dir + "/train", g)
writer_test = tf.summary.FileWriter(FLAGS.log_dir + "/test", g)
saver = tf.train.Saver(tf.global_variables())
sv = tf.train.Supervisor(
is_chief=True,
logdir=logdir,
init_op=init_op,
init_feed_dict={lr: FLAGS.learning_rate, mom: FLAGS.mom1},
saver=saver,
global_step=global_step,
summary_op=None,
summary_writer=None,
save_model_secs=150, recovery_wait_secs=0)
ul_images_np = np.load("train_images.npy").reshape((-1, 32, 32, 3))
print("TRUNCATING UL DATA")
ul_images_np = ul_images_np[:FLAGS.batch_size]
ul_u_np = random_sphere_numpy(ul_images_np.shape)
print(ul_images_np.shape, ul_u_np.shape)
print("Training...")
with sv.managed_session() as sess:
for ep in range(FLAGS.num_epochs):
if sv.should_stop():
break
if ep < FLAGS.epoch_decay_start:
feed_dict = {lr: FLAGS.learning_rate, mom: FLAGS.mom1}
else:
decayed_lr = ((FLAGS.num_epochs - ep) / float(
FLAGS.num_epochs - FLAGS.epoch_decay_start)) * FLAGS.learning_rate
feed_dict = {lr: decayed_lr, mom: FLAGS.mom2}
sum_loss = 0
start = time.time()
for i in range(FLAGS.num_iter_per_epoch):
picked = range(FLAGS.batch_size) # np.random.choice(len(ul_images_np), size=FLAGS.batch_size, replace=False)
feed_dict[ul_images] = ul_images_np[picked]
feed_dict[ul_u] = ul_u_np[picked]
ul_u_updated_np, _, batch_loss, _ = sess.run([ul_u_updated, train_op, loss, global_step],
feed_dict=feed_dict)
delta = ul_u_updated_np - ul_u_np[picked]
# print("pos", ul_u_updated_np.reshape((FLAGS.batch_size, -1))[0, :4])
# print("delta", np.linalg.norm(delta.reshape((FLAGS.batch_size, -1)), axis=1)[:4])
print(np.linalg.norm(ul_u_updated_np - ul_u_np[picked]), ul_u_updated_np.reshape((FLAGS.batch_size, -1))[0, :3])
ul_u_np[picked] = ul_u_updated_np
sum_loss += batch_loss
end = time.time()
print("Epoch:", ep, "CE_loss_train:", sum_loss / FLAGS.num_iter_per_epoch, "elapsed_time:", end - start)
if (ep + 1) % FLAGS.eval_freq == 0 or ep + 1 == FLAGS.num_epochs:
# Eval on training data
act_values_dict = {}
feed_dict = {ul_u_eval_train: random_sphere_numpy(ul_u_eval_train.shape)}
for key, _ in losses_eval_train.iteritems():
act_values_dict[key] = 0
n_iter_per_epoch = NUM_EVAL_EXAMPLES / FLAGS.eval_batch_size
for i in range(n_iter_per_epoch):
values = losses_eval_train.values()
act_values = sess.run(values, feed_dict=feed_dict)
for key, value in zip(act_values_dict.keys(), act_values):
act_values_dict[key] += value
summary = tf.Summary()
current_global_step = sess.run(global_step)
for key, value in act_values_dict.iteritems():
print("train-" + key, value / n_iter_per_epoch)
summary.value.add(tag=key, simple_value=value / n_iter_per_epoch)
if writer_train is not None:
writer_train.add_summary(summary, current_global_step)
# Eval on test data
act_values_dict = {}
print("HOW COME THIS DOES NOT DEPEND ON ul_images_eval_train? SOMETHING'S WRONG HERE.")
feed_dict = {ul_u_eval_test: random_sphere_numpy(ul_u_eval_test.shape)}
for key, _ in losses_eval_test.iteritems():
act_values_dict[key] = 0
n_iter_per_epoch = NUM_EVAL_EXAMPLES / FLAGS.eval_batch_size
for i in range(n_iter_per_epoch):
values = losses_eval_test.values()
act_values = sess.run(values, feed_dict=feed_dict)
for key, value in zip(act_values_dict.keys(), act_values):
act_values_dict[key] += value
summary = tf.Summary()
current_global_step = sess.run(global_step)
for key, value in act_values_dict.iteritems():
print("test-" + key, value / n_iter_per_epoch)
summary.value.add(tag=key, simple_value=value / n_iter_per_epoch)
if writer_test is not None:
writer_test.add_summary(summary, current_global_step)
saver.save(sess, sv.save_path, global_step=global_step)
sv.stop()
if __name__ == "__main__":
tf.app.run()
| 47.778626 | 132 | 0.582202 | [
"MIT"
] | danielvarga/vat_tf | train_semisup.py | 12,518 | Python |
"""
File: Milestone1.py
Name: 黃科諺
-----------------------
This file tests the milestone 1 for
our babyname.py project
"""
import sys
def add_data_for_name(name_data, year, rank, name):
name_info = {year: rank}
if name in name_data:
if year in name_data[name]:
exist_rank = int(name_data[name][year])
if int(rank) < exist_rank:
name_data[name][year] = rank
else:
name_data[name][year] = rank
else:
name_data[name] = name_info
# ------------- DO NOT EDIT THE CODE BELOW THIS LINE ---------------- #
def test1():
name_data = {'Kylie': {'2010': '57'}, 'Nick': {'2010': '37'}}
add_data_for_name(name_data, '2010', '208', 'Kate')
print('--------------------test1----------------------')
print(str(name_data))
print('-----------------------------------------------')
def test2():
name_data = {'Kylie': {'2010': '57'}, 'Nick': {'2010': '37'}}
add_data_for_name(name_data, '2000', '104', 'Kylie')
print('--------------------test2----------------------')
print(str(name_data))
print('-----------------------------------------------')
def test3():
name_data = {'Kylie': {'2010': '57'}, 'Sammy': {'1980': '451', '1990': '200'}, 'Kate': {'2000': '100'}}
add_data_for_name(name_data, '1990', '900', 'Sammy')
add_data_for_name(name_data, '2010', '400', 'Kylie')
add_data_for_name(name_data, '2000', '20', 'Kate')
print('-------------------test3-----------------------')
print(str(name_data))
print('-----------------------------------------------')
def test4():
name_data = {'Kylie': {'2010': '57'}, 'Nick': {'2010': '37'}}
add_data_for_name(name_data, '2010', '208', 'Kate')
add_data_for_name(name_data, '2000', '108', 'Kate')
add_data_for_name(name_data, '1990', '200', 'Sammy')
add_data_for_name(name_data, '1990', '90', 'Sammy')
add_data_for_name(name_data, '2000', '104', 'Kylie')
print('--------------------test4----------------------')
print(str(name_data))
print('-----------------------------------------------')
def main():
args = sys.argv[1:]
if len(args) == 1 and args[0] == 'test1':
test1()
elif len(args) == 1 and args[0] == 'test2':
test2()
elif len(args) == 1 and args[0] == 'test3':
test3()
elif len(args) == 1 and args[0] == 'test4':
test4()
if __name__ == "__main__":
main()
| 31.307692 | 107 | 0.47502 | [
"MIT"
] | kenhuang1204/stanCode_projects | stanCode_projects/name_searching_system/milestone1.py | 2,448 | Python |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from compas.utilities import await_callback
from compas_fab.backends.interfaces import AddCollisionMesh
from compas_fab.backends.ros.messages import ApplyPlanningSceneRequest
from compas_fab.backends.ros.messages import ApplyPlanningSceneResponse
from compas_fab.backends.ros.messages import CollisionObject
from compas_fab.backends.ros.messages import PlanningScene
from compas_fab.backends.ros.messages import PlanningSceneWorld
from compas_fab.backends.ros.service_description import ServiceDescription
__all__ = [
'MoveItAddCollisionMesh',
]
class MoveItAddCollisionMesh(AddCollisionMesh):
"""Callable to add a collision mesh to the planning scene."""
APPLY_PLANNING_SCENE = ServiceDescription('/apply_planning_scene',
'ApplyPlanningScene',
ApplyPlanningSceneRequest,
ApplyPlanningSceneResponse,
)
def __init__(self, ros_client):
self.ros_client = ros_client
def add_collision_mesh(self, collision_mesh, options=None):
"""Add a collision mesh to the planning scene.
Parameters
----------
collision_mesh : :class:`compas_fab.robots.CollisionMesh`
Object containing the collision mesh to be added.
options : dict, optional
Unused parameter.
Returns
-------
``None``
"""
kwargs = {}
kwargs['collision_mesh'] = collision_mesh
kwargs['errback_name'] = 'errback'
return await_callback(self.add_collision_mesh_async, **kwargs)
def add_collision_mesh_async(self, callback, errback, collision_mesh):
co = CollisionObject.from_collision_mesh(collision_mesh)
co.operation = CollisionObject.ADD
world = PlanningSceneWorld(collision_objects=[co])
scene = PlanningScene(world=world, is_diff=True)
request = scene.to_request(self.ros_client.ros_distro)
self.APPLY_PLANNING_SCENE(self.ros_client, request, callback, errback)
| 38.362069 | 78 | 0.681798 | [
"MIT"
] | gramaziokohler/compas_fab | src/compas_fab/backends/ros/backend_features/move_it_add_collision_mesh.py | 2,225 | Python |
"""
libquantum example 3: 03_sweep_linear.py
Construct classic linear chirp and illustrate CWT and STFT TRFs.
"""
import os
from pathlib import Path
import numpy as np
import scipy.io.wavfile
import matplotlib.pyplot as plt
from libquantum import atoms, entropy, scales, spectra, utils, synthetics
import libquantum.plot_templates.plot_time_frequency_reps as pltq
if __name__ == "__main__":
"""
Exercises with classic linear sweep
Option of exporting to wav
"""
# Do you want to export a wav file? True or False
do_save_wave = False
# If True, saves to home directory
home_dir: str = str(Path.home())
# Or can specify a preferred wav file directory
# home_dir: str = "/Users/mgarces/Documents/DATA_API_M/synthetics"
output_wav_directory = os.path.join(home_dir, "wav")
EVENT_NAME = "redshift_linear_sweep"
print("Event Name: " + EVENT_NAME)
wav_filename = EVENT_NAME
order_number_input = 3
station_id_str = 'Synth'
run_time_epoch_s = utils.datetime_now_epoch_s()
# Chirp type
is_redshift = True
sig_wf_sample_rate_hz = 8000.
sig_frequency_hz_start = 40.
sig_frequency_hz_end = 400.
sig_duration_s = 13.19675
head_s = 0.5
# sig_wf_sample_rate_hz = 8000.
# sig_frequency_hz_start = 40.
# sig_frequency_hz_end = 400.
# sig_duration_s = 13.19675
# head_s = 0.5
# Blueshift sweep
sig_wf_blu, sig_wf_epoch_s = synthetics.chirp_linear_in_noise(snr_bits=12.,
sample_rate_hz=sig_wf_sample_rate_hz,
duration_s=sig_duration_s,
frequency_start_hz=sig_frequency_hz_start,
frequency_end_hz=sig_frequency_hz_end,
intro_s=head_s,
outro_s=head_s)
sig_wf_red = np.flipud(sig_wf_blu)
# Choose origin and red/blue shift
sig_wf_epoch_s += run_time_epoch_s
sig_wf = np.copy(sig_wf_red)
# Antialias filter synthetic
synthetics.antialias_halfNyquist(synth=sig_wf)
# Export to wav directory
if do_save_wave:
wav_sample_rate_hz = 8000.
export_filename = os.path.join(output_wav_directory, wav_filename + "_8kz.wav")
synth_wav = 0.9 * np.real(sig_wf) / np.max(np.abs((np.real(sig_wf))))
scipy.io.wavfile.write(export_filename, int(wav_sample_rate_hz), synth_wav)
# Frame to mic start and end and plot
event_reference_time_epoch_s = sig_wf_epoch_s[0]
max_time_s, min_frequency_hz = scales.from_duration(band_order_Nth=order_number_input,
sig_duration_s=sig_duration_s)
print('\nRequest Order N=', order_number_input)
print('Lowest frequency in hz that can support this order for this signal duration is ', min_frequency_hz)
print('Scale with signal duration and to Nyquist, default G2 base re F1')
# Select plot frequencies
fmin = np.ceil(min_frequency_hz)
fmax = sig_wf_sample_rate_hz/2.
# TFR SECTION
# Compute complex wavelet transform (cwt) from signal duration
if is_redshift:
mic_cwt, mic_cwt_bits, mic_cwt_time_s, mic_cwt_frequency_hz = \
atoms.cwt_chirp_from_sig(sig_wf=sig_wf,
frequency_sample_rate_hz=sig_wf_sample_rate_hz,
band_order_Nth=order_number_input,
dictionary_type="tone",
index_shift=-1)
else:
mic_cwt, mic_cwt_bits, mic_cwt_time_s, mic_cwt_frequency_hz = \
atoms.cwt_chirp_from_sig(sig_wf=sig_wf,
frequency_sample_rate_hz=sig_wf_sample_rate_hz,
band_order_Nth=order_number_input,
dictionary_type="tone")
mic_cwt_snr, mic_cwt_snr_bits, mic_cwt_snr_entropy = entropy.snr_mean_max(tfr_coeff_complex=mic_cwt)
pltq.plot_wf_mesh_mesh_vert(redvox_id=station_id_str,
wf_panel_2_sig=sig_wf,
wf_panel_2_time=sig_wf_epoch_s,
mesh_time=mic_cwt_time_s,
mesh_frequency=mic_cwt_frequency_hz,
mesh_panel_1_trf=mic_cwt_bits,
mesh_panel_1_colormap_scaling="range",
mesh_panel_0_tfr=mic_cwt_snr_entropy,
wf_panel_2_units="Norm",
mesh_panel_1_cbar_units="bits",
mesh_panel_0_cbar_units="eSNR bits",
start_time_epoch=event_reference_time_epoch_s,
figure_title="CWT for " + EVENT_NAME,
frequency_hz_ymin=fmin,
frequency_hz_ymax=fmax)
# Compute short term Fourier transform (STFT) from segmented signal duration
mic_stft, mic_stft_bits, mic_stft_time_s, mic_stft_frequency_hz = \
spectra.stft_from_sig(sig_wf=sig_wf,
frequency_sample_rate_hz=sig_wf_sample_rate_hz,
band_order_Nth=order_number_input)
mic_stft_snr, mic_stft_snr_bits, mic_stft_snr_entropy = entropy.snr_mean_max(tfr_coeff_complex=mic_stft)
# Log frequency is the default, for linear use frequency_scaling="linear",
pltq.plot_wf_mesh_mesh_vert(frequency_scaling="log",
redvox_id=station_id_str,
wf_panel_2_sig=sig_wf,
wf_panel_2_time=sig_wf_epoch_s,
mesh_time=mic_stft_time_s,
mesh_frequency=mic_stft_frequency_hz,
mesh_panel_1_trf=mic_stft_bits,
mesh_panel_1_colormap_scaling="range",
mesh_panel_0_tfr=mic_stft_snr_entropy,
wf_panel_2_units="Norm",
mesh_panel_1_cbar_units="bits",
mesh_panel_0_cbar_units="eSNR bits",
figure_title="STFT for " + EVENT_NAME,
frequency_hz_ymin=fmin,
frequency_hz_ymax=fmax)
plt.show()
| 44.13245 | 110 | 0.579232 | [
"Apache-2.0"
] | RedVoxInc/libquantum | examples/03_sweep_linear.py | 6,664 | Python |
import hashlib
from fastecdsa import keys, curve, ecdsa
from hashlib import sha256
from uuid import uuid4
class Transaction:
def __init__(self, from_address, to_address, amount):
self.from_address = from_address
self.to_address = to_address
self.amount = amount
self.id = str(uuid4()).replace('-', '')
self.signature = None
def calculate_hash(self):
return sha256((str(self.from_address) + str(self.to_address) + str(self.amount) + self.id).encode()).hexdigest()
def sign_tx(self, priv_key):
hash_tx = self.calculate_hash()
self.signature = ecdsa.sign(hash_tx, priv_key, hashfunc=sha256)
def is_valid(self):
if self.signature is None:
return True
if len(self.signature) == 0 and self.to_address is None:
return False
hash_tx = self.calculate_hash()
pubkey = keys.get_public_keys_from_sig(self.signature, hash_tx, curve=curve.P256, hashfunc=sha256)
valid = ecdsa.verify(self.signature, hash_tx, pubkey[0], hashfunc=sha256)
return valid
def serialize(self):
return {
'id': self.id,
'from_address': self.from_address,
'to_address': self.to_address,
'amount': self.amount
}
| 30.928571 | 120 | 0.635874 | [
"MIT"
] | ephremdeme/voting | blockchain/Transaction.py | 1,299 | Python |
# -*- coding: utf-8 -*-
from app.libs.utils import data_decode
import socket, socketserver, threading
import traceback
class ThreadedTCPRequestHandler(socketserver.BaseRequestHandler):
ip = ""
port = 0
timeOut = 100
def __init__(self, request, client_address, server):
from app.service.device import Device
self.socket = None
self.addr = None
self.cloud_id = None
self.device = Device()
self.sign = None
self.device_id = None
self.timestamp = None
super().__init__(request, client_address, server)
def setup(self):
self.ip = self.client_address[0].strip()
self.port = self.client_address[1]
self.request.settimeout(self.timeOut)
self.addr = self.ip + str(self.port)
self.socket = self.request
print(self.ip)
def handle(self):
try:
while True:
try:
# time.sleep(1)
data = self.request.recv(1024)
except socket.timeout:
print(self.ip + ":" + str(self.port) + "接收超时")
break
if data:
data = data_decode(data)
self.device.parse_data(data, self)
else:
break
except Exception as e:
with open("err_log.log", "a+") as f:
f.write(traceback.format_exc()+'\r\r')
print(self.client_address, "连接断开")
finally:
self.request.close()
def finish(self):
if self.cloud_id is None:
print(self.ip + ":" + str(self.port) + "断开连接!")
else:
get_instance().remove_client(self.cloud_id)
print(self.ip + ":" + str(self.port) + self.cloud_id + "断开连接!")
class ThreadedTCPServer(socketserver.ThreadingMixIn, socketserver.TCPServer):
pass
class TCPServer:
instance = None
@staticmethod
def get_instance():
print("start")
if TCPServer.instance is None:
TCPServer.instance = TCPServer()
return TCPServer.instance
def __init__(self):
self.clients = {}
self.server = None
try:
self.server = ThreadedTCPServer(("0.0.0.0", 5002), ThreadedTCPRequestHandler)
server_thread = threading.Thread(target=self.server.serve_forever)
server_thread.daemon = True
server_thread.start()
# server_thread.join()
except (KeyboardInterrupt, SystemExit, Exception) as e:
print(e)
print("end")
self.server.shutdown()
self.server.close()
def add_client(self, cloud, sock):
self.clients[cloud] = sock
print("this is clients", self.clients)
def remove_client(self, cloud):
if cloud in self.clients:
print("删除设备" + cloud)
from app.service.device import Device
Device.offline_alarm(self.clients[cloud])
self.clients.pop(cloud)
def get_instance():
return TCPServer.get_instance()
| 30.126214 | 89 | 0.565259 | [
"Apache-2.0"
] | mohansd/cyx-xElec-server | app/service/socketservice.py | 3,147 | Python |
from django.conf.urls import patterns, url
from django.contrib.contenttypes.models import ContentType
from kitsune.questions.feeds import (
QuestionsFeed, AnswersFeed, TaggedQuestionsFeed)
from kitsune.questions.models import Question, Answer
from kitsune.flagit import views as flagit_views
urlpatterns = patterns(
'kitsune.questions.views',
url(r'^$', 'product_list', name='questions.home'),
url(r'^/answer-preview-async$', 'answer_preview_async',
name='questions.answer_preview_async'),
url(r'^/dashboard/metrics$', 'metrics', name='questions.metrics'),
url(r'^/dashboard/metrics/(?P<locale_code>[^/]+)$', 'metrics',
name='questions.locale_metrics'),
# AAQ
url(r'^/new$', 'aaq', name='questions.aaq_step1'),
url(r'^/new/confirm$', 'aaq_confirm', name='questions.aaq_confirm'),
url(r'^/new/(?P<product_key>[\w\-]+)$',
'aaq_step2', name='questions.aaq_step2'),
url(r'^/new/(?P<product_key>[\w\-]+)/(?P<category_key>[\w\-]+)$',
'aaq_step3', name='questions.aaq_step3'),
url(r'^/new/(?P<product_key>[\w\-]+)/(?P<category_key>[\w\-]+)/search$',
'aaq_step4', name='questions.aaq_step4'),
url(r'^/new/(?P<product_key>[\w\-]+)/(?P<category_key>[\w\-]+)/form$',
'aaq_step5', name='questions.aaq_step5'),
# AAQ flow for Marketplace
url(r'^/marketplace$', 'marketplace', name='questions.marketplace_aaq'),
url(r'^/marketplace/success$',
'marketplace_success', name='questions.marketplace_aaq_success'),
url(r'^/marketplace/refund$', 'marketplace_refund',
name='questions.marketplace_refund'),
url(r'^/marketplace/developer-request$', 'marketplace_developer_request',
name='questions.marketplace_developer_request'),
url(r'^/marketplace/(?P<category_slug>[\w\-]+)$',
'marketplace_category', name='questions.marketplace_aaq_category'),
# TODO: Factor out `/(?P<question_id>\d+)` below
url(r'^/(?P<question_id>\d+)$', 'question_details',
name='questions.details'),
url(r'^/(?P<question_id>\d+)/edit$',
'edit_question', name='questions.edit_question'),
url(r'^/(?P<question_id>\d+)/edit-details$',
'edit_details', name='questions.edit_details'),
url(r'^/(?P<question_id>\d+)/reply$', 'reply', name='questions.reply'),
url(r'^/(?P<question_id>\d+)/delete$', 'delete_question',
name='questions.delete'),
url(r'^/(?P<question_id>\d+)/lock$', 'lock_question',
name='questions.lock'),
url(r'^/(?P<question_id>\d+)/archive$', 'archive_question',
name='questions.archive'),
url(r'^/(?P<question_id>\d+)/delete/(?P<answer_id>\d+)$',
'delete_answer', name='questions.delete_answer'),
url(r'^/(?P<question_id>\d+)/edit/(?P<answer_id>\d+)$', 'edit_answer',
name='questions.edit_answer'),
url(r'^/(?P<question_id>\d+)/solve/(?P<answer_id>\d+)$', 'solve',
name='questions.solve'),
url(r'^/(?P<question_id>\d+)/unsolve/(?P<answer_id>\d+)$', 'unsolve',
name='questions.unsolve'),
url(r'^/(?P<question_id>\d+)/vote$', 'question_vote',
name='questions.vote'),
url(r'^/(?P<question_id>\d+)/vote/(?P<answer_id>\d+)$',
'answer_vote', name='questions.answer_vote'),
url(r'^/(?P<question_id>\d+)/add-tag$', 'add_tag',
name='questions.add_tag'),
url(r'^/(?P<question_id>\d+)/remove-tag$', 'remove_tag',
name='questions.remove_tag'),
url(r'^/(?P<question_id>\d+)/add-tag-async$', 'add_tag_async',
name='questions.add_tag_async'),
url(r'^/(?P<question_id>\d+)/remove-tag-async$', 'remove_tag_async',
name='questions.remove_tag_async'),
# Feeds
# Note: this needs to be above questions.list because "feed"
# matches the product slug regex.
url(r'^/feed$', QuestionsFeed(), name='questions.feed'),
url(r'^/(?P<question_id>\d+)/feed$', AnswersFeed(),
name='questions.answers.feed'),
url(r'^/tagged/(?P<tag_slug>[\w\-]+)/feed$', TaggedQuestionsFeed(),
name='questions.tagged_feed'),
# Mark as spam
url(r'^/mark_spam$', 'mark_spam', name='questions.mark_spam'),
url(r'^/unmark_spam$', 'unmark_spam', name='questions.unmark_spam'),
# Question lists
url(r'^/(?P<product_slug>[\w+\-\,]+)$', 'question_list',
name='questions.list'),
# Flag content ("Report this post")
url(r'^/(?P<object_id>\d+)/flag$', flagit_views.flag,
{'content_type': ContentType.objects.get_for_model(Question).id},
name='questions.flag'),
url(r'^/(?P<question_id>\d+)/flag/(?P<object_id>\d+)$', flagit_views.flag,
{'content_type': ContentType.objects.get_for_model(Answer).id},
name='questions.answer_flag'),
# Subcribe by email
url(r'^/(?P<question_id>\d+)/watch$', 'watch_question',
name='questions.watch'),
url(r'^/(?P<question_id>\d+)/unwatch$', 'unwatch_question',
name='questions.unwatch'),
url(r'^/confirm/(?P<watch_id>\d+)/(?P<secret>\w+)$', 'activate_watch',
name='questions.activate_watch'),
url(r'^/unsubscribe/(?P<watch_id>\d+)/(?P<secret>\w+)$',
'unsubscribe_watch', name='questions.unsubscribe'),
)
| 45.778761 | 78 | 0.626329 | [
"BSD-3-Clause"
] | 983834572/kitsune | kitsune/questions/urls.py | 5,173 | Python |
# -*- coding: utf-8 -*-
# -*- coding: utf8 -*-
"""Autogenerated file - DO NOT EDIT
If you spot a bug, please report it on the mailing list and/or change the generator."""
from nipype.interfaces.base import (
CommandLine,
CommandLineInputSpec,
SEMLikeCommandLine,
TraitedSpec,
File,
Directory,
traits,
isdefined,
InputMultiPath,
OutputMultiPath,
)
import os
class ExtractSkeletonInputSpec(CommandLineInputSpec):
InputImageFileName = File(position=-2, desc="Input image", exists=True, argstr="%s")
OutputImageFileName = traits.Either(
traits.Bool,
File(),
position=-1,
hash_files=False,
desc="Skeleton of the input image",
argstr="%s",
)
type = traits.Enum(
"1D", "2D", desc="Type of skeleton to create", argstr="--type %s"
)
dontPrune = traits.Bool(
desc="Return the full skeleton, not just the maximal skeleton",
argstr="--dontPrune ",
)
numPoints = traits.Int(
desc="Number of points used to represent the skeleton", argstr="--numPoints %d"
)
pointsFile = traits.Str(
desc="Name of the file to store the coordinates of the central (1D) skeleton points",
argstr="--pointsFile %s",
)
class ExtractSkeletonOutputSpec(TraitedSpec):
OutputImageFileName = File(
position=-1, desc="Skeleton of the input image", exists=True
)
class ExtractSkeleton(SEMLikeCommandLine):
"""title: Extract Skeleton
category: Filtering
description: Extract the skeleton of a binary object. The skeleton can be limited to being a 1D curve or allowed to be a full 2D manifold. The branches of the skeleton can be pruned so that only the maximal center skeleton is returned.
version: 0.1.0.$Revision: 2104 $(alpha)
documentation-url: http://wiki.slicer.org/slicerWiki/index.php/Documentation/4.1/Modules/ExtractSkeleton
contributor: Pierre Seroul (UNC), Martin Styner (UNC), Guido Gerig (UNC), Stephen Aylward (Kitware)
acknowledgements: The original implementation of this method was provided by ETH Zurich, Image Analysis Laboratory of Profs Olaf Kuebler, Gabor Szekely and Guido Gerig. Martin Styner at UNC, Chapel Hill made enhancements. Wrapping for Slicer was provided by Pierre Seroul and Stephen Aylward at Kitware, Inc.
"""
input_spec = ExtractSkeletonInputSpec
output_spec = ExtractSkeletonOutputSpec
_cmd = "ExtractSkeleton "
_outputs_filenames = {"OutputImageFileName": "OutputImageFileName.nii"}
| 33.986486 | 310 | 0.702584 | [
"Apache-2.0"
] | AnnaD15/nipype | nipype/interfaces/slicer/filtering/extractskeleton.py | 2,515 | Python |
import subprocess
import time
import os
TEST_TYPE = os.getenv("TEST_TYPE", "bdd")
def before_scenario(context, scenario):
if f"{TEST_TYPE}" == "bdd":
proc = subprocess.Popen(["make", "start"])
time.sleep(4)
context.proc = proc
context.root_url = "http://localhost:5000"
else:
context.root_url = os.getenv("ROOT_ENDPOINT")
def after_scenario(context, scenario):
if f"{TEST_TYPE}" == "bdd":
proc = context.proc
proc.terminate()
| 21.73913 | 53 | 0.622 | [
"MIT"
] | abhisheksr01/zero-2-hero-python-flask-microservice | features/environment.py | 500 | Python |
class Solution:
def coinChange(self, coins, amount):
"""
:type coins: List[int]
:type amount: int
:rtype: int
"""
# max value taken as amount+1 because in worst case, it can be amount - when denoms of only 1
res = [amount+1]*(amount+1)
res[0] = 0
for i in range(1, amount+1):
for j in coins:
if j <= i:
res[i] = min(res[i], res[i-j] + 1)
if res[amount] > amount:
return -1
else:
return res[amount]
| 27.238095 | 101 | 0.461538 | [
"MIT"
] | vedantc6/LCode | Session1_2018/coinChange.py | 572 | Python |
import xml.etree.ElementTree as ET
from .exceptions import UnpopulatedPropertyError
from .property_decorators import property_not_nullable, property_is_boolean
from .tag_item import TagItem
from ..datetime_helpers import parse_datetime
import copy
class FlowItem(object):
def __init__(self, project_id, name=None):
self._webpage_url = None
self._created_at = None
self._id = None
self._initial_tags = set()
self._project_name = None
self._updated_at = None
self.name = name
self.owner_id = None
self.project_id = project_id
self.tags = set()
self.description = None
self._connections = None
self._permissions = None
@property
def connections(self):
if self._connections is None:
error = 'Flow item must be populated with connections first.'
raise UnpopulatedPropertyError(error)
return self._connections()
@property
def permissions(self):
if self._permissions is None:
error = "Project item must be populated with permissions first."
raise UnpopulatedPropertyError(error)
return self._permissions()
@property
def webpage_url(self):
return self._webpage_url
@property
def created_at(self):
return self._created_at
@property
def id(self):
return self._id
@property
def project_id(self):
return self._project_id
@project_id.setter
@property_not_nullable
def project_id(self, value):
self._project_id = value
@property
def description(self):
return self._description
@description.setter
def description(self, value):
self._description = value
@property
def project_name(self):
return self._project_name
@property
def flow_type(self):
return self._flow_type
@property
def updated_at(self):
return self._updated_at
def _set_connections(self, connections):
self._connections = connections
def _set_permissions(self, permissions):
self._permissions = permissions
def _parse_common_elements(self, flow_xml, ns):
if not isinstance(flow_xml, ET.Element):
flow_xml = ET.fromstring(flow_xml).find('.//t:flow', namespaces=ns)
if flow_xml is not None:
(_, _, _, _, _, updated_at, _, project_id, project_name, owner_id) = self._parse_element(flow_xml, ns)
self._set_values(None, None, None, None, None, updated_at, None, project_id,
project_name, owner_id)
return self
def _set_values(self, id, name, description, webpage_url, created_at,
updated_at, tags, project_id, project_name, owner_id):
if id is not None:
self._id = id
if name:
self.name = name
if description:
self.description = description
if webpage_url:
self._webpage_url = webpage_url
if created_at:
self._created_at = created_at
if updated_at:
self._updated_at = updated_at
if tags:
self.tags = tags
self._initial_tags = copy.copy(tags)
if project_id:
self.project_id = project_id
if project_name:
self._project_name = project_name
if owner_id:
self.owner_id = owner_id
@classmethod
def from_response(cls, resp, ns):
all_flow_items = list()
parsed_response = ET.fromstring(resp)
all_flow_xml = parsed_response.findall('.//t:flow', namespaces=ns)
for flow_xml in all_flow_xml:
(id_, name, description, webpage_url, created_at, updated_at,
tags, project_id, project_name, owner_id) = cls._parse_element(flow_xml, ns)
flow_item = cls(project_id)
flow_item._set_values(id_, name, description, webpage_url, created_at, updated_at,
tags, None, project_name, owner_id)
all_flow_items.append(flow_item)
return all_flow_items
@staticmethod
def _parse_element(flow_xml, ns):
id_ = flow_xml.get('id', None)
name = flow_xml.get('name', None)
description = flow_xml.get('description', None)
webpage_url = flow_xml.get('webpageUrl', None)
created_at = parse_datetime(flow_xml.get('createdAt', None))
updated_at = parse_datetime(flow_xml.get('updatedAt', None))
tags = None
tags_elem = flow_xml.find('.//t:tags', namespaces=ns)
if tags_elem is not None:
tags = TagItem.from_xml_element(tags_elem, ns)
project_id = None
project_name = None
project_elem = flow_xml.find('.//t:project', namespaces=ns)
if project_elem is not None:
project_id = project_elem.get('id', None)
project_name = project_elem.get('name', None)
owner_id = None
owner_elem = flow_xml.find('.//t:owner', namespaces=ns)
if owner_elem is not None:
owner_id = owner_elem.get('id', None)
return (id_, name, description, webpage_url, created_at, updated_at, tags, project_id,
project_name, owner_id)
| 32.484663 | 114 | 0.628706 | [
"CC0-1.0",
"MIT"
] | BenevolentHighPriestess/server-client-python | tableauserverclient/models/flow_item.py | 5,295 | Python |
from interpolate import interpolate_doc
foo = """
hello
world
"""
bar = "foo bar\nbaz"
class Foo:
# cf matplotlib's kwdoc.
__kw__ = "the kw of foo"
@interpolate_doc
def func():
"""
this is a docstring
{interpolate_example.foo}
{bar}
{Foo!K}
"""
try:
@interpolate_doc
def bad_doc():
"""
fields {must} be preceded by whitespace
"""
except ValueError:
print("error correctly caught")
| 12.756757 | 47 | 0.576271 | [
"MIT"
] | anntzer/structured-docstrings | interpolate_example.py | 472 | Python |
import discord
from discord.ext import commands
# Set slash commands=True when constructing your bot to enable all slash commands
# if your bot is only for a couple of servers, you can use the parameter
# `slash_command_guilds=[list, of, guild, ids]` to specify this,
# then the commands will be much faster to upload.
bot = commands.Bot("!", intents=discord.Intents(guilds=True, messages=True), slash_commands=True)
@bot.event
async def on_ready():
print(f"Logged in as {bot.user} (ID: {bot.user.id})")
print("------")
@bot.command()
# You can use commands.Option to define descriptions for your options, and converters will still work fine.
async def ping(
ctx: commands.Context, emoji: bool = commands.Option(description="whether to use an emoji when responding")
):
# This command can be used with slash commands or message commands
if emoji:
await ctx.send("\U0001f3d3")
else:
await ctx.send("Pong!")
@bot.command(message_command=False)
async def only_slash(ctx: commands.Context):
# This command can only be used with slash commands
await ctx.send("Hello from slash commands!")
@bot.command(slash_command=False)
async def only_message(ctx: commands.Context):
# This command can only be used with a message
await ctx.send("Hello from message commands!")
bot.run("token")
| 31.97619 | 111 | 0.724497 | [
"MIT"
] | Astrea49/enhanced-discord.py | examples/slash_commands.py | 1,343 | Python |
from io import BytesIO
from typing import Tuple, Sequence
import attr
from PIL import Image
from ebl.changelog import Changelog
from ebl.ebl_ai_client import EblAiClient
from ebl.files.application.file_repository import FileRepository
from ebl.fragmentarium.application.annotations_repository import AnnotationsRepository
from ebl.fragmentarium.application.annotations_schema import AnnotationsSchema
from ebl.fragmentarium.application.cropped_sign_image import CroppedSign
from ebl.fragmentarium.application.cropped_sign_images_repository import (
CroppedSignImage,
CroppedSignImagesRepository,
)
from ebl.fragmentarium.application.fragment_repository import FragmentRepository
from ebl.fragmentarium.domain.annotation import (
Annotations,
AnnotationValueType,
)
from ebl.transliteration.domain.line_label import LineLabel
from ebl.transliteration.domain.museum_number import MuseumNumber
from ebl.users.domain.user import User
@attr.attrs(auto_attribs=True, frozen=True)
class AnnotationsService:
_ebl_ai_client: EblAiClient
_annotations_repository: AnnotationsRepository
_photo_repository: FileRepository
_changelog: Changelog
_fragments_repository: FragmentRepository
_photos_repository: FileRepository
_cropped_sign_images_repository: CroppedSignImagesRepository
def generate_annotations(
self, number: MuseumNumber, threshold: float = 0.3
) -> Annotations:
fragment_image = self._photo_repository.query_by_file_name(f"{number}.jpg")
return self._ebl_ai_client.generate_annotations(
number, fragment_image, threshold
)
def find(self, number: MuseumNumber) -> Annotations:
return self._annotations_repository.query_by_museum_number(number)
def _label_by_line_number(
self, line_number_to_match: int, labels: Sequence[LineLabel]
) -> str:
matching_label = None
for label in labels:
label_line_number = label.line_number
if label_line_number and label_line_number.is_matching_number(
line_number_to_match
):
matching_label = label
return matching_label.formatted_label if matching_label else ""
def _cropped_image_from_annotations_helper(
self,
annotations: Annotations,
image: Image.Image,
script: str,
labels: Sequence[LineLabel],
) -> Tuple[Annotations, Sequence[CroppedSignImage]]:
cropped_sign_images = []
updated_cropped_annotations = []
for annotation in annotations.annotations:
label = (
self._label_by_line_number(annotation.data.path[0], labels)
if annotation.data.type != AnnotationValueType.BLANK
else ""
)
cropped_image = annotation.crop_image(image)
cropped_sign_image = CroppedSignImage.create(cropped_image)
cropped_sign_images.append(cropped_sign_image)
updated_cropped_annotation = attr.evolve(
annotation,
cropped_sign=CroppedSign(
cropped_sign_image.image_id,
script,
label,
),
)
updated_cropped_annotations.append(updated_cropped_annotation)
return (
attr.evolve(annotations, annotations=updated_cropped_annotations),
cropped_sign_images,
)
def _cropped_image_from_annotations(
self, annotations: Annotations
) -> Tuple[Annotations, Sequence[CroppedSignImage]]:
fragment = self._fragments_repository.query_by_museum_number(
annotations.fragment_number
)
fragment_image = self._photos_repository.query_by_file_name(
f"{annotations.fragment_number}.jpg"
)
image_bytes = fragment_image.read()
image = Image.open(BytesIO(image_bytes), mode="r")
return self._cropped_image_from_annotations_helper(
annotations, image, fragment.script, fragment.text.labels
)
def update(self, annotations: Annotations, user: User) -> Annotations:
old_annotations = self._annotations_repository.query_by_museum_number(
annotations.fragment_number
)
_id = str(annotations.fragment_number)
schema = AnnotationsSchema()
(
annotations_with_image_ids,
cropped_sign_images,
) = self._cropped_image_from_annotations(annotations)
self._annotations_repository.create_or_update(annotations_with_image_ids)
self._cropped_sign_images_repository.create_many(cropped_sign_images)
self._changelog.create(
"annotations",
user.profile,
{"_id": _id, **schema.dump(old_annotations)},
{"_id": _id, **schema.dump(annotations_with_image_ids)},
)
return annotations_with_image_ids
| 37.755725 | 86 | 0.697736 | [
"MIT"
] | ElectronicBabylonianLiterature/dictionary | ebl/fragmentarium/application/annotations_service.py | 4,946 | Python |
def number_of_occurrences(s, xs):
return xs.count(s) | 28 | 33 | 0.732143 | [
"MIT"
] | anubhab-code/Competitive-Programming | CodeWars/7 Kyu/Number Of Occurrences.py | 56 | Python |
#Python program to get the size of an object in bytes
import sys
Object = input("Enter any object: ")
print(f'The size of the object {Object} is {sys.getsizeof(Object)} bytes')
| 29.666667 | 74 | 0.735955 | [
"MIT"
] | CodedLadiesInnovateTech/-python-challenge-solutions | GodwillOnyewuchi/Phase 1/Python Basic 2/day 9 task/task 9.py | 178 | Python |
from .chain import *
from .element import *
| 14.666667 | 22 | 0.727273 | [
"MIT"
] | ArcletProject/Edoves | arclet/edoves/main/message/__init__.py | 44 | Python |
from importlib import import_module, reload
import pytest
import sys
from unittest.mock import patch
from rest_framework import status
from django.contrib.auth.models import Permission, Group
from django.conf import settings
from django.urls import clear_url_caches
from django.urls import reverse
from .factories import UserFactory
from groups.models import GroupInfo
from users.models import UserProfile
BLANK_CHOICE = ('', '---------')
USER_DETAILS = {
'username': 'test',
'email': 'test@test.com',
'first_name': 'Foo',
'last_name': 'Bar',
}
USER_DETAILS_CREATE = USER_DETAILS.copy()
USER_DETAILS_CREATE.update(password1='pass', password2='pass')
USER_DETAILS_CHANGING = {
'username': 'johnsmith',
'email': 'john@smiths.com',
'first_name': 'John',
'last_name': 'Smith',
}
@pytest.mark.django_db
def test_create_user_view_get(admin_client):
url = reverse('wagtailusers_users:add')
response = admin_client.get(url)
assert response.status_code == status.HTTP_200_OK
@pytest.mark.django_db
def test_create_user_view(admin_client):
url = reverse('wagtailusers_users:add')
response = admin_client.post(url, data=USER_DETAILS_CREATE)
assert response.context['message'] == 'User test created.'
assert response.status_code == status.HTTP_302_FOUND
assert response.url == reverse('wagtailusers_users:index')
@pytest.mark.django_db
def test_create_user_view_invalid_form(admin_client):
url = reverse('wagtailusers_users:add')
post_data = USER_DETAILS.copy()
post_data.update(email='This is not an email address')
response = admin_client.post(url, post_data)
message = response.context['message']
assert message == 'The user could not be created due to errors.'
assert response.status_code == status.HTTP_200_OK
@pytest.mark.django_db
def test_get_edit_user_view(admin_client):
user = UserFactory(**USER_DETAILS)
url = reverse('wagtailusers_users:edit', kwargs={'pk': user.pk})
response = admin_client.get(url)
assert response.status_code == status.HTTP_200_OK
assert response.context['can_delete'] is True
@pytest.mark.django_db
def test_edit_user_view(team_leaders_group, admin_client):
user = UserFactory(**USER_DETAILS)
url = reverse('wagtailusers_users:edit', kwargs={'pk': user.pk})
# We'll add the user to a group, as well as changing their details
post_data = USER_DETAILS_CHANGING.copy()
post_data['groups'] = [team_leaders_group.pk]
response = admin_client.post(url, data=post_data)
assert response.context['message'] == 'User johnsmith updated.'
assert response.status_code == status.HTTP_302_FOUND
assert response.url == reverse('wagtailusers_users:index')
# The user's details should have changed to reflect the posted values
user.refresh_from_db()
for field_name, changed_value in USER_DETAILS_CHANGING.items():
assert getattr(user, field_name) == changed_value
# And they should have been added to a group
group_ids = set(user.groups.values_list('id', flat=True))
assert group_ids == {team_leaders_group.pk}
@pytest.mark.django_db
def test_edit_user_view_invalid_form(admin_client, approved_user):
url = reverse('wagtailusers_users:edit', kwargs={'pk': approved_user.pk})
post_data = USER_DETAILS.copy()
post_data.update(email='This is not an email address')
response = admin_client.post(url, post_data)
message = response.context['message']
assert message == 'The user could not be saved due to errors.'
assert response.status_code == status.HTTP_200_OK
@pytest.mark.django_db
def test_edit_user_view_cannot_change_personal_details_when_sso_enforced(
admin_client
):
# Set this flag to True and repeat previous test
settings.FEATURE_FLAGS['ENFORCE_STAFF_SSO_ON'] = True
user = UserFactory(**USER_DETAILS)
# Post changes to the view
url = reverse('wagtailusers_users:edit', kwargs={'pk': user.pk})
admin_client.post(url, data=USER_DETAILS_CHANGING)
# The users details should remain unchanged, because the
# personal detail fields should all be disabled
user.refresh_from_db()
for field_name, original_value in USER_DETAILS.items():
assert getattr(user, field_name) == original_value
# Change this back to avoid cross-test pollution
settings.FEATURE_FLAGS['ENFORCE_STAFF_SSO_ON'] = False
reload_urlconf()
@pytest.mark.django_db
def test_edit_user_view_preserves_ability_to_update_is_active(admin_client):
# Set this flag to True and actions if previous test
settings.FEATURE_FLAGS['ENFORCE_STAFF_SSO_ON'] = True
# Create an 'inactive' user to test with
user = UserFactory(**USER_DETAILS)
user.is_active = False
user.save()
# Post using the same details + 'is_active=on'
post_data = USER_DETAILS.copy()
post_data.update(is_active='on')
url = reverse('wagtailusers_users:edit', kwargs={'pk': user.pk})
admin_client.post(url, data=post_data)
# The change to 'is_active' should have been applied, because that field
# is not disabled along with the personal detail ones
user.refresh_from_db()
assert user.is_active is True
# Reset flag to avoid cross-test pollution
settings.FEATURE_FLAGS['ENFORCE_STAFF_SSO_ON'] = False
reload_urlconf()
@pytest.mark.django_db
def test_edit_user_view_warns_administrator_if_user_is_awaiting_approval(
admin_client, user_awaiting_approval
):
# This flag must be set for the warning to show
settings.FEATURE_FLAGS['ENFORCE_STAFF_SSO_ON'] = True
user = user_awaiting_approval
url = reverse('wagtailusers_users:edit', kwargs={'pk': user.pk})
response = admin_client.get(url)
message = response.context['message']
assert "This user is awaiting approval" in message
assert "requested to be added to the 'Moderators' group" in message
# Reset flag to avoid cross-test pollution
settings.FEATURE_FLAGS['ENFORCE_STAFF_SSO_ON'] = False
reload_urlconf()
@pytest.mark.django_db
def test_edit_user_view_marks_user_as_approved_if_added_to_group(
admin_client, admin_user, user_awaiting_approval
):
# This flag must be set for the warning to show
settings.FEATURE_FLAGS['ENFORCE_STAFF_SSO_ON'] = True
user = user_awaiting_approval
profile = user_awaiting_approval.userprofile
url = reverse('wagtailusers_users:edit', kwargs={'pk': user.pk})
group = Group.objects.get(pk=profile.self_assigned_group_id)
group.permissions.add(Permission.objects.get(codename='access_admin'))
with patch('users.views.notify_user_of_access_request_approval', autospec=True) as mocked_method:
response = admin_client.post(url, {
'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name,
'email': user.email,
'is_active': True,
'groups': [group.pk],
})
# Ensure the post was successful
assert response.context['message'] == 'User %s updated.' % user.username
assert response.status_code == status.HTTP_302_FOUND
assert response.url == reverse('wagtailusers_users:index')
# The UserProfile should have been updated
profile.refresh_from_db()
assert profile.assignment_status == UserProfile.STATUS_APPROVED
assert profile.approved_by_id == admin_user.id
assert profile.approved_at is not None
# A notification should have been triggered for the user
expected_call_args = dict(
request=response.wsgi_request,
user_email=user.email,
user_name=user.first_name,
reviewer_name=admin_user.get_full_name(),
)
mocked_method.assert_called_with(**expected_call_args)
# Reset flag to avoid cross-test pollution
settings.FEATURE_FLAGS['ENFORCE_STAFF_SSO_ON'] = False
reload_urlconf()
@pytest.mark.django_db
def test_edit_user_view_does_not_mark_user_as_approved_if_not_added_to_a_group(admin_client, groups_with_info):
user = UserFactory(username='some-user')
profile = user.userprofile
profile.assignment_status = UserProfile.STATUS_AWAITING_APPROVAL
profile.self_assigned_group_id = groups_with_info[0].id
profile.save()
url = reverse('wagtailusers_users:edit', kwargs={'pk': user.pk})
with patch(
'users.views.notify_user_of_access_request_approval'
) as mocked_method:
response = admin_client.post(url, {
'username': user.username,
'first_name': user.first_name,
'last_name': user.last_name,
'email': user.email,
'is_active': True,
})
# Ensure the post was successful
assert response.context['message'] == 'User %s updated.' % user.username
assert response.status_code == status.HTTP_302_FOUND
assert response.url == reverse('wagtailusers_users:index')
# The UserProfile should NOT have been updated
profile.refresh_from_db()
assert profile.assignment_status == UserProfile.STATUS_AWAITING_APPROVAL
assert profile.approved_by_id is None
assert profile.approved_at is None
# no notification should have been triggered
mocked_method.assert_not_called()
def reload_urlconf(urlconf=None):
clear_url_caches()
if urlconf is None:
urlconf = settings.ROOT_URLCONF
if urlconf in sys.modules:
reload(sys.modules[urlconf])
else:
import_module(urlconf)
@pytest.mark.django_db
def test_force_staff_sso(client):
"""Test that URLs and redirects are in place."""
settings.FEATURE_FLAGS['ENFORCE_STAFF_SSO_ON'] = True
settings.AUTHBROKER_CLIENT_ID = 'debug'
settings.AUTHBROKER_CLIENT_SECRET = 'debug'
settings.AUTHBROKER_URL = 'https://test.com'
reload_urlconf()
assert reverse('authbroker_client:login') == '/auth/login/'
assert reverse('authbroker_client:callback') == '/auth/callback/'
response = client.get('/admin/login/')
assert response.status_code == 302
assert response.url == '/auth/login/'
settings.FEATURE_FLAGS['ENFORCE_STAFF_SSO_ON'] = False
reload_urlconf()
@pytest.mark.parametrize('assignment_status, expected_status_code', (
(UserProfile.STATUS_CREATED, 200),
(UserProfile.STATUS_AWAITING_APPROVAL, 302),
(UserProfile.STATUS_APPROVED, 302)
))
@pytest.mark.django_db
def test_ssorequestaccessview_responds_based_on_assignment_status(
admin_client, admin_user, assignment_status, expected_status_code
):
url = reverse('sso:request_access')
profile = admin_user.userprofile
profile.assignment_status = assignment_status
profile.save()
response = admin_client.get(url)
assert response.status_code == expected_status_code
@pytest.mark.django_db
def test_ssorequestaccessview_shows_unlimited_visibilty_groups_only(
admin_client, groups_with_info
):
url = reverse('sso:request_access')
# Visbility is set to 'unrestricted' for all groups in `groups_with_info`,
# so choices should reflect that by default
expected_choices = tuple(
(g.id, g.info.name_singular) for g in groups_with_info
)
# Confirm the choices in the form are as expected
response = admin_client.get(url)
group_field = response.context['form']['self_assigned_group'].field
actual_choices = tuple(group_field.choices)
assert actual_choices == expected_choices
# Change the visibility of groups and try again
GroupInfo.objects.all().update(
visibility=GroupInfo.VISIBILITY_MANAGERS_ONLY)
# Choices should be empty now
response = admin_client.get(url)
group_field = response.context['form']['self_assigned_group'].field
assert tuple(group_field.choices) == ()
@pytest.mark.django_db
def test_ssorequestaccessview_with_no_team_leaders_group(admin_client):
# If no 'team leaders group' has been designated, the 'team_leaders'
# field should only have a 'blank' option
url = reverse('sso:request_access')
response = admin_client.get(url)
team_leader_field = response.context['form']['team_leader'].field
assert tuple(team_leader_field.choices) == (BLANK_CHOICE,)
@pytest.mark.django_db
def test_ssorequestaccessview_with_team_leaders_group_but_no_members(
admin_client, team_leaders_group
):
# If the designated 'team leaders group' has no members, the 'team_leaders'
# field should only have a 'blank' option
url = reverse('sso:request_access')
response = admin_client.get(url)
team_leader_field = response.context['form']['team_leader'].field
assert team_leaders_group.user_set.all().exists() is False
assert tuple(team_leader_field.choices) == (BLANK_CHOICE,)
@pytest.mark.django_db
def test_ssorequestaccessview_with_team_leaders(
admin_client, team_leaders_group, team_leaders
):
url = reverse('sso:request_access')
# When team leaders are defined, they will appear as choices
# for the 'team_leaders' field
expected_choices = [BLANK_CHOICE]
expected_choices.extend(list(
(tl.id, "{} <{}>".format(tl.get_full_name(), tl.email))
for tl in team_leaders
))
# Confirm the choices in the form are as expected
response = admin_client.get(url)
team_leader_field = response.context['form']['team_leader'].field
actual_choices = list(team_leader_field.choices)
assert actual_choices == expected_choices
@pytest.mark.django_db
def test_ssorequestaccessview_fails_validation_if_form_incomplete(
admin_client, groups_with_info, team_leaders
):
url = reverse('sso:request_access')
response = admin_client.post(url, data={})
# Should still be on the same view
assert response.status_code == 200
# Both form fields should have errors
assert 'self_assigned_group' in response.context['form'].errors
assert 'team_leader' in response.context['form'].errors
@pytest.mark.django_db
def test_ssorequestaccessview_post_with_complete_data(
admin_client, admin_user, groups_with_info, team_leaders
):
group = groups_with_info[0]
team_leader = team_leaders[0]
with patch(
'users.views.notify_team_leader_of_pending_access_request',
autospec=True
) as mocked_method:
response = admin_client.post(
reverse('sso:request_access'),
data={
'self_assigned_group': group.id,
'team_leader': team_leader.id,
}
)
# Should be redirected to the success url
success_url = reverse('sso:request_access_success')
assert response.url == success_url
# The UserProfile for `admin_user` should have been updated
profile = admin_user.userprofile
assert profile.self_assigned_group_id == group.id
assert profile.team_leader_id == team_leader.id
assert profile.assignment_status == UserProfile.STATUS_AWAITING_APPROVAL # noqa
# A notification should have been triggered for the user
expected_call_args = dict(
request=response.wsgi_request,
team_leader_email=team_leader.email,
team_leader_name=team_leader.first_name,
user_id=admin_user.id,
user_name=admin_user.get_full_name(),
user_email=admin_user.email,
user_role=group.info.name_singular,
)
mocked_method.assert_called_with(**expected_call_args)
@pytest.mark.django_db
@pytest.mark.parametrize('url', (
reverse('sso:request_access'),
reverse('sso:request_access_success'),
))
def test_ssorequestaccess_views_only_available_to_authenticated_users(
client, admin_client, url
):
# When not authenticated, the user is redirected to the login page
response = client.get(url)
assert response.status_code == 302
assert response.url.startswith(settings.LOGIN_URL)
# When authenticated, things work fine
response = admin_client.get(url)
assert response.status_code == 200
| 34.45 | 111 | 0.730927 | [
"MIT"
] | uktrade/directory-cms | tests/users/test_views.py | 15,847 | Python |
# -*- coding: utf-8 -*-
# !/usr/bin/env python
"""
-------------------------------------------------
File Name: DbClient.py
Description : DB工厂类
Author : JHao
date: 2016/12/2
-------------------------------------------------
Change Activity:
2016/12/02: DB工厂类
2020/07/03: 取消raw_proxy储存
-------------------------------------------------
"""
__author__ = 'JHao'
import os
import sys
from util.six import urlparse, withMetaclass
from util.singleton import Singleton
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
class DbClient(withMetaclass(Singleton)):
"""
DbClient DB工厂类 提供get/put/update/pop/delete/exists/getAll/clean/getCount/changeTable方法
抽象方法定义:
get(): 随机返回一个proxy;
put(proxy): 存入一个proxy;
pop(): 顺序返回并删除一个proxy;
update(proxy): 更新指定proxy信息;
delete(proxy): 删除指定proxy;
exists(proxy): 判断指定proxy是否存在;
getAll(): 返回所有代理;
clean(): 清除所有proxy信息;
getCount(): 返回proxy统计信息;
changeTable(name): 切换操作对象
所有方法需要相应类去具体实现:
ssdb: ssdbClient.py
redis: redisClient.py
mongodb: mongodbClient.py
"""
def __init__(self, db_conn):
"""
init
:return:
"""
self.parseDbConn(db_conn)
self.__initDbClient()
@classmethod
def parseDbConn(cls, db_conn):
db_conf = urlparse(db_conn)
cls.db_type = db_conf.scheme.upper().strip()
cls.db_host = db_conf.hostname
cls.db_port = db_conf.port
cls.db_user = db_conf.username
cls.db_pwd = db_conf.password
cls.db_name = db_conf.path[1:]
return cls
def __initDbClient(self):
"""
init DB Client
:return:
"""
__type = None
if "SSDB" == self.db_type:
__type = "ssdbClient"
elif "REDIS" == self.db_type:
__type = "redisClient"
elif "POSTGRESQL" == self.db_type:
__type = "postgresqlClient"
else:
pass
assert __type, 'type error, Not support DB type: {}'.format(self.db_type)
self.client = getattr(__import__(__type), "%sClient" % self.db_type.title())(host=self.db_host,
port=self.db_port,
username=self.db_user,
password=self.db_pwd,
db=self.db_name)
def get(self, **kwargs):
return self.client.get(**kwargs)
def put(self, key, **kwargs):
return self.client.put(key, **kwargs)
def update(self, key, value, **kwargs):
return self.client.update(key, value, **kwargs)
def delete(self, key, **kwargs):
return self.client.delete(key, **kwargs)
def exists(self, key, **kwargs):
return self.client.exists(key, **kwargs)
def pop(self, **kwargs):
return self.client.pop(**kwargs)
def getAll(self):
return self.client.getAll()
def clear(self):
return self.client.clear()
def changeTable(self, name):
self.client.changeTable(name)
def getCount(self):
return self.client.getCount()
def test(self):
return self.client.test()
| 28.487805 | 107 | 0.50742 | [
"MIT"
] | dota2heqiuzhi/proxy_pool | db/dbClient.py | 3,708 | Python |
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.13.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1alpha1_priority_class import V1alpha1PriorityClass
class TestV1alpha1PriorityClass(unittest.TestCase):
""" V1alpha1PriorityClass unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1alpha1PriorityClass(self):
"""
Test V1alpha1PriorityClass
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1alpha1_priority_class.V1alpha1PriorityClass()
pass
if __name__ == '__main__':
unittest.main()
| 22.266667 | 105 | 0.720559 | [
"Apache-2.0"
] | MiaoRachelYu/python | kubernetes/test/test_v1alpha1_priority_class.py | 1,002 | Python |
"""Forms related to import operations."""
from __future__ import unicode_literals
from django import forms
from django.utils.translation import ugettext_lazy
class ImportDataForm(forms.Form):
"""Base form to import objects."""
sourcefile = forms.FileField(label=ugettext_lazy("Select a file"))
sepchar = forms.CharField(
label=ugettext_lazy("Separator"),
max_length=1,
required=False,
widget=forms.TextInput(attrs={"class": "form-control"})
)
continue_if_exists = forms.BooleanField(
label=ugettext_lazy("Continue on error"), required=False,
help_text=ugettext_lazy("Don't treat duplicated objects as error")
)
def __init__(self, *args, **kwargs):
super(ImportDataForm, self).__init__(*args, **kwargs)
self.fields["sepchar"].widget.attrs = {"class": "col-md-1 form-control"}
def clean_sepchar(self):
if self.cleaned_data["sepchar"] == "":
return ";"
return self.cleaned_data["sepchar"]
class ImportIdentitiesForm(ImportDataForm):
"""A form to import identities."""
crypt_password = forms.BooleanField(
label=ugettext_lazy("Crypt passwords"), required=False,
help_text=ugettext_lazy(
"Check this option if passwords contained in your file "
"are not crypted"
)
)
| 29.521739 | 80 | 0.662003 | [
"ISC"
] | antoniotrento/modoboa | modoboa/admin/forms/import_.py | 1,358 | Python |
End of preview. Expand
in Dataset Viewer.
- Downloads last month
- 42