text
stringlengths 213
32.3k
|
---|
from django.test.utils import modify_settings
from django.urls import reverse
from weblate.lang.models import get_default_lang
from weblate.trans.tests.test_views import ViewTestCase
from weblate.trans.tests.utils import create_test_billing, get_test_file
from weblate.vcs.git import GitRepository
TEST_ZIP = get_test_file("translations.zip")
TEST_INVALID_ZIP = get_test_file("invalid.zip")
TEST_HTML = get_test_file("cs.html")
class CreateTest(ViewTestCase):
@classmethod
def setUpClass(cls):
super().setUpClass()
# Global setup to configure git committer
GitRepository.global_setup()
def assert_create_project(self, result):
response = self.client.get(reverse("create-project"))
match = "not have permission to create project"
if result:
self.assertNotContains(response, match)
else:
self.assertContains(response, match)
def client_create_project(self, result, **kwargs):
params = {
"name": "Create Project",
"slug": "create-project",
"web": "https://weblate.org/",
}
params.update(kwargs)
response = self.client.post(reverse("create-project"), params)
if isinstance(result, str):
self.assertRedirects(response, result)
elif result:
self.assertEqual(response.status_code, 302)
else:
self.assertEqual(response.status_code, 200)
return response
@modify_settings(INSTALLED_APPS={"append": "weblate.billing"})
def test_create_project_billing(self):
# No permissions without billing
self.assert_create_project(False)
self.client_create_project(reverse("create-project"))
# Create empty billing
billing = create_test_billing(self.user)
self.assert_create_project(True)
# Create one project
self.client_create_project(False, billing=0)
self.client_create_project(True, billing=billing.pk)
# No more billings left
self.client_create_project(
reverse("create-project"), name="p2", slug="p2", billing=billing.pk
)
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_project_admin(self):
# No permissions without superuser
self.assert_create_project(False)
self.client_create_project(reverse("create-project"))
# Make superuser
self.user.is_superuser = True
self.user.save()
# Now can create
self.assert_create_project(True)
self.client_create_project(True)
self.client_create_project(True, name="p2", slug="p2")
def assert_create_component(self, result):
response = self.client.get(reverse("create-component-vcs"))
match = "not have permission to create component"
if result:
self.assertNotContains(response, match)
else:
self.assertContains(response, match)
def client_create_component(self, result, **kwargs):
params = {
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"vcs": "git",
"repo": self.component.get_repo_link_url(),
"file_format": "po",
"filemask": "po/*.po",
"new_base": "po/project.pot",
"new_lang": "add",
"language_regex": "^[^.]+$",
"source_language": get_default_lang(),
}
params.update(kwargs)
response = self.client.post(reverse("create-component-vcs"), params)
if result:
self.assertEqual(response.status_code, 302)
else:
self.assertEqual(response.status_code, 200)
return response
@modify_settings(INSTALLED_APPS={"append": "weblate.billing"})
def test_create_component_billing(self):
# No permissions without billing
self.assert_create_component(False)
self.client_create_component(False)
# Create billing and add permissions
billing = create_test_billing(self.user)
billing.projects.add(self.project)
self.project.add_user(self.user, "@Administration")
self.assert_create_component(True)
# Create two components
self.client_create_component(True)
self.client_create_component(True, name="c2", slug="c2")
# Restrict plan to test nothing more can be created
billing.plan.limit_strings = 1
billing.plan.save()
self.client_create_component(False, name="c3", slug="c3")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_component_admin(self):
# No permissions without superuser
self.assert_create_component(False)
self.client_create_component(False)
# Make superuser
self.user.is_superuser = True
self.user.save()
# Now can create
self.assert_create_component(True)
self.client_create_component(True)
self.client_create_component(True, name="c2", slug="c2")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_component_wizard(self):
# Make superuser
self.user.is_superuser = True
self.user.save()
# First step
params = {
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"vcs": "git",
"repo": self.component.repo,
"source_language": get_default_lang(),
}
response = self.client.post(reverse("create-component-vcs"), params)
self.assertContains(response, self.component.get_repo_link_url())
self.assertContains(response, "po/*.po")
# Display form
params["discovery"] = "4"
response = self.client.post(reverse("create-component-vcs"), params)
self.assertContains(response, self.component.get_repo_link_url())
self.assertContains(response, "po/*.po")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_component_existing(self):
# Make superuser
self.user.is_superuser = True
self.user.save()
response = self.client.post(
reverse("create-component"),
{
"origin": "existing",
"name": "Create Component",
"slug": "create-component",
"component": self.component.pk,
},
follow=True,
)
self.assertContains(response, self.component.get_repo_link_url())
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_component_branch_fail(self):
# Make superuser
self.user.is_superuser = True
self.user.save()
response = self.client.post(
reverse("create-component"),
{
"origin": "branch",
"name": "Create Component",
"slug": "create-component",
"component": self.component.pk,
"branch": "translations",
},
follow=True,
)
self.assertContains(response, "The filemask did not match any files")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_component_branch(self):
# Make superuser
self.user.is_superuser = True
self.user.save()
component = self.create_android(
project=self.project, name="Android", slug="android"
)
response = self.client.post(
reverse("create-component"),
{
"origin": "branch",
"name": "Create Component",
"slug": "create-component",
"component": component.pk,
"branch": "translations",
},
follow=True,
)
self.assertContains(response, "Return to the component")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_invalid_zip(self):
self.user.is_superuser = True
self.user.save()
with open(TEST_INVALID_ZIP, "rb") as handle:
response = self.client.post(
reverse("create-component-zip"),
{
"zipfile": handle,
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"source_language": get_default_lang(),
},
)
self.assertContains(response, "Failed to parse uploaded ZIP file.")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_zip(self):
self.user.is_superuser = True
self.user.save()
with open(TEST_ZIP, "rb") as handle:
response = self.client.post(
reverse("create-component-zip"),
{
"zipfile": handle,
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"source_language": get_default_lang(),
},
)
self.assertContains(response, "*.po")
response = self.client.post(
reverse("create-component-zip"),
{
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"vcs": "local",
"repo": "local:",
"discovery": "0",
"source_language": get_default_lang(),
},
)
self.assertContains(response, "Adding new translation")
self.assertContains(response, "*.po")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_doc(self):
self.user.is_superuser = True
self.user.save()
with open(TEST_HTML, "rb") as handle:
response = self.client.post(
reverse("create-component-doc"),
{
"docfile": handle,
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"source_language": get_default_lang(),
},
)
self.assertContains(response, "*.html")
response = self.client.post(
reverse("create-component-doc"),
{
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"vcs": "local",
"repo": "local:",
"discovery": "0",
"source_language": get_default_lang(),
},
)
self.assertContains(response, "Adding new translation")
self.assertContains(response, "*.html")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_scratch(self):
def create():
return self.client.post(
reverse("create-component"),
{
"origin": "scratch",
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"file_format": "po-mono",
"source_language": get_default_lang(),
},
follow=True,
)
# Make superuser
self.user.is_superuser = True
self.user.save()
response = create()
self.assertContains(response, "Test/Create Component")
response = create()
self.assertContains(response, "Entry by the same name already exists.")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_scratch_android(self):
# Make superuser
self.user.is_superuser = True
self.user.save()
response = self.client.post(
reverse("create-component"),
{
"origin": "scratch",
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"file_format": "aresource",
"source_language": get_default_lang(),
},
follow=True,
)
self.assertContains(response, "Test/Create Component")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_scratch_bilingual(self):
# Make superuser
self.user.is_superuser = True
self.user.save()
response = self.client.post(
reverse("create-component"),
{
"origin": "scratch",
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"file_format": "po",
"source_language": get_default_lang(),
},
follow=True,
)
self.assertContains(response, "Test/Create Component")
@modify_settings(INSTALLED_APPS={"remove": "weblate.billing"})
def test_create_scratch_strings(self):
# Make superuser
self.user.is_superuser = True
self.user.save()
response = self.client.post(
reverse("create-component"),
{
"origin": "scratch",
"name": "Create Component",
"slug": "create-component",
"project": self.project.pk,
"file_format": "strings",
"source_language": get_default_lang(),
},
follow=True,
)
self.assertContains(response, "Test/Create Component")
|
from math import sqrt
import numpy as np
from scipy import linalg
from ..utils import check_random_state, logger, verbose, fill_doc
@fill_doc
def power_iteration_kron(A, C, max_iter=1000, tol=1e-3, random_state=0):
"""Find the largest singular value for the matrix kron(C.T, A).
It uses power iterations.
Parameters
----------
A : array
An array
C : array
An array
max_iter : int
Maximum number of iterations
%(random_state)s
Returns
-------
L : float
largest singular value
Notes
-----
http://en.wikipedia.org/wiki/Power_iteration
"""
AS_size = C.shape[0]
rng = check_random_state(random_state)
B = rng.randn(AS_size, AS_size)
B /= linalg.norm(B, 'fro')
ATA = np.dot(A.T, A)
CCT = np.dot(C, C.T)
L0 = np.inf
for _ in range(max_iter):
Y = np.dot(np.dot(ATA, B), CCT)
L = linalg.norm(Y, 'fro')
if abs(L - L0) < tol:
break
B = Y / L
L0 = L
return L
@verbose
def compute_bias(M, G, X, max_iter=1000, tol=1e-6, n_orient=1, verbose=None):
"""Compute scaling to correct amplitude bias.
It solves the following optimization problem using FISTA:
min 1/2 * (|| M - GDX ||fro)^2
s.t. D >= 1 and D is a diagonal matrix
Reference for the FISTA algorithm:
Amir Beck and Marc Teboulle
A Fast Iterative Shrinkage-Thresholding Algorithm for Linear Inverse
Problems, SIAM J. Imaging Sci., 2(1), 183-202. (20 pages)
http://epubs.siam.org/doi/abs/10.1137/080716542
Parameters
----------
M : array
measurement data.
G : array
leadfield matrix.
X : array
reconstructed time courses with amplitude bias.
max_iter : int
Maximum number of iterations.
tol : float
The tolerance on convergence.
n_orient : int
The number of orientations (1 for fixed and 3 otherwise).
%(verbose)s
Returns
-------
D : array
Debiasing weights.
"""
n_sources = X.shape[0]
lipschitz_constant = 1.1 * power_iteration_kron(G, X)
# initializations
D = np.ones(n_sources)
Y = np.ones(n_sources)
t = 1.0
for i in range(max_iter):
D0 = D
# gradient step
R = M - np.dot(G * Y, X)
D = Y + np.sum(np.dot(G.T, R) * X, axis=1) / lipschitz_constant
# Equivalent but faster than:
# D = Y + np.diag(np.dot(np.dot(G.T, R), X.T)) / lipschitz_constant
# prox ie projection on constraint
if n_orient != 1: # take care of orientations
# The scaling has to be the same for all orientations
D = np.mean(D.reshape(-1, n_orient), axis=1)
D = np.tile(D, [n_orient, 1]).T.ravel()
D = np.maximum(D, 1.0)
t0 = t
t = 0.5 * (1.0 + sqrt(1.0 + 4.0 * t ** 2))
Y.fill(0.0)
dt = (t0 - 1.0) / t
Y = D + dt * (D - D0)
Ddiff = linalg.norm(D - D0, np.inf)
if Ddiff < tol:
logger.info("Debiasing converged after %d iterations "
"max(|D - D0| = %e < %e)" % (i, Ddiff, tol))
break
else:
Ddiff = linalg.norm(D - D0, np.inf)
logger.info("Debiasing did not converge after %d iterations! "
"max(|D - D0| = %e >= %e)" % (max_iter, Ddiff, tol))
return D
|
import glob
from importlib import import_module
import os
from os import path as op
from mne.utils import _replace_md5, ArgvSetter
def setup(app):
app.connect('builder-inited', generate_commands_rst)
def setup_module():
# HACK: Stop nosetests running setup() above
pass
# Header markings go:
# 1. =/= : Page title
# 2. = : Command name
# 3. -/- : Command description
# 4. - : Command sections (Examples, Notes)
header = """\
:orphan:
.. _python_commands:
===============================
Command line tools using Python
===============================
.. contents:: Page contents
:local:
:depth: 1
"""
command_rst = """
.. _{0}:
{0}
{1}
.. rst-class:: callout
{2}
"""
def generate_commands_rst(app=None):
from sphinx_gallery import sphinx_compatibility
out_dir = op.abspath(op.join(op.dirname(__file__), '..', 'generated'))
if not op.isdir(out_dir):
os.mkdir(out_dir)
out_fname = op.join(out_dir, 'commands.rst.new')
command_path = op.abspath(
op.join(os.path.dirname(__file__), '..', '..', 'mne', 'commands'))
fnames = sorted([
op.basename(fname)
for fname in glob.glob(op.join(command_path, 'mne_*.py'))])
iterator = sphinx_compatibility.status_iterator(
fnames, 'generating MNE command help ... ', length=len(fnames))
with open(out_fname, 'w', encoding='utf8') as f:
f.write(header)
for fname in iterator:
cmd_name = fname[:-3]
module = import_module('.' + cmd_name, 'mne.commands')
with ArgvSetter(('mne', cmd_name, '--help')) as out:
try:
module.run()
except SystemExit: # this is how these terminate
pass
output = out.stdout.getvalue().splitlines()
# Swap usage and title lines
output[0], output[2] = output[2], output[0]
# Add header marking
for idx in (1, 0):
output.insert(idx, '-' * len(output[0]))
# Add code styling for the "Usage: " line
for li, line in enumerate(output):
if line.startswith('Usage: mne '):
output[li] = 'Usage: ``%s``' % line[7:]
break
# Turn "Options:" into field list
if 'Options:' in output:
ii = output.index('Options:')
output[ii] = 'Options'
output.insert(ii + 1, '-------')
output.insert(ii + 2, '')
output.insert(ii + 3, '.. rst-class:: field-list cmd-list')
output.insert(ii + 4, '')
output = '\n'.join(output)
cmd_name_space = cmd_name.replace('mne_', 'mne ')
f.write(command_rst.format(
cmd_name_space, '=' * len(cmd_name_space), output))
_replace_md5(out_fname)
# This is useful for testing/iterating to see what the result looks like
if __name__ == '__main__':
generate_commands_rst()
|
from lark import Lark, Transformer, v_args
from lark.lexer import Lexer, Token
class TypeLexer(Lexer):
def __init__(self, lexer_conf):
pass
def lex(self, data):
for obj in data:
if isinstance(obj, int):
yield Token('INT', obj)
elif isinstance(obj, (type(''), type(u''))):
yield Token('STR', obj)
else:
raise TypeError(obj)
parser = Lark("""
start: data_item+
data_item: STR INT*
%declare STR INT
""", parser='lalr', lexer=TypeLexer)
class ParseToDict(Transformer):
@v_args(inline=True)
def data_item(self, name, *numbers):
return name.value, [n.value for n in numbers]
start = dict
def test():
data = ['alice', 1, 27, 3, 'bob', 4, 'carrie', 'dan', 8, 6]
print(data)
tree = parser.parse(data)
res = ParseToDict().transform(tree)
print('-->')
print(res) # prints {'alice': [1, 27, 3], 'bob': [4], 'carrie': [], 'dan': [8, 6]}
if __name__ == '__main__':
test()
|
import logging
from functools import wraps
from flask import request, Response
from kalliope import Utils
from kalliope.core.ConfigurationManager import SettingLoader
logging.basicConfig()
logger = logging.getLogger("kalliope")
def check_auth(username, password):
"""This function is called to check if a username /
password combination is valid.
"""
sl = SettingLoader()
settings = sl.settings
return username == settings.rest_api.login and password == settings.rest_api.password
def authenticate():
"""Sends a 401 response that enables basic auth"""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
sl = SettingLoader()
settings = sl.settings
if settings.rest_api.password_protected:
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
def get_parameters_from_request(http_request):
"""
Get "parameters" object from the http_request
:param http_request: flask http request
:return: dict of parameters from the received json
"""
parameters = None
try:
# Silent True in case no parameters it does not raise an error
received_json = http_request.get_json(silent=True, force=True)
if 'parameters' in received_json:
parameters = received_json['parameters']
except TypeError:
logger.debug("[FlaskAPI] no parameters received in http request")
pass
return parameters
def get_value_flag_from_request(http_request, flag_to_find, is_boolean=False):
"""
Get the value flag from the request if exist, None otherwise !
:param http_request:
:param flag_to_find: json flag to find in the http_request
:param is_boolean: True if the expected value is a boolean, False Otherwise.
:return: the Value of the flag that has been found in the request
"""
flag_value = None
try:
received_json = http_request.get_json(force=True, silent=True, cache=True)
if flag_to_find in received_json:
flag_value = received_json[flag_to_find]
if is_boolean:
flag_value = Utils.str_to_bool(flag_value)
except TypeError:
# no json received
pass
return flag_value
|
from collections import defaultdict
from itertools import chain
from pgmpy.models import BayesianModel
from pgmpy.models import MarkovModel
from pgmpy.models import FactorGraph
from pgmpy.models import JunctionTree
from pgmpy.models import DynamicBayesianNetwork
from pgmpy.factors.discrete import TabularCPD
class Inference(object):
"""
Base class for all inference algorithms.
Converts BayesianModel and MarkovModel to a uniform representation so that inference
algorithms can be applied. Also it checks if all the associated CPDs / Factors are
consistent with the model.
Initialize inference for a model.
Parameters
----------
model: pgmpy.models.BayesianModel or pgmpy.models.MarkovModel or pgmpy.models.NoisyOrModel
model for which to initialize the inference object.
Examples
--------
>>> from pgmpy.inference import Inference
>>> from pgmpy.models import BayesianModel
>>> from pgmpy.factors.discrete import TabularCPD
>>> student = BayesianModel([('diff', 'grade'), ('intel', 'grade')])
>>> diff_cpd = TabularCPD('diff', 2, [[0.2, 0.8]])
>>> intel_cpd = TabularCPD('intel', 2, [[0.3, 0.7]])
>>> grade_cpd = TabularCPD('grade', 3, [[0.1, 0.1, 0.1, 0.1],
... [0.1, 0.1, 0.1, 0.1],
... [0.8, 0.8, 0.8, 0.8]],
... evidence=['diff', 'intel'], evidence_card=[2, 2])
>>> student.add_cpds(diff_cpd, intel_cpd, grade_cpd)
>>> model = Inference(student)
>>> from pgmpy.models import MarkovModel
>>> from pgmpy.factors import DiscreteFactor
>>> import numpy as np
>>> student = MarkovModel([('Alice', 'Bob'), ('Bob', 'Charles'),
... ('Charles', 'Debbie'), ('Debbie', 'Alice')])
>>> factor_a_b = DiscreteFactor(['Alice', 'Bob'], cardinality=[2, 2], value=np.random.rand(4))
>>> factor_b_c = DiscreteFactor(['Bob', 'Charles'], cardinality=[2, 2], value=np.random.rand(4))
>>> factor_c_d = DiscreteFactor(['Charles', 'Debbie'], cardinality=[2, 2], value=np.random.rand(4))
>>> factor_d_a = DiscreteFactor(['Debbie', 'Alice'], cardinality=[2, 2], value=np.random.rand(4))
>>> student.add_factors(factor_a_b, factor_b_c, factor_c_d, factor_d_a)
>>> model = Inference(student)
"""
def __init__(self, model):
self.model = model
model.check_model()
if isinstance(model, JunctionTree):
self.variables = set(chain(*model.nodes()))
else:
self.variables = model.nodes()
self.cardinality = {}
self.factors = defaultdict(list)
if isinstance(model, BayesianModel):
self.state_names_map = {}
for node in model.nodes():
cpd = model.get_cpds(node)
if isinstance(cpd, TabularCPD):
self.cardinality[node] = cpd.variable_card
cpd = cpd.to_factor()
for var in cpd.scope():
self.factors[var].append(cpd)
self.state_names_map.update(cpd.no_to_name)
elif isinstance(model, (MarkovModel, FactorGraph, JunctionTree)):
self.cardinality = model.get_cardinality()
for factor in model.get_factors():
for var in factor.variables:
self.factors[var].append(factor)
elif isinstance(model, DynamicBayesianNetwork):
self.start_bayesian_model = BayesianModel(model.get_intra_edges(0))
self.start_bayesian_model.add_cpds(*model.get_cpds(time_slice=0))
cpd_inter = [model.get_cpds(node) for node in model.get_interface_nodes(1)]
self.interface_nodes = model.get_interface_nodes(0)
self.one_and_half_model = BayesianModel(
model.get_inter_edges() + model.get_intra_edges(1)
)
self.one_and_half_model.add_cpds(
*(model.get_cpds(time_slice=1) + cpd_inter)
)
|
import pytest
from homeassistant import config_entries, data_entry_flow, setup
from homeassistant.components.solarlog import config_flow
from homeassistant.components.solarlog.const import DEFAULT_HOST, DOMAIN
from homeassistant.const import CONF_HOST, CONF_NAME
from tests.async_mock import patch
from tests.common import MockConfigEntry
NAME = "Solarlog test 1 2 3"
HOST = "http://1.1.1.1"
async def test_form(hass):
"""Test we get the form."""
await setup.async_setup_component(hass, "persistent_notification", {})
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] == {}
with patch(
"homeassistant.components.solarlog.config_flow.SolarLogConfigFlow._test_connection",
return_value={"title": "solarlog test 1 2 3"},
), patch(
"homeassistant.components.solarlog.async_setup", return_value=True
) as mock_setup, patch(
"homeassistant.components.solarlog.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"], {"host": HOST, "name": NAME}
)
await hass.async_block_till_done()
assert result2["type"] == "create_entry"
assert result2["title"] == "solarlog_test_1_2_3"
assert result2["data"] == {"host": "http://1.1.1.1"}
assert len(mock_setup.mock_calls) == 1
assert len(mock_setup_entry.mock_calls) == 1
@pytest.fixture(name="test_connect")
def mock_controller():
"""Mock a successful _host_in_configuration_exists."""
with patch(
"homeassistant.components.solarlog.config_flow.SolarLogConfigFlow._test_connection",
return_value=True,
):
yield
def init_config_flow(hass):
"""Init a configuration flow."""
flow = config_flow.SolarLogConfigFlow()
flow.hass = hass
return flow
async def test_user(hass, test_connect):
"""Test user config."""
flow = init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
# tets with all provided
result = await flow.async_step_user({CONF_NAME: NAME, CONF_HOST: HOST})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "solarlog_test_1_2_3"
assert result["data"][CONF_HOST] == HOST
async def test_import(hass, test_connect):
"""Test import step."""
flow = init_config_flow(hass)
# import with only host
result = await flow.async_step_import({CONF_HOST: HOST})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "solarlog"
assert result["data"][CONF_HOST] == HOST
# import with only name
result = await flow.async_step_import({CONF_NAME: NAME})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "solarlog_test_1_2_3"
assert result["data"][CONF_HOST] == DEFAULT_HOST
# import with host and name
result = await flow.async_step_import({CONF_HOST: HOST, CONF_NAME: NAME})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "solarlog_test_1_2_3"
assert result["data"][CONF_HOST] == HOST
async def test_abort_if_already_setup(hass, test_connect):
"""Test we abort if the device is already setup."""
flow = init_config_flow(hass)
MockConfigEntry(
domain="solarlog", data={CONF_NAME: NAME, CONF_HOST: HOST}
).add_to_hass(hass)
# Should fail, same HOST different NAME (default)
result = await flow.async_step_import(
{CONF_HOST: HOST, CONF_NAME: "solarlog_test_7_8_9"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
# Should fail, same HOST and NAME
result = await flow.async_step_user({CONF_HOST: HOST, CONF_NAME: NAME})
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_HOST: "already_configured"}
# SHOULD pass, diff HOST (without http://), different NAME
result = await flow.async_step_import(
{CONF_HOST: "2.2.2.2", CONF_NAME: "solarlog_test_7_8_9"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "solarlog_test_7_8_9"
assert result["data"][CONF_HOST] == "http://2.2.2.2"
# SHOULD pass, diff HOST, same NAME
result = await flow.async_step_import(
{CONF_HOST: "http://2.2.2.2", CONF_NAME: NAME}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "solarlog_test_1_2_3"
assert result["data"][CONF_HOST] == "http://2.2.2.2"
|
class FileDisposition(object):
"""A simple value type for recording what to do with a file."""
pass
# FileDisposition "methods": FileDisposition is a pure value object, so it can
# be implemented in either C or Python. Acting on them is done with these
# functions.
def disposition_init(cls, original_filename):
"""Construct and initialize a new FileDisposition object."""
disp = cls()
disp.original_filename = original_filename
disp.canonical_filename = original_filename
disp.source_filename = None
disp.trace = False
disp.reason = ""
disp.file_tracer = None
disp.has_dynamic_filename = False
return disp
def disposition_debug_msg(disp):
"""Make a nice debug message of what the FileDisposition is doing."""
if disp.trace:
msg = "Tracing %r" % (disp.original_filename,)
if disp.file_tracer:
msg += ": will be traced by %r" % disp.file_tracer
else:
msg = "Not tracing %r: %s" % (disp.original_filename, disp.reason)
return msg
|
import argparse
import os
import re
import sys
from paasta_tools.cli.utils import lazy_choices_completer
from paasta_tools.secret_tools import get_secret_provider
from paasta_tools.secret_tools import SHARED_SECRET_SERVICE
from paasta_tools.utils import _log_audit
from paasta_tools.utils import list_clusters
from paasta_tools.utils import list_services
from paasta_tools.utils import load_system_paasta_config
SECRET_NAME_REGEX = r"([A-Za-z0-9_-]*)"
def check_secret_name(secret_name_arg: str):
pattern = re.compile(SECRET_NAME_REGEX)
if (
not secret_name_arg.startswith("-")
and not secret_name_arg.startswith("_")
and "".join(pattern.findall(secret_name_arg)) == secret_name_arg
):
return secret_name_arg
raise argparse.ArgumentTypeError(
"--secret-name argument should only contain letters, numbers, "
"dashes and underscores characters and cannot start from latter two"
)
def add_subparser(subparsers):
secret_parser = subparsers.add_parser(
"secret",
help="Add/update PaaSTA service secrets",
description=(
"This script allows you to add secrets to your services "
"as environment variables. This script modifies your local "
"checkout of yelpsoa-configs and you must then commit and "
"push the changes back to git."
),
)
secret_parser.add_argument(
"action", help="should be add/update", choices=["add", "update", "decrypt"]
)
secret_parser.add_argument(
"-n",
"--secret-name",
type=check_secret_name,
required=True,
help="The name of the secret to create/update, "
"this is the name you will reference in your "
"services yaml files and should "
"be unique per service.",
)
# Must choose valid service or act on a shared secret
service_group = secret_parser.add_mutually_exclusive_group(required=True)
service_group.add_argument(
"-s", "--service", help="The name of the service on which you wish to act"
).completer = lazy_choices_completer(list_services)
service_group.add_argument(
"--shared",
help="Act on a secret that can be shared by all services",
action="store_true",
)
secret_parser.add_argument(
"-c",
"--clusters",
help="A comma-separated list of clusters to create secrets for. "
"Note: this is translated to ecosystems because Vault is run "
"at an ecosystem level. As a result you can only have different "
"secrets per ecosystem. (it is not possible for example to encrypt "
"a different value for norcal-prod vs nova-prod. "
"Defaults to all clusters in which the service runs. "
"For example: --clusters norcal-prod,nova-prod ",
).completer = lazy_choices_completer(list_clusters)
secret_parser.add_argument(
"-p",
"--plain-text",
required=False,
type=str,
help="Optionally specify the secret as a command line argument",
)
secret_parser.add_argument(
"-i",
"--stdin",
required=False,
action="store_true",
default=False,
help="Optionally pass the plaintext from stdin",
)
secret_parser.add_argument(
"--cross-env-motivation",
required=False,
type=str,
help=(
"Provide motivation in case the same value is being duplicated "
"across multiple runtime environments when adding or updating a secret"
),
metavar="MOTIVATION",
)
secret_parser.set_defaults(command=paasta_secret)
def secret_name_for_env(secret_name):
secret_name = secret_name.upper()
valid_parts = re.findall(r"[a-zA-Z0-9_]+", secret_name)
return "_".join(valid_parts)
def print_paasta_helper(secret_path, secret_name, is_shared):
print(
"\nYou have successfully encrypted your new secret and it\n"
"has been stored at {}\n"
"To use the secret in a service you can add it to your PaaSTA service\n"
"as an environment variable.\n"
"You do so by referencing it in the env dict in your yaml config:\n\n"
"main:\n"
" cpus: 1\n"
" env:\n"
" PAASTA_SECRET_{}: {}SECRET({})\n\n"
"Once you have referenced the secret you must commit the newly\n"
"created/updated json file and your changes to your yaml config. When\n"
"you push to master PaaSTA will bounce your service and the new\n"
"secrets plaintext will be in the environment variable you have\n"
"specified. The PAASTA_SECRET_ prefix is optional but necessary\n"
"for the yelp_servlib client library".format(
secret_path,
secret_name_for_env(secret_name),
"SHARED_" if is_shared else "",
secret_name,
)
)
def get_plaintext_input(args):
if args.stdin:
plaintext = sys.stdin.buffer.read()
elif args.plain_text:
plaintext = args.plain_text.encode("utf-8")
else:
print(
"Please enter the plaintext for the secret, then enter a newline and Ctrl-D when done."
)
lines = []
while True:
try:
line = input()
except EOFError:
break
lines.append(line)
plaintext = "\n".join(lines).encode("utf-8")
print("The secret as a Python string is:", repr(plaintext))
print("Please make sure this is correct.")
return plaintext
def is_service_folder(soa_dir, service_name):
return os.path.isfile(os.path.join(soa_dir, service_name, "service.yaml"))
def _get_secret_provider_for_service(service_name, cluster_names=None):
if not is_service_folder(os.getcwd(), service_name):
print(
"{} not found.\n"
"You must run this tool from the root of your local yelpsoa checkout\n"
"The tool modifies files in yelpsoa-configs that you must then commit\n"
"and push back to git.".format(os.path.join(service_name, "service.yaml"))
)
sys.exit(1)
system_paasta_config = load_system_paasta_config()
secret_provider_kwargs = {
"vault_cluster_config": system_paasta_config.get_vault_cluster_config()
}
clusters = (
cluster_names.split(",")
if cluster_names
else list_clusters(service=service_name, soa_dir=os.getcwd())
)
return get_secret_provider(
secret_provider_name=system_paasta_config.get_secret_provider_name(),
soa_dir=os.getcwd(),
service_name=service_name,
cluster_names=clusters,
secret_provider_kwargs=secret_provider_kwargs,
)
def paasta_secret(args):
if args.shared:
service = SHARED_SECRET_SERVICE
if not args.clusters:
print("A list of clusters is required for shared secrets.")
sys.exit(1)
else:
service = args.service
secret_provider = _get_secret_provider_for_service(
service, cluster_names=args.clusters
)
if args.action in ["add", "update"]:
plaintext = get_plaintext_input(args)
if not plaintext:
print("Warning: Given plaintext is an empty string.")
secret_provider.write_secret(
action=args.action,
secret_name=args.secret_name,
plaintext=plaintext,
cross_environment_motivation=args.cross_env_motivation,
)
secret_path = os.path.join(
secret_provider.secret_dir, f"{args.secret_name}.json"
)
_log_audit(
action=f"{args.action}-secret",
action_details={"secret_name": args.secret_name, "clusters": args.clusters},
service=service,
)
print_paasta_helper(secret_path, args.secret_name, args.shared)
elif args.action == "decrypt":
print(
decrypt_secret(
secret_provider=secret_provider, secret_name=args.secret_name
),
end="",
)
else:
print("Unknown action")
sys.exit(1)
def decrypt_secret(secret_provider, secret_name):
if len(secret_provider.cluster_names) > 1:
print(
"Can only decrypt for one cluster at a time!\nFor example, try '-c norcal-devc'"
" to decrypt the secret for this service in norcal-devc."
)
sys.exit(1)
return secret_provider.decrypt_secret(secret_name)
|
from pygal import Line
s1 = [1, 3, 12, 3, 4]
s2 = [7, -4, 10, None, 8, 3, 1]
def test_no_serie_config():
"""Test per serie no configuration"""
chart = Line()
chart.add('1', s1)
chart.add('2', s2)
q = chart.render_pyquery()
assert len(q('.serie-0 .line')) == 1
assert len(q('.serie-1 .line')) == 1
assert len(q('.serie-0 .dot')) == 5
assert len(q('.serie-1 .dot')) == 6
def test_global_config():
"""Test global configuration"""
chart = Line(stroke=False)
chart.add('1', s1)
chart.add('2', s2)
q = chart.render_pyquery()
assert len(q('.serie-0 .line')) == 0
assert len(q('.serie-1 .line')) == 0
assert len(q('.serie-0 .dot')) == 5
assert len(q('.serie-1 .dot')) == 6
def test_serie_config():
"""Test per serie configuration"""
chart = Line()
chart.add('1', s1, stroke=False)
chart.add('2', s2)
q = chart.render_pyquery()
assert len(q('.serie-0 .line')) == 0
assert len(q('.serie-1 .line')) == 1
assert len(q('.serie-0 .dot')) == 5
assert len(q('.serie-1 .dot')) == 6
def test_serie_precedence_over_global_config():
"""Test that per serie configuration overide global configuration"""
chart = Line(stroke=False)
chart.add('1', s1, stroke=True)
chart.add('2', s2)
q = chart.render_pyquery()
assert len(q('.serie-0 .line')) == 1
assert len(q('.serie-1 .line')) == 0
assert len(q('.serie-0 .dot')) == 5
assert len(q('.serie-1 .dot')) == 6
|
from __future__ import unicode_literals
from rules.BaseTrick import *
from lib.data.data import pyoptions
def SDrule(cname, birth):
for cn in cname:
for bd in birth:
for _ in simplejoin(wordshaper(cn), dateshaper(bd)):
yield _
for _ in simplejoin(dateshaper(bd), wordshaper(cn)):
yield _
for mid in pyoptions.sedb_trick_mid:
for _ in middlejoin(wordshaper(cn), dateshaper(bd), mid):
yield _
for _ in middlejoin(dateshaper(bd), wordshaper(cn), mid):
yield _
for suf in pyoptions.sedb_trick_suf:
for _ in tailjoins(wordshaper(cn), dateshaper(bd), suf):
yield _
for _ in tailjoins(dateshaper(bd), wordshaper(cn), suf):
yield _
for pre in pyoptions.sedb_trick_pre:
for _ in headjoins(wordshaper(cn), dateshaper(bd), pre):
yield _
for _ in headjoins(dateshaper(bd), wordshaper(cn), pre):
yield _
|
import ast
import logging
import numpy as np
from bson.binary import Binary
from pandas import DataFrame, Series, Panel
from arctic._util import NP_OBJECT_DTYPE
from arctic.serialization.numpy_records import SeriesSerializer, DataFrameSerializer
from ._ndarray_store import NdarrayStore
from .._compression import compress, decompress
from .._config import FORCE_BYTES_TO_UNICODE
from ..date._util import to_pandas_closed_closed
from ..exceptions import ArcticException
log = logging.getLogger(__name__)
DTN64_DTYPE = 'datetime64[ns]'
INDEX_DTYPE = [('datetime', DTN64_DTYPE), ('index', 'i8')]
class PandasStore(NdarrayStore):
def _segment_index(self, recarr, existing_index, start, new_segments):
"""
Generate index of datetime64 -> item offset.
Parameters:
-----------
new_data: new data being written (or appended)
existing_index: index field from the versions document of the previous version
start: first (0-based) offset of the new data
segments: list of offsets. Each offset is the row index of the
the last row of a particular chunk relative to the start of the _original_ item.
array(new_data) - segments = array(offsets in item)
Returns:
--------
Binary(compress(array([(index, datetime)]))
Where index is the 0-based index of the datetime in the DataFrame
"""
# find the index of the first datetime64 column
idx_col = self._datetime64_index(recarr)
# if one exists let's create the index on it
if idx_col is not None:
new_segments = np.array(new_segments, dtype='i8')
last_rows = recarr[new_segments - start]
# create numpy index
index = np.core.records.fromarrays([last_rows[idx_col]] + [new_segments, ], dtype=INDEX_DTYPE)
# append to existing index if exists
if existing_index:
# existing_index_arr is read-only but it's never written to
existing_index_arr = np.frombuffer(decompress(existing_index), dtype=INDEX_DTYPE)
if start > 0:
existing_index_arr = existing_index_arr[existing_index_arr['index'] < start]
index = np.concatenate((existing_index_arr, index))
return Binary(compress(index.tostring()))
elif existing_index:
raise ArcticException("Could not find datetime64 index in item but existing data contains one")
return None
def _datetime64_index(self, recarr):
""" Given a np.recarray find the first datetime64 column """
# TODO: Handle multi-indexes
names = recarr.dtype.names
for name in names:
if recarr[name].dtype == DTN64_DTYPE:
return name
return None
def read_options(self):
return ['date_range']
def _index_range(self, version, symbol, date_range=None, **kwargs):
""" Given a version, read the segment_index and return the chunks associated
with the date_range. As the segment index is (id -> last datetime)
we need to take care in choosing the correct chunks. """
if date_range and 'segment_index' in version:
# index is read-only but it's never written to
index = np.frombuffer(decompress(version['segment_index']), dtype=INDEX_DTYPE)
dtcol = self._datetime64_index(index)
if dtcol and len(index):
dts = index[dtcol]
start, end = _start_end(date_range, dts)
if start > dts[-1]:
return -1, -1
idxstart = min(np.searchsorted(dts, start), len(dts) - 1)
idxend = min(np.searchsorted(dts, end, side='right'), len(dts) - 1)
return int(index['index'][idxstart]), int(index['index'][idxend] + 1)
return super(PandasStore, self)._index_range(version, symbol, **kwargs)
def _daterange(self, recarr, date_range):
""" Given a recarr, slice out the given artic.date.DateRange if a
datetime64 index exists """
idx = self._datetime64_index(recarr)
if idx and len(recarr):
dts = recarr[idx]
mask = Series(np.zeros(len(dts)), index=dts)
start, end = _start_end(date_range, dts)
mask[start:end] = 1.0
return recarr[mask.values.astype(bool)]
return recarr
def read(self, arctic_lib, version, symbol, read_preference=None, date_range=None, **kwargs):
item = super(PandasStore, self).read(arctic_lib, version, symbol, read_preference,
date_range=date_range, **kwargs)
if date_range:
item = self._daterange(item, date_range)
return item
def get_info(self, version):
"""
parses out the relevant information in version
and returns it to the user in a dictionary
"""
ret = super(PandasStore, self).get_info(version)
ret['col_names'] = version['dtype_metadata']
ret['handler'] = self.__class__.__name__
ret['dtype'] = ast.literal_eval(version['dtype'])
return ret
def _start_end(date_range, dts):
"""
Return tuple: [start, end] of np.datetime64 dates that are inclusive of the passed
in datetimes.
"""
# FIXME: timezones
assert len(dts)
_assert_no_timezone(date_range)
date_range = to_pandas_closed_closed(date_range, add_tz=False)
start = np.datetime64(date_range.start) if date_range.start else dts[0]
end = np.datetime64(date_range.end) if date_range.end else dts[-1]
return start, end
def _assert_no_timezone(date_range):
for _dt in (date_range.start, date_range.end):
if _dt and _dt.tzinfo is not None:
raise ValueError("DateRange with timezone not supported")
class PandasSeriesStore(PandasStore):
TYPE = 'pandasseries'
SERIALIZER = SeriesSerializer()
@staticmethod
def can_write_type(data):
return isinstance(data, Series)
def can_write(self, version, symbol, data):
if self.can_write_type(data):
# Series has always a single-column
if data.dtype is NP_OBJECT_DTYPE or data.index.dtype is NP_OBJECT_DTYPE:
return self.SERIALIZER.can_convert_to_records_without_objects(data, symbol)
return True
return False
def write(self, arctic_lib, version, symbol, item, previous_version):
item, md = self.SERIALIZER.serialize(item)
super(PandasSeriesStore, self).write(arctic_lib, version, symbol, item, previous_version, dtype=md)
def append(self, arctic_lib, version, symbol, item, previous_version, **kwargs):
item, md = self.SERIALIZER.serialize(item)
super(PandasSeriesStore, self).append(arctic_lib, version, symbol, item, previous_version, dtype=md, **kwargs)
def read_options(self):
return super(PandasSeriesStore, self).read_options()
def read(self, arctic_lib, version, symbol, **kwargs):
item = super(PandasSeriesStore, self).read(arctic_lib, version, symbol, **kwargs)
# Try to check if force_bytes_to_unicode is set in kwargs else use the config value (which defaults to False)
force_bytes_to_unicode = kwargs.get('force_bytes_to_unicode', FORCE_BYTES_TO_UNICODE)
return self.SERIALIZER.deserialize(item, force_bytes_to_unicode=force_bytes_to_unicode)
class PandasDataFrameStore(PandasStore):
TYPE = 'pandasdf'
SERIALIZER = DataFrameSerializer()
@staticmethod
def can_write_type(data):
return isinstance(data, DataFrame)
def can_write(self, version, symbol, data):
if self.can_write_type(data):
if NP_OBJECT_DTYPE in data.dtypes.values or data.index.dtype is NP_OBJECT_DTYPE:
return self.SERIALIZER.can_convert_to_records_without_objects(data, symbol)
return True
return False
def write(self, arctic_lib, version, symbol, item, previous_version):
item, md = self.SERIALIZER.serialize(item)
super(PandasDataFrameStore, self).write(arctic_lib, version, symbol, item, previous_version, dtype=md)
def append(self, arctic_lib, version, symbol, item, previous_version, **kwargs):
item, md = self.SERIALIZER.serialize(item)
super(PandasDataFrameStore, self).append(arctic_lib, version, symbol, item, previous_version, dtype=md, **kwargs)
def read(self, arctic_lib, version, symbol, **kwargs):
item = super(PandasDataFrameStore, self).read(arctic_lib, version, symbol, **kwargs)
# Try to check if force_bytes_to_unicode is set in kwargs else use the config value (which defaults to False)
force_bytes_to_unicode = kwargs.get('force_bytes_to_unicode', FORCE_BYTES_TO_UNICODE)
return self.SERIALIZER.deserialize(item, force_bytes_to_unicode=force_bytes_to_unicode)
def read_options(self):
return super(PandasDataFrameStore, self).read_options()
class PandasPanelStore(PandasDataFrameStore):
TYPE = 'pandaspan'
@staticmethod
def can_write_type(data):
return isinstance(data, Panel)
def can_write(self, version, symbol, data):
if self.can_write_type(data):
frame = data.to_frame(filter_observations=False)
if NP_OBJECT_DTYPE in frame.dtypes.values or (hasattr(data, 'index') and data.index.dtype is NP_OBJECT_DTYPE):
return self.SERIALIZER.can_convert_to_records_without_objects(frame, symbol)
return True
return False
def write(self, arctic_lib, version, symbol, item, previous_version):
if np.product(item.shape) == 0:
# Currently not supporting zero size panels as they drop indices when converting to dataframes
# Plan is to find a better solution in due course.
raise ValueError('Cannot insert a zero size panel into mongo.')
if not np.all(len(i.names) == 1 for i in item.axes):
raise ValueError('Cannot insert panels with multiindexes')
item = item.to_frame(filter_observations=False)
if len(set(item.dtypes)) == 1:
# If all columns have the same dtype, we support non-string column names.
# We know from above check that columns is not a multiindex.
item = DataFrame(item.stack())
elif item.columns.dtype != np.dtype('object'):
raise ValueError('Cannot support non-object dtypes for columns')
super(PandasPanelStore, self).write(arctic_lib, version, symbol, item, previous_version)
def read(self, arctic_lib, version, symbol, **kwargs):
item = super(PandasPanelStore, self).read(arctic_lib, version, symbol, **kwargs)
if len(item.index.names) == 3:
return item.iloc[:, 0].unstack().to_panel()
return item.to_panel()
def read_options(self):
return super(PandasPanelStore, self).read_options()
def append(self, arctic_lib, version, symbol, item, previous_version, **kwargs):
raise ValueError('Appending not supported for pandas.Panel')
|
import pytest
from voluptuous import MultipleInvalid
import homeassistant.components.dynalite.const as dynalite
from homeassistant.const import CONF_HOST, CONF_NAME, CONF_PORT, CONF_ROOM
from homeassistant.setup import async_setup_component
from tests.async_mock import call, patch
from tests.common import MockConfigEntry
async def test_empty_config(hass):
"""Test with an empty config."""
assert await async_setup_component(hass, dynalite.DOMAIN, {}) is True
assert len(hass.config_entries.flow.async_progress()) == 0
assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 0
async def test_async_setup(hass):
"""Test a successful setup with all of the different options."""
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
return_value=True,
):
assert await async_setup_component(
hass,
dynalite.DOMAIN,
{
dynalite.DOMAIN: {
dynalite.CONF_BRIDGES: [
{
CONF_HOST: "1.2.3.4",
CONF_PORT: 1234,
dynalite.CONF_AUTO_DISCOVER: True,
dynalite.CONF_POLL_TIMER: 5.5,
dynalite.CONF_AREA: {
"1": {
CONF_NAME: "Name1",
dynalite.CONF_CHANNEL: {"4": {}},
dynalite.CONF_PRESET: {"7": {}},
dynalite.CONF_NO_DEFAULT: True,
},
"2": {CONF_NAME: "Name2"},
"3": {
CONF_NAME: "Name3",
dynalite.CONF_TEMPLATE: CONF_ROOM,
},
"4": {
CONF_NAME: "Name4",
dynalite.CONF_TEMPLATE: dynalite.CONF_TIME_COVER,
},
},
dynalite.CONF_DEFAULT: {dynalite.CONF_FADE: 2.3},
dynalite.CONF_ACTIVE: dynalite.ACTIVE_INIT,
dynalite.CONF_PRESET: {
"5": {CONF_NAME: "pres5", dynalite.CONF_FADE: 4.5}
},
dynalite.CONF_TEMPLATE: {
CONF_ROOM: {
dynalite.CONF_ROOM_ON: 6,
dynalite.CONF_ROOM_OFF: 7,
},
dynalite.CONF_TIME_COVER: {
dynalite.CONF_OPEN_PRESET: 8,
dynalite.CONF_CLOSE_PRESET: 9,
dynalite.CONF_STOP_PRESET: 10,
dynalite.CONF_CHANNEL_COVER: 3,
dynalite.CONF_DURATION: 2.2,
dynalite.CONF_TILT_TIME: 3.3,
dynalite.CONF_DEVICE_CLASS: "awning",
},
},
}
]
}
},
)
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 1
async def test_service_request_area_preset(hass):
"""Test requesting and area preset via service call."""
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
return_value=True,
), patch(
"dynalite_devices_lib.dynalite.Dynalite.request_area_preset",
return_value=True,
) as mock_req_area_pres:
assert await async_setup_component(
hass,
dynalite.DOMAIN,
{
dynalite.DOMAIN: {
dynalite.CONF_BRIDGES: [
{CONF_HOST: "1.2.3.4"},
{CONF_HOST: "5.6.7.8"},
]
}
},
)
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 2
await hass.services.async_call(
dynalite.DOMAIN,
"request_area_preset",
{"host": "1.2.3.4", "area": 2},
)
await hass.async_block_till_done()
mock_req_area_pres.assert_called_once_with(2, 1)
mock_req_area_pres.reset_mock()
await hass.services.async_call(
dynalite.DOMAIN,
"request_area_preset",
{"area": 3},
)
await hass.async_block_till_done()
assert mock_req_area_pres.mock_calls == [call(3, 1), call(3, 1)]
mock_req_area_pres.reset_mock()
await hass.services.async_call(
dynalite.DOMAIN,
"request_area_preset",
{"host": "5.6.7.8", "area": 4},
)
await hass.async_block_till_done()
mock_req_area_pres.assert_called_once_with(4, 1)
mock_req_area_pres.reset_mock()
await hass.services.async_call(
dynalite.DOMAIN,
"request_area_preset",
{"host": "6.5.4.3", "area": 5},
)
await hass.async_block_till_done()
mock_req_area_pres.assert_not_called()
mock_req_area_pres.reset_mock()
await hass.services.async_call(
dynalite.DOMAIN,
"request_area_preset",
{"host": "1.2.3.4", "area": 6, "channel": 9},
)
await hass.async_block_till_done()
mock_req_area_pres.assert_called_once_with(6, 9)
mock_req_area_pres.reset_mock()
await hass.services.async_call(
dynalite.DOMAIN,
"request_area_preset",
{"host": "1.2.3.4", "area": 7},
)
await hass.async_block_till_done()
mock_req_area_pres.assert_called_once_with(7, 1)
async def test_service_request_channel_level(hass):
"""Test requesting the level of a channel via service call."""
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
return_value=True,
), patch(
"dynalite_devices_lib.dynalite.Dynalite.request_channel_level",
return_value=True,
) as mock_req_chan_lvl:
assert await async_setup_component(
hass,
dynalite.DOMAIN,
{
dynalite.DOMAIN: {
dynalite.CONF_BRIDGES: [
{
CONF_HOST: "1.2.3.4",
dynalite.CONF_AREA: {"7": {CONF_NAME: "test"}},
},
{CONF_HOST: "5.6.7.8"},
]
}
},
)
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 2
await hass.services.async_call(
dynalite.DOMAIN,
"request_channel_level",
{"host": "1.2.3.4", "area": 2, "channel": 3},
)
await hass.async_block_till_done()
mock_req_chan_lvl.assert_called_once_with(2, 3)
mock_req_chan_lvl.reset_mock()
with pytest.raises(MultipleInvalid):
await hass.services.async_call(
dynalite.DOMAIN,
"request_channel_level",
{"area": 3},
)
await hass.async_block_till_done()
mock_req_chan_lvl.assert_not_called()
await hass.services.async_call(
dynalite.DOMAIN,
"request_channel_level",
{"area": 4, "channel": 5},
)
await hass.async_block_till_done()
assert mock_req_chan_lvl.mock_calls == [call(4, 5), call(4, 5)]
async def test_async_setup_bad_config1(hass):
"""Test a successful with bad config on templates."""
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
return_value=True,
):
assert not await async_setup_component(
hass,
dynalite.DOMAIN,
{
dynalite.DOMAIN: {
dynalite.CONF_BRIDGES: [
{
CONF_HOST: "1.2.3.4",
dynalite.CONF_AREA: {
"1": {
dynalite.CONF_TEMPLATE: dynalite.CONF_TIME_COVER,
CONF_NAME: "Name",
dynalite.CONF_ROOM_ON: 7,
}
},
}
]
}
},
)
await hass.async_block_till_done()
async def test_async_setup_bad_config2(hass):
"""Test a successful with bad config on numbers."""
host = "1.2.3.4"
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
return_value=True,
):
assert not await async_setup_component(
hass,
dynalite.DOMAIN,
{
dynalite.DOMAIN: {
dynalite.CONF_BRIDGES: [
{
CONF_HOST: host,
dynalite.CONF_AREA: {"WRONG": {CONF_NAME: "Name"}},
}
]
}
},
)
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 0
async def test_unload_entry(hass):
"""Test being able to unload an entry."""
host = "1.2.3.4"
entry = MockConfigEntry(domain=dynalite.DOMAIN, data={CONF_HOST: host})
entry.add_to_hass(hass)
with patch(
"homeassistant.components.dynalite.bridge.DynaliteDevices.async_setup",
return_value=True,
):
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries(dynalite.DOMAIN)) == 1
with patch.object(
hass.config_entries, "async_forward_entry_unload", return_value=True
) as mock_unload:
assert await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert mock_unload.call_count == len(dynalite.ENTITY_PLATFORMS)
expected_calls = [
call(entry, platform) for platform in dynalite.ENTITY_PLATFORMS
]
for cur_call in mock_unload.mock_calls:
assert cur_call in expected_calls
|
import os.path as op
import numpy as np
from numpy.testing import assert_almost_equal
import pytest
from scipy.linalg import svd, pinv
import scipy.io as sio
from mne.io import read_raw_fif
from mne import pick_types
from mne.preprocessing.infomax_ import infomax
from mne.utils import random_permutation, run_tests_if_main
from mne.datasets import testing
base_dir = op.join(op.dirname(__file__), 'data')
def generate_data_for_comparing_against_eeglab_infomax(ch_type, random_state):
"""Generate data."""
data_dir = op.join(testing.data_path(download=False), 'MEG', 'sample')
raw_fname = op.join(data_dir, 'sample_audvis_trunc_raw.fif')
raw = read_raw_fif(raw_fname, preload=True)
if ch_type == 'eeg':
picks = pick_types(raw.info, meg=False, eeg=True, exclude='bads')
else:
picks = pick_types(raw.info, meg=ch_type,
eeg=False, exclude='bads')
# select a small number of channels for the test
number_of_channels_to_use = 5
idx_perm = random_permutation(picks.shape[0], random_state)
picks = picks[idx_perm[:number_of_channels_to_use]]
raw.filter(1, 45, picks=picks, filter_length='10s',
l_trans_bandwidth=0.5, h_trans_bandwidth=0.5,
phase='zero-double', fir_window='hann',
fir_design='firwin2') # use the old way
X = raw[picks, :][0][:, ::20]
# Subtract the mean
mean_X = X.mean(axis=1)
X -= mean_X[:, None]
# pre_whitening: z-score
X /= np.std(X)
T = X.shape[1]
cov_X = np.dot(X, X.T) / T
# Let's whiten the data
U, D, _ = svd(cov_X)
W = np.dot(U, U.T / np.sqrt(D)[:, None])
Y = np.dot(W, X)
return Y
@pytest.mark.slowtest
@testing.requires_testing_data
def test_mne_python_vs_eeglab():
"""Test eeglab vs mne_python infomax code."""
random_state = 42
methods = ['infomax', 'extended_infomax']
ch_types = ['eeg', 'mag']
for ch_type in ch_types:
Y = generate_data_for_comparing_against_eeglab_infomax(
ch_type, random_state)
N, T = Y.shape
for method in methods:
eeglab_results_file = ('eeglab_%s_results_%s_data.mat'
% (method,
dict(eeg='eeg', mag='meg')[ch_type]))
# For comparasion against eeglab, make sure the following
# parameters have the same value in mne_python and eeglab:
#
# - starting point
# - random state
# - learning rate
# - block size
# - blowup parameter
# - blowup_fac parameter
# - tolerance for stopping the algorithm
# - number of iterations
# - anneal_step parameter
#
# Notes:
# * By default, eeglab whiten the data using "sphering transform"
# instead of pca. The mne_python infomax code does not
# whiten the data. To make sure both mne_python and eeglab starts
# from the same point (i.e., the same matrix), we need to make
# sure to whiten the data outside, and pass these whiten data to
# mne_python and eeglab. Finally, we need to tell eeglab that
# the input data is already whiten, this can be done by calling
# eeglab with the following syntax:
#
# % Run infomax
# [unmixing,sphere,meanvar,bias,signs,lrates,sources,y] = ...
# runica( Y, 'sphering', 'none');
#
# % Run extended infomax
# [unmixing,sphere,meanvar,bias,signs,lrates,sources,y] = ...
# runica( Y, 'sphering', 'none', 'extended', 1);
#
# By calling eeglab using the former code, we are using its
# default parameters, which are specified below in the section
# "EEGLAB default parameters".
#
# * eeglab does not expose a parameter for fixing the random state.
# Therefore, to accomplish this, we need to edit the runica.m
# file located at /path_to_eeglab/functions/sigprocfunc/runica.m
#
# i) Comment the line related with the random number generator
# (line 812).
# ii) Then, add the following line just below line 812:
# rng(42); %use 42 as random seed.
#
# * eeglab does not have the parameter "n_small_angle",
# so we need to disable it for making a fair comparison.
#
# * Finally, we need to take the unmixing matrix estimated by the
# mne_python infomax implementation and order the components
# in the same way that eeglab does. This is done below in the
# section "Order the components in the same way that eeglab does"
# EEGLAB default parameters
l_rate_eeglab = 0.00065 / np.log(N)
block_eeglab = int(np.ceil(np.min([5 * np.log(T), 0.3 * T])))
blowup_eeglab = 1e9
blowup_fac_eeglab = 0.8
max_iter_eeglab = 512
if method == 'infomax':
anneal_step_eeglab = 0.9
use_extended = False
elif method == 'extended_infomax':
anneal_step_eeglab = 0.98
use_extended = True
w_change_eeglab = 1e-7 if N > 32 else 1e-6
# Call mne_python infomax version using the following syntax
# to obtain the same result than eeglab version
unmixing = infomax(
Y.T, extended=use_extended, random_state=random_state,
max_iter=max_iter_eeglab, l_rate=l_rate_eeglab,
block=block_eeglab, w_change=w_change_eeglab,
blowup=blowup_eeglab, blowup_fac=blowup_fac_eeglab,
n_small_angle=None, anneal_step=anneal_step_eeglab)
# Order the components in the same way that eeglab does
sources = np.dot(unmixing, Y)
mixing = pinv(unmixing)
mvar = np.sum(mixing ** 2, axis=0) * \
np.sum(sources ** 2, axis=1) / (N * T - 1)
windex = np.argsort(mvar)[::-1]
unmixing_ordered = unmixing[windex, :]
# Load the eeglab results, then compare the unmixing matrices
# estimated by mne_python and eeglab. To make the comparison use
# the \ell_inf norm:
# ||unmixing_mne_python - unmixing_eeglab||_inf
eeglab_data = sio.loadmat(op.join(base_dir, eeglab_results_file))
unmixing_eeglab = eeglab_data['unmixing_eeglab']
maximum_difference = np.max(np.abs(unmixing_ordered -
unmixing_eeglab))
assert_almost_equal(maximum_difference, 1e-12, decimal=10)
run_tests_if_main()
|
import pytest
import voluptuous as vol
from homeassistant.components.input_number import (
ATTR_VALUE,
DOMAIN,
SERVICE_DECREMENT,
SERVICE_INCREMENT,
SERVICE_RELOAD,
SERVICE_SET_VALUE,
)
from homeassistant.const import (
ATTR_EDITABLE,
ATTR_ENTITY_ID,
ATTR_FRIENDLY_NAME,
ATTR_NAME,
)
from homeassistant.core import Context, CoreState, State
from homeassistant.exceptions import Unauthorized
from homeassistant.helpers import entity_registry
from homeassistant.setup import async_setup_component
# pylint: disable=protected-access
from tests.async_mock import patch
from tests.common import mock_restore_cache
@pytest.fixture
def storage_setup(hass, hass_storage):
"""Storage setup."""
async def _storage(items=None, config=None):
if items is None:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {
"items": [
{
"id": "from_storage",
"initial": 10,
"name": "from storage",
"max": 100,
"min": 0,
"step": 1,
"mode": "slider",
}
]
},
}
else:
hass_storage[DOMAIN] = {
"key": DOMAIN,
"version": 1,
"data": {"items": items},
}
if config is None:
config = {DOMAIN: {}}
return await async_setup_component(hass, DOMAIN, config)
return _storage
async def set_value(hass, entity_id, value):
"""Set input_number to value.
This is a legacy helper method. Do not use it for new tests.
"""
await hass.services.async_call(
DOMAIN,
SERVICE_SET_VALUE,
{ATTR_ENTITY_ID: entity_id, ATTR_VALUE: value},
blocking=True,
)
async def increment(hass, entity_id):
"""Increment value of entity.
This is a legacy helper method. Do not use it for new tests.
"""
await hass.services.async_call(
DOMAIN, SERVICE_INCREMENT, {ATTR_ENTITY_ID: entity_id}, blocking=True
)
async def decrement(hass, entity_id):
"""Decrement value of entity.
This is a legacy helper method. Do not use it for new tests.
"""
await hass.services.async_call(
DOMAIN, SERVICE_DECREMENT, {ATTR_ENTITY_ID: entity_id}, blocking=True
)
async def test_config(hass):
"""Test config."""
invalid_configs = [
None,
{},
{"name with space": None},
{"test_1": {"min": 50, "max": 50}},
]
for cfg in invalid_configs:
assert not await async_setup_component(hass, DOMAIN, {DOMAIN: cfg})
async def test_set_value(hass, caplog):
"""Test set_value method."""
assert await async_setup_component(
hass, DOMAIN, {DOMAIN: {"test_1": {"initial": 50, "min": 0, "max": 100}}}
)
entity_id = "input_number.test_1"
state = hass.states.get(entity_id)
assert 50 == float(state.state)
await set_value(hass, entity_id, "30.4")
state = hass.states.get(entity_id)
assert 30.4 == float(state.state)
await set_value(hass, entity_id, "70")
state = hass.states.get(entity_id)
assert 70 == float(state.state)
with pytest.raises(vol.Invalid) as excinfo:
await set_value(hass, entity_id, "110")
assert "Invalid value for input_number.test_1: 110.0 (range 0.0 - 100.0)" in str(
excinfo.value
)
state = hass.states.get(entity_id)
assert 70 == float(state.state)
async def test_increment(hass):
"""Test increment method."""
assert await async_setup_component(
hass, DOMAIN, {DOMAIN: {"test_2": {"initial": 50, "min": 0, "max": 51}}}
)
entity_id = "input_number.test_2"
state = hass.states.get(entity_id)
assert 50 == float(state.state)
await increment(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 51 == float(state.state)
await increment(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 51 == float(state.state)
async def test_decrement(hass):
"""Test decrement method."""
assert await async_setup_component(
hass, DOMAIN, {DOMAIN: {"test_3": {"initial": 50, "min": 49, "max": 100}}}
)
entity_id = "input_number.test_3"
state = hass.states.get(entity_id)
assert 50 == float(state.state)
await decrement(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 49 == float(state.state)
await decrement(hass, entity_id)
await hass.async_block_till_done()
state = hass.states.get(entity_id)
assert 49 == float(state.state)
async def test_mode(hass):
"""Test mode settings."""
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test_default_slider": {"min": 0, "max": 100},
"test_explicit_box": {"min": 0, "max": 100, "mode": "box"},
"test_explicit_slider": {"min": 0, "max": 100, "mode": "slider"},
}
},
)
state = hass.states.get("input_number.test_default_slider")
assert state
assert "slider" == state.attributes["mode"]
state = hass.states.get("input_number.test_explicit_box")
assert state
assert "box" == state.attributes["mode"]
state = hass.states.get("input_number.test_explicit_slider")
assert state
assert "slider" == state.attributes["mode"]
async def test_restore_state(hass):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass, (State("input_number.b1", "70"), State("input_number.b2", "200"))
)
hass.state = CoreState.starting
await async_setup_component(
hass,
DOMAIN,
{DOMAIN: {"b1": {"min": 0, "max": 100}, "b2": {"min": 10, "max": 100}}},
)
state = hass.states.get("input_number.b1")
assert state
assert float(state.state) == 70
state = hass.states.get("input_number.b2")
assert state
assert float(state.state) == 10
async def test_initial_state_overrules_restore_state(hass):
"""Ensure states are restored on startup."""
mock_restore_cache(
hass, (State("input_number.b1", "70"), State("input_number.b2", "200"))
)
hass.state = CoreState.starting
await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"b1": {"initial": 50, "min": 0, "max": 100},
"b2": {"initial": 60, "min": 0, "max": 100},
}
},
)
state = hass.states.get("input_number.b1")
assert state
assert float(state.state) == 50
state = hass.states.get("input_number.b2")
assert state
assert float(state.state) == 60
async def test_no_initial_state_and_no_restore_state(hass):
"""Ensure that entity is create without initial and restore feature."""
hass.state = CoreState.starting
await async_setup_component(hass, DOMAIN, {DOMAIN: {"b1": {"min": 0, "max": 100}}})
state = hass.states.get("input_number.b1")
assert state
assert float(state.state) == 0
async def test_input_number_context(hass, hass_admin_user):
"""Test that input_number context works."""
assert await async_setup_component(
hass, "input_number", {"input_number": {"b1": {"min": 0, "max": 100}}}
)
state = hass.states.get("input_number.b1")
assert state is not None
await hass.services.async_call(
"input_number",
"increment",
{"entity_id": state.entity_id},
True,
Context(user_id=hass_admin_user.id),
)
state2 = hass.states.get("input_number.b1")
assert state2 is not None
assert state.state != state2.state
assert state2.context.user_id == hass_admin_user.id
async def test_reload(hass, hass_admin_user, hass_read_only_user):
"""Test reload service."""
count_start = len(hass.states.async_entity_ids())
ent_reg = await entity_registry.async_get_registry(hass)
assert await async_setup_component(
hass,
DOMAIN,
{
DOMAIN: {
"test_1": {"initial": 50, "min": 0, "max": 51},
"test_3": {"initial": 10, "min": 0, "max": 15},
}
},
)
assert count_start + 2 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("input_number.test_1")
state_2 = hass.states.get("input_number.test_2")
state_3 = hass.states.get("input_number.test_3")
assert state_1 is not None
assert state_2 is None
assert state_3 is not None
assert 50 == float(state_1.state)
assert 10 == float(state_3.state)
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_1") is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_2") is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_3") is not None
with patch(
"homeassistant.config.load_yaml_config_file",
autospec=True,
return_value={
DOMAIN: {
"test_1": {"initial": 40, "min": 0, "max": 51},
"test_2": {"initial": 20, "min": 10, "max": 30},
}
},
):
with pytest.raises(Unauthorized):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_read_only_user.id),
)
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start + 2 == len(hass.states.async_entity_ids())
state_1 = hass.states.get("input_number.test_1")
state_2 = hass.states.get("input_number.test_2")
state_3 = hass.states.get("input_number.test_3")
assert state_1 is not None
assert state_2 is not None
assert state_3 is None
assert 50 == float(state_1.state)
assert 20 == float(state_2.state)
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_1") is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_2") is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, "test_3") is None
async def test_load_from_storage(hass, storage_setup):
"""Test set up from storage."""
assert await storage_setup()
state = hass.states.get(f"{DOMAIN}.from_storage")
assert float(state.state) == 10
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "from storage"
assert state.attributes.get(ATTR_EDITABLE)
async def test_editable_state_attribute(hass, storage_setup):
"""Test editable attribute."""
assert await storage_setup(
config={
DOMAIN: {
"from_yaml": {
"min": 1,
"max": 10,
"initial": 5,
"step": 1,
"mode": "slider",
}
}
}
)
state = hass.states.get(f"{DOMAIN}.from_storage")
assert float(state.state) == 10
assert state.attributes.get(ATTR_FRIENDLY_NAME) == "from storage"
assert state.attributes.get(ATTR_EDITABLE)
state = hass.states.get(f"{DOMAIN}.from_yaml")
assert float(state.state) == 5
assert not state.attributes.get(ATTR_EDITABLE)
async def test_ws_list(hass, hass_ws_client, storage_setup):
"""Test listing via WS."""
assert await storage_setup(
config={
DOMAIN: {
"from_yaml": {
"min": 1,
"max": 10,
"initial": 5,
"step": 1,
"mode": "slider",
}
}
}
)
client = await hass_ws_client(hass)
await client.send_json({"id": 6, "type": f"{DOMAIN}/list"})
resp = await client.receive_json()
assert resp["success"]
storage_ent = "from_storage"
yaml_ent = "from_yaml"
result = {item["id"]: item for item in resp["result"]}
assert len(result) == 1
assert storage_ent in result
assert yaml_ent not in result
assert result[storage_ent][ATTR_NAME] == "from storage"
async def test_ws_delete(hass, hass_ws_client, storage_setup):
"""Test WS delete cleans up entity registry."""
assert await storage_setup()
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is not None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is not None
client = await hass_ws_client(hass)
await client.send_json(
{"id": 6, "type": f"{DOMAIN}/delete", f"{DOMAIN}_id": f"{input_id}"}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is None
async def test_update_min_max(hass, hass_ws_client, storage_setup):
"""Test updating min/max updates the state."""
items = [
{
"id": "from_storage",
"name": "from storage",
"max": 100,
"min": 0,
"step": 1,
"mode": "slider",
}
]
assert await storage_setup(items)
input_id = "from_storage"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is not None
assert state.state
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is not None
client = await hass_ws_client(hass)
await client.send_json(
{"id": 6, "type": f"{DOMAIN}/update", f"{DOMAIN}_id": f"{input_id}", "min": 9}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert float(state.state) == 9
await client.send_json(
{
"id": 7,
"type": f"{DOMAIN}/update",
f"{DOMAIN}_id": f"{input_id}",
"max": 5,
"min": 0,
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert float(state.state) == 5
async def test_ws_create(hass, hass_ws_client, storage_setup):
"""Test create WS."""
assert await storage_setup(items=[])
input_id = "new_input"
input_entity_id = f"{DOMAIN}.{input_id}"
ent_reg = await entity_registry.async_get_registry(hass)
state = hass.states.get(input_entity_id)
assert state is None
assert ent_reg.async_get_entity_id(DOMAIN, DOMAIN, input_id) is None
client = await hass_ws_client(hass)
await client.send_json(
{
"id": 6,
"type": f"{DOMAIN}/create",
"name": "New Input",
"max": 20,
"min": 0,
"initial": 10,
"step": 1,
"mode": "slider",
}
)
resp = await client.receive_json()
assert resp["success"]
state = hass.states.get(input_entity_id)
assert float(state.state) == 10
async def test_setup_no_config(hass, hass_admin_user):
"""Test component setup with no config."""
count_start = len(hass.states.async_entity_ids())
assert await async_setup_component(hass, DOMAIN, {})
with patch(
"homeassistant.config.load_yaml_config_file", autospec=True, return_value={}
):
await hass.services.async_call(
DOMAIN,
SERVICE_RELOAD,
blocking=True,
context=Context(user_id=hass_admin_user.id),
)
await hass.async_block_till_done()
assert count_start == len(hass.states.async_entity_ids())
|
from __future__ import print_function
from datetime import datetime as dt
from bson.binary import Binary
from six.moves import cPickle
from arctic import Arctic, register_library_type
from arctic.decorators import mongo_retry
#
# Arctic maps a library, e.g. 'jblackburn.stuff' to a class instance
# which implements whatever API you like.
#
# Arctic provides a standard switching layer for:
# - Registering custom storage types (e.g. CustomArcticLibType)
# - Mapping data libraries to a storage type (e.g. 'jblackburn.stuff' -> CustomArcticLibType)
# - Handling Authentication
# - Maintaining per-library metadata
# - Quota
#
class Stuff(object):
"""
Some custom class persisted by our CustomArcticLibType Library Type
"""
def __init__(self, field1, date_field, stuff):
# Some string field
self.field1 = field1
# Some date field
self.date_field = date_field
# Arbitrary other stuff
self.stuff = stuff
def __str__(self):
return str(self.field1) + " " + str(self.date_field) + " " + str(self.stuff)
class CustomArcticLibType(object):
"""
Custom Arctic Library for storing 'Stuff' items
"""
# Choose a library type name that's unique; e.g. <sector>.DataType
_LIBRARY_TYPE = 'test.CustomArcticLibType'
def __init__(self, arctic_lib):
self._arctic_lib = arctic_lib
# Arctic_lib gives you a root pymongo.Collection just-for-you:
# You may store all your data in here ...
self._collection = arctic_lib.get_top_level_collection()
# ... or you can create 'sub-collections', e.g.
self._sub_collection = self._collection.sub_collection
# The name of this library
print("My name is %s" % arctic_lib.get_name())
# Fetch some per-library metadata for this library
self.some_metadata = arctic_lib.get_library_metadata('some_metadata')
@classmethod
def initialize_library(cls, arctic_lib, **kwargs):
# Persist some per-library metadata in this arctic_lib
arctic_lib.set_library_metadata('some_metadata', 'some_value')
CustomArcticLibType(arctic_lib)._ensure_index()
def _ensure_index(self):
"""
Index any fields used by your queries.
"""
collection = self._collection
# collection.add_indexes
collection.create_index('field1')
###########################################
# Create your own API below!
###########################################
@mongo_retry
def query(self, *args, **kwargs):
"""
Generic query method.
In reality, your storage class would have its own query methods,
Performs a Mongo find on the Marketdata index metadata collection.
See:
http://api.mongodb.org/python/current/api/pymongo/collection.html
"""
for x in self._collection.find(*args, **kwargs):
x['stuff'] = cPickle.loads(x['stuff'])
del x['_id'] # Remove default unique '_id' field from doc
yield Stuff(**x)
@mongo_retry
def stats(self):
"""
Database usage statistics. Used by quota.
"""
res = {}
db = self._collection.database
res['dbstats'] = db.command('dbstats')
res['data'] = db.command('collstats', self._collection.name)
res['totals'] = {'count': res['data']['count'],
'size': res['data']['size']
}
return res
@mongo_retry
def store(self, thing):
"""
Simple persistence method
"""
to_store = {'field1': thing.field1,
'date_field': thing.date_field,
}
to_store['stuff'] = Binary(cPickle.dumps(thing.stuff))
# Respect any soft-quota on write - raises if stats().totals.size > quota
self._arctic_lib.check_quota()
self._collection.insert_one(to_store)
@mongo_retry
def delete(self, query):
"""
Simple delete method
"""
self._collection.delete_one(query)
# Hook the class in for the type string 'CustomArcticLibType'
register_library_type(CustomArcticLibType._LIBRARY_TYPE, CustomArcticLibType)
# Create a Arctic instance pointed at a mongo host
if 'mongo_host' not in globals():
mongo_host = 'localhost'
store = Arctic(mongo_host)
# Initialize the library
# Map username.custom_lib -> CustomArcticLibType
store.initialize_library('username.custom_lib', CustomArcticLibType._LIBRARY_TYPE)
# Now pull our username.custom_lib ; note that it has the:
# - query(...)
# - store(...)
# - delete(...)
# API we defined above
lib = store['username.custom_lib']
# Store some items in the custom library type
lib.store(Stuff('thing', dt(2012, 1, 1), object()))
lib.store(Stuff('thing2', dt(2013, 1, 1), object()))
lib.store(Stuff('thing3', dt(2014, 1, 1), object()))
lib.store(Stuff(['a', 'b', 'c'], dt(2014, 1, 1), object()))
# Do some querying via our library's query method.
# You would have your own methods for querying here... (which use your index(es), of course)
for e in list(lib.query()): # list everything
print(e)
list(lib.query({'field1': 'thing'})) # just get by name
list(lib.query({'field1': 'a'})) # Can query lists
list(lib.query({'field1': 'b'}))
list(lib.query({'date_field': {'$lt': dt(2013, 2, 2)}}))
list(lib.query({'field1': 'thing',
'date_field': {'$lt': dt(2013, 2, 2)} }))
# Remove everything
lib.delete({})
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from tensorflow.python.ops import control_flow_ops
slim = tf.contrib.slim
_R_MEAN = 123.68
_G_MEAN = 116.78
_B_MEAN = 103.94
_RESIZE_SIDE_MIN = 256
_RESIZE_SIDE_MAX = 512
def _crop(image, offset_height, offset_width, crop_height, crop_width):
"""Crops the given image using the provided offsets and sizes.
Note that the method doesn't assume we know the input image size but it does
assume we know the input image rank.
Args:
image: an image of shape [height, width, channels].
offset_height: a scalar tensor indicating the height offset.
offset_width: a scalar tensor indicating the width offset.
crop_height: the height of the cropped image.
crop_width: the width of the cropped image.
Returns:
the cropped (and resized) image.
Raises:
InvalidArgumentError: if the rank is not 3 or if the image dimensions are
less than the crop size.
"""
original_shape = tf.shape(image)
rank_assertion = tf.Assert(
tf.equal(tf.rank(image), 3),
['Rank of image must be equal to 3.'])
cropped_shape = control_flow_ops.with_dependencies(
[rank_assertion],
tf.stack([crop_height, crop_width, original_shape[2]]))
size_assertion = tf.Assert(
tf.logical_and(
tf.greater_equal(original_shape[0], crop_height),
tf.greater_equal(original_shape[1], crop_width)),
['Crop size greater than the image size.'])
offsets = tf.to_int32(tf.stack([offset_height, offset_width, 0]))
# Use tf.slice instead of crop_to_bounding box as it accepts tensors to
# define the crop size.
image = control_flow_ops.with_dependencies(
[size_assertion],
tf.slice(image, offsets, cropped_shape))
return tf.reshape(image, cropped_shape)
def _random_crop(image_list, crop_height, crop_width):
"""Crops the given list of images.
The function applies the same crop to each image in the list. This can be
effectively applied when there are multiple image inputs of the same
dimension such as:
image, depths, normals = _random_crop([image, depths, normals], 120, 150)
Args:
image_list: a list of image tensors of the same dimension but possibly
varying channel.
crop_height: the new height.
crop_width: the new width.
Returns:
the image_list with cropped images.
Raises:
ValueError: if there are multiple image inputs provided with different size
or the images are smaller than the crop dimensions.
"""
if not image_list:
raise ValueError('Empty image_list.')
# Compute the rank assertions.
rank_assertions = []
for i in range(len(image_list)):
image_rank = tf.rank(image_list[i])
rank_assert = tf.Assert(
tf.equal(image_rank, 3),
['Wrong rank for tensor %s [expected] [actual]',
image_list[i].name, 3, image_rank])
rank_assertions.append(rank_assert)
image_shape = control_flow_ops.with_dependencies(
[rank_assertions[0]],
tf.shape(image_list[0]))
image_height = image_shape[0]
image_width = image_shape[1]
crop_size_assert = tf.Assert(
tf.logical_and(
tf.greater_equal(image_height, crop_height),
tf.greater_equal(image_width, crop_width)),
['Crop size greater than the image size.'])
asserts = [rank_assertions[0], crop_size_assert]
for i in range(1, len(image_list)):
image = image_list[i]
asserts.append(rank_assertions[i])
shape = control_flow_ops.with_dependencies([rank_assertions[i]],
tf.shape(image))
height = shape[0]
width = shape[1]
height_assert = tf.Assert(
tf.equal(height, image_height),
['Wrong height for tensor %s [expected][actual]',
image.name, height, image_height])
width_assert = tf.Assert(
tf.equal(width, image_width),
['Wrong width for tensor %s [expected][actual]',
image.name, width, image_width])
asserts.extend([height_assert, width_assert])
# Create a random bounding box.
#
# Use tf.random_uniform and not numpy.random.rand as doing the former would
# generate random numbers at graph eval time, unlike the latter which
# generates random numbers at graph definition time.
max_offset_height = control_flow_ops.with_dependencies(
asserts, tf.reshape(image_height - crop_height + 1, []))
max_offset_width = control_flow_ops.with_dependencies(
asserts, tf.reshape(image_width - crop_width + 1, []))
offset_height = tf.random_uniform(
[], maxval=max_offset_height, dtype=tf.int32)
offset_width = tf.random_uniform(
[], maxval=max_offset_width, dtype=tf.int32)
return [_crop(image, offset_height, offset_width,
crop_height, crop_width) for image in image_list]
def _central_crop(image_list, crop_height, crop_width):
"""Performs central crops of the given image list.
Args:
image_list: a list of image tensors of the same dimension but possibly
varying channel.
crop_height: the height of the image following the crop.
crop_width: the width of the image following the crop.
Returns:
the list of cropped images.
"""
outputs = []
for image in image_list:
image_height = tf.shape(image)[0]
image_width = tf.shape(image)[1]
offset_height = (image_height - crop_height) / 2
offset_width = (image_width - crop_width) / 2
outputs.append(_crop(image, offset_height, offset_width,
crop_height, crop_width))
return outputs
def _mean_image_subtraction(image, means):
"""Subtracts the given means from each image channel.
For example:
means = [123.68, 116.779, 103.939]
image = _mean_image_subtraction(image, means)
Note that the rank of `image` must be known.
Args:
image: a tensor of size [height, width, C].
means: a C-vector of values to subtract from each channel.
Returns:
the centered image.
Raises:
ValueError: If the rank of `image` is unknown, if `image` has a rank other
than three or if the number of channels in `image` doesn't match the
number of values in `means`.
"""
if image.get_shape().ndims != 3:
raise ValueError('Input must be of size [height, width, C>0]')
num_channels = image.get_shape().as_list()[-1]
if len(means) != num_channels:
raise ValueError('len(means) must match the number of channels')
channels = tf.split(axis=2, num_or_size_splits=num_channels, value=image)
for i in range(num_channels):
channels[i] -= means[i]
return tf.concat(axis=2, values=channels)
def _smallest_size_at_least(height, width, smallest_side):
"""Computes new shape with the smallest side equal to `smallest_side`.
Computes new shape with the smallest side equal to `smallest_side` while
preserving the original aspect ratio.
Args:
height: an int32 scalar tensor indicating the current height.
width: an int32 scalar tensor indicating the current width.
smallest_side: A python integer or scalar `Tensor` indicating the size of
the smallest side after resize.
Returns:
new_height: an int32 scalar tensor indicating the new height.
new_width: and int32 scalar tensor indicating the new width.
"""
smallest_side = tf.convert_to_tensor(smallest_side, dtype=tf.int32)
height = tf.to_float(height)
width = tf.to_float(width)
smallest_side = tf.to_float(smallest_side)
scale = tf.cond(tf.greater(height, width),
lambda: smallest_side / width,
lambda: smallest_side / height)
new_height = tf.to_int32(height * scale)
new_width = tf.to_int32(width * scale)
return new_height, new_width
def _aspect_preserving_resize(image, smallest_side):
"""Resize images preserving the original aspect ratio.
Args:
image: A 3-D image `Tensor`.
smallest_side: A python integer or scalar `Tensor` indicating the size of
the smallest side after resize.
Returns:
resized_image: A 3-D tensor containing the resized image.
"""
smallest_side = tf.convert_to_tensor(smallest_side, dtype=tf.int32)
shape = tf.shape(image)
height = shape[0]
width = shape[1]
new_height, new_width = _smallest_size_at_least(height, width, smallest_side)
image = tf.expand_dims(image, 0)
resized_image = tf.image.resize_bilinear(image, [new_height, new_width],
align_corners=False)
resized_image = tf.squeeze(resized_image)
resized_image.set_shape([None, None, 3])
return resized_image
def preprocess_for_train(image,
output_height,
output_width,
resize_side_min=_RESIZE_SIDE_MIN,
resize_side_max=_RESIZE_SIDE_MAX):
"""Preprocesses the given image for training.
Note that the actual resizing scale is sampled from
[`resize_size_min`, `resize_size_max`].
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
resize_side_min: The lower bound for the smallest side of the image for
aspect-preserving resizing.
resize_side_max: The upper bound for the smallest side of the image for
aspect-preserving resizing.
Returns:
A preprocessed image.
"""
resize_side = tf.random_uniform(
[], minval=resize_side_min, maxval=resize_side_max+1, dtype=tf.int32)
image = _aspect_preserving_resize(image, resize_side)
image = _random_crop([image], output_height, output_width)[0]
image.set_shape([output_height, output_width, 3])
image = tf.to_float(image)
image = tf.image.random_flip_left_right(image)
return _mean_image_subtraction(image, [_R_MEAN, _G_MEAN, _B_MEAN])
def preprocess_for_eval(image, output_height, output_width, resize_side):
"""Preprocesses the given image for evaluation.
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
resize_side: The smallest side of the image for aspect-preserving resizing.
Returns:
A preprocessed image.
"""
image = _aspect_preserving_resize(image, resize_side)
image = _central_crop([image], output_height, output_width)[0]
image.set_shape([output_height, output_width, 3])
image = tf.to_float(image)
return _mean_image_subtraction(image, [_R_MEAN, _G_MEAN, _B_MEAN])
def preprocess_image(image, output_height, output_width, is_training=False,
resize_side_min=_RESIZE_SIDE_MIN,
resize_side_max=_RESIZE_SIDE_MAX):
"""Preprocesses the given image.
Args:
image: A `Tensor` representing an image of arbitrary size.
output_height: The height of the image after preprocessing.
output_width: The width of the image after preprocessing.
is_training: `True` if we're preprocessing the image for training and
`False` otherwise.
resize_side_min: The lower bound for the smallest side of the image for
aspect-preserving resizing. If `is_training` is `False`, then this value
is used for rescaling.
resize_side_max: The upper bound for the smallest side of the image for
aspect-preserving resizing. If `is_training` is `False`, this value is
ignored. Otherwise, the resize side is sampled from
[resize_size_min, resize_size_max].
Returns:
A preprocessed image.
"""
if is_training:
return preprocess_for_train(image, output_height, output_width,
resize_side_min, resize_side_max)
else:
return preprocess_for_eval(image, output_height, output_width,
resize_side_min)
|
import json
import os
import re
from collections import abc
from collections import deque
from random import choice
from random import randrange
from threading import Lock
from urllib.parse import quote_from_bytes
from markupsafe import escape
from markupsafe import Markup
_word_split_re = re.compile(r"(\s+)")
_lead_pattern = "|".join(map(re.escape, ("(", "<", "<")))
_trail_pattern = "|".join(map(re.escape, (".", ",", ")", ">", "\n", ">")))
_punctuation_re = re.compile(
fr"^(?P<lead>(?:{_lead_pattern})*)(?P<middle>.*?)(?P<trail>(?:{_trail_pattern})*)$"
)
_simple_email_re = re.compile(r"^\S+@[a-zA-Z0-9._-]+\.[a-zA-Z0-9._-]+$")
_striptags_re = re.compile(r"(<!--.*?-->|<[^>]*>)")
_entity_re = re.compile(r"&([^;]+);")
_letters = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ"
_digits = "0123456789"
# special singleton representing missing values for the runtime
missing = type("MissingType", (), {"__repr__": lambda x: "missing"})()
# internal code
internal_code = set()
concat = "".join
_slash_escape = "\\/" not in json.dumps("/")
def contextfunction(f):
"""This decorator can be used to mark a function or method context callable.
A context callable is passed the active :class:`Context` as first argument when
called from the template. This is useful if a function wants to get access
to the context or functions provided on the context object. For example
a function that returns a sorted list of template variables the current
template exports could look like this::
@contextfunction
def get_exported_names(context):
return sorted(context.exported_vars)
"""
f.contextfunction = True
return f
def evalcontextfunction(f):
"""This decorator can be used to mark a function or method as an eval
context callable. This is similar to the :func:`contextfunction`
but instead of passing the context, an evaluation context object is
passed. For more information about the eval context, see
:ref:`eval-context`.
.. versionadded:: 2.4
"""
f.evalcontextfunction = True
return f
def environmentfunction(f):
"""This decorator can be used to mark a function or method as environment
callable. This decorator works exactly like the :func:`contextfunction`
decorator just that the first argument is the active :class:`Environment`
and not context.
"""
f.environmentfunction = True
return f
def internalcode(f):
"""Marks the function as internally used"""
internal_code.add(f.__code__)
return f
def is_undefined(obj):
"""Check if the object passed is undefined. This does nothing more than
performing an instance check against :class:`Undefined` but looks nicer.
This can be used for custom filters or tests that want to react to
undefined variables. For example a custom default filter can look like
this::
def default(var, default=''):
if is_undefined(var):
return default
return var
"""
from .runtime import Undefined
return isinstance(obj, Undefined)
def consume(iterable):
"""Consumes an iterable without doing anything with it."""
for _ in iterable:
pass
def clear_caches():
"""Jinja keeps internal caches for environments and lexers. These are
used so that Jinja doesn't have to recreate environments and lexers all
the time. Normally you don't have to care about that but if you are
measuring memory consumption you may want to clean the caches.
"""
from .environment import _spontaneous_environments
from .lexer import _lexer_cache
_spontaneous_environments.clear()
_lexer_cache.clear()
def import_string(import_name, silent=False):
"""Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
If the `silent` is True the return value will be `None` if the import
fails.
:return: imported object
"""
try:
if ":" in import_name:
module, obj = import_name.split(":", 1)
elif "." in import_name:
module, _, obj = import_name.rpartition(".")
else:
return __import__(import_name)
return getattr(__import__(module, None, None, [obj]), obj)
except (ImportError, AttributeError):
if not silent:
raise
def open_if_exists(filename, mode="rb"):
"""Returns a file descriptor for the filename if that file exists,
otherwise ``None``.
"""
if not os.path.isfile(filename):
return None
return open(filename, mode)
def object_type_repr(obj):
"""Returns the name of the object's type. For some recognized
singletons the name of the object is returned instead. (For
example for `None` and `Ellipsis`).
"""
if obj is None:
return "None"
elif obj is Ellipsis:
return "Ellipsis"
cls = type(obj)
if cls.__module__ == "builtins":
return f"{cls.__name__} object"
return f"{cls.__module__}.{cls.__name__} object"
def pformat(obj):
"""Format an object using :func:`pprint.pformat`.
"""
from pprint import pformat
return pformat(obj)
def urlize(text, trim_url_limit=None, rel=None, target=None):
"""Converts any URLs in text into clickable links. Works on http://,
https:// and www. links. Links can have trailing punctuation (periods,
commas, close-parens) and leading punctuation (opening parens) and
it'll still do the right thing.
If trim_url_limit is not None, the URLs in link text will be limited
to trim_url_limit characters.
If nofollow is True, the URLs in link text will get a rel="nofollow"
attribute.
If target is not None, a target attribute will be added to the link.
"""
def trim_url(x, limit=trim_url_limit):
if limit is not None:
return x[:limit] + ("..." if len(x) >= limit else "")
return x
words = _word_split_re.split(str(escape(text)))
rel_attr = f' rel="{escape(rel)}"' if rel else ""
target_attr = f' target="{escape(target)}"' if target else ""
for i, word in enumerate(words):
match = _punctuation_re.match(word)
if match:
lead, middle, trail = match.groups()
if middle.startswith("www.") or (
"@" not in middle
and not middle.startswith("http://")
and not middle.startswith("https://")
and len(middle) > 0
and middle[0] in _letters + _digits
and (
middle.endswith(".org")
or middle.endswith(".net")
or middle.endswith(".com")
)
):
middle = (
f'<a href="http://{middle}"{rel_attr}{target_attr}>'
f"{trim_url(middle)}</a>"
)
if middle.startswith("http://") or middle.startswith("https://"):
middle = (
f'<a href="{middle}"{rel_attr}{target_attr}>{trim_url(middle)}</a>'
)
if (
"@" in middle
and not middle.startswith("www.")
and ":" not in middle
and _simple_email_re.match(middle)
):
middle = f'<a href="mailto:{middle}">{middle}</a>'
if lead + middle + trail != word:
words[i] = lead + middle + trail
return "".join(words)
def generate_lorem_ipsum(n=5, html=True, min=20, max=100):
"""Generate some lorem ipsum for the template."""
from .constants import LOREM_IPSUM_WORDS
words = LOREM_IPSUM_WORDS.split()
result = []
for _ in range(n):
next_capitalized = True
last_comma = last_fullstop = 0
word = None
last = None
p = []
# each paragraph contains out of 20 to 100 words.
for idx, _ in enumerate(range(randrange(min, max))):
while True:
word = choice(words)
if word != last:
last = word
break
if next_capitalized:
word = word.capitalize()
next_capitalized = False
# add commas
if idx - randrange(3, 8) > last_comma:
last_comma = idx
last_fullstop += 2
word += ","
# add end of sentences
if idx - randrange(10, 20) > last_fullstop:
last_comma = last_fullstop = idx
word += "."
next_capitalized = True
p.append(word)
# ensure that the paragraph ends with a dot.
p = " ".join(p)
if p.endswith(","):
p = p[:-1] + "."
elif not p.endswith("."):
p += "."
result.append(p)
if not html:
return "\n\n".join(result)
return Markup("\n".join(f"<p>{escape(x)}</p>" for x in result))
def url_quote(obj, charset="utf-8", for_qs=False):
"""Quote a string for use in a URL using the given charset.
This function is misnamed, it is a wrapper around
:func:`urllib.parse.quote`.
:param obj: String or bytes to quote. Other types are converted to
string then encoded to bytes using the given charset.
:param charset: Encode text to bytes using this charset.
:param for_qs: Quote "/" and use "+" for spaces.
"""
if not isinstance(obj, bytes):
if not isinstance(obj, str):
obj = str(obj)
obj = obj.encode(charset)
safe = b"" if for_qs else b"/"
rv = quote_from_bytes(obj, safe)
if for_qs:
rv = rv.replace("%20", "+")
return rv
def unicode_urlencode(obj, charset="utf-8", for_qs=False):
import warnings
warnings.warn(
"'unicode_urlencode' has been renamed to 'url_quote'. The old"
" name will be removed in version 3.1.",
DeprecationWarning,
stacklevel=2,
)
return url_quote(obj, charset=charset, for_qs=for_qs)
@abc.MutableMapping.register
class LRUCache:
"""A simple LRU Cache implementation."""
# this is fast for small capacities (something below 1000) but doesn't
# scale. But as long as it's only used as storage for templates this
# won't do any harm.
def __init__(self, capacity):
self.capacity = capacity
self._mapping = {}
self._queue = deque()
self._postinit()
def _postinit(self):
# alias all queue methods for faster lookup
self._popleft = self._queue.popleft
self._pop = self._queue.pop
self._remove = self._queue.remove
self._wlock = Lock()
self._append = self._queue.append
def __getstate__(self):
return {
"capacity": self.capacity,
"_mapping": self._mapping,
"_queue": self._queue,
}
def __setstate__(self, d):
self.__dict__.update(d)
self._postinit()
def __getnewargs__(self):
return (self.capacity,)
def copy(self):
"""Return a shallow copy of the instance."""
rv = self.__class__(self.capacity)
rv._mapping.update(self._mapping)
rv._queue.extend(self._queue)
return rv
def get(self, key, default=None):
"""Return an item from the cache dict or `default`"""
try:
return self[key]
except KeyError:
return default
def setdefault(self, key, default=None):
"""Set `default` if the key is not in the cache otherwise
leave unchanged. Return the value of this key.
"""
try:
return self[key]
except KeyError:
self[key] = default
return default
def clear(self):
"""Clear the cache."""
self._wlock.acquire()
try:
self._mapping.clear()
self._queue.clear()
finally:
self._wlock.release()
def __contains__(self, key):
"""Check if a key exists in this cache."""
return key in self._mapping
def __len__(self):
"""Return the current size of the cache."""
return len(self._mapping)
def __repr__(self):
return f"<{self.__class__.__name__} {self._mapping!r}>"
def __getitem__(self, key):
"""Get an item from the cache. Moves the item up so that it has the
highest priority then.
Raise a `KeyError` if it does not exist.
"""
self._wlock.acquire()
try:
rv = self._mapping[key]
if self._queue[-1] != key:
try:
self._remove(key)
except ValueError:
# if something removed the key from the container
# when we read, ignore the ValueError that we would
# get otherwise.
pass
self._append(key)
return rv
finally:
self._wlock.release()
def __setitem__(self, key, value):
"""Sets the value for an item. Moves the item up so that it
has the highest priority then.
"""
self._wlock.acquire()
try:
if key in self._mapping:
self._remove(key)
elif len(self._mapping) == self.capacity:
del self._mapping[self._popleft()]
self._append(key)
self._mapping[key] = value
finally:
self._wlock.release()
def __delitem__(self, key):
"""Remove an item from the cache dict.
Raise a `KeyError` if it does not exist.
"""
self._wlock.acquire()
try:
del self._mapping[key]
try:
self._remove(key)
except ValueError:
pass
finally:
self._wlock.release()
def items(self):
"""Return a list of items."""
result = [(key, self._mapping[key]) for key in list(self._queue)]
result.reverse()
return result
def values(self):
"""Return a list of all values."""
return [x[1] for x in self.items()]
def keys(self):
"""Return a list of all keys ordered by most recent usage."""
return list(self)
def __iter__(self):
return reversed(tuple(self._queue))
def __reversed__(self):
"""Iterate over the keys in the cache dict, oldest items
coming first.
"""
return iter(tuple(self._queue))
__copy__ = copy
def select_autoescape(
enabled_extensions=("html", "htm", "xml"),
disabled_extensions=(),
default_for_string=True,
default=False,
):
"""Intelligently sets the initial value of autoescaping based on the
filename of the template. This is the recommended way to configure
autoescaping if you do not want to write a custom function yourself.
If you want to enable it for all templates created from strings or
for all templates with `.html` and `.xml` extensions::
from jinja2 import Environment, select_autoescape
env = Environment(autoescape=select_autoescape(
enabled_extensions=('html', 'xml'),
default_for_string=True,
))
Example configuration to turn it on at all times except if the template
ends with `.txt`::
from jinja2 import Environment, select_autoescape
env = Environment(autoescape=select_autoescape(
disabled_extensions=('txt',),
default_for_string=True,
default=True,
))
The `enabled_extensions` is an iterable of all the extensions that
autoescaping should be enabled for. Likewise `disabled_extensions` is
a list of all templates it should be disabled for. If a template is
loaded from a string then the default from `default_for_string` is used.
If nothing matches then the initial value of autoescaping is set to the
value of `default`.
For security reasons this function operates case insensitive.
.. versionadded:: 2.9
"""
enabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in enabled_extensions)
disabled_patterns = tuple(f".{x.lstrip('.').lower()}" for x in disabled_extensions)
def autoescape(template_name):
if template_name is None:
return default_for_string
template_name = template_name.lower()
if template_name.endswith(enabled_patterns):
return True
if template_name.endswith(disabled_patterns):
return False
return default
return autoescape
def htmlsafe_json_dumps(obj, dumper=None, **kwargs):
"""Works exactly like :func:`dumps` but is safe for use in ``<script>``
tags. It accepts the same arguments and returns a JSON string. Note that
this is available in templates through the ``|tojson`` filter which will
also mark the result as safe. Due to how this function escapes certain
characters this is safe even if used outside of ``<script>`` tags.
The following characters are escaped in strings:
- ``<``
- ``>``
- ``&``
- ``'``
This makes it safe to embed such strings in any place in HTML with the
notable exception of double quoted attributes. In that case single
quote your attributes or HTML escape it in addition.
"""
if dumper is None:
dumper = json.dumps
rv = (
dumper(obj, **kwargs)
.replace("<", "\\u003c")
.replace(">", "\\u003e")
.replace("&", "\\u0026")
.replace("'", "\\u0027")
)
return Markup(rv)
class Cycler:
"""Cycle through values by yield them one at a time, then restarting
once the end is reached. Available as ``cycler`` in templates.
Similar to ``loop.cycle``, but can be used outside loops or across
multiple loops. For example, render a list of folders and files in a
list, alternating giving them "odd" and "even" classes.
.. code-block:: html+jinja
{% set row_class = cycler("odd", "even") %}
<ul class="browser">
{% for folder in folders %}
<li class="folder {{ row_class.next() }}">{{ folder }}
{% endfor %}
{% for file in files %}
<li class="file {{ row_class.next() }}">{{ file }}
{% endfor %}
</ul>
:param items: Each positional argument will be yielded in the order
given for each cycle.
.. versionadded:: 2.1
"""
def __init__(self, *items):
if not items:
raise RuntimeError("at least one item has to be provided")
self.items = items
self.pos = 0
def reset(self):
"""Resets the current item to the first item."""
self.pos = 0
@property
def current(self):
"""Return the current item. Equivalent to the item that will be
returned next time :meth:`next` is called.
"""
return self.items[self.pos]
def next(self):
"""Return the current item, then advance :attr:`current` to the
next item.
"""
rv = self.current
self.pos = (self.pos + 1) % len(self.items)
return rv
__next__ = next
class Joiner:
"""A joining helper for templates."""
def __init__(self, sep=", "):
self.sep = sep
self.used = False
def __call__(self):
if not self.used:
self.used = True
return ""
return self.sep
class Namespace:
"""A namespace object that can hold arbitrary attributes. It may be
initialized from a dictionary or with keyword arguments."""
def __init__(*args, **kwargs): # noqa: B902
self, args = args[0], args[1:]
self.__attrs = dict(*args, **kwargs)
def __getattribute__(self, name):
# __class__ is needed for the awaitable check in async mode
if name in {"_Namespace__attrs", "__class__"}:
return object.__getattribute__(self, name)
try:
return self.__attrs[name]
except KeyError:
raise AttributeError(name)
def __setitem__(self, name, value):
self.__attrs[name] = value
def __repr__(self):
return f"<Namespace {self.__attrs!r}>"
# does this python version support async for in and async generators?
try:
exec("async def _():\n async for _ in ():\n yield _")
have_async_gen = True
except SyntaxError:
have_async_gen = False
|
from __future__ import absolute_import
from __future__ import print_function
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation
from keras.utils import np_utils
from keras import optimizers
from elephas.ml_model import ElephasEstimator
from elephas.ml.adapter import to_data_frame
from pyspark import SparkContext, SparkConf
from pyspark.mllib.evaluation import MulticlassMetrics
from pyspark.ml import Pipeline
# Define basic parameters
batch_size = 64
nb_classes = 10
epochs = 1
# Load data
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape(60000, 784)
x_test = x_test.reshape(10000, 784)
x_train = x_train.astype("float32")
x_test = x_test.astype("float32")
x_train /= 255
x_test /= 255
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
# Convert class vectors to binary class matrices
y_train = np_utils.to_categorical(y_train, nb_classes)
y_test = np_utils.to_categorical(y_test, nb_classes)
model = Sequential()
model.add(Dense(128, input_dim=784))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(128))
model.add(Activation('relu'))
model.add(Dropout(0.2))
model.add(Dense(10))
model.add(Activation('softmax'))
# Create Spark context
conf = SparkConf().setAppName('Mnist_Spark_MLP').setMaster('local[8]')
sc = SparkContext(conf=conf)
# Build RDD from numpy features and labels
df = to_data_frame(sc, x_train, y_train, categorical=True)
test_df = to_data_frame(sc, x_test, y_test, categorical=True)
sgd = optimizers.SGD(lr=0.01, decay=1e-6, momentum=0.9, nesterov=True)
sgd_conf = optimizers.serialize(sgd)
# Initialize Spark ML Estimator
estimator = ElephasEstimator()
estimator.set_keras_model_config(model.to_yaml())
estimator.set_optimizer_config(sgd_conf)
estimator.set_mode("synchronous")
estimator.set_loss("categorical_crossentropy")
estimator.set_metrics(['acc'])
estimator.set_epochs(epochs)
estimator.set_batch_size(batch_size)
estimator.set_validation_split(0.1)
estimator.set_categorical_labels(True)
estimator.set_nb_classes(nb_classes)
# Fitting a model returns a Transformer
pipeline = Pipeline(stages=[estimator])
fitted_pipeline = pipeline.fit(df)
# Evaluate Spark model by evaluating the underlying model
prediction = fitted_pipeline.transform(test_df)
pnl = prediction.select("label", "prediction")
pnl.show(100)
prediction_and_label = pnl.rdd.map(lambda row: (row.label, row.prediction))
metrics = MulticlassMetrics(prediction_and_label)
print(metrics.precision())
print(metrics.recall())
|
import logging
import socket
from gps3.agps3threaded import AGPS3mechanism
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_LATITUDE,
ATTR_LONGITUDE,
ATTR_MODE,
CONF_HOST,
CONF_NAME,
CONF_PORT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
ATTR_CLIMB = "climb"
ATTR_ELEVATION = "elevation"
ATTR_GPS_TIME = "gps_time"
ATTR_SPEED = "speed"
DEFAULT_HOST = "localhost"
DEFAULT_NAME = "GPS"
DEFAULT_PORT = 2947
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the GPSD component."""
name = config.get(CONF_NAME)
host = config.get(CONF_HOST)
port = config.get(CONF_PORT)
# Will hopefully be possible with the next gps3 update
# https://github.com/wadda/gps3/issues/11
# from gps3 import gps3
# try:
# gpsd_socket = gps3.GPSDSocket()
# gpsd_socket.connect(host=host, port=port)
# except GPSError:
# _LOGGER.warning('Not able to connect to GPSD')
# return False
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
try:
sock.connect((host, port))
sock.shutdown(2)
_LOGGER.debug("Connection to GPSD possible")
except OSError:
_LOGGER.error("Not able to connect to GPSD")
return False
add_entities([GpsdSensor(hass, name, host, port)])
class GpsdSensor(Entity):
"""Representation of a GPS receiver available via GPSD."""
def __init__(self, hass, name, host, port):
"""Initialize the GPSD sensor."""
self.hass = hass
self._name = name
self._host = host
self._port = port
self.agps_thread = AGPS3mechanism()
self.agps_thread.stream_data(host=self._host, port=self._port)
self.agps_thread.run_thread()
@property
def name(self):
"""Return the name."""
return self._name
@property
def state(self):
"""Return the state of GPSD."""
if self.agps_thread.data_stream.mode == 3:
return "3D Fix"
if self.agps_thread.data_stream.mode == 2:
return "2D Fix"
return None
@property
def device_state_attributes(self):
"""Return the state attributes of the GPS."""
return {
ATTR_LATITUDE: self.agps_thread.data_stream.lat,
ATTR_LONGITUDE: self.agps_thread.data_stream.lon,
ATTR_ELEVATION: self.agps_thread.data_stream.alt,
ATTR_GPS_TIME: self.agps_thread.data_stream.time,
ATTR_SPEED: self.agps_thread.data_stream.speed,
ATTR_CLIMB: self.agps_thread.data_stream.climb,
ATTR_MODE: self.agps_thread.data_stream.mode,
}
|
import numpy as np
import pandas as pd
from scattertext.termranking import AbsoluteFrequencyRanker
from scattertext.termscoring.RankDifference import RankDifference
class GanttChart(object):
'''
Note: the Gantt charts listed here are inspired by
Dustin Arendt and Svitlana Volkova. ESTEEM: A Novel Framework for Qualitatively Evaluating and
Visualizing Spatiotemporal Embeddings in Social Media. ACL System Demonstrations. 2017.
http://www.aclweb.org/anthology/P/P17/P17-4005.pdf
In order to use the make chart function, Altair must be installed.
'''
def __init__(self,
term_doc_matrix,
category_to_timestep_func,
is_gap_between_sequences_func,
timesteps_to_lag=4,
num_top_terms_each_timestep=10,
num_terms_to_include=40,
starting_time_step=None,
term_ranker=AbsoluteFrequencyRanker,
term_scorer=RankDifference()):
'''
Parameters
----------
term_doc_matrix : TermDocMatrix
category_to_timestep_func : lambda
is_gap_between_sequences_func : lambda
timesteps_to_lag : int
num_top_terms_each_timestep : int
num_terms_to_include : int
starting_time_step : object
term_ranker : TermRanker
term_scorer : TermScorer
'''
self.corpus = term_doc_matrix
self.timesteps_to_lag = timesteps_to_lag
self.num_top_terms_each_timestep = num_top_terms_each_timestep
self.num_terms_to_include = num_terms_to_include
self.is_gap_between_sequences_func = is_gap_between_sequences_func
self.category_to_timestep_func = category_to_timestep_func
self.term_ranker = term_ranker
self.term_scorer = term_scorer
categories = list(sorted(self.corpus.get_categories()))
if len(categories) <= timesteps_to_lag:
raise Exception("The number of categories in the term doc matrix is <= "
+ str(timesteps_to_lag))
if starting_time_step is None:
starting_time_step = categories[timesteps_to_lag + 1]
self.starting_time_step = starting_time_step
def make_chart(self):
'''
Returns
-------
altair.Chart
'''
task_df = self.get_task_df()
import altair as alt
chart = alt.Chart(task_df).mark_bar().encode(
x='start',
x2='end',
y='term',
)
return chart
def get_temporal_score_df(self):
'''
Returns
-------
'''
scoredf = {}
tdf = self.term_ranker(self.corpus).get_ranks()
for cat in sorted(self.corpus.get_categories()):
if cat >= self.starting_time_step:
negative_categories = self._get_negative_categories(cat, tdf)
scores = self.term_scorer.get_scores(
tdf[cat + ' freq'].astype(int),
tdf[negative_categories].sum(axis=1)
)
scoredf[cat + ' score'] = scores
scoredf[cat + ' freq'] = tdf[cat + ' freq'].astype(int)
return pd.DataFrame(scoredf)
def _get_negative_categories(self, cat, tdf):
return sorted([x for x in tdf.columns if x < cat])[-self.timesteps_to_lag:]
def _get_term_time_df(self):
data = []
tdf = self.term_ranker(self.corpus).get_ranks()
for cat in sorted(self.corpus.get_categories()):
if cat >= self.starting_time_step:
negative_categories = self._get_negative_categories(cat, tdf)
scores = self.term_scorer.get_scores(
tdf[cat + ' freq'].astype(int),
tdf[negative_categories].sum(axis=1)
)
top_term_indices = np.argsort(-scores)[:self.num_top_terms_each_timestep]
for term in tdf.index[top_term_indices]:
data.append({'time': self.category_to_timestep_func(cat),
'term': term,
'top': 1})
return pd.DataFrame(data)
def get_task_df(self):
'''
Returns
-------
'''
term_time_df = self._get_term_time_df()
terms_to_include = (
term_time_df
.groupby('term')['top']
.sum()
.sort_values(ascending=False)
.iloc[:self.num_terms_to_include].index
)
task_df = (
term_time_df[term_time_df.term.isin(terms_to_include)][['time', 'term']]
.groupby('term')
.apply(lambda x: pd.Series(self._find_sequences(x['time'])))
.reset_index()
.rename({0: 'sequence'}, axis=1)
.reset_index()
.assign(start=lambda x: x['sequence'].apply(lambda x: x[0]))
.assign(end=lambda x: x['sequence'].apply(lambda x: x[1]))
[['term', 'start', 'end']]
)
return task_df
def _find_sequences(self, time_steps):
min_timestep = None
last_timestep = None
sequences = []
cur_sequence = []
for cur_timestep in sorted(time_steps):
if min_timestep is None:
cur_sequence = [cur_timestep]
min_timestep = cur_timestep
elif not self.is_gap_between_sequences_func(last_timestep, cur_timestep):
cur_sequence.append(cur_timestep)
min_timestep = cur_timestep
else:
sequences.append([cur_sequence[0], cur_sequence[-1]])
cur_sequence = [cur_timestep]
last_timestep = cur_timestep
if len(cur_sequence) != []:
sequences.append([cur_sequence[0], cur_sequence[-1]])
return sequences
|
import math
import numpy as np
import tensorflow as tf
from tensornetwork.backends.tensorflow import decompositions
class DecompositionsTest(tf.test.TestCase):
def test_expected_shapes(self):
val = tf.zeros((2, 3, 4, 5))
u, s, vh, _ = decompositions.svd(tf, val, 2)
self.assertEqual(u.shape, (2, 3, 6))
self.assertEqual(s.shape, (6,))
self.assertAllClose(s, np.zeros(6))
self.assertEqual(vh.shape, (6, 4, 5))
def test_expected_shapes_qr(self):
val = tf.zeros((2, 3, 4, 5))
for non_negative_diagonal in [True, False]:
q, r = decompositions.qr(tf, val, 2, non_negative_diagonal)
self.assertEqual(q.shape, (2, 3, 6))
self.assertEqual(r.shape, (6, 4, 5))
def test_expected_shapes_rq(self):
val = tf.zeros((2, 3, 4, 5))
for non_negative_diagonal in [True, False]:
r, q = decompositions.rq(tf, val, 2, non_negative_diagonal)
self.assertEqual(r.shape, (2, 3, 6))
self.assertEqual(q.shape, (6, 4, 5))
def test_rq(self):
random_matrix = np.random.rand(10, 10)
for non_negative_diagonal in [True, False]:
r, q = decompositions.rq(tf, random_matrix, 1, non_negative_diagonal)
self.assertAllClose(tf.tensordot(r, q, ([1], [0])), random_matrix)
def test_qr(self):
random_matrix = np.random.rand(10, 10)
for non_negative_diagonal in [True, False]:
q, r = decompositions.qr(tf, random_matrix, 1, non_negative_diagonal)
self.assertAllClose(tf.tensordot(q, r, ([1], [0])), random_matrix)
def test_rq_defun(self):
random_matrix = np.random.rand(10, 10)
for non_negative_diagonal in [True, False]:
rq = tf.function(decompositions.rq)
r, q = rq(tf, random_matrix, 1, non_negative_diagonal)
self.assertAllClose(tf.tensordot(r, q, ([1], [0])), random_matrix)
def test_qr_defun(self):
random_matrix = np.random.rand(10, 10)
for non_negative_diagonal in [True, False]:
qr = tf.function(decompositions.qr)
q, r = qr(tf, random_matrix, 1, non_negative_diagonal)
self.assertAllClose(tf.tensordot(q, r, ([1], [0])), random_matrix)
def test_max_singular_values(self):
random_matrix = np.random.rand(10, 10)
unitary1, _, unitary2 = np.linalg.svd(random_matrix)
singular_values = np.array(range(10))
val = unitary1.dot(np.diag(singular_values).dot(unitary2.T))
u, s, vh, trun = decompositions.svd(
tf, val, 1, max_singular_values=7)
self.assertEqual(u.shape, (10, 7))
self.assertEqual(s.shape, (7,))
self.assertAllClose(s, np.arange(9, 2, -1))
self.assertEqual(vh.shape, (7, 10))
self.assertAllClose(trun, np.arange(2, -1, -1))
def test_max_singular_values_defun(self):
random_matrix = np.random.rand(10, 10)
unitary1, _, unitary2 = np.linalg.svd(random_matrix)
singular_values = np.array(range(10))
val = unitary1.dot(np.diag(singular_values).dot(unitary2.T))
svd_decomposition = tf.function(decompositions.svd)
u, s, vh, trun = svd_decomposition(tf, val, 1, max_singular_values=7)
self.assertEqual(u.shape, (10, 7))
self.assertEqual(s.shape, (7,))
self.assertAllClose(s, np.arange(9, 2, -1))
self.assertEqual(vh.shape, (7, 10))
self.assertAllClose(trun, np.arange(2, -1, -1))
def test_max_truncation_error(self):
random_matrix = np.random.rand(10, 10)
unitary1, _, unitary2 = np.linalg.svd(random_matrix)
singular_values = np.array(range(10))
val = unitary1.dot(np.diag(singular_values).dot(unitary2.T))
u, s, vh, trun = decompositions.svd(
tf, val, 1, max_truncation_error=math.sqrt(5.1))
self.assertEqual(u.shape, (10, 7))
self.assertEqual(s.shape, (7,))
self.assertAllClose(s, np.arange(9, 2, -1))
self.assertEqual(vh.shape, (7, 10))
self.assertAllClose(trun, np.arange(2, -1, -1))
def test_max_truncation_error_relative(self):
absolute = np.diag([2.0, 1.0, 0.2, 0.1])
relative = np.diag([2.0, 1.0, 0.2, 0.1])
max_truncation_err = 0.2
_, _, _, trunc_sv_absolute = decompositions.svd(
tf,
absolute,
1,
max_truncation_error=max_truncation_err,
relative=False)
_, _, _, trunc_sv_relative = decompositions.svd(
tf, relative, 1, max_truncation_error=max_truncation_err, relative=True)
np.testing.assert_almost_equal(trunc_sv_absolute, [0.1])
np.testing.assert_almost_equal(trunc_sv_relative, [0.2, 0.1])
if __name__ == '__main__':
tf.test.main()
|
revision = "8ae67285ff14"
down_revision = "5e680529b666"
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_index("certificate_replacement_associations_ix")
op.create_index(
"certificate_replacement_associations_ix",
"certificate_replacement_associations",
["replaced_certificate_id", "certificate_id"],
unique=True,
)
def downgrade():
op.drop_index("certificate_replacement_associations_ix")
op.create_index(
"certificate_replacement_associations_ix",
"certificate_replacement_associations",
["certificate_id", "certificate_id"],
unique=True,
)
|
import pytest
from homeassistant.components import http, hue
from homeassistant.components.hue import light as hue_light
import homeassistant.loader as loader
from tests.async_mock import ANY, patch
from tests.common import MockModule, async_mock_service, mock_integration
async def test_component_dependencies(hass):
"""Test if we can get the proper load order of components."""
mock_integration(hass, MockModule("mod1"))
mock_integration(hass, MockModule("mod2", ["mod1"]))
mod_3 = mock_integration(hass, MockModule("mod3", ["mod2"]))
assert {"mod1", "mod2", "mod3"} == await loader._async_component_dependencies(
hass, "mod_3", mod_3, set(), set()
)
# Create circular dependency
mock_integration(hass, MockModule("mod1", ["mod3"]))
with pytest.raises(loader.CircularDependency):
print(
await loader._async_component_dependencies(
hass, "mod_3", mod_3, set(), set()
)
)
# Depend on non-existing component
mod_1 = mock_integration(hass, MockModule("mod1", ["nonexisting"]))
with pytest.raises(loader.IntegrationNotFound):
print(
await loader._async_component_dependencies(
hass, "mod_1", mod_1, set(), set()
)
)
# Having an after dependency 2 deps down that is circular
mod_1 = mock_integration(
hass, MockModule("mod1", partial_manifest={"after_dependencies": ["mod_3"]})
)
with pytest.raises(loader.CircularDependency):
print(
await loader._async_component_dependencies(
hass, "mod_3", mod_3, set(), set()
)
)
def test_component_loader(hass):
"""Test loading components."""
components = loader.Components(hass)
assert components.http.CONFIG_SCHEMA is http.CONFIG_SCHEMA
assert hass.components.http.CONFIG_SCHEMA is http.CONFIG_SCHEMA
def test_component_loader_non_existing(hass):
"""Test loading components."""
components = loader.Components(hass)
with pytest.raises(ImportError):
components.non_existing
async def test_component_wrapper(hass):
"""Test component wrapper."""
calls = async_mock_service(hass, "persistent_notification", "create")
components = loader.Components(hass)
components.persistent_notification.async_create("message")
await hass.async_block_till_done()
assert len(calls) == 1
async def test_helpers_wrapper(hass):
"""Test helpers wrapper."""
helpers = loader.Helpers(hass)
result = []
def discovery_callback(service, discovered):
"""Handle discovery callback."""
result.append(discovered)
helpers.discovery.async_listen("service_name", discovery_callback)
await helpers.discovery.async_discover("service_name", "hello", None, {})
await hass.async_block_till_done()
assert result == ["hello"]
async def test_custom_component_name(hass):
"""Test the name attribte of custom components."""
integration = await loader.async_get_integration(hass, "test_standalone")
int_comp = integration.get_component()
assert int_comp.__name__ == "custom_components.test_standalone"
assert int_comp.__package__ == "custom_components"
comp = hass.components.test_standalone
assert comp.__name__ == "custom_components.test_standalone"
assert comp.__package__ == "custom_components"
integration = await loader.async_get_integration(hass, "test_package")
int_comp = integration.get_component()
assert int_comp.__name__ == "custom_components.test_package"
assert int_comp.__package__ == "custom_components.test_package"
comp = hass.components.test_package
assert comp.__name__ == "custom_components.test_package"
assert comp.__package__ == "custom_components.test_package"
integration = await loader.async_get_integration(hass, "test")
platform = integration.get_platform("light")
assert platform.__name__ == "custom_components.test.light"
assert platform.__package__ == "custom_components.test"
# Test custom components is mounted
from custom_components.test_package import TEST
assert TEST == 5
async def test_log_warning_custom_component(hass, caplog):
"""Test that we log a warning when loading a custom component."""
hass.components.test_standalone
assert "You are using a custom integration for test_standalone" in caplog.text
await loader.async_get_integration(hass, "test")
assert "You are using a custom integration for test " in caplog.text
async def test_get_integration(hass):
"""Test resolving integration."""
integration = await loader.async_get_integration(hass, "hue")
assert hue == integration.get_component()
assert hue_light == integration.get_platform("light")
async def test_get_integration_legacy(hass):
"""Test resolving integration."""
integration = await loader.async_get_integration(hass, "test_embedded")
assert integration.get_component().DOMAIN == "test_embedded"
assert integration.get_platform("switch") is not None
async def test_get_integration_custom_component(hass):
"""Test resolving integration."""
integration = await loader.async_get_integration(hass, "test_package")
print(integration)
assert integration.get_component().DOMAIN == "test_package"
assert integration.name == "Test Package"
def test_integration_properties(hass):
"""Test integration properties."""
integration = loader.Integration(
hass,
"homeassistant.components.hue",
None,
{
"name": "Philips Hue",
"domain": "hue",
"dependencies": ["test-dep"],
"requirements": ["test-req==1.0.0"],
"zeroconf": ["_hue._tcp.local."],
"homekit": {"models": ["BSB002"]},
"ssdp": [
{
"manufacturer": "Royal Philips Electronics",
"modelName": "Philips hue bridge 2012",
},
{
"manufacturer": "Royal Philips Electronics",
"modelName": "Philips hue bridge 2015",
},
{"manufacturer": "Signify", "modelName": "Philips hue bridge 2015"},
],
"mqtt": ["hue/discovery"],
},
)
assert integration.name == "Philips Hue"
assert integration.domain == "hue"
assert integration.homekit == {"models": ["BSB002"]}
assert integration.zeroconf == ["_hue._tcp.local."]
assert integration.ssdp == [
{
"manufacturer": "Royal Philips Electronics",
"modelName": "Philips hue bridge 2012",
},
{
"manufacturer": "Royal Philips Electronics",
"modelName": "Philips hue bridge 2015",
},
{"manufacturer": "Signify", "modelName": "Philips hue bridge 2015"},
]
assert integration.mqtt == ["hue/discovery"]
assert integration.dependencies == ["test-dep"]
assert integration.requirements == ["test-req==1.0.0"]
assert integration.is_built_in is True
integration = loader.Integration(
hass,
"custom_components.hue",
None,
{
"name": "Philips Hue",
"domain": "hue",
"dependencies": ["test-dep"],
"requirements": ["test-req==1.0.0"],
},
)
assert integration.is_built_in is False
assert integration.homekit is None
assert integration.zeroconf is None
assert integration.ssdp is None
assert integration.mqtt is None
integration = loader.Integration(
hass,
"custom_components.hue",
None,
{
"name": "Philips Hue",
"domain": "hue",
"dependencies": ["test-dep"],
"zeroconf": [{"type": "_hue._tcp.local.", "name": "hue*"}],
"requirements": ["test-req==1.0.0"],
},
)
assert integration.is_built_in is False
assert integration.homekit is None
assert integration.zeroconf == [{"type": "_hue._tcp.local.", "name": "hue*"}]
assert integration.ssdp is None
async def test_integrations_only_once(hass):
"""Test that we load integrations only once."""
int_1 = hass.async_create_task(loader.async_get_integration(hass, "hue"))
int_2 = hass.async_create_task(loader.async_get_integration(hass, "hue"))
assert await int_1 is await int_2
async def test_get_custom_components_internal(hass):
"""Test that we can a list of custom components."""
# pylint: disable=protected-access
integrations = await loader._async_get_custom_components(hass)
assert integrations == {"test": ANY, "test_package": ANY}
def _get_test_integration(hass, name, config_flow):
"""Return a generated test integration."""
return loader.Integration(
hass,
f"homeassistant.components.{name}",
None,
{
"name": name,
"domain": name,
"config_flow": config_flow,
"dependencies": [],
"requirements": [],
"zeroconf": [f"_{name}._tcp.local."],
"homekit": {"models": [name]},
"ssdp": [{"manufacturer": name, "modelName": name}],
"mqtt": [f"{name}/discovery"],
},
)
def _get_test_integration_with_zeroconf_matcher(hass, name, config_flow):
"""Return a generated test integration with a zeroconf matcher."""
return loader.Integration(
hass,
f"homeassistant.components.{name}",
None,
{
"name": name,
"domain": name,
"config_flow": config_flow,
"dependencies": [],
"requirements": [],
"zeroconf": [{"type": f"_{name}._tcp.local.", "name": f"{name}*"}],
"homekit": {"models": [name]},
"ssdp": [{"manufacturer": name, "modelName": name}],
},
)
async def test_get_custom_components(hass):
"""Verify that custom components are cached."""
test_1_integration = _get_test_integration(hass, "test_1", False)
test_2_integration = _get_test_integration(hass, "test_2", True)
name = "homeassistant.loader._async_get_custom_components"
with patch(name) as mock_get:
mock_get.return_value = {
"test_1": test_1_integration,
"test_2": test_2_integration,
}
integrations = await loader.async_get_custom_components(hass)
assert integrations == mock_get.return_value
integrations = await loader.async_get_custom_components(hass)
assert integrations == mock_get.return_value
mock_get.assert_called_once_with(hass)
async def test_get_config_flows(hass):
"""Verify that custom components with config_flow are available."""
test_1_integration = _get_test_integration(hass, "test_1", False)
test_2_integration = _get_test_integration(hass, "test_2", True)
with patch("homeassistant.loader.async_get_custom_components") as mock_get:
mock_get.return_value = {
"test_1": test_1_integration,
"test_2": test_2_integration,
}
flows = await loader.async_get_config_flows(hass)
assert "test_2" in flows
assert "test_1" not in flows
async def test_get_zeroconf(hass):
"""Verify that custom components with zeroconf are found."""
test_1_integration = _get_test_integration(hass, "test_1", True)
test_2_integration = _get_test_integration_with_zeroconf_matcher(
hass, "test_2", True
)
with patch("homeassistant.loader.async_get_custom_components") as mock_get:
mock_get.return_value = {
"test_1": test_1_integration,
"test_2": test_2_integration,
}
zeroconf = await loader.async_get_zeroconf(hass)
assert zeroconf["_test_1._tcp.local."] == [{"domain": "test_1"}]
assert zeroconf["_test_2._tcp.local."] == [
{"domain": "test_2", "name": "test_2*"}
]
async def test_get_homekit(hass):
"""Verify that custom components with homekit are found."""
test_1_integration = _get_test_integration(hass, "test_1", True)
test_2_integration = _get_test_integration(hass, "test_2", True)
with patch("homeassistant.loader.async_get_custom_components") as mock_get:
mock_get.return_value = {
"test_1": test_1_integration,
"test_2": test_2_integration,
}
homekit = await loader.async_get_homekit(hass)
assert homekit["test_1"] == "test_1"
assert homekit["test_2"] == "test_2"
async def test_get_ssdp(hass):
"""Verify that custom components with ssdp are found."""
test_1_integration = _get_test_integration(hass, "test_1", True)
test_2_integration = _get_test_integration(hass, "test_2", True)
with patch("homeassistant.loader.async_get_custom_components") as mock_get:
mock_get.return_value = {
"test_1": test_1_integration,
"test_2": test_2_integration,
}
ssdp = await loader.async_get_ssdp(hass)
assert ssdp["test_1"] == [{"manufacturer": "test_1", "modelName": "test_1"}]
assert ssdp["test_2"] == [{"manufacturer": "test_2", "modelName": "test_2"}]
async def test_get_mqtt(hass):
"""Verify that custom components with MQTT are found."""
test_1_integration = _get_test_integration(hass, "test_1", True)
test_2_integration = _get_test_integration(hass, "test_2", True)
with patch("homeassistant.loader.async_get_custom_components") as mock_get:
mock_get.return_value = {
"test_1": test_1_integration,
"test_2": test_2_integration,
}
mqtt = await loader.async_get_mqtt(hass)
assert mqtt["test_1"] == ["test_1/discovery"]
assert mqtt["test_2"] == ["test_2/discovery"]
async def test_get_custom_components_safe_mode(hass):
"""Test that we get empty custom components in safe mode."""
hass.config.safe_mode = True
assert await loader.async_get_custom_components(hass) == {}
|
from homeassistant.const import STATE_OFF, STATE_ON
from tests.components.advantage_air import (
TEST_SET_RESPONSE,
TEST_SET_URL,
TEST_SYSTEM_DATA,
TEST_SYSTEM_URL,
add_mock_config,
)
async def test_binary_sensor_async_setup_entry(hass, aioclient_mock):
"""Test binary sensor setup."""
aioclient_mock.get(
TEST_SYSTEM_URL,
text=TEST_SYSTEM_DATA,
)
aioclient_mock.get(
TEST_SET_URL,
text=TEST_SET_RESPONSE,
)
await add_mock_config(hass)
registry = await hass.helpers.entity_registry.async_get_registry()
assert len(aioclient_mock.mock_calls) == 1
# Test First Air Filter
entity_id = "binary_sensor.ac_one_filter"
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_OFF
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1-filter"
# Test Second Air Filter
entity_id = "binary_sensor.ac_two_filter"
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_ON
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac2-filter"
# Test First Motion Sensor
entity_id = "binary_sensor.zone_open_with_sensor_motion"
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_ON
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1-z01-motion"
# Test Second Motion Sensor
entity_id = "binary_sensor.zone_closed_with_sensor_motion"
state = hass.states.get(entity_id)
assert state
assert state.state == STATE_OFF
entry = registry.async_get(entity_id)
assert entry
assert entry.unique_id == "uniqueid-ac1-z02-motion"
|
import asyncio
from datetime import datetime, timedelta
import logging
import sys
from pycarwings2 import CarwingsError, Session
import voluptuous as vol
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME, HTTP_OK
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import async_track_point_in_utc_time
from homeassistant.util.dt import utcnow
_LOGGER = logging.getLogger(__name__)
DOMAIN = "nissan_leaf"
DATA_LEAF = "nissan_leaf_data"
DATA_BATTERY = "battery"
DATA_CHARGING = "charging"
DATA_PLUGGED_IN = "plugged_in"
DATA_CLIMATE = "climate"
DATA_RANGE_AC = "range_ac_on"
DATA_RANGE_AC_OFF = "range_ac_off"
CONF_INTERVAL = "update_interval"
CONF_CHARGING_INTERVAL = "update_interval_charging"
CONF_CLIMATE_INTERVAL = "update_interval_climate"
CONF_REGION = "region"
CONF_VALID_REGIONS = ["NNA", "NE", "NCI", "NMA", "NML"]
CONF_FORCE_MILES = "force_miles"
INITIAL_UPDATE = timedelta(seconds=15)
MIN_UPDATE_INTERVAL = timedelta(minutes=2)
DEFAULT_INTERVAL = timedelta(hours=1)
DEFAULT_CHARGING_INTERVAL = timedelta(minutes=15)
DEFAULT_CLIMATE_INTERVAL = timedelta(minutes=5)
RESTRICTED_BATTERY = 2
RESTRICTED_INTERVAL = timedelta(hours=12)
MAX_RESPONSE_ATTEMPTS = 10
PYCARWINGS2_SLEEP = 30
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_REGION): vol.In(CONF_VALID_REGIONS),
vol.Optional(CONF_INTERVAL, default=DEFAULT_INTERVAL): (
vol.All(cv.time_period, vol.Clamp(min=MIN_UPDATE_INTERVAL))
),
vol.Optional(
CONF_CHARGING_INTERVAL, default=DEFAULT_CHARGING_INTERVAL
): (
vol.All(cv.time_period, vol.Clamp(min=MIN_UPDATE_INTERVAL))
),
vol.Optional(
CONF_CLIMATE_INTERVAL, default=DEFAULT_CLIMATE_INTERVAL
): (
vol.All(cv.time_period, vol.Clamp(min=MIN_UPDATE_INTERVAL))
),
vol.Optional(CONF_FORCE_MILES, default=False): cv.boolean,
}
)
],
)
},
extra=vol.ALLOW_EXTRA,
)
LEAF_COMPONENTS = ["sensor", "switch", "binary_sensor"]
SIGNAL_UPDATE_LEAF = "nissan_leaf_update"
SERVICE_UPDATE_LEAF = "update"
SERVICE_START_CHARGE_LEAF = "start_charge"
ATTR_VIN = "vin"
UPDATE_LEAF_SCHEMA = vol.Schema({vol.Required(ATTR_VIN): cv.string})
START_CHARGE_LEAF_SCHEMA = vol.Schema({vol.Required(ATTR_VIN): cv.string})
def setup(hass, config):
"""Set up the Nissan Leaf component."""
async def async_handle_update(service):
"""Handle service to update leaf data from Nissan servers."""
# It would be better if this was changed to use nickname, or
# an entity name rather than a vin.
vin = service.data[ATTR_VIN]
if vin in hass.data[DATA_LEAF]:
data_store = hass.data[DATA_LEAF][vin]
await data_store.async_update_data(utcnow())
else:
_LOGGER.debug("Vin %s not recognised for update", vin)
async def async_handle_start_charge(service):
"""Handle service to start charging."""
# It would be better if this was changed to use nickname, or
# an entity name rather than a vin.
vin = service.data[ATTR_VIN]
if vin in hass.data[DATA_LEAF]:
data_store = hass.data[DATA_LEAF][vin]
# Send the command to request charging is started to Nissan
# servers. If that completes OK then trigger a fresh update to
# pull the charging status from the car after waiting a minute
# for the charging request to reach the car.
result = await hass.async_add_executor_job(data_store.leaf.start_charging)
if result:
_LOGGER.debug("Start charging sent, request updated data in 1 minute")
check_charge_at = utcnow() + timedelta(minutes=1)
data_store.next_update = check_charge_at
async_track_point_in_utc_time(
hass, data_store.async_update_data, check_charge_at
)
else:
_LOGGER.debug("Vin %s not recognised for update", vin)
def setup_leaf(car_config):
"""Set up a car."""
_LOGGER.debug("Logging into You+Nissan...")
username = car_config[CONF_USERNAME]
password = car_config[CONF_PASSWORD]
region = car_config[CONF_REGION]
leaf = None
try:
# This might need to be made async (somehow) causes
# homeassistant to be slow to start
sess = Session(username, password, region)
leaf = sess.get_leaf()
except KeyError:
_LOGGER.error(
"Unable to fetch car details..."
" do you actually have a Leaf connected to your account?"
)
return False
except CarwingsError:
_LOGGER.error(
"An unknown error occurred while connecting to Nissan: %s",
sys.exc_info()[0],
)
return False
_LOGGER.warning(
"WARNING: This may poll your Leaf too often, and drain the 12V"
" battery. If you drain your cars 12V battery it WILL NOT START"
" as the drive train battery won't connect."
" Don't set the intervals too low"
)
data_store = LeafDataStore(hass, leaf, car_config)
hass.data[DATA_LEAF][leaf.vin] = data_store
for component in LEAF_COMPONENTS:
load_platform(hass, component, DOMAIN, {}, car_config)
async_track_point_in_utc_time(
hass, data_store.async_update_data, utcnow() + INITIAL_UPDATE
)
hass.data[DATA_LEAF] = {}
for car in config[DOMAIN]:
setup_leaf(car)
hass.services.register(
DOMAIN, SERVICE_UPDATE_LEAF, async_handle_update, schema=UPDATE_LEAF_SCHEMA
)
hass.services.register(
DOMAIN,
SERVICE_START_CHARGE_LEAF,
async_handle_start_charge,
schema=START_CHARGE_LEAF_SCHEMA,
)
return True
class LeafDataStore:
"""Nissan Leaf Data Store."""
def __init__(self, hass, leaf, car_config):
"""Initialise the data store."""
self.hass = hass
self.leaf = leaf
self.car_config = car_config
self.force_miles = car_config[CONF_FORCE_MILES]
self.data = {}
self.data[DATA_CLIMATE] = False
self.data[DATA_BATTERY] = 0
self.data[DATA_CHARGING] = False
self.data[DATA_RANGE_AC] = 0
self.data[DATA_RANGE_AC_OFF] = 0
self.data[DATA_PLUGGED_IN] = False
self.next_update = None
self.last_check = None
self.request_in_progress = False
# Timestamp of last successful response from battery or climate.
self.last_battery_response = None
self.last_climate_response = None
self._remove_listener = None
async def async_update_data(self, now):
"""Update data from nissan leaf."""
# Prevent against a previously scheduled update and an ad-hoc update
# started from an update from both being triggered.
if self._remove_listener:
self._remove_listener()
self._remove_listener = None
# Clear next update whilst this update is underway
self.next_update = None
await self.async_refresh_data(now)
self.next_update = self.get_next_interval()
_LOGGER.debug("Next update=%s", self.next_update)
self._remove_listener = async_track_point_in_utc_time(
self.hass, self.async_update_data, self.next_update
)
def get_next_interval(self):
"""Calculate when the next update should occur."""
base_interval = self.car_config[CONF_INTERVAL]
climate_interval = self.car_config[CONF_CLIMATE_INTERVAL]
charging_interval = self.car_config[CONF_CHARGING_INTERVAL]
# The 12V battery is used when communicating with Nissan servers.
# The 12V battery is charged from the traction battery when not
# connected and when the traction battery has enough charge. To
# avoid draining the 12V battery we shall restrict the update
# frequency if low battery detected.
if (
self.last_battery_response is not None
and self.data[DATA_CHARGING] is False
and self.data[DATA_BATTERY] <= RESTRICTED_BATTERY
):
_LOGGER.debug(
"Low battery so restricting refresh frequency (%s)", self.leaf.nickname
)
interval = RESTRICTED_INTERVAL
else:
intervals = [base_interval]
if self.data[DATA_CHARGING]:
intervals.append(charging_interval)
if self.data[DATA_CLIMATE]:
intervals.append(climate_interval)
interval = min(intervals)
return utcnow() + interval
async def async_refresh_data(self, now):
"""Refresh the leaf data and update the datastore."""
if self.request_in_progress:
_LOGGER.debug("Refresh currently in progress for %s", self.leaf.nickname)
return
_LOGGER.debug("Updating Nissan Leaf Data")
self.last_check = datetime.today()
self.request_in_progress = True
server_response = await self.async_get_battery()
if server_response is not None:
_LOGGER.debug("Server Response: %s", server_response.__dict__)
if server_response.answer["status"] == HTTP_OK:
self.data[DATA_BATTERY] = server_response.battery_percent
# pycarwings2 library doesn't always provide cruising rnages
# so we have to check if they exist before we can use them.
# Root cause: the nissan servers don't always send the data.
if hasattr(server_response, "cruising_range_ac_on_km"):
self.data[DATA_RANGE_AC] = server_response.cruising_range_ac_on_km
else:
self.data[DATA_RANGE_AC] = None
if hasattr(server_response, "cruising_range_ac_off_km"):
self.data[
DATA_RANGE_AC_OFF
] = server_response.cruising_range_ac_off_km
else:
self.data[DATA_RANGE_AC_OFF] = None
self.data[DATA_PLUGGED_IN] = server_response.is_connected
self.data[DATA_CHARGING] = server_response.is_charging
async_dispatcher_send(self.hass, SIGNAL_UPDATE_LEAF)
self.last_battery_response = utcnow()
# Climate response only updated if battery data updated first.
if server_response is not None:
try:
climate_response = await self.async_get_climate()
if climate_response is not None:
_LOGGER.debug(
"Got climate data for Leaf: %s", climate_response.__dict__
)
self.data[DATA_CLIMATE] = climate_response.is_hvac_running
self.last_climate_response = utcnow()
except CarwingsError:
_LOGGER.error("Error fetching climate info")
self.request_in_progress = False
async_dispatcher_send(self.hass, SIGNAL_UPDATE_LEAF)
@staticmethod
def _extract_start_date(battery_info):
"""Extract the server date from the battery response."""
try:
return battery_info.answer["BatteryStatusRecords"]["OperationDateAndTime"]
except KeyError:
return None
async def async_get_battery(self):
"""Request battery update from Nissan servers."""
try:
# Request battery update from the car
_LOGGER.debug("Requesting battery update, %s", self.leaf.vin)
request = await self.hass.async_add_executor_job(self.leaf.request_update)
if not request:
_LOGGER.error("Battery update request failed")
return None
for attempt in range(MAX_RESPONSE_ATTEMPTS):
_LOGGER.debug(
"Waiting %s seconds for battery update (%s) (%s)",
PYCARWINGS2_SLEEP,
self.leaf.vin,
attempt,
)
await asyncio.sleep(PYCARWINGS2_SLEEP)
# We don't use the response from get_status_from_update
# apart from knowing that the car has responded saying it
# has given the latest battery status to Nissan.
check_result_info = await self.hass.async_add_executor_job(
self.leaf.get_status_from_update, request
)
if check_result_info is not None:
# Get the latest battery status from Nissan servers.
# This has the SOC in it.
server_info = await self.hass.async_add_executor_job(
self.leaf.get_latest_battery_status
)
return server_info
_LOGGER.debug(
"%s attempts exceeded return latest data from server",
MAX_RESPONSE_ATTEMPTS,
)
# Get the latest data from the nissan servers, even though
# it may be out of date, it's better than nothing.
server_info = await self.hass.async_add_executor_job(
self.leaf.get_latest_battery_status
)
return server_info
except CarwingsError:
_LOGGER.error("An error occurred getting battery status")
return None
except KeyError:
_LOGGER.error("An error occurred parsing response from server")
return None
async def async_get_climate(self):
"""Request climate data from Nissan servers."""
try:
return await self.hass.async_add_executor_job(
self.leaf.get_latest_hvac_status
)
except CarwingsError:
_LOGGER.error(
"An error occurred communicating with the car %s", self.leaf.vin
)
return None
async def async_set_climate(self, toggle):
"""Set climate control mode via Nissan servers."""
climate_result = None
if toggle:
_LOGGER.debug("Requesting climate turn on for %s", self.leaf.vin)
set_function = self.leaf.start_climate_control
result_function = self.leaf.get_start_climate_control_result
else:
_LOGGER.debug("Requesting climate turn off for %s", self.leaf.vin)
set_function = self.leaf.stop_climate_control
result_function = self.leaf.get_stop_climate_control_result
request = await self.hass.async_add_executor_job(set_function)
for attempt in range(MAX_RESPONSE_ATTEMPTS):
if attempt > 0:
_LOGGER.debug(
"Climate data not in yet (%s) (%s). Waiting (%s) seconds",
self.leaf.vin,
attempt,
PYCARWINGS2_SLEEP,
)
await asyncio.sleep(PYCARWINGS2_SLEEP)
climate_result = await self.hass.async_add_executor_job(
result_function, request
)
if climate_result is not None:
break
if climate_result is not None:
_LOGGER.debug("Climate result: %s", climate_result.__dict__)
async_dispatcher_send(self.hass, SIGNAL_UPDATE_LEAF)
return climate_result.is_hvac_running == toggle
_LOGGER.debug("Climate result not returned by Nissan servers")
return False
class LeafEntity(Entity):
"""Base class for Nissan Leaf entity."""
def __init__(self, car):
"""Store LeafDataStore upon init."""
self.car = car
def log_registration(self):
"""Log registration."""
_LOGGER.debug(
"Registered %s integration for VIN %s",
self.__class__.__name__,
self.car.leaf.vin,
)
@property
def device_state_attributes(self):
"""Return default attributes for Nissan leaf entities."""
return {
"next_update": self.car.next_update,
"last_attempt": self.car.last_check,
"updated_on": self.car.last_battery_response,
"update_in_progress": self.car.request_in_progress,
"vin": self.car.leaf.vin,
}
async def async_added_to_hass(self):
"""Register callbacks."""
self.log_registration()
self.async_on_remove(
async_dispatcher_connect(
self.car.hass, SIGNAL_UPDATE_LEAF, self._update_callback
)
)
@callback
def _update_callback(self):
"""Update the state."""
self.async_schedule_update_ha_state(True)
|
import homeassistant.components.sensor as sensor
from homeassistant.const import PERCENTAGE, PRESSURE_HPA, SIGNAL_STRENGTH_DECIBELS
from tests.common import MockEntity
DEVICE_CLASSES = list(sensor.DEVICE_CLASSES)
DEVICE_CLASSES.append("none")
UNITS_OF_MEASUREMENT = {
sensor.DEVICE_CLASS_BATTERY: PERCENTAGE, # % of battery that is left
sensor.DEVICE_CLASS_HUMIDITY: PERCENTAGE, # % of humidity in the air
sensor.DEVICE_CLASS_ILLUMINANCE: "lm", # current light level (lx/lm)
sensor.DEVICE_CLASS_SIGNAL_STRENGTH: SIGNAL_STRENGTH_DECIBELS, # signal strength (dB/dBm)
sensor.DEVICE_CLASS_TEMPERATURE: "C", # temperature (C/F)
sensor.DEVICE_CLASS_TIMESTAMP: "hh:mm:ss", # timestamp (ISO8601)
sensor.DEVICE_CLASS_PRESSURE: PRESSURE_HPA, # pressure (hPa/mbar)
sensor.DEVICE_CLASS_POWER: "kW", # power (W/kW)
sensor.DEVICE_CLASS_CURRENT: "A", # current (A)
sensor.DEVICE_CLASS_ENERGY: "kWh", # energy (Wh/kWh)
sensor.DEVICE_CLASS_POWER_FACTOR: PERCENTAGE, # power factor (no unit, min: -1.0, max: 1.0)
sensor.DEVICE_CLASS_VOLTAGE: "V", # voltage (V)
}
ENTITIES = {}
def init(empty=False):
"""Initialize the platform with entities."""
global ENTITIES
ENTITIES = (
{}
if empty
else {
device_class: MockSensor(
name=f"{device_class} sensor",
unique_id=f"unique_{device_class}",
device_class=device_class,
unit_of_measurement=UNITS_OF_MEASUREMENT.get(device_class),
)
for device_class in DEVICE_CLASSES
}
)
async def async_setup_platform(
hass, config, async_add_entities_callback, discovery_info=None
):
"""Return mock entities."""
async_add_entities_callback(list(ENTITIES.values()))
class MockSensor(MockEntity):
"""Mock Sensor class."""
@property
def device_class(self):
"""Return the class of this sensor."""
return self._handle("device_class")
@property
def unit_of_measurement(self):
"""Return the unit_of_measurement of this sensor."""
return self._handle("unit_of_measurement")
|
import re
from django.utils.html import strip_tags
from django.utils.translation import gettext_lazy as _
from weblate.checks.base import TargetCheck
from weblate.checks.data import IGNORE_WORDS
from weblate.checks.format import (
C_PRINTF_MATCH,
PHP_PRINTF_MATCH,
PYTHON_BRACE_MATCH,
PYTHON_PRINTF_MATCH,
)
from weblate.checks.languages import LANGUAGES
from weblate.checks.qt import QT_FORMAT_MATCH, QT_PLURAL_MATCH
from weblate.checks.ruby import RUBY_FORMAT_MATCH
# Email address to ignore
EMAIL_RE = re.compile(r"[a-z0-9_.-]+@[a-z0-9_.-]+\.[a-z0-9-]{2,}", re.IGNORECASE)
URL_RE = re.compile(
r"(?:http|ftp)s?://" # http:// or https://
r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+"
r"(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|" # domain...
r"localhost|" # localhost...
r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip
r"(?::\d+)?" # optional port
r"(?:/?|[/?]\S+)$",
re.IGNORECASE,
)
HASH_RE = re.compile(r"#[A-Za-z0-9_-]*")
DOMAIN_RE = re.compile(
r"(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+"
r"(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)",
re.IGNORECASE,
)
PATH_RE = re.compile(r"(^|[ ])(/[a-zA-Z0-9=:?._-]+)+")
TEMPLATE_RE = re.compile(r"{[a-z_-]+}|@[A-Z_]@", re.IGNORECASE)
RST_MATCH = re.compile(r"(:[a-z:]+:`[^`]+`|``[^`]+``)")
SPLIT_RE = re.compile(
r"(?:\&(?:nbsp|rsaquo|lt|gt|amp|ldquo|rdquo|times|quot);|"
+ r'[() ,.^`"\'\\/_<>!?;:|{}*^@%#&~=+\r\n✓—‑…\[\]0-9-])+',
re.IGNORECASE,
)
EMOJI_RE = re.compile("[\U00002600-\U000027BF]|[\U0001f000-\U0001fffd]")
# Docbook tags to ignore
DB_TAGS = ("screen", "indexterm", "programlisting")
def strip_format(msg, flags):
"""Remove format strings from the strings.
These are quite often not changed by translators.
"""
if "python-format" in flags:
regex = PYTHON_PRINTF_MATCH
elif "python-brace-format" in flags:
regex = PYTHON_BRACE_MATCH
elif "php-format" in flags:
regex = PHP_PRINTF_MATCH
elif "c-format" in flags:
regex = C_PRINTF_MATCH
elif "qt-format" in flags:
regex = QT_FORMAT_MATCH
elif "qt-plural-format" in flags:
regex = QT_PLURAL_MATCH
elif "ruby-format" in flags:
regex = RUBY_FORMAT_MATCH
elif "rst-text" in flags:
regex = RST_MATCH
else:
return msg
stripped = regex.sub("", msg)
return stripped
def strip_string(msg, flags):
"""Strip (usually) not translated parts from the string."""
# Strip HTML markup
stripped = strip_tags(msg)
# Strip format strings
stripped = strip_format(stripped, flags)
# Remove emojis
stripped = EMOJI_RE.sub(" ", stripped)
# Remove email addresses
stripped = EMAIL_RE.sub("", stripped)
# Strip full URLs
stripped = URL_RE.sub("", stripped)
# Strip hash tags / IRC channels
stripped = HASH_RE.sub("", stripped)
# Strip domain names/URLs
stripped = DOMAIN_RE.sub("", stripped)
# Strip file/URL paths
stripped = PATH_RE.sub("", stripped)
# Strip template markup
stripped = TEMPLATE_RE.sub("", stripped)
# Cleanup trailing/leading chars
return stripped
def test_word(word, extra_ignore):
"""Test whether word should be ignored."""
return (
len(word) <= 2
or word in IGNORE_WORDS
or word in LANGUAGES
or word in extra_ignore
)
def strip_placeholders(msg, unit):
return re.sub(
"|".join(
re.escape(param) if isinstance(param, str) else param.pattern
for param in unit.all_flags.get_value("placeholders")
),
"",
msg,
)
class SameCheck(TargetCheck):
"""Check for not translated entries."""
check_id = "same"
name = _("Unchanged translation")
description = _("Source and translation are identical")
def should_ignore(self, source, unit):
"""Check whether given unit should be ignored."""
if "strict-same" in unit.all_flags:
return False
# Ignore some docbook tags
if unit.note.startswith("Tag: ") and unit.note[5:] in DB_TAGS:
return True
# Ignore name of the project
extra_ignore = set(
unit.translation.component.project.name.lower().split()
+ unit.translation.component.name.lower().split()
)
# Lower case source
lower_source = source.lower()
# Check special things like 1:4 1/2 or copyright
if (
len(source.strip("0123456789:/,.")) <= 1
or "(c) copyright" in lower_source
or "©" in source
):
return True
# Strip format strings
stripped = strip_string(source, unit.all_flags)
# Strip placeholder strings
if "placeholders" in unit.all_flags:
stripped = strip_placeholders(stripped, unit)
# Ignore strings which don't contain any string to translate
# or just single letter (usually unit or something like that)
# or are whole uppercase (abbreviations)
if len(stripped) <= 1 or stripped.isupper():
return True
# Check if we have any word which is not in blacklist
# (words which are often same in foreign language)
for word in SPLIT_RE.split(stripped.lower()):
if not test_word(word, extra_ignore):
return False
return True
def should_skip(self, unit):
# Skip read-only units and ignored check
if unit.readonly or super().should_skip(unit):
return True
source_language = unit.translation.component.source_language.base_code
# Ignore the check for source language,
# English variants will have most things not translated
# Interlingua is also quite often similar to English
if self.is_language(unit, source_language) or (
source_language == "en" and self.is_language(unit, ("en", "ia"))
):
return True
return False
def check_single(self, source, target, unit):
# One letter things are usually labels or decimal/thousand separators
if len(source) <= 1 and len(target) <= 1:
return False
# Check for ignoring
if self.should_ignore(source, unit):
return False
return source == target
|
import logging
import socket
import ebusdpy
import voluptuous as vol
from homeassistant.const import (
CONF_HOST,
CONF_MONITORED_CONDITIONS,
CONF_NAME,
CONF_PORT,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
from .const import DOMAIN, SENSOR_TYPES
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "ebusd"
DEFAULT_PORT = 8888
CONF_CIRCUIT = "circuit"
CACHE_TTL = 900
SERVICE_EBUSD_WRITE = "ebusd_write"
def verify_ebusd_config(config):
"""Verify eBusd config."""
circuit = config[CONF_CIRCUIT]
for condition in config[CONF_MONITORED_CONDITIONS]:
if condition not in SENSOR_TYPES[circuit]:
raise vol.Invalid(f"Condition '{condition}' not in '{circuit}'.")
return config
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
vol.All(
{
vol.Required(CONF_CIRCUIT): cv.string,
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_MONITORED_CONDITIONS, default=[]): cv.ensure_list,
},
verify_ebusd_config,
)
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the eBusd component."""
_LOGGER.debug("Integration setup started")
conf = config[DOMAIN]
name = conf[CONF_NAME]
circuit = conf[CONF_CIRCUIT]
monitored_conditions = conf.get(CONF_MONITORED_CONDITIONS)
server_address = (conf.get(CONF_HOST), conf.get(CONF_PORT))
try:
ebusdpy.init(server_address)
hass.data[DOMAIN] = EbusdData(server_address, circuit)
sensor_config = {
CONF_MONITORED_CONDITIONS: monitored_conditions,
"client_name": name,
"sensor_types": SENSOR_TYPES[circuit],
}
load_platform(hass, "sensor", DOMAIN, sensor_config, config)
hass.services.register(DOMAIN, SERVICE_EBUSD_WRITE, hass.data[DOMAIN].write)
_LOGGER.debug("Ebusd integration setup completed")
return True
except (socket.timeout, OSError):
return False
class EbusdData:
"""Get the latest data from Ebusd."""
def __init__(self, address, circuit):
"""Initialize the data object."""
self._circuit = circuit
self._address = address
self.value = {}
def update(self, name, stype):
"""Call the Ebusd API to update the data."""
try:
_LOGGER.debug("Opening socket to ebusd %s", name)
command_result = ebusdpy.read(
self._address, self._circuit, name, stype, CACHE_TTL
)
if command_result is not None:
if "ERR:" in command_result:
_LOGGER.warning(command_result)
else:
self.value[name] = command_result
except RuntimeError as err:
_LOGGER.error(err)
raise RuntimeError(err) from err
def write(self, call):
"""Call write methon on ebusd."""
name = call.data.get("name")
value = call.data.get("value")
try:
_LOGGER.debug("Opening socket to ebusd %s", name)
command_result = ebusdpy.write(self._address, self._circuit, name, value)
if command_result is not None:
if "done" not in command_result:
_LOGGER.warning("Write command failed: %s", name)
except RuntimeError as err:
_LOGGER.error(err)
|
import logging
from Plugwise_Smile.Smile import Smile
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
CURRENT_HVAC_COOL,
CURRENT_HVAC_HEAT,
CURRENT_HVAC_IDLE,
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_HEAT_COOL,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from homeassistant.core import callback
from .const import (
COORDINATOR,
DEFAULT_MAX_TEMP,
DEFAULT_MIN_TEMP,
DOMAIN,
SCHEDULE_OFF,
SCHEDULE_ON,
)
from .gateway import SmileGateway
HVAC_MODES_HEAT_ONLY = [HVAC_MODE_HEAT, HVAC_MODE_AUTO]
HVAC_MODES_HEAT_COOL = [HVAC_MODE_HEAT_COOL, HVAC_MODE_AUTO]
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Smile Thermostats from a config entry."""
api = hass.data[DOMAIN][config_entry.entry_id]["api"]
coordinator = hass.data[DOMAIN][config_entry.entry_id][COORDINATOR]
entities = []
thermostat_classes = [
"thermostat",
"zone_thermostat",
"thermostatic_radiator_valve",
]
all_devices = api.get_all_devices()
for dev_id, device_properties in all_devices.items():
if device_properties["class"] not in thermostat_classes:
continue
thermostat = PwThermostat(
api,
coordinator,
device_properties["name"],
dev_id,
device_properties["location"],
device_properties["class"],
DEFAULT_MIN_TEMP,
DEFAULT_MAX_TEMP,
)
entities.append(thermostat)
async_add_entities(entities, True)
class PwThermostat(SmileGateway, ClimateEntity):
"""Representation of an Plugwise thermostat."""
def __init__(
self, api, coordinator, name, dev_id, loc_id, model, min_temp, max_temp
):
"""Set up the Plugwise API."""
super().__init__(api, coordinator, name, dev_id)
self._api = api
self._loc_id = loc_id
self._model = model
self._min_temp = min_temp
self._max_temp = max_temp
self._selected_schema = None
self._last_active_schema = None
self._preset_mode = None
self._presets = None
self._presets_list = None
self._heating_state = None
self._cooling_state = None
self._compressor_state = None
self._dhw_state = None
self._hvac_mode = None
self._schema_names = None
self._schema_status = None
self._temperature = None
self._setpoint = None
self._water_pressure = None
self._schedule_temp = None
self._hvac_mode = None
self._single_thermostat = self._api.single_master_thermostat()
self._unique_id = f"{dev_id}-climate"
@property
def hvac_action(self):
"""Return the current action."""
if self._single_thermostat:
if self._heating_state:
return CURRENT_HVAC_HEAT
if self._cooling_state:
return CURRENT_HVAC_COOL
return CURRENT_HVAC_IDLE
if self._setpoint > self._temperature:
return CURRENT_HVAC_HEAT
return CURRENT_HVAC_IDLE
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def device_state_attributes(self):
"""Return the device specific state attributes."""
attributes = {}
if self._schema_names:
attributes["available_schemas"] = self._schema_names
if self._selected_schema:
attributes["selected_schema"] = self._selected_schema
return attributes
@property
def preset_modes(self):
"""Return the available preset modes list."""
return self._presets_list
@property
def hvac_modes(self):
"""Return the available hvac modes list."""
if self._compressor_state is not None:
return HVAC_MODES_HEAT_COOL
return HVAC_MODES_HEAT_ONLY
@property
def hvac_mode(self):
"""Return current active hvac state."""
return self._hvac_mode
@property
def target_temperature(self):
"""Return the target_temperature."""
return self._setpoint
@property
def preset_mode(self):
"""Return the active preset."""
if self._presets:
return self._preset_mode
return None
@property
def current_temperature(self):
"""Return the current room temperature."""
return self._temperature
@property
def min_temp(self):
"""Return the minimal temperature possible to set."""
return self._min_temp
@property
def max_temp(self):
"""Return the maximum temperature possible to set."""
return self._max_temp
@property
def temperature_unit(self):
"""Return the unit of measured temperature."""
return TEMP_CELSIUS
async def async_set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if (temperature is not None) and (
self._min_temp < temperature < self._max_temp
):
try:
await self._api.set_temperature(self._loc_id, temperature)
self._setpoint = temperature
self.async_write_ha_state()
except Smile.PlugwiseError:
_LOGGER.error("Error while communicating to device")
else:
_LOGGER.error("Invalid temperature requested")
async def async_set_hvac_mode(self, hvac_mode):
"""Set the hvac mode."""
state = SCHEDULE_OFF
if hvac_mode == HVAC_MODE_AUTO:
state = SCHEDULE_ON
try:
await self._api.set_temperature(self._loc_id, self._schedule_temp)
self._setpoint = self._schedule_temp
except Smile.PlugwiseError:
_LOGGER.error("Error while communicating to device")
try:
await self._api.set_schedule_state(
self._loc_id, self._last_active_schema, state
)
self._hvac_mode = hvac_mode
self.async_write_ha_state()
except Smile.PlugwiseError:
_LOGGER.error("Error while communicating to device")
async def async_set_preset_mode(self, preset_mode):
"""Set the preset mode."""
try:
await self._api.set_preset(self._loc_id, preset_mode)
self._preset_mode = preset_mode
self._setpoint = self._presets.get(self._preset_mode, "none")[0]
self.async_write_ha_state()
except Smile.PlugwiseError:
_LOGGER.error("Error while communicating to device")
@callback
def _async_process_data(self):
"""Update the data for this climate device."""
climate_data = self._api.get_device_data(self._dev_id)
heater_central_data = self._api.get_device_data(self._api.heater_id)
if "setpoint" in climate_data:
self._setpoint = climate_data["setpoint"]
if "temperature" in climate_data:
self._temperature = climate_data["temperature"]
if "schedule_temperature" in climate_data:
self._schedule_temp = climate_data["schedule_temperature"]
if "available_schedules" in climate_data:
self._schema_names = climate_data["available_schedules"]
if "selected_schedule" in climate_data:
self._selected_schema = climate_data["selected_schedule"]
self._schema_status = False
if self._selected_schema is not None:
self._schema_status = True
if "last_used" in climate_data:
self._last_active_schema = climate_data["last_used"]
if "presets" in climate_data:
self._presets = climate_data["presets"]
if self._presets:
self._presets_list = list(self._presets)
if "active_preset" in climate_data:
self._preset_mode = climate_data["active_preset"]
if heater_central_data.get("heating_state") is not None:
self._heating_state = heater_central_data["heating_state"]
if heater_central_data.get("cooling_state") is not None:
self._cooling_state = heater_central_data["cooling_state"]
if heater_central_data.get("compressor_state") is not None:
self._compressor_state = heater_central_data["compressor_state"]
self._hvac_mode = HVAC_MODE_HEAT
if self._compressor_state is not None:
self._hvac_mode = HVAC_MODE_HEAT_COOL
if self._schema_status:
self._hvac_mode = HVAC_MODE_AUTO
self.async_write_ha_state()
|
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime
import hashlib
import operator
import os
import re
import typing
import six
from ._impl import json
if typing.TYPE_CHECKING:
from typing import Any, Dict, Mapping, Optional, Text
class NameGenerator(object):
"""Generator for filenames using a template.
"""
@classmethod
def _get_info(cls, media):
# type: (Mapping[Text, Any]) -> Mapping[Text, Any]
info = {
'id': media['id'],
'code': media['shortcode'],
'ownerid': media['owner']['id'],
'username': media['owner'].get('username'),
'fullname': media['owner'].get('full_name'),
'commentscount': media.get('edge_media_to_comment', {}).get('count'),
'likescount': media.get('edge_media_preview_like', {}).get('count'),
'width': media.get('dimensions', {}).get('width'),
'height': media.get('dimensions', {}).get('height'),
} # type: Dict[Text, Any]
timestamp = media.get('date') or media.get('taken_at_timestamp')
if timestamp is not None:
dt = datetime.datetime.fromtimestamp(timestamp)
info['datetime'] = ("{0.year}-{0.month:02d}-{0.day:02d} {0.hour:02d}"
"h{0.minute:02d}m{0.second:02d}s{0.microsecond}").format(dt)
info['date'] = datetime.date.fromtimestamp(timestamp)
return dict(six.moves.filter(
operator.itemgetter(1), six.iteritems(info)))
def __init__(self, template="{id}"):
# type: (Text) -> None
self.template = template
def base(self, media):
# type: (Mapping[Text, Any]) -> Text
info = self._get_info(media)
return self.template.format(**info)
def file(self, media, ext=None):
# type: (Mapping[Text, Any], Optional[Text]) -> Text
ext = ext or ("mp4" if media['is_video'] else "jpg")
return os.path.extsep.join([self.base(media), ext])
def needs_extended(self, media):
# type: (Mapping[Text, Any]) -> bool
try:
self.base(media)
return False
except KeyError:
return True
def get_shared_data(html):
match = re.search(r'window._sharedData = ({[^\n]*});', html)
return json.loads(match.group(1))
def get_additional_data(html):
match = re.search(r"window.__additionalDataLoaded\('/p/.*/',({[^\n]*})\);", html)
return json.loads(match.group(1))
|
import aioharmony.exceptions as harmony_exceptions
from aioharmony.harmonyapi import HarmonyAPI
from homeassistant.const import CONF_HOST, CONF_NAME
from .const import DOMAIN
def find_unique_id_for_remote(harmony: HarmonyAPI):
"""Find the unique id for both websocket and xmpp clients."""
if harmony.hub_id is not None:
return str(harmony.hub_id)
# fallback timeStampHash if Hub ID is not available
return harmony.config["global"]["timeStampHash"].split(";")[-1]
def find_best_name_for_remote(data: dict, harmony: HarmonyAPI):
"""Find the best name from config or fallback to the remote."""
# As a last resort we get the name from the harmony client
# in the event a name was not provided. harmony.name is
# usually the ip address but it can be an empty string.
if CONF_NAME not in data or data[CONF_NAME] is None or data[CONF_NAME] == "":
return harmony.name
return data[CONF_NAME]
async def get_harmony_client_if_available(ip_address: str):
"""Connect to a harmony hub and fetch info."""
harmony = HarmonyAPI(ip_address=ip_address)
try:
if not await harmony.connect():
await harmony.close()
return None
except harmony_exceptions.TimeOut:
return None
await harmony.close()
return harmony
def find_matching_config_entries_for_host(hass, host):
"""Search existing config entries for one matching the host."""
for entry in hass.config_entries.async_entries(DOMAIN):
if entry.data[CONF_HOST] == host:
return entry
return None
def list_names_from_hublist(hub_list):
"""Extract the name key value from a hub list of names."""
if not hub_list:
return []
return [
element["name"]
for element in hub_list
if element.get("name") and element.get("id") != -1
]
|
import builtins
import logging
import unittest.mock
import pytest
from qutebrowser.config import (config, configexc, configfiles, configinit,
configdata, configtypes)
from qutebrowser.utils import objreg, usertypes
@pytest.fixture
def init_patch(qapp, fake_save_manager, monkeypatch, config_tmpdir,
data_tmpdir):
monkeypatch.setattr(configfiles, 'state', None)
monkeypatch.setattr(config, 'instance', None)
monkeypatch.setattr(config, 'key_instance', None)
monkeypatch.setattr(config, 'change_filters', [])
monkeypatch.setattr(configinit, '_init_errors', None)
monkeypatch.setattr(configtypes.FontBase, 'default_family', None)
monkeypatch.setattr(configtypes.FontBase, 'default_size', None)
yield
try:
objreg.delete('config-commands')
except KeyError:
pass
@pytest.fixture
def args(fake_args):
"""Arguments needed for the config to init."""
fake_args.temp_settings = []
return fake_args
@pytest.fixture(autouse=True)
def configdata_init(monkeypatch):
"""Make sure configdata is init'ed and no test re-init's it."""
if not configdata.DATA:
configdata.init()
monkeypatch.setattr(configdata, 'init', lambda: None)
class TestEarlyInit:
def test_config_py_path(self, args, init_patch, config_py_arg):
config_py_arg.write('\n'.join(['config.load_autoconfig()',
'c.colors.hints.bg = "red"']))
configinit.early_init(args)
expected = 'colors.hints.bg = red'
assert config.instance.dump_userconfig() == expected
@pytest.mark.parametrize('config_py', [True, 'error', False])
def test_config_py(self, init_patch, config_tmpdir, caplog, args,
config_py):
"""Test loading with only a config.py."""
config_py_file = config_tmpdir / 'config.py'
if config_py:
config_py_lines = ['c.colors.hints.bg = "red"',
'config.load_autoconfig(False)']
if config_py == 'error':
config_py_lines.append('c.foo = 42')
config_py_file.write_text('\n'.join(config_py_lines),
'utf-8', ensure=True)
with caplog.at_level(logging.ERROR):
configinit.early_init(args)
# Check error messages
expected_errors = []
if config_py == 'error':
expected_errors.append("While setting 'foo': No option 'foo'")
if configinit._init_errors is None:
actual_errors = []
else:
actual_errors = [str(err)
for err in configinit._init_errors.errors]
assert actual_errors == expected_errors
# Make sure things have been init'ed
assert isinstance(config.instance, config.Config)
assert isinstance(config.key_instance, config.KeyConfig)
# Check config values
if config_py:
expected = 'colors.hints.bg = red'
else:
expected = '<Default configuration>'
assert config.instance.dump_userconfig() == expected
@pytest.mark.parametrize('load_autoconfig', [True, False])
@pytest.mark.parametrize('config_py', [True, 'error', False])
@pytest.mark.parametrize('invalid_yaml', ['42', 'list', 'unknown',
'wrong-type', False])
def test_autoconfig_yml(self, init_patch, config_tmpdir, # noqa: C901
caplog, args,
load_autoconfig, config_py, invalid_yaml):
"""Test interaction between config.py and autoconfig.yml."""
# Prepare files
autoconfig_file = config_tmpdir / 'autoconfig.yml'
config_py_file = config_tmpdir / 'config.py'
yaml_lines = {
'42': '42',
'list': '[1, 2]',
'unknown': [
'settings:',
' colors.foobar:',
' global: magenta',
'config_version: 2',
],
'wrong-type': [
'settings:',
' tabs.position:',
' global: true',
'config_version: 2',
],
False: [
'settings:',
' colors.hints.fg:',
' global: magenta',
'config_version: 2',
],
}
text = '\n'.join(yaml_lines[invalid_yaml])
autoconfig_file.write_text(text, 'utf-8', ensure=True)
if config_py:
config_py_lines = ['c.colors.hints.bg = "red"']
config_py_lines.append('config.load_autoconfig({})'.format(load_autoconfig))
if config_py == 'error':
config_py_lines.append('c.foo = 42')
config_py_file.write_text('\n'.join(config_py_lines),
'utf-8', ensure=True)
with caplog.at_level(logging.ERROR):
configinit.early_init(args)
# Check error messages
expected_errors = []
if load_autoconfig or not config_py:
suffix = ' (autoconfig.yml)' if config_py else ''
if invalid_yaml in ['42', 'list']:
error = ("While loading data{}: Toplevel object is not a dict"
.format(suffix))
expected_errors.append(error)
elif invalid_yaml == 'wrong-type':
error = ("Error{}: Invalid value 'True' - expected a value of "
"type str but got bool.".format(suffix))
expected_errors.append(error)
elif invalid_yaml == 'unknown':
error = ("While loading options{}: Unknown option "
"colors.foobar".format(suffix))
expected_errors.append(error)
if config_py == 'error':
expected_errors.append("While setting 'foo': No option 'foo'")
if configinit._init_errors is None:
actual_errors = []
else:
actual_errors = [str(err)
for err in configinit._init_errors.errors]
assert actual_errors == expected_errors
# Check config values
dump = config.instance.dump_userconfig()
if config_py and load_autoconfig and not invalid_yaml:
expected = [
'colors.hints.bg = red',
'colors.hints.fg = magenta',
]
elif config_py:
expected = ['colors.hints.bg = red']
elif invalid_yaml:
expected = ['<Default configuration>']
else:
expected = ['colors.hints.fg = magenta']
assert dump == '\n'.join(expected)
@pytest.mark.parametrize('byte', [
b'\x00', # configparser.Error
b'\xda', # UnicodeDecodeError
])
def test_state_init_errors(self, init_patch, args, data_tmpdir, byte):
state_file = data_tmpdir / 'state'
state_file.write_binary(byte)
configinit.early_init(args)
assert configinit._init_errors.errors
def test_invalid_change_filter(self, init_patch, args):
config.change_filter('foobar')
with pytest.raises(configexc.NoOptionError):
configinit.early_init(args)
def test_temp_settings_valid(self, init_patch, args):
args.temp_settings = [('colors.completion.fg', 'magenta')]
configinit.early_init(args)
assert config.instance.get_obj('colors.completion.fg') == 'magenta'
def test_temp_settings_invalid(self, caplog, init_patch, message_mock,
args):
"""Invalid temp settings should show an error."""
args.temp_settings = [('foo', 'bar')]
with caplog.at_level(logging.ERROR):
configinit.early_init(args)
msg = message_mock.getmsg()
assert msg.level == usertypes.MessageLevel.error
assert msg.text == "set: NoOptionError - No option 'foo'"
class TestLateInit:
@pytest.mark.parametrize('errors', [True, 'fatal', False])
def test_late_init(self, init_patch, monkeypatch, fake_save_manager, args,
mocker, errors):
configinit.early_init(args)
if errors:
err = configexc.ConfigErrorDesc("Error text",
Exception("Exception"))
errs = configexc.ConfigFileErrors("config.py", [err])
if errors == 'fatal':
errs.fatal = True
monkeypatch.setattr(configinit, '_init_errors', errs)
msgbox_mock = mocker.patch(
'qutebrowser.config.configinit.msgbox.msgbox', autospec=True)
exit_mock = mocker.patch(
'qutebrowser.config.configinit.sys.exit', autospec=True)
configinit.late_init(fake_save_manager)
fake_save_manager.add_saveable.assert_any_call(
'state-config', unittest.mock.ANY)
fake_save_manager.add_saveable.assert_any_call(
'yaml-config', unittest.mock.ANY, unittest.mock.ANY)
if errors:
assert len(msgbox_mock.call_args_list) == 1
_call_posargs, call_kwargs = msgbox_mock.call_args_list[0]
text = call_kwargs['text'].strip()
assert text.startswith('Errors occurred while reading config.py:')
assert '<b>Error text</b>: Exception' in text
assert exit_mock.called == (errors == 'fatal')
else:
assert not msgbox_mock.called
@pytest.mark.parametrize('settings, size, family', [
# Only fonts.default_family customized
([('fonts.default_family', 'Comic Sans MS')], 10, 'Comic Sans MS'),
# default_family and default_size
([('fonts.default_family', 'Comic Sans MS'),
('fonts.default_size', '23pt')], 23, 'Comic Sans MS'),
# fonts.default_family and font settings customized
# https://github.com/qutebrowser/qutebrowser/issues/3096
([('fonts.default_family', 'Comic Sans MS'),
('fonts.keyhint', '12pt default_family')], 12, 'Comic Sans MS'),
# as above, but with default_size
([('fonts.default_family', 'Comic Sans MS'),
('fonts.default_size', '23pt'),
('fonts.keyhint', 'default_size default_family')],
23, 'Comic Sans MS'),
])
@pytest.mark.parametrize('method', ['temp', 'auto', 'py'])
def test_fonts_defaults_init(self, init_patch, args, config_tmpdir,
fake_save_manager, method,
settings, size, family):
"""Ensure setting fonts.default_family at init works properly.
See https://github.com/qutebrowser/qutebrowser/issues/2973
and https://github.com/qutebrowser/qutebrowser/issues/5223
"""
if method == 'temp':
args.temp_settings = settings
elif method == 'auto':
autoconfig_file = config_tmpdir / 'autoconfig.yml'
lines = (["config_version: 2", "settings:"] +
[" {}:\n global:\n '{}'".format(k, v)
for k, v in settings])
autoconfig_file.write_text('\n'.join(lines), 'utf-8', ensure=True)
elif method == 'py':
config_py_file = config_tmpdir / 'config.py'
lines = ["c.{} = '{}'".format(k, v) for k, v in settings]
lines.append("config.load_autoconfig(False)")
config_py_file.write_text('\n'.join(lines), 'utf-8', ensure=True)
configinit.early_init(args)
configinit.late_init(fake_save_manager)
# Font
expected = '{}pt "{}"'.format(size, family)
assert config.instance.get('fonts.keyhint') == expected
@pytest.fixture
def run_configinit(self, init_patch, fake_save_manager, args):
"""Run configinit.early_init() and .late_init()."""
configinit.early_init(args)
configinit.late_init(fake_save_manager)
def test_fonts_defaults_later(self, run_configinit):
"""Ensure setting fonts.default_family/size after init works properly.
See https://github.com/qutebrowser/qutebrowser/issues/2973
"""
changed_options = []
config.instance.changed.connect(changed_options.append)
config.instance.set_obj('fonts.default_family', 'Comic Sans MS')
config.instance.set_obj('fonts.default_size', '23pt')
assert 'fonts.keyhint' in changed_options # Font
assert config.instance.get('fonts.keyhint') == '23pt "Comic Sans MS"'
# Font subclass, but doesn't end with "default_family"
assert 'fonts.web.family.standard' not in changed_options
def test_setting_fonts_defaults_family(self, run_configinit):
"""Make sure setting fonts.default_family/size after a family works.
See https://github.com/qutebrowser/qutebrowser/issues/3130
"""
config.instance.set_str('fonts.web.family.standard', '')
config.instance.set_str('fonts.default_family', 'Terminus')
config.instance.set_str('fonts.default_size', '10pt')
def test_default_size_hints(self, run_configinit):
"""Make sure default_size applies to the hints font.
See https://github.com/qutebrowser/qutebrowser/issues/5214
"""
config.instance.set_obj('fonts.default_family', 'SomeFamily')
config.instance.set_obj('fonts.default_size', '23pt')
assert config.instance.get('fonts.hints') == 'bold 23pt SomeFamily'
def test_default_size_hints_changed(self, run_configinit):
config.instance.set_obj('fonts.hints', 'bold default_size SomeFamily')
changed_options = []
config.instance.changed.connect(changed_options.append)
config.instance.set_obj('fonts.default_size', '23pt')
assert config.instance.get('fonts.hints') == 'bold 23pt SomeFamily'
assert 'fonts.hints' in changed_options
@pytest.mark.parametrize('arg, confval, used', [
# overridden by commandline arg
('webkit', 'webengine', usertypes.Backend.QtWebKit),
# set in config
(None, 'webkit', usertypes.Backend.QtWebKit),
])
def test_get_backend(monkeypatch, args, config_stub,
arg, confval, used):
real_import = __import__
def fake_import(name, *args, **kwargs):
if name != 'PyQt5.QtWebKit':
return real_import(name, *args, **kwargs)
raise ImportError
args.backend = arg
config_stub.val.backend = confval
monkeypatch.setattr(builtins, '__import__', fake_import)
assert configinit.get_backend(args) == used
|
from molecule import logger
from molecule.driver import base
from molecule import util
LOG = logger.get_logger(__name__)
class GCE(base.Base):
"""
The class responsible for managing `GCE`_ instances. `GCE`_
is `not` the default driver used in Molecule.
GCE is somewhat different than other cloud providers. There is not
an Ansible module for managing ssh keys. This driver assumes the developer
has deployed project wide ssh key.
Molecule leverages Ansible's `gce_module`_, by mapping variables from
``molecule.yml`` into ``create.yml`` and ``destroy.yml``.
.. _`gce_module`: https://docs.ansible.com/ansible/latest/gce_module.html
.. code-block:: yaml
driver:
name: gce
platforms:
- name: instance
.. code-block:: bash
$ pip install molecule[gce]
Change the options passed to the ssh client.
.. code-block:: yaml
driver:
name: gce
ssh_connection_options:
-o ControlPath=~/.ansible/cp/%r@%h-%p
.. important::
Molecule does not merge lists, when overriding the developer must
provide all options.
Provide a list of files Molecule will preserve, relative to the scenario
ephemeral directory, after any ``destroy`` subcommand execution.
.. code-block:: yaml
driver:
name: gce
safe_files:
- foo
.. _`GCE`: https://cloud.google.com/compute/docs/
""" # noqa
def __init__(self, config):
super(GCE, self).__init__(config)
self._name = 'gce'
@property
def name(self):
return self._name
@name.setter
def name(self, value):
self._name = value
@property
def login_cmd_template(self):
connection_options = ' '.join(self.ssh_connection_options)
return ('ssh {{address}} '
'-l {{user}} '
'-p {{port}} '
'-i {{identity_file}} '
'{}').format(connection_options)
@property
def default_safe_files(self):
return [
self.instance_config,
]
@property
def default_ssh_connection_options(self):
return self._get_ssh_connection_options()
def login_options(self, instance_name):
d = {'instance': instance_name}
return util.merge_dicts(d, self._get_instance_config(instance_name))
def ansible_connection_options(self, instance_name):
try:
d = self._get_instance_config(instance_name)
return {
'ansible_user': d['user'],
'ansible_host': d['address'],
'ansible_port': d['port'],
'ansible_private_key_file': d['identity_file'],
'connection': 'ssh',
'ansible_ssh_common_args':
' '.join(self.ssh_connection_options),
}
except StopIteration:
return {}
except IOError:
# Instance has yet to be provisioned , therefore the
# instance_config is not on disk.
return {}
def _get_instance_config(self, instance_name):
instance_config_dict = util.safe_load_file(
self._config.driver.instance_config)
return next(item for item in instance_config_dict
if item['instance'] == instance_name)
def sanity_checks(self):
# FIXME(decentral1se): Implement sanity checks
pass
|
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
ATTR_COLOR_TEMP,
ATTR_EFFECT,
ATTR_FLASH,
ATTR_HS_COLOR,
ATTR_TRANSITION,
DOMAIN,
EFFECT_COLORLOOP,
FLASH_LONG,
FLASH_SHORT,
SUPPORT_BRIGHTNESS,
SUPPORT_COLOR,
SUPPORT_COLOR_TEMP,
SUPPORT_EFFECT,
SUPPORT_FLASH,
SUPPORT_TRANSITION,
LightEntity,
)
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
import homeassistant.util.color as color_util
from .const import (
CONF_GROUP_ID_BASE,
COVER_TYPES,
DOMAIN as DECONZ_DOMAIN,
LOCK_TYPES,
NEW_GROUP,
NEW_LIGHT,
SWITCH_TYPES,
)
from .deconz_device import DeconzDevice
from .gateway import get_gateway_from_config_entry
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the deCONZ lights and groups from a config entry."""
gateway = get_gateway_from_config_entry(hass, config_entry)
gateway.entities[DOMAIN] = set()
@callback
def async_add_light(lights):
"""Add light from deCONZ."""
entities = []
for light in lights:
if (
light.type not in COVER_TYPES + LOCK_TYPES + SWITCH_TYPES
and light.uniqueid not in gateway.entities[DOMAIN]
):
entities.append(DeconzLight(light, gateway))
if entities:
async_add_entities(entities)
gateway.listeners.append(
async_dispatcher_connect(
hass, gateway.async_signal_new_device(NEW_LIGHT), async_add_light
)
)
@callback
def async_add_group(groups):
"""Add group from deCONZ."""
if not gateway.option_allow_deconz_groups:
return
entities = []
for group in groups:
if not group.lights:
continue
known_groups = set(gateway.entities[DOMAIN])
new_group = DeconzGroup(group, gateway)
if new_group.unique_id not in known_groups:
entities.append(new_group)
if entities:
async_add_entities(entities)
gateway.listeners.append(
async_dispatcher_connect(
hass, gateway.async_signal_new_device(NEW_GROUP), async_add_group
)
)
async_add_light(gateway.api.lights.values())
async_add_group(gateway.api.groups.values())
class DeconzBaseLight(DeconzDevice, LightEntity):
"""Representation of a deCONZ light."""
TYPE = DOMAIN
def __init__(self, device, gateway):
"""Set up light."""
super().__init__(device, gateway)
self._features = 0
if self._device.brightness is not None:
self._features |= SUPPORT_BRIGHTNESS
self._features |= SUPPORT_FLASH
self._features |= SUPPORT_TRANSITION
if self._device.ct is not None:
self._features |= SUPPORT_COLOR_TEMP
if self._device.xy is not None:
self._features |= SUPPORT_COLOR
if self._device.effect is not None:
self._features |= SUPPORT_EFFECT
@property
def brightness(self):
"""Return the brightness of this light between 0..255."""
return self._device.brightness
@property
def effect_list(self):
"""Return the list of supported effects."""
return [EFFECT_COLORLOOP]
@property
def color_temp(self):
"""Return the CT color value."""
if self._device.colormode != "ct":
return None
return self._device.ct
@property
def hs_color(self):
"""Return the hs color value."""
if self._device.colormode in ("xy", "hs") and self._device.xy:
return color_util.color_xy_to_hs(*self._device.xy)
return None
@property
def is_on(self):
"""Return true if light is on."""
return self._device.state
@property
def supported_features(self):
"""Flag supported features."""
return self._features
async def async_turn_on(self, **kwargs):
"""Turn on light."""
data = {"on": True}
if ATTR_COLOR_TEMP in kwargs:
data["ct"] = kwargs[ATTR_COLOR_TEMP]
if ATTR_HS_COLOR in kwargs:
data["xy"] = color_util.color_hs_to_xy(*kwargs[ATTR_HS_COLOR])
if ATTR_BRIGHTNESS in kwargs:
data["bri"] = kwargs[ATTR_BRIGHTNESS]
if ATTR_TRANSITION in kwargs:
data["transitiontime"] = int(kwargs[ATTR_TRANSITION] * 10)
elif "IKEA" in self._device.manufacturer:
data["transitiontime"] = 0
if ATTR_FLASH in kwargs:
if kwargs[ATTR_FLASH] == FLASH_SHORT:
data["alert"] = "select"
del data["on"]
elif kwargs[ATTR_FLASH] == FLASH_LONG:
data["alert"] = "lselect"
del data["on"]
if ATTR_EFFECT in kwargs:
if kwargs[ATTR_EFFECT] == EFFECT_COLORLOOP:
data["effect"] = "colorloop"
else:
data["effect"] = "none"
await self._device.async_set_state(data)
async def async_turn_off(self, **kwargs):
"""Turn off light."""
if not self._device.state:
return
data = {"on": False}
if ATTR_TRANSITION in kwargs:
data["bri"] = 0
data["transitiontime"] = int(kwargs[ATTR_TRANSITION] * 10)
if ATTR_FLASH in kwargs:
if kwargs[ATTR_FLASH] == FLASH_SHORT:
data["alert"] = "select"
del data["on"]
elif kwargs[ATTR_FLASH] == FLASH_LONG:
data["alert"] = "lselect"
del data["on"]
await self._device.async_set_state(data)
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return {"is_deconz_group": self._device.type == "LightGroup"}
class DeconzLight(DeconzBaseLight):
"""Representation of a deCONZ light."""
@property
def max_mireds(self):
"""Return the warmest color_temp that this light supports."""
return self._device.ctmax or super().max_mireds
@property
def min_mireds(self):
"""Return the coldest color_temp that this light supports."""
return self._device.ctmin or super().min_mireds
class DeconzGroup(DeconzBaseLight):
"""Representation of a deCONZ group."""
def __init__(self, device, gateway):
"""Set up group and create an unique id."""
group_id_base = gateway.config_entry.unique_id
if CONF_GROUP_ID_BASE in gateway.config_entry.data:
group_id_base = gateway.config_entry.data[CONF_GROUP_ID_BASE]
self._unique_id = f"{group_id_base}-{device.deconz_id}"
super().__init__(device, gateway)
@property
def unique_id(self):
"""Return a unique identifier for this device."""
return self._unique_id
@property
def device_info(self):
"""Return a device description for device registry."""
bridgeid = self.gateway.api.config.bridgeid
return {
"identifiers": {(DECONZ_DOMAIN, self.unique_id)},
"manufacturer": "Dresden Elektronik",
"model": "deCONZ group",
"name": self._device.name,
"via_device": (DECONZ_DOMAIN, bridgeid),
}
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attributes = dict(super().device_state_attributes)
attributes["all_on"] = self._device.all_on
return attributes
|
from jolokia import JolokiaCollector
import math
import re
class CassandraJolokiaCollector(JolokiaCollector):
# override to allow setting which percentiles will be collected
def get_default_config_help(self):
config_help = super(CassandraJolokiaCollector,
self).get_default_config_help()
config_help.update({
'percentiles':
'Comma separated list of percentiles to be collected '
'(e.g., "50,95,99").',
'histogram_regex':
'Filter to only process attributes that match this regex'
})
return config_help
# override to allow setting which percentiles will be collected
def get_default_config(self):
config = super(CassandraJolokiaCollector, self).get_default_config()
config.update({
'percentiles': ['50', '95', '99'],
'histogram_regex': '.*HistogramMicros$'
})
return config
def __init__(self, *args, **kwargs):
super(CassandraJolokiaCollector, self).__init__(*args, **kwargs)
self.offsets = self.create_offsets(91)
self.update_config(self.config)
def update_config(self, config):
if 'percentiles' in config:
self.percentiles = map(int, config['percentiles'])
if 'histogram_regex' in config:
self.histogram_regex = re.compile(config['histogram_regex'])
# override: Interpret beans that match the `histogram_regex` as histograms,
# and collect percentiles from them.
def interpret_bean_with_list(self, prefix, values):
if not self.histogram_regex.match(prefix):
return
buckets = values
offsets = self.offsets
for percentile in self.percentiles:
value = self.compute_percentile(offsets, buckets, percentile)
cleaned_key = self.clean_up("%s.p%s" % (prefix, percentile))
self.publish(cleaned_key, value)
# Adapted from Cassandra docs:
# https://bit.ly/13M5JPE
# The index corresponds to the x-axis in a histogram. It represents buckets
# of values, which are a series of ranges. Each offset includes the range of
# values greater than the previous offset and less than or equal to the
# current offset. The offsets start at 1 and each subsequent offset is
# calculated by multiplying the previous offset by 1.2, rounding up, and
# removing duplicates. The offsets can range from 1 to approximately 25
# million, with less precision as the offsets get larger.
def compute_percentile(self, offsets, buckets, percentile_int):
non_zero_points_sum = sum(buckets)
if non_zero_points_sum is 0:
return 0
middle_point_index = math.floor(
non_zero_points_sum * (percentile_int / float(100)))
points_seen = 0
for index, bucket in enumerate(buckets):
points_seen += bucket
if points_seen >= middle_point_index:
return round((offsets[index] - offsets[index - 1]) / 2)
# Returns a list of offsets for `n` buckets.
def create_offsets(self, bucket_count):
last_num = 1
offsets = [last_num]
for index in range(bucket_count):
next_num = round(last_num * 1.2)
if next_num == last_num:
next_num += 1
offsets.append(next_num)
last_num = next_num
return offsets
|
from yeelight import BulbException, BulbType
from yeelight.main import _MODEL_SPECS
from homeassistant.components.yeelight import (
CONF_MODE_MUSIC,
CONF_NIGHTLIGHT_SWITCH_TYPE,
CONF_SAVE_ON_CHANGE,
DOMAIN,
NIGHTLIGHT_SWITCH_TYPE_LIGHT,
YeelightScanner,
)
from homeassistant.const import CONF_DEVICES, CONF_ID, CONF_NAME
from tests.async_mock import MagicMock, patch
IP_ADDRESS = "192.168.1.239"
MODEL = "color"
ID = "0x000000000015243f"
FW_VER = "18"
CAPABILITIES = {
"id": ID,
"model": MODEL,
"fw_ver": FW_VER,
"support": "get_prop set_default set_power toggle set_bright start_cf stop_cf"
" set_scene cron_add cron_get cron_del set_ct_abx set_rgb",
"name": "",
}
NAME = "name"
UNIQUE_NAME = f"yeelight_{MODEL}_{ID}"
MODULE = "homeassistant.components.yeelight"
MODULE_CONFIG_FLOW = f"{MODULE}.config_flow"
PROPERTIES = {
"power": "on",
"main_power": "on",
"bright": "50",
"ct": "4000",
"rgb": "16711680",
"hue": "100",
"sat": "35",
"color_mode": "1",
"flowing": "0",
"bg_power": "on",
"bg_lmode": "1",
"bg_flowing": "0",
"bg_ct": "5000",
"bg_bright": "80",
"bg_rgb": "16711680",
"nl_br": "23",
"active_mode": "0",
"current_brightness": "30",
}
ENTITY_BINARY_SENSOR = f"binary_sensor.{UNIQUE_NAME}_nightlight"
ENTITY_LIGHT = f"light.{UNIQUE_NAME}"
ENTITY_NIGHTLIGHT = f"light.{UNIQUE_NAME}_nightlight"
ENTITY_AMBILIGHT = f"light.{UNIQUE_NAME}_ambilight"
YAML_CONFIGURATION = {
DOMAIN: {
CONF_DEVICES: {
IP_ADDRESS: {
CONF_NAME: NAME,
CONF_NIGHTLIGHT_SWITCH_TYPE: NIGHTLIGHT_SWITCH_TYPE_LIGHT,
CONF_MODE_MUSIC: True,
CONF_SAVE_ON_CHANGE: True,
}
}
}
}
CONFIG_ENTRY_DATA = {
CONF_ID: ID,
}
def _mocked_bulb(cannot_connect=False):
bulb = MagicMock()
type(bulb).get_capabilities = MagicMock(
return_value=None if cannot_connect else CAPABILITIES
)
type(bulb).get_properties = MagicMock(
side_effect=BulbException if cannot_connect else None
)
type(bulb).get_model_specs = MagicMock(return_value=_MODEL_SPECS[MODEL])
bulb.capabilities = CAPABILITIES
bulb.model = MODEL
bulb.bulb_type = BulbType.Color
bulb.last_properties = PROPERTIES
bulb.music_mode = False
return bulb
def _patch_discovery(prefix, no_device=False):
YeelightScanner._scanner = None # Clear class scanner to reset hass
def _mocked_discovery(timeout=2, interface=False):
if no_device:
return []
return [{"ip": IP_ADDRESS, "port": 55443, "capabilities": CAPABILITIES}]
return patch(f"{prefix}.discover_bulbs", side_effect=_mocked_discovery)
|
from datetime import timedelta
import logging
from coinbase.wallet.client import Client
from coinbase.wallet.error import AuthenticationError
import voluptuous as vol
from homeassistant.const import CONF_API_KEY
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import load_platform
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
DOMAIN = "coinbase"
CONF_API_SECRET = "api_secret"
CONF_ACCOUNT_CURRENCIES = "account_balance_currencies"
CONF_EXCHANGE_CURRENCIES = "exchange_rate_currencies"
MIN_TIME_BETWEEN_UPDATES = timedelta(minutes=1)
DATA_COINBASE = "coinbase_cache"
CONFIG_SCHEMA = vol.Schema(
{
DOMAIN: vol.Schema(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_API_SECRET): cv.string,
vol.Optional(CONF_ACCOUNT_CURRENCIES): vol.All(
cv.ensure_list, [cv.string]
),
vol.Optional(CONF_EXCHANGE_CURRENCIES, default=[]): vol.All(
cv.ensure_list, [cv.string]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
def setup(hass, config):
"""Set up the Coinbase component.
Will automatically setup sensors to support
wallets discovered on the network.
"""
api_key = config[DOMAIN][CONF_API_KEY]
api_secret = config[DOMAIN][CONF_API_SECRET]
account_currencies = config[DOMAIN].get(CONF_ACCOUNT_CURRENCIES)
exchange_currencies = config[DOMAIN][CONF_EXCHANGE_CURRENCIES]
hass.data[DATA_COINBASE] = coinbase_data = CoinbaseData(api_key, api_secret)
if not hasattr(coinbase_data, "accounts"):
return False
for account in coinbase_data.accounts.data:
if account_currencies is None or account.currency in account_currencies:
load_platform(hass, "sensor", DOMAIN, {"account": account}, config)
for currency in exchange_currencies:
if currency not in coinbase_data.exchange_rates.rates:
_LOGGER.warning("Currency %s not found", currency)
continue
native = coinbase_data.exchange_rates.currency
load_platform(
hass,
"sensor",
DOMAIN,
{"native_currency": native, "exchange_currency": currency},
config,
)
return True
class CoinbaseData:
"""Get the latest data and update the states."""
def __init__(self, api_key, api_secret):
"""Init the coinbase data object."""
self.client = Client(api_key, api_secret)
self.update()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Get the latest data from coinbase."""
try:
self.accounts = self.client.get_accounts()
self.exchange_rates = self.client.get_exchange_rates()
except AuthenticationError as coinbase_error:
_LOGGER.error(
"Authentication error connecting to coinbase: %s", coinbase_error
)
|
from homeassistant.components.climate import ClimateEntity
from homeassistant.components.climate.const import (
HVAC_MODE_AUTO,
HVAC_MODE_HEAT,
HVAC_MODE_OFF,
PRESET_BOOST,
PRESET_COMFORT,
PRESET_ECO,
SUPPORT_PRESET_MODE,
SUPPORT_TARGET_TEMPERATURE,
)
from homeassistant.const import ATTR_TEMPERATURE, TEMP_CELSIUS
from .const import ATTR_DISCOVER_DEVICES, HM_ATTRIBUTE_SUPPORT
from .entity import HMDevice
HM_TEMP_MAP = ["ACTUAL_TEMPERATURE", "TEMPERATURE"]
HM_HUMI_MAP = ["ACTUAL_HUMIDITY", "HUMIDITY"]
HM_PRESET_MAP = {
"BOOST_MODE": PRESET_BOOST,
"COMFORT_MODE": PRESET_COMFORT,
"LOWERING_MODE": PRESET_ECO,
}
HM_CONTROL_MODE = "CONTROL_MODE"
HMIP_CONTROL_MODE = "SET_POINT_MODE"
SUPPORT_FLAGS = SUPPORT_TARGET_TEMPERATURE | SUPPORT_PRESET_MODE
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Homematic thermostat platform."""
if discovery_info is None:
return
devices = []
for conf in discovery_info[ATTR_DISCOVER_DEVICES]:
new_device = HMThermostat(conf)
devices.append(new_device)
add_entities(devices, True)
class HMThermostat(HMDevice, ClimateEntity):
"""Representation of a Homematic thermostat."""
@property
def supported_features(self):
"""Return the list of supported features."""
return SUPPORT_FLAGS
@property
def temperature_unit(self):
"""Return the unit of measurement that is used."""
return TEMP_CELSIUS
@property
def hvac_mode(self):
"""Return hvac operation ie. heat, cool mode.
Need to be one of HVAC_MODE_*.
"""
if self.target_temperature <= self._hmdevice.OFF_VALUE + 0.5:
return HVAC_MODE_OFF
if "MANU_MODE" in self._hmdevice.ACTIONNODE:
if self._hm_control_mode == self._hmdevice.MANU_MODE:
return HVAC_MODE_HEAT
return HVAC_MODE_AUTO
# Simple devices
if self._data.get("BOOST_MODE"):
return HVAC_MODE_AUTO
return HVAC_MODE_HEAT
@property
def hvac_modes(self):
"""Return the list of available hvac operation modes.
Need to be a subset of HVAC_MODES.
"""
if "AUTO_MODE" in self._hmdevice.ACTIONNODE:
return [HVAC_MODE_AUTO, HVAC_MODE_HEAT, HVAC_MODE_OFF]
return [HVAC_MODE_HEAT, HVAC_MODE_OFF]
@property
def preset_mode(self):
"""Return the current preset mode, e.g., home, away, temp."""
if self._data.get("BOOST_MODE", False):
return "boost"
if not self._hm_control_mode:
return None
mode = HM_ATTRIBUTE_SUPPORT[HM_CONTROL_MODE][1][self._hm_control_mode]
mode = mode.lower()
# Filter HVAC states
if mode not in (HVAC_MODE_AUTO, HVAC_MODE_HEAT):
return None
return mode
@property
def preset_modes(self):
"""Return a list of available preset modes."""
preset_modes = []
for mode in self._hmdevice.ACTIONNODE:
if mode in HM_PRESET_MAP:
preset_modes.append(HM_PRESET_MAP[mode])
return preset_modes
@property
def current_humidity(self):
"""Return the current humidity."""
for node in HM_HUMI_MAP:
if node in self._data:
return self._data[node]
@property
def current_temperature(self):
"""Return the current temperature."""
for node in HM_TEMP_MAP:
if node in self._data:
return self._data[node]
@property
def target_temperature(self):
"""Return the target temperature."""
return self._data.get(self._state)
def set_temperature(self, **kwargs):
"""Set new target temperature."""
temperature = kwargs.get(ATTR_TEMPERATURE)
if temperature is None:
return None
self._hmdevice.writeNodeData(self._state, float(temperature))
def set_hvac_mode(self, hvac_mode):
"""Set new target hvac mode."""
if hvac_mode == HVAC_MODE_AUTO:
self._hmdevice.MODE = self._hmdevice.AUTO_MODE
elif hvac_mode == HVAC_MODE_HEAT:
self._hmdevice.MODE = self._hmdevice.MANU_MODE
elif hvac_mode == HVAC_MODE_OFF:
self._hmdevice.turnoff()
def set_preset_mode(self, preset_mode: str) -> None:
"""Set new preset mode."""
if preset_mode == PRESET_BOOST:
self._hmdevice.MODE = self._hmdevice.BOOST_MODE
elif preset_mode == PRESET_COMFORT:
self._hmdevice.MODE = self._hmdevice.COMFORT_MODE
elif preset_mode == PRESET_ECO:
self._hmdevice.MODE = self._hmdevice.LOWERING_MODE
@property
def min_temp(self):
"""Return the minimum temperature."""
return 4.5
@property
def max_temp(self):
"""Return the maximum temperature."""
return 30.5
@property
def target_temperature_step(self):
"""Return the supported step of target temperature."""
return 0.5
@property
def _hm_control_mode(self):
"""Return Control mode."""
if HMIP_CONTROL_MODE in self._data:
return self._data[HMIP_CONTROL_MODE]
# Homematic
return self._data.get("CONTROL_MODE")
def _init_data_struct(self):
"""Generate a data dict (self._data) from the Homematic metadata."""
self._state = next(iter(self._hmdevice.WRITENODE.keys()))
self._data[self._state] = None
if (
HM_CONTROL_MODE in self._hmdevice.ATTRIBUTENODE
or HMIP_CONTROL_MODE in self._hmdevice.ATTRIBUTENODE
):
self._data[HM_CONTROL_MODE] = None
for node in self._hmdevice.SENSORNODE.keys():
self._data[node] = None
|
from weblate.trans.management.commands import WeblateLangCommand
from weblate.trans.tasks import commit_pending
class Command(WeblateLangCommand):
help = "commits pending changes older than given age"
def add_arguments(self, parser):
super().add_arguments(parser)
parser.add_argument(
"--age",
action="store",
type=int,
dest="age",
default=None,
help="Age of changes to commit in hours",
)
def handle(self, *args, **options):
commit_pending(
options["age"],
set(self.get_translations(**options).values_list("id", flat=True)),
self.stdout.write if int(options["verbosity"]) >= 1 else None,
)
|
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from numpy import array
import matplotlib.pyplot as plt
from filterpy.hinfinity import HInfinityFilter
def test_Hinfinity():
dt = 0.1
f = HInfinityFilter(2, 1, 0, gamma=.4)
f.F = array([[1., dt],
[0., 1.]])
f.H = array([[0., 1.]])
f.x = array([[0., 0.]]).T
#f.G = array([[dt**2 / 2, dt]]).T
f.P = 0.01
f.W = array([[0.0003, 0.005],
[0.0050, 0.100]])/ 1000
f.V = 0.01
f.Q = 0.01
xs = []
vs = []
for i in range(1,40):
f.update (5)
print(f.x.T)
xs.append(f.x[0,0])
vs.append(f.x[1,0])
f.predict()
plt.subplot(211)
plt.plot(xs)
plt.subplot(212)
plt.plot(vs)
if __name__ == "__main__":
test_Hinfinity()
|
import logging
from unifiled import unifiled
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
# Validation of the user's configuration
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Optional(CONF_PORT, default=20443): vol.All(cv.port, cv.string),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Unifi LED platform."""
# Assign configuration variables.
# The configuration check takes care they are present.
host = config[CONF_HOST]
port = config[CONF_PORT]
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
api = unifiled(host, port, username=username, password=password)
# Verify that passed in configuration works
if not api.getloginstate():
_LOGGER.error("Could not connect to unifiled controller")
return
add_entities(UnifiLedLight(light, api) for light in api.getlights())
class UnifiLedLight(LightEntity):
"""Representation of an unifiled Light."""
def __init__(self, light, api):
"""Init Unifi LED Light."""
self._api = api
self._light = light
self._name = light["name"]
self._unique_id = light["id"]
self._state = light["status"]["output"]
self._available = light["isOnline"]
self._brightness = self._api.convertfrom100to255(light["status"]["led"])
self._features = SUPPORT_BRIGHTNESS
@property
def name(self):
"""Return the display name of this light."""
return self._name
@property
def available(self):
"""Return the available state of this light."""
return self._available
@property
def brightness(self):
"""Return the brightness name of this light."""
return self._brightness
@property
def unique_id(self):
"""Return the unique id of this light."""
return self._unique_id
@property
def is_on(self):
"""Return true if light is on."""
return self._state
@property
def supported_features(self):
"""Return the supported features of this light."""
return self._features
def turn_on(self, **kwargs):
"""Instruct the light to turn on."""
self._api.setdevicebrightness(
self._unique_id,
str(self._api.convertfrom255to100(kwargs.get(ATTR_BRIGHTNESS, 255))),
)
self._api.setdeviceoutput(self._unique_id, 1)
def turn_off(self, **kwargs):
"""Instruct the light to turn off."""
self._api.setdeviceoutput(self._unique_id, 0)
def update(self):
"""Update the light states."""
self._state = self._api.getlightstate(self._unique_id)
self._brightness = self._api.convertfrom100to255(
self._api.getlightbrightness(self._unique_id)
)
self._available = self._api.getlightavailable(self._unique_id)
|
import posixpath
import socket
from http import client
from radicale import httputils
from radicale import item as radicale_item
from radicale import pathutils, storage, xmlutils
from radicale.log import logger
class ApplicationMkcalendarMixin:
def do_MKCALENDAR(self, environ, base_prefix, path, user):
"""Manage MKCALENDAR request."""
if "w" not in self._rights.authorization(user, path):
return httputils.NOT_ALLOWED
try:
xml_content = self._read_xml_request_body(environ)
except RuntimeError as e:
logger.warning(
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
except socket.timeout:
logger.debug("Client timed out", exc_info=True)
return httputils.REQUEST_TIMEOUT
# Prepare before locking
props = xmlutils.props_from_request(xml_content)
props = {k: v for k, v in props.items() if v is not None}
props["tag"] = "VCALENDAR"
# TODO: use this?
# timezone = props.get("C:calendar-timezone")
try:
radicale_item.check_and_sanitize_props(props)
except ValueError as e:
logger.warning(
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
with self._storage.acquire_lock("w", user):
item = next(self._storage.discover(path), None)
if item:
return self._webdav_error_response(
client.CONFLICT, "D:resource-must-be-null")
parent_path = pathutils.unstrip_path(
posixpath.dirname(pathutils.strip_path(path)), True)
parent_item = next(self._storage.discover(parent_path), None)
if not parent_item:
return httputils.CONFLICT
if (not isinstance(parent_item, storage.BaseCollection) or
parent_item.get_meta("tag")):
return httputils.FORBIDDEN
try:
self._storage.create_collection(path, props=props)
except ValueError as e:
logger.warning(
"Bad MKCALENDAR request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
return client.CREATED, {}, None
|
import copy
import json
import os
from typing import Dict, List, Text
from absl import flags
from perfkitbenchmarker import data
from perfkitbenchmarker import edw_service
from perfkitbenchmarker import vm_util
from perfkitbenchmarker.providers import azure
FLAGS = flags.FLAGS
VALID_EXIST_STATUSES = ['Resuming', 'Online']
READY_STATUSES = ['Online']
PAUSING_STATUSES = ['Pausing']
SYNAPSE_JDBC_JAR = 'synapse-jdbc-client-1.0.jar'
def GetSqlDataWarehouseClientInterface(
server_name: str, database: str, user: str, password: str,
resource_group: str) -> edw_service.EdwClientInterface:
"""Builds and Returns the requested SqlDataWarehouse client Interface.
Args:
server_name: Name of the SqlDataWarehouse server to use.
database: Name of the database to run queries against.
user: SqlDataWarehouse username for authentication.
password: SqlDataWarehouse password for authentication.
resource_group: Azure resource group used to whitelist the VM's IP address.
Returns:
A concrete Client Interface object.
Raises:
RuntimeError: if an unsupported sqldatawarehouse_client_interface is
requested.
"""
if FLAGS.sqldatawarehouse_client_interface == 'CLI':
return CliClientInterface(server_name, database, user, password,
resource_group)
if FLAGS.sqldatawarehouse_client_interface == 'JDBC':
return JdbcClientInterface(server_name, database, user, password,
resource_group)
raise RuntimeError('Unknown SqlDataWarehouse Client Interface requested.')
class CliClientInterface(edw_service.EdwClientInterface):
"""Command Line Client Interface class for Azure SqlDataWarehouse.
Uses the native SqlDataWarehouse client that ships with the Azure CLI.
https://docs.microsoft.com/en-us/cli/azure/sql/server?view=azure-cli-latest
Attributes:
server_name: Name of the SqlDataWarehouse server to use.
database: Name of the database to run queries against.
user: Redshift username for authentication.
password: Redshift password for authentication.
resource_group: Azure resource group used to whitelist the VM's IP address.
"""
def __init__(self, server_name: str, database: str, user: str, password: str,
resource_group: str):
self.server_name = server_name
self.database = database
self.user = user
self.password = password
self.resource_group = resource_group
def Prepare(self, package_name: str) -> None:
"""Prepares the client vm to execute query.
Installs the sql server tool dependencies.
Args:
package_name: String name of the package defining the preprovisioned data
(certificates, etc.) to extract and use during client vm preparation.
"""
self.client_vm.Install('pip')
self.client_vm.RemoteCommand('sudo pip install absl-py')
self.client_vm.Install('mssql_tools')
self.whitelist_ip = self.client_vm.ip_address
cmd = [
azure.AZURE_PATH, 'sql', 'server', 'firewall-rule', 'create', '--name',
self.whitelist_ip, '--resource-group', self.resource_group, '--server',
self.server_name, '--end-ip-address', self.whitelist_ip,
'--start-ip-address', self.whitelist_ip
]
vm_util.IssueCommand(cmd)
# Push the framework to execute a sql query and gather performance details
service_specific_dir = os.path.join('edw',
Azuresqldatawarehouse.SERVICE_TYPE)
self.client_vm.PushFile(
data.ResourcePath(
os.path.join(service_specific_dir, 'script_runner.sh')))
runner_permission_update_cmd = 'chmod 755 {}'.format('script_runner.sh')
self.client_vm.RemoteCommand(runner_permission_update_cmd)
self.client_vm.PushFile(
data.ResourcePath(os.path.join('edw', 'script_driver.py')))
self.client_vm.PushFile(
data.ResourcePath(
os.path.join(service_specific_dir,
'provider_specific_script_driver.py')))
def ExecuteQuery(self, query_name: Text) -> (float, Dict[str, str]):
"""Executes a query and returns performance details.
Args:
query_name: String name of the query to execute
Returns:
A tuple of (execution_time, execution details)
execution_time: A Float variable set to the query's completion time in
secs. -1.0 is used as a sentinel value implying the query failed. For a
successful query the value is expected to be positive.
performance_details: A dictionary of query execution attributes eg. job_id
"""
query_command = (
'python script_driver.py --script={} --server={} --database={} '
'--user={} --password={} --query_timeout={}').format(
query_name, self.server_name, self.database, self.user,
self.password, FLAGS.query_timeout)
stdout, _ = self.client_vm.RemoteCommand(query_command)
performance = json.loads(stdout)
details = copy.copy(self.GetMetadata())
details['job_id'] = performance[query_name]['job_id']
return float(performance[query_name]['execution_time']), details
def GetMetadata(self) -> Dict[str, str]:
"""Gets the Metadata attributes for the Client Interface."""
return {'client': FLAGS.sqldatawarehouse_client_interface}
class JdbcClientInterface(edw_service.EdwClientInterface):
"""JDBC Client Interface class for Azure SqlDataWarehouse.
Attributes:
server_name: Name of the SqlDataWarehouse server to use.
database: Name of the database to run queries against.
user: Redshift username for authentication.
password: Redshift password for authentication.
resource_group: Azure resource group used to whitelist the VM's IP address.
"""
def __init__(self, server_name: str, database: str, user: str, password: str,
resource_group: str):
self.server_name = server_name
self.database = database
self.user = user
self.password = password
self.resource_group = resource_group
def Prepare(self, package_name: str) -> None:
"""Prepares the client vm to execute query.
Installs the sql server tool dependencies.
Args:
package_name: String name of the package defining the preprovisioned data
(certificates, etc.) to extract and use during client vm preparation.
"""
self.client_vm.Install('openjdk')
self.client_vm.Install('mssql_tools')
self.client_vm.Install('azure_cli')
self.whitelist_ip = self.client_vm.ip_address
cmd = [
azure.AZURE_PATH, 'sql', 'server', 'firewall-rule', 'create', '--name',
self.whitelist_ip, '--resource-group', self.resource_group, '--server',
self.server_name, '--end-ip-address', self.whitelist_ip,
'--start-ip-address', self.whitelist_ip
]
vm_util.IssueCommand(cmd)
# Push the executable jar to the working directory on client vm
self.client_vm.InstallPreprovisionedPackageData(package_name,
[SYNAPSE_JDBC_JAR], '')
def ExecuteQuery(self, query_name: Text) -> (float, Dict[str, str]):
"""Executes a query and returns performance details.
Args:
query_name: String name of the query to execute
Returns:
A tuple of (execution_time, execution details)
execution_time: A Float variable set to the query's completion time in
secs. -1.0 is used as a sentinel value implying the query failed. For a
successful query the value is expected to be positive.
performance_details: A dictionary of query execution attributes eg. job_id
"""
query_command = (f'java -cp {SYNAPSE_JDBC_JAR} '
f'com.google.cloud.performance.edw.Single '
f'--server {self.server_name} --database {self.database} '
f'--query_timeout {FLAGS.query_timeout} '
f'--query_file {query_name}')
stdout, _ = self.client_vm.RemoteCommand(query_command)
performance = json.loads(stdout)
details = copy.copy(self.GetMetadata())
if 'failure_reason' in performance:
details.update({'failure_reason': performance['failure_reason']})
else:
details.update(performance['details'])
return performance['query_wall_time_in_secs'], details
def ExecuteSimultaneous(self, submission_interval: int,
queries: List[str]) -> str:
"""Executes queries simultaneously on client and return performance details.
Simultaneous app expects queries as white space separated query file names.
Args:
submission_interval: Simultaneous query submission interval in
milliseconds.
queries: List of strings (names) of queries to execute.
Returns:
A serialized dictionary of execution details.
"""
query_list = ' '.join(queries)
cmd = (f'java -cp {SYNAPSE_JDBC_JAR} '
f'com.google.cloud.performance.edw.Simultaneous '
f'--server {self.server_name} --database {self.database} '
f'--submission_interval {submission_interval} --query_timeout '
f'{FLAGS.query_timeout} --query_files {query_list}')
stdout, _ = self.client_vm.RemoteCommand(cmd)
return stdout
def ExecuteThroughput(self, concurrency_streams: List[List[str]]) -> str:
"""Executes a throughput test and returns performance details.
Args:
concurrency_streams: List of streams to execute simultaneously, each of
which is a list of string names of queries.
Returns:
A serialized dictionary of execution details.
"""
query_list = ' '.join([','.join(stream) for stream in concurrency_streams])
cmd = (
f'java -cp {SYNAPSE_JDBC_JAR} '
f'com.google.cloud.performance.edw.Throughput '
f'--server {self.server_name} --database {self.database} '
f'--query_timeout {FLAGS.query_timeout} --query_streams {query_list}')
stdout, _ = self.client_vm.RemoteCommand(cmd)
return stdout
def GetMetadata(self) -> Dict[str, str]:
"""Gets the Metadata attributes for the Client Interface."""
return {'client': FLAGS.sqldatawarehouse_client_interface}
class Azuresqldatawarehouse(edw_service.EdwService):
"""Object representing an Azure SQL data warehouse."""
CLOUD = azure.CLOUD
SERVICE_TYPE = 'azuresqldatawarehouse'
def __init__(self, edw_service_spec):
super(Azuresqldatawarehouse, self).__init__(edw_service_spec)
self.whitelist_ip = None
self.resource_group = edw_service_spec.resource_group
self.server_name = edw_service_spec.server_name
self.client_interface = GetSqlDataWarehouseClientInterface(
self.server_name, self.db, self.user, self.password,
self.resource_group)
def WhitelistIPAddress(self, ip_address):
"""To whitelist the IP address on the cluster."""
self.whitelist_ip = ip_address
cmd = [azure.AZURE_PATH,
'sql',
'server',
'firewall-rule',
'create',
'--name',
self.whitelist_ip,
'--resource-group',
self.resource_group,
'--server',
self.server_name,
'--end-ip-address',
self.whitelist_ip,
'--start-ip-address',
self.whitelist_ip]
vm_util.IssueCommand(cmd)
def __DescribeCluster(self):
"""Describe cluster."""
cmd = [azure.AZURE_PATH,
'sql',
'dw',
'show',
'--name',
self.db,
'--resource-group',
self.resource_group,
'--server',
self.server_name]
return vm_util.IssueCommand(cmd, raise_on_failure=False)
def _Exists(self):
"""Method to validate the existence of cluster.
Returns:
Boolean value indicating the existence of a cluster.
"""
stdout, _, _ = self.__DescribeCluster()
if not stdout or (json.loads(stdout)['status'] not in VALID_EXIST_STATUSES):
return False
else:
return True
def _IsReady(self):
"""Method to return if the cluster is ready to handle queries."""
stdout, _, _ = self.__DescribeCluster()
return json.loads(stdout)['status'] in READY_STATUSES
def _Create(self):
"""Resuming the cluster."""
cmd = [azure.AZURE_PATH,
'sql',
'dw',
'resume',
'--name',
self.db,
'--resource-group',
self.resource_group,
'--server',
self.server_name]
vm_util.IssueCommand(cmd, timeout=420)
def _IsDeleting(self):
"""Method to check if the cluster is pausing."""
stdout, _, _ = self.__DescribeCluster()
if not stdout:
return False
else:
return json.loads(stdout)['status'] in PAUSING_STATUSES
def _Delete(self):
"""Pausing cluster."""
cmd = [azure.AZURE_PATH,
'sql',
'dw',
'pause',
'--name',
self.db,
'--resource-group',
self.resource_group,
'--server',
self.server_name]
vm_util.IssueCommand(cmd, raise_on_failure=False)
def _DeleteDependencies(self):
"""Delete dependencies of the cluster."""
if self.client_interface.whitelist_ip is not None:
cmd = [
azure.AZURE_PATH, 'sql', 'server', 'firewall-rule', 'delete',
'--name', self.client_interface.whitelist_ip, '--resource-group',
self.resource_group, '--server', self.server_name
]
vm_util.IssueCommand(cmd, raise_on_failure=False)
def GetMetadata(self):
"""Return a dictionary of the metadata for this cluster."""
basic_data = super(Azuresqldatawarehouse, self).GetMetadata()
basic_data['resource_group'] = self.resource_group
basic_data['server_name'] = self.server_name
basic_data.update(self.client_interface.GetMetadata())
return basic_data
|
import logging
from pymailgunner import (
Client,
MailgunCredentialsError,
MailgunDomainError,
MailgunError,
)
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_DATA,
ATTR_TITLE,
ATTR_TITLE_DEFAULT,
PLATFORM_SCHEMA,
BaseNotificationService,
)
from homeassistant.const import CONF_API_KEY, CONF_DOMAIN, CONF_RECIPIENT, CONF_SENDER
from . import CONF_SANDBOX, DOMAIN as MAILGUN_DOMAIN
_LOGGER = logging.getLogger(__name__)
# Images to attach to notification
ATTR_IMAGES = "images"
DEFAULT_SANDBOX = False
# pylint: disable=no-value-for-parameter
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_RECIPIENT): vol.Email(), vol.Optional(CONF_SENDER): vol.Email()}
)
def get_service(hass, config, discovery_info=None):
"""Get the Mailgun notification service."""
data = hass.data[MAILGUN_DOMAIN]
mailgun_service = MailgunNotificationService(
data.get(CONF_DOMAIN),
data.get(CONF_SANDBOX),
data.get(CONF_API_KEY),
config.get(CONF_SENDER),
config.get(CONF_RECIPIENT),
)
if mailgun_service.connection_is_valid():
return mailgun_service
return None
class MailgunNotificationService(BaseNotificationService):
"""Implement a notification service for the Mailgun mail service."""
def __init__(self, domain, sandbox, api_key, sender, recipient):
"""Initialize the service."""
self._client = None # Mailgun API client
self._domain = domain
self._sandbox = sandbox
self._api_key = api_key
self._sender = sender
self._recipient = recipient
def initialize_client(self):
"""Initialize the connection to Mailgun."""
self._client = Client(self._api_key, self._domain, self._sandbox)
_LOGGER.debug("Mailgun domain: %s", self._client.domain)
self._domain = self._client.domain
if not self._sender:
self._sender = f"hass@{self._domain}"
def connection_is_valid(self):
"""Check whether the provided credentials are valid."""
try:
self.initialize_client()
except MailgunCredentialsError:
_LOGGER.exception("Invalid credentials")
return False
except MailgunDomainError as mailgun_error:
_LOGGER.exception(mailgun_error)
return False
return True
def send_message(self, message="", **kwargs):
"""Send a mail to the recipient."""
subject = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
data = kwargs.get(ATTR_DATA)
files = data.get(ATTR_IMAGES) if data else None
try:
# Initialize the client in case it was not.
if self._client is None:
self.initialize_client()
resp = self._client.send_mail(
sender=self._sender,
to=self._recipient,
subject=subject,
text=message,
files=files,
)
_LOGGER.debug("Message sent: %s", resp)
except MailgunError as mailgun_error:
_LOGGER.exception("Failed to send message: %s", mailgun_error)
|
import datetime
import logging
from concord232 import client as concord232_client
import requests
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOTION,
DEVICE_CLASS_OPENING,
DEVICE_CLASS_SAFETY,
DEVICE_CLASS_SMOKE,
DEVICE_CLASSES,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
from homeassistant.const import CONF_HOST, CONF_PORT
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
CONF_EXCLUDE_ZONES = "exclude_zones"
CONF_ZONE_TYPES = "zone_types"
DEFAULT_HOST = "localhost"
DEFAULT_NAME = "Alarm"
DEFAULT_PORT = "5007"
DEFAULT_SSL = False
SCAN_INTERVAL = datetime.timedelta(seconds=10)
ZONE_TYPES_SCHEMA = vol.Schema({cv.positive_int: vol.In(DEVICE_CLASSES)})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_EXCLUDE_ZONES, default=[]): vol.All(
cv.ensure_list, [cv.positive_int]
),
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_ZONE_TYPES, default={}): ZONE_TYPES_SCHEMA,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Concord232 binary sensor platform."""
host = config[CONF_HOST]
port = config[CONF_PORT]
exclude = config[CONF_EXCLUDE_ZONES]
zone_types = config[CONF_ZONE_TYPES]
sensors = []
try:
_LOGGER.debug("Initializing client")
client = concord232_client.Client(f"http://{host}:{port}")
client.zones = client.list_zones()
client.last_zone_update = dt_util.utcnow()
except requests.exceptions.ConnectionError as ex:
_LOGGER.error("Unable to connect to Concord232: %s", str(ex))
return False
# The order of zones returned by client.list_zones() can vary.
# When the zones are not named, this can result in the same entity
# name mapping to different sensors in an unpredictable way. Sort
# the zones by zone number to prevent this.
client.zones.sort(key=lambda zone: zone["number"])
for zone in client.zones:
_LOGGER.info("Loading Zone found: %s", zone["name"])
if zone["number"] not in exclude:
sensors.append(
Concord232ZoneSensor(
hass,
client,
zone,
zone_types.get(zone["number"], get_opening_type(zone)),
)
)
add_entities(sensors, True)
def get_opening_type(zone):
"""Return the result of the type guessing from name."""
if "MOTION" in zone["name"]:
return DEVICE_CLASS_MOTION
if "KEY" in zone["name"]:
return DEVICE_CLASS_SAFETY
if "SMOKE" in zone["name"]:
return DEVICE_CLASS_SMOKE
if "WATER" in zone["name"]:
return "water"
return DEVICE_CLASS_OPENING
class Concord232ZoneSensor(BinarySensorEntity):
"""Representation of a Concord232 zone as a sensor."""
def __init__(self, hass, client, zone, zone_type):
"""Initialize the Concord232 binary sensor."""
self._hass = hass
self._client = client
self._zone = zone
self._number = zone["number"]
self._zone_type = zone_type
@property
def device_class(self):
"""Return the class of this sensor, from DEVICE_CLASSES."""
return self._zone_type
@property
def name(self):
"""Return the name of the binary sensor."""
return self._zone["name"]
@property
def is_on(self):
"""Return true if the binary sensor is on."""
# True means "faulted" or "open" or "abnormal state"
return bool(self._zone["state"] != "Normal")
def update(self):
"""Get updated stats from API."""
last_update = dt_util.utcnow() - self._client.last_zone_update
_LOGGER.debug("Zone: %s ", self._zone)
if last_update > datetime.timedelta(seconds=1):
self._client.zones = self._client.list_zones()
self._client.last_zone_update = dt_util.utcnow()
_LOGGER.debug("Updated from zone: %s", self._zone["name"])
if hasattr(self._client, "zones"):
self._zone = next(
(x for x in self._client.zones if x["number"] == self._number), None
)
|
import re
from django.core.exceptions import ValidationError
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
from weblate.checks.models import CHECKS
from weblate.checks.parser import (
SYNTAXCHARS,
FlagsParser,
multi_value_flag,
single_value_flag,
)
from weblate.fonts.utils import get_font_weight
PLAIN_FLAGS = {
v.enable_string: v.name
for k, v in CHECKS.items()
if v.default_disabled and not v.param_type
}
TYPED_FLAGS = {v.enable_string: v.name for k, v in CHECKS.items() if v.param_type}
TYPED_FLAGS_ARGS = {
v.enable_string: v.param_type for k, v in CHECKS.items() if v.param_type
}
PLAIN_FLAGS["rst-text"] = gettext_lazy("RST text")
PLAIN_FLAGS["md-text"] = gettext_lazy("Markdown text")
PLAIN_FLAGS["xml-text"] = gettext_lazy("XML text")
PLAIN_FLAGS["dos-eol"] = gettext_lazy("DOS line endings")
PLAIN_FLAGS["url"] = gettext_lazy("URL")
PLAIN_FLAGS["auto-java-messageformat"] = gettext_lazy(
"Automatically detect Java MessageFormat"
)
PLAIN_FLAGS["read-only"] = gettext_lazy("Read only")
PLAIN_FLAGS["strict-same"] = gettext_lazy("Strict unchanged check")
TYPED_FLAGS["font-family"] = gettext_lazy("Font family")
TYPED_FLAGS_ARGS["font-family"] = single_value_flag(str)
TYPED_FLAGS["font-size"] = gettext_lazy("Font size")
TYPED_FLAGS_ARGS["font-size"] = single_value_flag(int)
TYPED_FLAGS["font-weight"] = gettext_lazy("Font weight")
TYPED_FLAGS_ARGS["font-weight"] = single_value_flag(get_font_weight)
TYPED_FLAGS["font-spacing"] = gettext_lazy("Font spacing")
TYPED_FLAGS_ARGS["font-spacing"] = single_value_flag(int)
TYPED_FLAGS["priority"] = gettext_lazy("Priority")
TYPED_FLAGS_ARGS["priority"] = single_value_flag(int)
TYPED_FLAGS["max-length"] = gettext_lazy("Maximum length of translation")
TYPED_FLAGS_ARGS["max-length"] = single_value_flag(int)
TYPED_FLAGS["replacements"] = gettext_lazy("Replacements while rendering")
TYPED_FLAGS_ARGS["replacements"] = multi_value_flag(str, modulo=2)
IGNORE_CHECK_FLAGS = {CHECKS[x].ignore_string for x in CHECKS}
FLAG_ALIASES = {"markdown-text": "md-text"}
class Flags:
def __init__(self, *args):
self._items = {}
self._values = {}
for flags in args:
self.merge(flags)
def get_items(self, flags):
if isinstance(flags, str):
return self.parse(flags)
if hasattr(flags, "tag"):
return self.parse_xml(flags)
if isinstance(flags, Flags):
return flags.items()
return flags
def merge(self, flags):
for flag in self.get_items(flags):
if isinstance(flag, tuple):
self._values[flag[0]] = flag[1:]
self._items[flag[0]] = flag
elif flag and flag not in ("fuzzy", "#"):
# Ignore some flags
self._items[flag] = flag
def remove(self, flags):
for flag in self.get_items(flags):
if isinstance(flag, tuple):
key = flag[0]
value = flag[1:]
if key in self._values and self._values[key] == value:
del self._values[key]
del self._items[key]
else:
self._items.pop(flag, None)
@staticmethod
def parse(flags):
"""Parse comma separated list of flags."""
state = 0
name = None
value = []
tokens = list(FlagsParser.parseString(flags, parseAll=True))
for pos, token in enumerate(tokens):
token = token.strip()
if state == 0 and token == ",":
pass
elif state == 0:
# Handle aliases
name = FLAG_ALIASES.get(token, token)
value = [name]
state = 1
elif state == 1 and token == ",":
# End of flag
state = 0
yield name
elif state in (1, 3) and token == ":":
# Value separator
state = 2
elif state == 2 and token == ",":
# Flag with empty parameter
state = 0
value.append("")
yield tuple(value)
elif state == 2 and token == ":":
# Empty param
value.append("")
elif state == 2:
if (
token == "r"
and pos + 1 < len(tokens)
and tokens[pos + 1] not in (",", ":")
):
# Regex prefix, value follows
state = 4
else:
# Value
value.append(token)
state = 3
elif state == 4:
# Regex value
value.append(re.compile(token))
state = 3
elif state == 3 and token == ",":
# Last value
yield tuple(value)
state = 0
else:
raise ValueError(f"Unexpected token: {token}, state={state}")
# With state 0 there was nothing parsed yet
if state > 0:
if state == 2:
# There was empty value
value.append("")
# Is this flag or flag with value
if len(value) > 1:
yield tuple(value)
else:
yield name
@classmethod
def parse_xml(cls, flags):
"""Parse comma separated list of flags."""
maxwidth = flags.get("maxwidth")
sizeunit = flags.get("size-unit")
if maxwidth:
if sizeunit in (None, "pixel", "point"):
yield "max-size", maxwidth
elif sizeunit in ("byte", "char"):
yield "max-length", maxwidth
font = flags.get("font")
if font:
font = font.split(";")
yield "font-family", font[0].strip().replace(" ", "_")
if len(font) > 1:
yield "font-size", font[1].strip()
if len(font) > 2:
yield "font-weight", font[2].strip()
text = flags.get("weblate-flags")
if text:
yield from cls.parse(text)
def has_value(self, key):
return key in self._values
def get_value(self, key):
return TYPED_FLAGS_ARGS[key](self._values[key])
def items(self):
return set(self._items.values())
def __iter__(self):
return self._items.__iter__()
def __contains__(self, key):
return key in self._items
def __bool__(self):
return bool(self._items)
@staticmethod
def format_value(value):
# Regexp objects
if hasattr(value, "pattern"):
value = value.pattern
if any(c in value for c in SYNTAXCHARS):
return '"{}"'.format(value.replace('"', r"\""))
return value
@classmethod
def format_flag(cls, flag):
if isinstance(flag, tuple):
return ":".join(cls.format_value(val) for val in flag)
return cls.format_value(flag)
def _format_values(self):
return (self.format_flag(item) for item in self._items.values())
def format(self):
return ", ".join(sorted(self._format_values()))
def validate(self):
for name in self._items:
if isinstance(name, tuple):
name = name[0]
is_typed = name in TYPED_FLAGS
is_plain = name in PLAIN_FLAGS or name in IGNORE_CHECK_FLAGS
if not is_typed and not is_plain:
raise ValidationError(_('Invalid translation flag: "%s"') % name)
if name in self._values:
if is_plain:
raise ValidationError(
_('Translation flag has no parameters: "%s"') % name
)
try:
self.get_value(name)
except Exception:
raise ValidationError(
_('Wrong parameters for translation flag: "%s"') % name
)
elif is_typed:
raise ValidationError(
_('Missing parameters for translation flag: "%s"') % name
)
|
from datetime import timedelta
from TransportNSW import TransportNSW
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_MODE,
CONF_API_KEY,
CONF_NAME,
TIME_MINUTES,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
ATTR_STOP_ID = "stop_id"
ATTR_ROUTE = "route"
ATTR_DUE_IN = "due"
ATTR_DELAY = "delay"
ATTR_REAL_TIME = "real_time"
ATTR_DESTINATION = "destination"
ATTRIBUTION = "Data provided by Transport NSW"
CONF_STOP_ID = "stop_id"
CONF_ROUTE = "route"
CONF_DESTINATION = "destination"
DEFAULT_NAME = "Next Bus"
ICONS = {
"Train": "mdi:train",
"Lightrail": "mdi:tram",
"Bus": "mdi:bus",
"Coach": "mdi:bus",
"Ferry": "mdi:ferry",
"Schoolbus": "mdi:bus",
"n/a": "mdi:clock",
None: "mdi:clock",
}
SCAN_INTERVAL = timedelta(seconds=60)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STOP_ID): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_ROUTE, default=""): cv.string,
vol.Optional(CONF_DESTINATION, default=""): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Transport NSW sensor."""
stop_id = config[CONF_STOP_ID]
api_key = config[CONF_API_KEY]
route = config.get(CONF_ROUTE)
destination = config.get(CONF_DESTINATION)
name = config.get(CONF_NAME)
data = PublicTransportData(stop_id, route, destination, api_key)
add_entities([TransportNSWSensor(data, stop_id, name)], True)
class TransportNSWSensor(Entity):
"""Implementation of an Transport NSW sensor."""
def __init__(self, data, stop_id, name):
"""Initialize the sensor."""
self.data = data
self._name = name
self._stop_id = stop_id
self._times = self._state = None
self._icon = ICONS[None]
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._times is not None:
return {
ATTR_DUE_IN: self._times[ATTR_DUE_IN],
ATTR_STOP_ID: self._stop_id,
ATTR_ROUTE: self._times[ATTR_ROUTE],
ATTR_DELAY: self._times[ATTR_DELAY],
ATTR_REAL_TIME: self._times[ATTR_REAL_TIME],
ATTR_DESTINATION: self._times[ATTR_DESTINATION],
ATTR_MODE: self._times[ATTR_MODE],
ATTR_ATTRIBUTION: ATTRIBUTION,
}
@property
def unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return TIME_MINUTES
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return self._icon
def update(self):
"""Get the latest data from Transport NSW and update the states."""
self.data.update()
self._times = self.data.info
self._state = self._times[ATTR_DUE_IN]
self._icon = ICONS[self._times[ATTR_MODE]]
class PublicTransportData:
"""The Class for handling the data retrieval."""
def __init__(self, stop_id, route, destination, api_key):
"""Initialize the data object."""
self._stop_id = stop_id
self._route = route
self._destination = destination
self._api_key = api_key
self.info = {
ATTR_ROUTE: self._route,
ATTR_DUE_IN: "n/a",
ATTR_DELAY: "n/a",
ATTR_REAL_TIME: "n/a",
ATTR_DESTINATION: "n/a",
ATTR_MODE: None,
}
self.tnsw = TransportNSW()
def update(self):
"""Get the next leave time."""
_data = self.tnsw.get_departures(
self._stop_id, self._route, self._destination, self._api_key
)
self.info = {
ATTR_ROUTE: _data["route"],
ATTR_DUE_IN: _data["due"],
ATTR_DELAY: _data["delay"],
ATTR_REAL_TIME: _data["real_time"],
ATTR_DESTINATION: _data["destination"],
ATTR_MODE: _data["mode"],
}
|
import logging
from velbus.util import VelbusException
from homeassistant.components.switch import SwitchEntity
from . import VelbusEntity
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(hass, entry, async_add_entities):
"""Set up Velbus switch based on config_entry."""
cntrl = hass.data[DOMAIN][entry.entry_id]["cntrl"]
modules_data = hass.data[DOMAIN][entry.entry_id]["switch"]
entities = []
for address, channel in modules_data:
module = cntrl.get_module(address)
entities.append(VelbusSwitch(module, channel))
async_add_entities(entities)
class VelbusSwitch(VelbusEntity, SwitchEntity):
"""Representation of a switch."""
@property
def is_on(self):
"""Return true if the switch is on."""
return self._module.is_on(self._channel)
def turn_on(self, **kwargs):
"""Instruct the switch to turn on."""
try:
self._module.turn_on(self._channel)
except VelbusException as err:
_LOGGER.error("A Velbus error occurred: %s", err)
def turn_off(self, **kwargs):
"""Instruct the switch to turn off."""
try:
self._module.turn_off(self._channel)
except VelbusException as err:
_LOGGER.error("A Velbus error occurred: %s", err)
|
import asyncio
import functools
import logging
import secrets
from urllib.parse import urlparse
from uuid import uuid4
from aiohttp import web
from pysmartapp import Dispatcher, SmartAppManager
from pysmartapp.const import SETTINGS_APP_ID
from pysmartthings import (
APP_TYPE_WEBHOOK,
CAPABILITIES,
CLASSIFICATION_AUTOMATION,
App,
AppOAuth,
AppSettings,
InstalledAppStatus,
SmartThings,
SourceType,
Subscription,
SubscriptionEntity,
)
from homeassistant.components import webhook
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.dispatcher import (
async_dispatcher_connect,
async_dispatcher_send,
)
from homeassistant.helpers.network import NoURLAvailableError, get_url
from homeassistant.helpers.typing import HomeAssistantType
from .const import (
APP_NAME_PREFIX,
APP_OAUTH_CLIENT_NAME,
APP_OAUTH_SCOPES,
CONF_CLOUDHOOK_URL,
CONF_INSTALLED_APP_ID,
CONF_INSTANCE_ID,
CONF_REFRESH_TOKEN,
DATA_BROKERS,
DATA_MANAGER,
DOMAIN,
IGNORED_CAPABILITIES,
SETTINGS_INSTANCE_ID,
SIGNAL_SMARTAPP_PREFIX,
STORAGE_KEY,
STORAGE_VERSION,
SUBSCRIPTION_WARNING_LIMIT,
)
_LOGGER = logging.getLogger(__name__)
def format_unique_id(app_id: str, location_id: str) -> str:
"""Format the unique id for a config entry."""
return f"{app_id}_{location_id}"
async def find_app(hass: HomeAssistantType, api):
"""Find an existing SmartApp for this installation of hass."""
apps = await api.apps()
for app in [app for app in apps if app.app_name.startswith(APP_NAME_PREFIX)]:
# Load settings to compare instance id
settings = await app.settings()
if (
settings.settings.get(SETTINGS_INSTANCE_ID)
== hass.data[DOMAIN][CONF_INSTANCE_ID]
):
return app
async def validate_installed_app(api, installed_app_id: str):
"""
Ensure the specified installed SmartApp is valid and functioning.
Query the API for the installed SmartApp and validate that it is tied to
the specified app_id and is in an authorized state.
"""
installed_app = await api.installed_app(installed_app_id)
if installed_app.installed_app_status != InstalledAppStatus.AUTHORIZED:
raise RuntimeWarning(
"Installed SmartApp instance '{}' ({}) is not AUTHORIZED but instead {}".format(
installed_app.display_name,
installed_app.installed_app_id,
installed_app.installed_app_status,
)
)
return installed_app
def validate_webhook_requirements(hass: HomeAssistantType) -> bool:
"""Ensure Home Assistant is setup properly to receive webhooks."""
if hass.components.cloud.async_active_subscription():
return True
if hass.data[DOMAIN][CONF_CLOUDHOOK_URL] is not None:
return True
return get_webhook_url(hass).lower().startswith("https://")
def get_webhook_url(hass: HomeAssistantType) -> str:
"""
Get the URL of the webhook.
Return the cloudhook if available, otherwise local webhook.
"""
cloudhook_url = hass.data[DOMAIN][CONF_CLOUDHOOK_URL]
if hass.components.cloud.async_active_subscription() and cloudhook_url is not None:
return cloudhook_url
return webhook.async_generate_url(hass, hass.data[DOMAIN][CONF_WEBHOOK_ID])
def _get_app_template(hass: HomeAssistantType):
try:
endpoint = f"at {get_url(hass, allow_cloud=False, prefer_external=True)}"
except NoURLAvailableError:
endpoint = ""
cloudhook_url = hass.data[DOMAIN][CONF_CLOUDHOOK_URL]
if cloudhook_url is not None:
endpoint = "via Nabu Casa"
description = f"{hass.config.location_name} {endpoint}"
return {
"app_name": APP_NAME_PREFIX + str(uuid4()),
"display_name": "Home Assistant",
"description": description,
"webhook_target_url": get_webhook_url(hass),
"app_type": APP_TYPE_WEBHOOK,
"single_instance": True,
"classifications": [CLASSIFICATION_AUTOMATION],
}
async def create_app(hass: HomeAssistantType, api):
"""Create a SmartApp for this instance of hass."""
# Create app from template attributes
template = _get_app_template(hass)
app = App()
for key, value in template.items():
setattr(app, key, value)
app, client = await api.create_app(app)
_LOGGER.debug("Created SmartApp '%s' (%s)", app.app_name, app.app_id)
# Set unique hass id in settings
settings = AppSettings(app.app_id)
settings.settings[SETTINGS_APP_ID] = app.app_id
settings.settings[SETTINGS_INSTANCE_ID] = hass.data[DOMAIN][CONF_INSTANCE_ID]
await api.update_app_settings(settings)
_LOGGER.debug(
"Updated App Settings for SmartApp '%s' (%s)", app.app_name, app.app_id
)
# Set oauth scopes
oauth = AppOAuth(app.app_id)
oauth.client_name = APP_OAUTH_CLIENT_NAME
oauth.scope.extend(APP_OAUTH_SCOPES)
await api.update_app_oauth(oauth)
_LOGGER.debug("Updated App OAuth for SmartApp '%s' (%s)", app.app_name, app.app_id)
return app, client
async def update_app(hass: HomeAssistantType, app):
"""Ensure the SmartApp is up-to-date and update if necessary."""
template = _get_app_template(hass)
template.pop("app_name") # don't update this
update_required = False
for key, value in template.items():
if getattr(app, key) != value:
update_required = True
setattr(app, key, value)
if update_required:
await app.save()
_LOGGER.debug(
"SmartApp '%s' (%s) updated with latest settings", app.app_name, app.app_id
)
def setup_smartapp(hass, app):
"""
Configure an individual SmartApp in hass.
Register the SmartApp with the SmartAppManager so that hass will service
lifecycle events (install, event, etc...). A unique SmartApp is created
for each SmartThings account that is configured in hass.
"""
manager = hass.data[DOMAIN][DATA_MANAGER]
smartapp = manager.smartapps.get(app.app_id)
if smartapp:
# already setup
return smartapp
smartapp = manager.register(app.app_id, app.webhook_public_key)
smartapp.name = app.display_name
smartapp.description = app.description
smartapp.permissions.extend(APP_OAUTH_SCOPES)
return smartapp
async def setup_smartapp_endpoint(hass: HomeAssistantType):
"""
Configure the SmartApp webhook in hass.
SmartApps are an extension point within the SmartThings ecosystem and
is used to receive push updates (i.e. device updates) from the cloud.
"""
data = hass.data.get(DOMAIN)
if data:
# already setup
return
# Get/create config to store a unique id for this hass instance.
store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
config = await store.async_load()
if not config:
# Create config
config = {
CONF_INSTANCE_ID: str(uuid4()),
CONF_WEBHOOK_ID: secrets.token_hex(),
CONF_CLOUDHOOK_URL: None,
}
await store.async_save(config)
# Register webhook
webhook.async_register(
hass, DOMAIN, "SmartApp", config[CONF_WEBHOOK_ID], smartapp_webhook
)
# Create webhook if eligible
cloudhook_url = config.get(CONF_CLOUDHOOK_URL)
if (
cloudhook_url is None
and hass.components.cloud.async_active_subscription()
and not hass.config_entries.async_entries(DOMAIN)
):
cloudhook_url = await hass.components.cloud.async_create_cloudhook(
config[CONF_WEBHOOK_ID]
)
config[CONF_CLOUDHOOK_URL] = cloudhook_url
await store.async_save(config)
_LOGGER.debug("Created cloudhook '%s'", cloudhook_url)
# SmartAppManager uses a dispatcher to invoke callbacks when push events
# occur. Use hass' implementation instead of the built-in one.
dispatcher = Dispatcher(
signal_prefix=SIGNAL_SMARTAPP_PREFIX,
connect=functools.partial(async_dispatcher_connect, hass),
send=functools.partial(async_dispatcher_send, hass),
)
# Path is used in digital signature validation
path = (
urlparse(cloudhook_url).path
if cloudhook_url
else webhook.async_generate_path(config[CONF_WEBHOOK_ID])
)
manager = SmartAppManager(path, dispatcher=dispatcher)
manager.connect_install(functools.partial(smartapp_install, hass))
manager.connect_update(functools.partial(smartapp_update, hass))
manager.connect_uninstall(functools.partial(smartapp_uninstall, hass))
hass.data[DOMAIN] = {
DATA_MANAGER: manager,
CONF_INSTANCE_ID: config[CONF_INSTANCE_ID],
DATA_BROKERS: {},
CONF_WEBHOOK_ID: config[CONF_WEBHOOK_ID],
# Will not be present if not enabled
CONF_CLOUDHOOK_URL: config.get(CONF_CLOUDHOOK_URL),
}
_LOGGER.debug(
"Setup endpoint for %s",
cloudhook_url
if cloudhook_url
else webhook.async_generate_url(hass, config[CONF_WEBHOOK_ID]),
)
async def unload_smartapp_endpoint(hass: HomeAssistantType):
"""Tear down the component configuration."""
if DOMAIN not in hass.data:
return
# Remove the cloudhook if it was created
cloudhook_url = hass.data[DOMAIN][CONF_CLOUDHOOK_URL]
if cloudhook_url and hass.components.cloud.async_is_logged_in():
await hass.components.cloud.async_delete_cloudhook(
hass.data[DOMAIN][CONF_WEBHOOK_ID]
)
# Remove cloudhook from storage
store = hass.helpers.storage.Store(STORAGE_VERSION, STORAGE_KEY)
await store.async_save(
{
CONF_INSTANCE_ID: hass.data[DOMAIN][CONF_INSTANCE_ID],
CONF_WEBHOOK_ID: hass.data[DOMAIN][CONF_WEBHOOK_ID],
CONF_CLOUDHOOK_URL: None,
}
)
_LOGGER.debug("Cloudhook '%s' was removed", cloudhook_url)
# Remove the webhook
webhook.async_unregister(hass, hass.data[DOMAIN][CONF_WEBHOOK_ID])
# Disconnect all brokers
for broker in hass.data[DOMAIN][DATA_BROKERS].values():
broker.disconnect()
# Remove all handlers from manager
hass.data[DOMAIN][DATA_MANAGER].dispatcher.disconnect_all()
# Remove the component data
hass.data.pop(DOMAIN)
async def smartapp_sync_subscriptions(
hass: HomeAssistantType,
auth_token: str,
location_id: str,
installed_app_id: str,
devices,
):
"""Synchronize subscriptions of an installed up."""
api = SmartThings(async_get_clientsession(hass), auth_token)
tasks = []
async def create_subscription(target: str):
sub = Subscription()
sub.installed_app_id = installed_app_id
sub.location_id = location_id
sub.source_type = SourceType.CAPABILITY
sub.capability = target
try:
await api.create_subscription(sub)
_LOGGER.debug(
"Created subscription for '%s' under app '%s'", target, installed_app_id
)
except Exception as error: # pylint:disable=broad-except
_LOGGER.error(
"Failed to create subscription for '%s' under app '%s': %s",
target,
installed_app_id,
error,
)
async def delete_subscription(sub: SubscriptionEntity):
try:
await api.delete_subscription(installed_app_id, sub.subscription_id)
_LOGGER.debug(
"Removed subscription for '%s' under app '%s' because it was no longer needed",
sub.capability,
installed_app_id,
)
except Exception as error: # pylint:disable=broad-except
_LOGGER.error(
"Failed to remove subscription for '%s' under app '%s': %s",
sub.capability,
installed_app_id,
error,
)
# Build set of capabilities and prune unsupported ones
capabilities = set()
for device in devices:
capabilities.update(device.capabilities)
# Remove items not defined in the library
capabilities.intersection_update(CAPABILITIES)
# Remove unused capabilities
capabilities.difference_update(IGNORED_CAPABILITIES)
capability_count = len(capabilities)
if capability_count > SUBSCRIPTION_WARNING_LIMIT:
_LOGGER.warning(
"Some device attributes may not receive push updates and there may be subscription "
"creation failures under app '%s' because %s subscriptions are required but "
"there is a limit of %s per app",
installed_app_id,
capability_count,
SUBSCRIPTION_WARNING_LIMIT,
)
_LOGGER.debug(
"Synchronizing subscriptions for %s capabilities under app '%s': %s",
capability_count,
installed_app_id,
capabilities,
)
# Get current subscriptions and find differences
subscriptions = await api.subscriptions(installed_app_id)
for subscription in subscriptions:
if subscription.capability in capabilities:
capabilities.remove(subscription.capability)
else:
# Delete the subscription
tasks.append(delete_subscription(subscription))
# Remaining capabilities need subscriptions created
tasks.extend([create_subscription(c) for c in capabilities])
if tasks:
await asyncio.gather(*tasks)
else:
_LOGGER.debug("Subscriptions for app '%s' are up-to-date", installed_app_id)
async def _continue_flow(
hass: HomeAssistantType,
app_id: str,
location_id: str,
installed_app_id: str,
refresh_token: str,
):
"""Continue a config flow if one is in progress for the specific installed app."""
unique_id = format_unique_id(app_id, location_id)
flow = next(
(
flow
for flow in hass.config_entries.flow.async_progress()
if flow["handler"] == DOMAIN and flow["context"]["unique_id"] == unique_id
),
None,
)
if flow is not None:
await hass.config_entries.flow.async_configure(
flow["flow_id"],
{
CONF_INSTALLED_APP_ID: installed_app_id,
CONF_REFRESH_TOKEN: refresh_token,
},
)
_LOGGER.debug(
"Continued config flow '%s' for SmartApp '%s' under parent app '%s'",
flow["flow_id"],
installed_app_id,
app_id,
)
async def smartapp_install(hass: HomeAssistantType, req, resp, app):
"""Handle a SmartApp installation and continue the config flow."""
await _continue_flow(
hass, app.app_id, req.location_id, req.installed_app_id, req.refresh_token
)
_LOGGER.debug(
"Installed SmartApp '%s' under parent app '%s'",
req.installed_app_id,
app.app_id,
)
async def smartapp_update(hass: HomeAssistantType, req, resp, app):
"""Handle a SmartApp update and either update the entry or continue the flow."""
entry = next(
(
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.data.get(CONF_INSTALLED_APP_ID) == req.installed_app_id
),
None,
)
if entry:
hass.config_entries.async_update_entry(
entry, data={**entry.data, CONF_REFRESH_TOKEN: req.refresh_token}
)
_LOGGER.debug(
"Updated config entry '%s' for SmartApp '%s' under parent app '%s'",
entry.entry_id,
req.installed_app_id,
app.app_id,
)
await _continue_flow(
hass, app.app_id, req.location_id, req.installed_app_id, req.refresh_token
)
_LOGGER.debug(
"Updated SmartApp '%s' under parent app '%s'", req.installed_app_id, app.app_id
)
async def smartapp_uninstall(hass: HomeAssistantType, req, resp, app):
"""
Handle when a SmartApp is removed from a location by the user.
Find and delete the config entry representing the integration.
"""
entry = next(
(
entry
for entry in hass.config_entries.async_entries(DOMAIN)
if entry.data.get(CONF_INSTALLED_APP_ID) == req.installed_app_id
),
None,
)
if entry:
# Add as job not needed because the current coroutine was invoked
# from the dispatcher and is not being awaited.
await hass.config_entries.async_remove(entry.entry_id)
_LOGGER.debug(
"Uninstalled SmartApp '%s' under parent app '%s'",
req.installed_app_id,
app.app_id,
)
async def smartapp_webhook(hass: HomeAssistantType, webhook_id: str, request):
"""
Handle a smartapp lifecycle event callback from SmartThings.
Requests from SmartThings are digitally signed and the SmartAppManager
validates the signature for authenticity.
"""
manager = hass.data[DOMAIN][DATA_MANAGER]
data = await request.json()
result = await manager.handle_request(data, request.headers)
return web.json_response(result)
|
from functools import partial
from functional.util import compose, parallelize
class ExecutionStrategies(object):
"""
Enum like object listing the types of execution strategies.
"""
PRE_COMPUTE = 0
PARALLEL = 1
class ExecutionEngine(object):
"""
Class to perform serial execution of a Sequence evaluation.
"""
def evaluate(self, sequence, transformations):
"""
Execute the sequence of transformations in serial
:param sequence: Sequence to evaluation
:param transformations: Transformations to apply
:return: Resulting sequence or value
"""
# pylint: disable=no-self-use
result = sequence
for transform in transformations:
strategies = transform.execution_strategies
if strategies is not None and ExecutionStrategies.PRE_COMPUTE in strategies:
result = transform.function(list(result))
else:
result = transform.function(result)
return iter(result)
class ParallelExecutionEngine(ExecutionEngine):
"""
Class to perform parallel execution of a Sequence evaluation.
"""
def __init__(self, processes=None, partition_size=None):
"""
Set the number of processes for parallel execution.
:param processes: Number of parallel Processes
"""
super(ParallelExecutionEngine, self).__init__()
self.processes = processes
self.partition_size = partition_size
def evaluate(self, sequence, transformations):
"""
Execute the sequence of transformations in parallel
:param sequence: Sequence to evaluation
:param transformations: Transformations to apply
:return: Resulting sequence or value
"""
result = sequence
parallel = partial(
parallelize, processes=self.processes, partition_size=self.partition_size
)
staged = []
for transform in transformations:
strategies = transform.execution_strategies or {}
if ExecutionStrategies.PARALLEL in strategies:
staged.insert(0, transform.function)
else:
if staged:
result = parallel(compose(*staged), result)
staged = []
if ExecutionStrategies.PRE_COMPUTE in strategies:
result = list(result)
result = transform.function(result)
if staged:
result = parallel(compose(*staged), result)
return iter(result)
|
from functools import partial
import os
import types
import numpy as np
import mne
info_ignored_fields = ('file_id', 'hpi_results', 'hpi_meas', 'meas_id',
'meas_date', 'highpass', 'lowpass', 'subject_info',
'hpi_subsystem', 'experimenter', 'description',
'proj_id', 'proj_name', 'line_freq', 'gantry_angle',
'dev_head_t', 'dig', 'bads', 'projs', 'ctf_head_t',
'dev_ctf_t')
ch_ignore_fields = ('logno', 'cal', 'range', 'scanno', 'coil_type', 'kind',
'loc', 'coord_frame', 'unit')
info_long_fields = ('hpi_meas', )
system_to_reader_fn_dict = {'neuromag306': mne.io.read_raw_fif,
'CNT': partial(mne.io.read_raw_cnt),
'CTF': partial(mne.io.read_raw_ctf,
clean_names=True),
'BTI': partial(mne.io.read_raw_bti,
head_shape_fname=None,
rename_channels=False,
sort_by_ch_name=False),
'EGI': mne.io.read_raw_egi,
'eximia': mne.io.read_raw_eximia}
ignore_channels_dict = {'BTI': ['MUz', 'MLx', 'MLy', 'MUx', 'MUy', 'MLz']}
drop_extra_chans_dict = {'EGI': ['STI 014', 'DIN1', 'DIN3',
'DIN7', 'DIN4', 'DIN5', 'DIN2'],
'eximia': ['GateIn', 'Trig1', 'Trig2']}
system_decimal_accuracy_dict = {'CNT': 2}
pandas_not_found_warning_msg = 'The Pandas library is not installed. Not ' \
'returning the original trialinfo matrix as ' \
'metadata.'
def _has_h5py():
try:
import h5py # noqa
return True
except ImportError:
return False
def _remove_ignored_ch_fields(info):
if 'chs' in info:
for cur_ch in info['chs']:
for cur_field in ch_ignore_fields:
if cur_field in cur_ch:
del cur_ch[cur_field]
def _remove_long_info_fields(info):
for cur_field in info_long_fields:
if cur_field in info:
del info[cur_field]
def _remove_ignored_info_fields(info):
for cur_field in info_ignored_fields:
if cur_field in info:
del info[cur_field]
_remove_ignored_ch_fields(info)
def get_data_paths(system):
"""Return common paths for all tests."""
test_data_folder_ft = os.path.join(mne.datasets.testing.data_path(),
'fieldtrip/ft_test_data', system)
return test_data_folder_ft
def get_cfg_local(system):
"""Return cfg_local field for the system."""
from mne.externals.pymatreader import read_mat
cfg_local = read_mat(os.path.join(get_data_paths(system), 'raw_v7.mat'),
['cfg_local'])['cfg_local']
return cfg_local
def get_raw_info(system):
"""Return the info dict of the raw data."""
cfg_local = get_cfg_local(system)
raw_data_file = os.path.join(mne.datasets.testing.data_path(),
cfg_local['file_name'])
reader_function = system_to_reader_fn_dict[system]
info = reader_function(raw_data_file, preload=False).info
info['comps'] = []
return info
def get_raw_data(system, drop_extra_chs=False):
"""Find, load and process the raw data."""
cfg_local = get_cfg_local(system)
raw_data_file = os.path.join(mne.datasets.testing.data_path(),
cfg_local['file_name'])
reader_function = system_to_reader_fn_dict[system]
raw_data = reader_function(raw_data_file, preload=True)
crop = min(cfg_local['crop'], np.max(raw_data.times))
if system == 'eximia':
crop -= 0.5 * (1.0 / raw_data.info['sfreq'])
raw_data.crop(0, crop)
raw_data.del_proj('all')
raw_data.info['comps'] = []
raw_data.drop_channels(cfg_local['removed_chan_names'])
if system in ['EGI']:
raw_data._data[0:-1, :] = raw_data._data[0:-1, :] * 1e6
if system in ['CNT']:
raw_data._data = raw_data._data * 1e6
if system in ignore_channels_dict:
raw_data.drop_channels(ignore_channels_dict[system])
if system in drop_extra_chans_dict and drop_extra_chs:
raw_data.drop_channels(drop_extra_chans_dict[system])
return raw_data
def get_epochs(system):
"""Find, load and process the epoched data."""
cfg_local = get_cfg_local(system)
raw_data = get_raw_data(system)
if cfg_local['eventtype'] in raw_data.ch_names:
stim_channel = cfg_local['eventtype']
else:
stim_channel = 'STI 014'
if system == 'CNT':
events, event_id = mne.events_from_annotations(raw_data)
events[:, 0] = events[:, 0] + 1
else:
events = mne.find_events(raw_data, stim_channel=stim_channel,
shortest_event=1)
if isinstance(cfg_local['eventvalue'], np.ndarray):
event_id = list(cfg_local['eventvalue'].astype('int'))
else:
event_id = [int(cfg_local['eventvalue'])]
event_id = [id for id in event_id if id in events[:, 2]]
epochs = mne.Epochs(raw_data, events=events,
event_id=event_id,
tmin=-cfg_local['prestim'],
tmax=cfg_local['poststim'], baseline=None)
return epochs
def get_evoked(system):
"""Find, load and process the avg data."""
epochs = get_epochs(system)
return epochs.average(picks=np.arange(len(epochs.ch_names)))
def check_info_fields(expected, actual, has_raw_info, ignore_long=True):
"""
Check if info fields are equal.
Some fields are ignored.
"""
expected = expected.info.copy()
actual = actual.info.copy()
if not has_raw_info:
_remove_ignored_info_fields(expected)
_remove_ignored_info_fields(actual)
if info_long_fields:
_remove_long_info_fields(expected)
_remove_long_info_fields(actual)
assert_deep_almost_equal(expected, actual)
def check_data(expected, actual, system):
"""Check data for equality."""
decimal = 7
if system in system_decimal_accuracy_dict:
decimal = system_decimal_accuracy_dict[system]
np.testing.assert_almost_equal(expected, actual, decimal=decimal)
def assert_deep_almost_equal(expected, actual, *args, **kwargs):
"""
Assert that two complex structures have almost equal contents.
Compares lists, dicts and tuples recursively. Checks numeric values
using test_case's :py:meth:`unittest.TestCase.assertAlmostEqual` and
checks all other values with :py:meth:`unittest.TestCase.assertEqual`.
Accepts additional positional and keyword arguments and pass those
intact to assertAlmostEqual() (that's how you specify comparison
precision).
This code has been adapted from
https://github.com/larsbutler/oq-engine/blob/master/tests/utils/helpers.py
"""
is_root = '__trace' not in kwargs
trace = kwargs.pop('__trace', 'ROOT')
if isinstance(expected, np.ndarray) and expected.size == 0:
expected = None
if isinstance(actual, np.ndarray) and actual.size == 0:
actual = None
try:
if isinstance(expected, (int, float, complex)):
np.testing.assert_almost_equal(expected, actual, *args, **kwargs)
elif isinstance(expected, (list, tuple, np.ndarray,
types.GeneratorType)):
if isinstance(expected, types.GeneratorType):
expected = list(expected)
actual = list(actual)
np.testing.assert_equal(len(expected), len(actual))
for index in range(len(expected)):
v1, v2 = expected[index], actual[index]
assert_deep_almost_equal(v1, v2,
__trace=repr(index), *args, **kwargs)
elif isinstance(expected, dict):
np.testing.assert_equal(set(expected), set(actual))
for key in expected:
assert_deep_almost_equal(expected[key], actual[key],
__trace=repr(key), *args, **kwargs)
else:
np.testing.assert_equal(expected, actual)
except AssertionError as exc:
exc.__dict__.setdefault('traces', []).append(trace)
if is_root:
trace = ' -> '.join(reversed(exc.traces))
message = ''
try:
message = exc.message
except AttributeError:
pass
exc = AssertionError("%s\nTRACE: %s" % (message, trace))
raise exc
def assert_warning_in_record(warning_message, warn_record):
"""Assert that a warning message is in the records."""
all_messages = [str(w.message) for w in warn_record]
assert warning_message in all_messages
|
import unittest
import mock
from kalliope.signals.order import Order
class TestOrder(unittest.TestCase):
def test_on_notification_received(self):
testing_order = Order()
# received valid notification skip_trigger
notification = "skip_trigger"
payload = {
"status": True
}
with mock.patch("kalliope.signals.order.order.Order.switch_trigger") as mock_switch_trigger_method:
testing_order.on_notification_received(notification=notification, payload=payload)
mock_switch_trigger_method.assert_called_once_with(payload)
# received valid notification skip_trigger_max_retry
notification = "skip_trigger_max_retry"
payload = {
"max_retry": 5
}
with mock.patch("kalliope.signals.order.order.Order.set_counter_max_retry") as mock_set_counter_max_retry_method:
testing_order.on_notification_received(notification=notification, payload=payload)
mock_set_counter_max_retry_method.assert_called_once()
# received valid notification skip_trigger_decrease_max_retry
notification = "skip_trigger_decrease_max_retry"
payload = None
with mock.patch(
"kalliope.signals.order.order.Order.decrease_max_retry") as mock_decrease_max_retry_method:
testing_order.on_notification_received(notification=notification, payload=payload)
mock_decrease_max_retry_method.assert_called_once()
def test_set_counter_max_retry(self):
# test valid payload
testing_order = Order()
payload = {
"max_retry": 5
}
testing_order.set_counter_max_retry(payload)
self.assertEqual(testing_order.counter_max_retry, 5)
# test invalid payload
testing_order = Order()
payload = {
"max_retry": -12
}
testing_order.set_counter_max_retry(payload)
self.assertEqual(testing_order.counter_max_retry, 0)
testing_order = Order()
payload = {
"wrong_key": 5
}
testing_order.set_counter_max_retry(payload)
self.assertEqual(testing_order.counter_max_retry, 0)
def decrease_max_retry(self):
# counter should not move because 0 by default
testing_order = Order()
testing_order.decrease_max_retry()
self.assertEqual(testing_order.counter_max_retry, 0)
# update the counter
testing_order = Order()
testing_order.counter_max_retry = 5
testing_order.decrease_max_retry()
self.assertEqual(testing_order.counter_max_retry, 4)
# update the counter and reach 0
testing_order = Order()
testing_order.skip_trigger = True
testing_order.counter_max_retry = 1
testing_order.decrease_max_retry()
self.assertEqual(testing_order.counter_max_retry, 0)
self.assertFalse(testing_order.skip_trigger)
def test_switch_trigger(self):
# valid payload
testing_order = Order()
payload = {
"status": True
}
testing_order.switch_trigger(payload)
self.assertTrue(testing_order.skip_trigger)
testing_order = Order()
payload = {
"status": "True"
}
testing_order.switch_trigger(payload)
self.assertTrue(testing_order.skip_trigger)
testing_order = Order()
payload = {
"status": False
}
testing_order.switch_trigger(payload)
self.assertFalse(testing_order.skip_trigger)
testing_order = Order()
payload = {
"status": "False"
}
testing_order.switch_trigger(payload)
self.assertFalse(testing_order.skip_trigger)
# invalid payload
testing_order = Order()
payload = {
"non-existing": "False"
}
testing_order.switch_trigger(payload)
self.assertFalse(testing_order.skip_trigger)
testing_order = Order()
payload = {
"status": "test"
}
testing_order.switch_trigger(payload)
self.assertFalse(testing_order.skip_trigger)
if __name__ == '__main__':
unittest.main()
|
import logging
from pyemby import EmbyServer
import voluptuous as vol
from homeassistant.components.media_player import PLATFORM_SCHEMA, MediaPlayerEntity
from homeassistant.components.media_player.const import (
MEDIA_TYPE_CHANNEL,
MEDIA_TYPE_MOVIE,
MEDIA_TYPE_MUSIC,
MEDIA_TYPE_TVSHOW,
SUPPORT_NEXT_TRACK,
SUPPORT_PAUSE,
SUPPORT_PLAY,
SUPPORT_PREVIOUS_TRACK,
SUPPORT_SEEK,
SUPPORT_STOP,
)
from homeassistant.const import (
CONF_API_KEY,
CONF_HOST,
CONF_PORT,
CONF_SSL,
DEVICE_DEFAULT_NAME,
EVENT_HOMEASSISTANT_START,
EVENT_HOMEASSISTANT_STOP,
STATE_IDLE,
STATE_OFF,
STATE_PAUSED,
STATE_PLAYING,
)
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
MEDIA_TYPE_TRAILER = "trailer"
MEDIA_TYPE_GENERIC_VIDEO = "video"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 8096
DEFAULT_SSL_PORT = 8920
DEFAULT_SSL = False
SUPPORT_EMBY = (
SUPPORT_PAUSE
| SUPPORT_PREVIOUS_TRACK
| SUPPORT_NEXT_TRACK
| SUPPORT_STOP
| SUPPORT_SEEK
| SUPPORT_PLAY
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_API_KEY): cv.string,
vol.Optional(CONF_HOST, default=DEFAULT_HOST): cv.string,
vol.Optional(CONF_PORT): cv.port,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
}
)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Emby platform."""
host = config.get(CONF_HOST)
key = config.get(CONF_API_KEY)
port = config.get(CONF_PORT)
ssl = config[CONF_SSL]
if port is None:
port = DEFAULT_SSL_PORT if ssl else DEFAULT_PORT
_LOGGER.debug("Setting up Emby server at: %s:%s", host, port)
emby = EmbyServer(host, key, port, ssl, hass.loop)
active_emby_devices = {}
inactive_emby_devices = {}
@callback
def device_update_callback(data):
"""Handle devices which are added to Emby."""
new_devices = []
active_devices = []
for dev_id in emby.devices:
active_devices.append(dev_id)
if (
dev_id not in active_emby_devices
and dev_id not in inactive_emby_devices
):
new = EmbyDevice(emby, dev_id)
active_emby_devices[dev_id] = new
new_devices.append(new)
elif dev_id in inactive_emby_devices:
if emby.devices[dev_id].state != "Off":
add = inactive_emby_devices.pop(dev_id)
active_emby_devices[dev_id] = add
_LOGGER.debug("Showing %s, item: %s", dev_id, add)
add.set_available(True)
if new_devices:
_LOGGER.debug("Adding new devices: %s", new_devices)
async_add_entities(new_devices, True)
@callback
def device_removal_callback(data):
"""Handle the removal of devices from Emby."""
if data in active_emby_devices:
rem = active_emby_devices.pop(data)
inactive_emby_devices[data] = rem
_LOGGER.debug("Inactive %s, item: %s", data, rem)
rem.set_available(False)
@callback
def start_emby(event):
"""Start Emby connection."""
emby.start()
async def stop_emby(event):
"""Stop Emby connection."""
await emby.stop()
emby.add_new_devices_callback(device_update_callback)
emby.add_stale_devices_callback(device_removal_callback)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, start_emby)
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_STOP, stop_emby)
class EmbyDevice(MediaPlayerEntity):
"""Representation of an Emby device."""
def __init__(self, emby, device_id):
"""Initialize the Emby device."""
_LOGGER.debug("New Emby Device initialized with ID: %s", device_id)
self.emby = emby
self.device_id = device_id
self.device = self.emby.devices[self.device_id]
self._available = True
self.media_status_last_position = None
self.media_status_received = None
async def async_added_to_hass(self):
"""Register callback."""
self.emby.add_update_callback(self.async_update_callback, self.device_id)
@callback
def async_update_callback(self, msg):
"""Handle device updates."""
# Check if we should update progress
if self.device.media_position:
if self.device.media_position != self.media_status_last_position:
self.media_status_last_position = self.device.media_position
self.media_status_received = dt_util.utcnow()
elif not self.device.is_nowplaying:
# No position, but we have an old value and are still playing
self.media_status_last_position = None
self.media_status_received = None
self.async_write_ha_state()
@property
def available(self):
"""Return True if entity is available."""
return self._available
def set_available(self, value):
"""Set available property."""
self._available = value
@property
def unique_id(self):
"""Return the id of this emby client."""
return self.device_id
@property
def supports_remote_control(self):
"""Return control ability."""
return self.device.supports_remote_control
@property
def name(self):
"""Return the name of the device."""
return f"Emby {self.device.name}" or DEVICE_DEFAULT_NAME
@property
def should_poll(self):
"""Return True if entity has to be polled for state."""
return False
@property
def state(self):
"""Return the state of the device."""
state = self.device.state
if state == "Paused":
return STATE_PAUSED
if state == "Playing":
return STATE_PLAYING
if state == "Idle":
return STATE_IDLE
if state == "Off":
return STATE_OFF
@property
def app_name(self):
"""Return current user as app_name."""
# Ideally the media_player object would have a user property.
return self.device.username
@property
def media_content_id(self):
"""Content ID of current playing media."""
return self.device.media_id
@property
def media_content_type(self):
"""Content type of current playing media."""
media_type = self.device.media_type
if media_type == "Episode":
return MEDIA_TYPE_TVSHOW
if media_type == "Movie":
return MEDIA_TYPE_MOVIE
if media_type == "Trailer":
return MEDIA_TYPE_TRAILER
if media_type == "Music":
return MEDIA_TYPE_MUSIC
if media_type == "Video":
return MEDIA_TYPE_GENERIC_VIDEO
if media_type == "Audio":
return MEDIA_TYPE_MUSIC
if media_type == "TvChannel":
return MEDIA_TYPE_CHANNEL
return None
@property
def media_duration(self):
"""Return the duration of current playing media in seconds."""
return self.device.media_runtime
@property
def media_position(self):
"""Return the position of current playing media in seconds."""
return self.media_status_last_position
@property
def media_position_updated_at(self):
"""
When was the position of the current playing media valid.
Returns value from homeassistant.util.dt.utcnow().
"""
return self.media_status_received
@property
def media_image_url(self):
"""Return the image URL of current playing media."""
return self.device.media_image_url
@property
def media_title(self):
"""Return the title of current playing media."""
return self.device.media_title
@property
def media_season(self):
"""Season of current playing media (TV Show only)."""
return self.device.media_season
@property
def media_series_title(self):
"""Return the title of the series of current playing media (TV)."""
return self.device.media_series_title
@property
def media_episode(self):
"""Return the episode of current playing media (TV only)."""
return self.device.media_episode
@property
def media_album_name(self):
"""Return the album name of current playing media (Music only)."""
return self.device.media_album_name
@property
def media_artist(self):
"""Return the artist of current playing media (Music track only)."""
return self.device.media_artist
@property
def media_album_artist(self):
"""Return the album artist of current playing media (Music only)."""
return self.device.media_album_artist
@property
def supported_features(self):
"""Flag media player features that are supported."""
if self.supports_remote_control:
return SUPPORT_EMBY
return 0
async def async_media_play(self):
"""Play media."""
await self.device.media_play()
async def async_media_pause(self):
"""Pause the media player."""
await self.device.media_pause()
async def async_media_stop(self):
"""Stop the media player."""
await self.device.media_stop()
async def async_media_next_track(self):
"""Send next track command."""
await self.device.media_next()
async def async_media_previous_track(self):
"""Send next track command."""
await self.device.media_previous()
async def async_media_seek(self, position):
"""Send seek command."""
await self.device.media_seek(position)
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import pickle
from absl._enum_module import enum
from absl.flags import _argument_parser
from absl.flags import _exceptions
from absl.flags import _flag
from absl.testing import absltest
from absl.testing import parameterized
class FlagTest(absltest.TestCase):
def setUp(self):
self.flag = _flag.Flag(
_argument_parser.ArgumentParser(),
_argument_parser.ArgumentSerializer(),
'fruit', 'apple', 'help')
def test_default_unparsed(self):
flag = _flag.Flag(
_argument_parser.ArgumentParser(),
_argument_parser.ArgumentSerializer(),
'fruit', 'apple', 'help')
self.assertEqual('apple', flag.default_unparsed)
flag = _flag.Flag(
_argument_parser.IntegerParser(),
_argument_parser.ArgumentSerializer(),
'number', '1', 'help')
self.assertEqual('1', flag.default_unparsed)
flag = _flag.Flag(
_argument_parser.IntegerParser(),
_argument_parser.ArgumentSerializer(),
'number', 1, 'help')
self.assertEqual(1, flag.default_unparsed)
def test_set_default_overrides_current_value(self):
self.assertEqual('apple', self.flag.value)
self.flag._set_default('orange')
self.assertEqual('orange', self.flag.value)
def test_set_default_overrides_current_value_when_not_using_default(self):
self.flag.using_default_value = False
self.assertEqual('apple', self.flag.value)
self.flag._set_default('orange')
self.assertEqual('apple', self.flag.value)
def test_pickle(self):
with self.assertRaisesRegexp(TypeError, "can't pickle Flag objects"):
pickle.dumps(self.flag)
def test_copy(self):
self.flag.value = 'orange'
with self.assertRaisesRegexp(
TypeError, 'Flag does not support shallow copies'):
copy.copy(self.flag)
flag2 = copy.deepcopy(self.flag)
self.assertEqual(flag2.value, 'orange')
flag2.value = 'mango'
self.assertEqual(flag2.value, 'mango')
self.assertEqual(self.flag.value, 'orange')
class BooleanFlagTest(parameterized.TestCase):
@parameterized.parameters(('', '(no help available)'),
('Is my test brilliant?', 'Is my test brilliant?'))
def test_help_text(self, helptext_input, helptext_output):
f = _flag.BooleanFlag('a_bool', False, helptext_input)
self.assertEqual(helptext_output, f.help)
class EnumFlagTest(parameterized.TestCase):
@parameterized.parameters(
('', '<apple|orange>: (no help available)'),
('Type of fruit.', '<apple|orange>: Type of fruit.'))
def test_help_text(self, helptext_input, helptext_output):
f = _flag.EnumFlag('fruit', 'apple', helptext_input, ['apple', 'orange'])
self.assertEqual(helptext_output, f.help)
def test_empty_values(self):
with self.assertRaises(ValueError):
_flag.EnumFlag('fruit', None, 'help', [])
class Fruit(enum.Enum):
APPLE = 1
ORANGE = 2
class EmptyEnum(enum.Enum):
pass
class EnumClassFlagTest(parameterized.TestCase):
@parameterized.parameters(
('', '<apple|orange>: (no help available)'),
('Type of fruit.', '<apple|orange>: Type of fruit.'))
def test_help_text_case_insensitive(self, helptext_input, helptext_output):
f = _flag.EnumClassFlag('fruit', None, helptext_input, Fruit)
self.assertEqual(helptext_output, f.help)
@parameterized.parameters(
('', '<APPLE|ORANGE>: (no help available)'),
('Type of fruit.', '<APPLE|ORANGE>: Type of fruit.'))
def test_help_text_case_sensitive(self, helptext_input, helptext_output):
f = _flag.EnumClassFlag(
'fruit', None, helptext_input, Fruit, case_sensitive=True)
self.assertEqual(helptext_output, f.help)
def test_requires_enum(self):
with self.assertRaises(TypeError):
_flag.EnumClassFlag('fruit', None, 'help', ['apple', 'orange'])
def test_requires_non_empty_enum_class(self):
with self.assertRaises(ValueError):
_flag.EnumClassFlag('empty', None, 'help', EmptyEnum)
def test_accepts_literal_default(self):
f = _flag.EnumClassFlag('fruit', Fruit.APPLE, 'A sample enum flag.', Fruit)
self.assertEqual(Fruit.APPLE, f.value)
def test_accepts_string_default(self):
f = _flag.EnumClassFlag('fruit', 'ORANGE', 'A sample enum flag.', Fruit)
self.assertEqual(Fruit.ORANGE, f.value)
def test_case_sensitive_rejects_default_with_wrong_case(self):
with self.assertRaises(_exceptions.IllegalFlagValueError):
_flag.EnumClassFlag(
'fruit', 'oranGe', 'A sample enum flag.', Fruit, case_sensitive=True)
def test_case_insensitive_accepts_string_default(self):
f = _flag.EnumClassFlag(
'fruit', 'oranGe', 'A sample enum flag.', Fruit, case_sensitive=False)
self.assertEqual(Fruit.ORANGE, f.value)
def test_default_value_does_not_exist(self):
with self.assertRaises(_exceptions.IllegalFlagValueError):
_flag.EnumClassFlag('fruit', 'BANANA', 'help', Fruit)
class MultiEnumClassFlagTest(parameterized.TestCase):
@parameterized.named_parameters(
('NoHelpSupplied', '', '<apple|orange>: (no help available);\n '
'repeat this option to specify a list of values', False),
('WithHelpSupplied', 'Type of fruit.',
'<APPLE|ORANGE>: Type of fruit.;\n '
'repeat this option to specify a list of values', True))
def test_help_text(self, helptext_input, helptext_output, case_sensitive):
f = _flag.MultiEnumClassFlag(
'fruit', None, helptext_input, Fruit, case_sensitive=case_sensitive)
self.assertEqual(helptext_output, f.help)
def test_requires_enum(self):
with self.assertRaises(TypeError):
_flag.MultiEnumClassFlag('fruit', None, 'help', ['apple', 'orange'])
def test_requires_non_empty_enum_class(self):
with self.assertRaises(ValueError):
_flag.MultiEnumClassFlag('empty', None, 'help', EmptyEnum)
def test_rejects_wrong_case_when_case_sensitive(self):
with self.assertRaisesRegex(_exceptions.IllegalFlagValueError,
'<APPLE|ORANGE>'):
_flag.MultiEnumClassFlag(
'fruit', ['APPLE', 'Orange'],
'A sample enum flag.',
Fruit,
case_sensitive=True)
def test_accepts_case_insensitive(self):
f = _flag.MultiEnumClassFlag('fruit', ['apple', 'APPLE'],
'A sample enum flag.', Fruit)
self.assertListEqual([Fruit.APPLE, Fruit.APPLE], f.value)
def test_accepts_literal_default(self):
f = _flag.MultiEnumClassFlag('fruit', Fruit.APPLE, 'A sample enum flag.',
Fruit)
self.assertListEqual([Fruit.APPLE], f.value)
def test_accepts_list_of_literal_default(self):
f = _flag.MultiEnumClassFlag('fruit', [Fruit.APPLE, Fruit.ORANGE],
'A sample enum flag.', Fruit)
self.assertListEqual([Fruit.APPLE, Fruit.ORANGE], f.value)
def test_accepts_string_default(self):
f = _flag.MultiEnumClassFlag('fruit', 'ORANGE', 'A sample enum flag.',
Fruit)
self.assertListEqual([Fruit.ORANGE], f.value)
def test_accepts_list_of_string_default(self):
f = _flag.MultiEnumClassFlag('fruit', ['ORANGE', 'APPLE'],
'A sample enum flag.', Fruit)
self.assertListEqual([Fruit.ORANGE, Fruit.APPLE], f.value)
def test_default_value_does_not_exist(self):
with self.assertRaisesRegex(_exceptions.IllegalFlagValueError,
'<apple|banana>'):
_flag.MultiEnumClassFlag('fruit', 'BANANA', 'help', Fruit)
if __name__ == '__main__':
absltest.main()
|
from flask import Flask, jsonify, Blueprint
from flasgger import swag_from, Swagger
try:
import simplejson as json
except ImportError:
import json
try:
from http import HTTPStatus
except ImportError:
import httplib as HTTPStatus
swagger_description = 'test_refs_basePath_API.yaml'
app = Flask(__name__)
swagger = Swagger(app, template_file=swagger_description)
api_blueprint = Blueprint('api', 'api', url_prefix='/api')
@api_blueprint.route('/get_cost', methods=['POST'])
@swag_from(swagger.template, definition='GetCostRequest', validation=True)
def get_cost():
result = dict(description='The best place',
cost=dict(currency='EUR', value=123456))
return jsonify([result])
app.register_blueprint(api_blueprint)
def test_swag(client, specs_data):
"""
This test is runs automatically in Travis CI
:param client: Flask app test client
:param specs_data: {'url': {swag_specs}} for every spec in app
"""
with client.get('apispec_1.json') as response:
assert response.status_code == HTTPStatus.OK
response_data = json.loads(response.data)
assert response_data.get('basePath') == '/api/', \
'wrong basePath: %r' % response_data
paths = response_data.get('paths')
assert '/get_cost' in paths, \
'get_cost NOK: %r' % response_data
assert '/api/get_costs' not in paths, \
'/api/get_cost NOK: %r' % response_data
req_data = dict(level=2,
location=dict(name='my preferred location',
position=dict(latitude=47.352735,
longitude=0.593682)))
with client.post('/api/get_cost', data=json.dumps(req_data),
content_type='application/json') as response:
assert response.status_code == HTTPStatus.OK, \
'bad status: %r' % response.data
if __name__ == '__main__':
app.run(debug=True)
|
import logging
from homeassistant.components.alarm_control_panel import (
FORMAT_NUMBER,
AlarmControlPanelEntity,
)
from homeassistant.components.alarm_control_panel.const import (
SUPPORT_ALARM_ARM_AWAY,
SUPPORT_ALARM_ARM_CUSTOM_BYPASS,
SUPPORT_ALARM_ARM_HOME,
SUPPORT_ALARM_ARM_NIGHT,
)
from homeassistant.const import (
CONF_PIN,
STATE_ALARM_ARMED_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME,
STATE_ALARM_ARMED_NIGHT,
STATE_ALARM_ARMING,
STATE_ALARM_DISARMED,
STATE_ALARM_TRIGGERED,
)
from .const import (
CONF_CODE_ARM_REQUIRED,
CONF_CODE_DISARM_REQUIRED,
CONF_HA_STATES_TO_RISCO,
CONF_RISCO_STATES_TO_HA,
DATA_COORDINATOR,
DEFAULT_OPTIONS,
DOMAIN,
RISCO_ARM,
RISCO_GROUPS,
RISCO_PARTIAL_ARM,
)
from .entity import RiscoEntity
_LOGGER = logging.getLogger(__name__)
STATES_TO_SUPPORTED_FEATURES = {
STATE_ALARM_ARMED_AWAY: SUPPORT_ALARM_ARM_AWAY,
STATE_ALARM_ARMED_CUSTOM_BYPASS: SUPPORT_ALARM_ARM_CUSTOM_BYPASS,
STATE_ALARM_ARMED_HOME: SUPPORT_ALARM_ARM_HOME,
STATE_ALARM_ARMED_NIGHT: SUPPORT_ALARM_ARM_NIGHT,
}
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up the Risco alarm control panel."""
coordinator = hass.data[DOMAIN][config_entry.entry_id][DATA_COORDINATOR]
options = {**DEFAULT_OPTIONS, **config_entry.options}
entities = [
RiscoAlarm(coordinator, partition_id, config_entry.data[CONF_PIN], options)
for partition_id in coordinator.data.partitions
]
async_add_entities(entities, False)
class RiscoAlarm(AlarmControlPanelEntity, RiscoEntity):
"""Representation of a Risco partition."""
def __init__(self, coordinator, partition_id, code, options):
"""Init the partition."""
super().__init__(coordinator)
self._partition_id = partition_id
self._partition = self.coordinator.data.partitions[self._partition_id]
self._code = code
self._code_arm_required = options[CONF_CODE_ARM_REQUIRED]
self._code_disarm_required = options[CONF_CODE_DISARM_REQUIRED]
self._risco_to_ha = options[CONF_RISCO_STATES_TO_HA]
self._ha_to_risco = options[CONF_HA_STATES_TO_RISCO]
self._supported_states = 0
for state in self._ha_to_risco:
self._supported_states |= STATES_TO_SUPPORTED_FEATURES[state]
def _get_data_from_coordinator(self):
self._partition = self.coordinator.data.partitions[self._partition_id]
@property
def device_info(self):
"""Return device info for this device."""
return {
"identifiers": {(DOMAIN, self.unique_id)},
"name": self.name,
"manufacturer": "Risco",
}
@property
def name(self):
"""Return the name of the partition."""
return f"Risco {self._risco.site_name} Partition {self._partition_id}"
@property
def unique_id(self):
"""Return a unique id for that partition."""
return f"{self._risco.site_uuid}_{self._partition_id}"
@property
def state(self):
"""Return the state of the device."""
if self._partition.triggered:
return STATE_ALARM_TRIGGERED
if self._partition.arming:
return STATE_ALARM_ARMING
if self._partition.disarmed:
return STATE_ALARM_DISARMED
if self._partition.armed:
return self._risco_to_ha[RISCO_ARM]
if self._partition.partially_armed:
for group, armed in self._partition.groups.items():
if armed:
return self._risco_to_ha[group]
return self._risco_to_ha[RISCO_PARTIAL_ARM]
return None
@property
def supported_features(self):
"""Return the list of supported features."""
return self._supported_states
@property
def code_arm_required(self):
"""Whether the code is required for arm actions."""
return self._code_arm_required
@property
def code_format(self):
"""Return one or more digits/characters."""
return FORMAT_NUMBER
def _validate_code(self, code):
"""Validate given code."""
return code == self._code
async def async_alarm_disarm(self, code=None):
"""Send disarm command."""
if self._code_disarm_required and not self._validate_code(code):
_LOGGER.warning("Wrong code entered for disarming")
return
await self._call_alarm_method("disarm")
async def async_alarm_arm_home(self, code=None):
"""Send arm home command."""
await self._arm(STATE_ALARM_ARMED_HOME, code)
async def async_alarm_arm_away(self, code=None):
"""Send arm away command."""
await self._arm(STATE_ALARM_ARMED_AWAY, code)
async def async_alarm_arm_night(self, code=None):
"""Send arm night command."""
await self._arm(STATE_ALARM_ARMED_NIGHT, code)
async def async_alarm_arm_custom_bypass(self, code=None):
"""Send arm custom bypass command."""
await self._arm(STATE_ALARM_ARMED_CUSTOM_BYPASS, code)
async def _arm(self, mode, code):
if self._code_arm_required and not self._validate_code(code):
_LOGGER.warning("Wrong code entered for %s", mode)
return
risco_state = self._ha_to_risco[mode]
if not risco_state:
_LOGGER.warning("No mapping for mode %s", mode)
return
if risco_state in RISCO_GROUPS:
await self._call_alarm_method("group_arm", risco_state)
else:
await self._call_alarm_method(risco_state)
async def _call_alarm_method(self, method, *args):
alarm = await getattr(self._risco, method)(self._partition_id, *args)
self._partition = alarm.partitions[self._partition_id]
self.async_write_ha_state()
|
from homeassistant.components.mqtt.subscription import (
async_subscribe_topics,
async_unsubscribe_topics,
)
from homeassistant.core import callback
from tests.async_mock import ANY
from tests.common import async_fire_mqtt_message
async def test_subscribe_topics(hass, mqtt_mock, caplog):
"""Test subscription to topics."""
calls1 = []
@callback
def record_calls1(*args):
"""Record calls."""
calls1.append(args)
calls2 = []
@callback
def record_calls2(*args):
"""Record calls."""
calls2.append(args)
sub_state = None
sub_state = await async_subscribe_topics(
hass,
sub_state,
{
"test_topic1": {"topic": "test-topic1", "msg_callback": record_calls1},
"test_topic2": {"topic": "test-topic2", "msg_callback": record_calls2},
},
)
async_fire_mqtt_message(hass, "test-topic1", "test-payload1")
assert len(calls1) == 1
assert calls1[0][0].topic == "test-topic1"
assert calls1[0][0].payload == "test-payload1"
assert len(calls2) == 0
async_fire_mqtt_message(hass, "test-topic2", "test-payload2")
assert len(calls1) == 1
assert len(calls2) == 1
assert calls2[0][0].topic == "test-topic2"
assert calls2[0][0].payload == "test-payload2"
await async_unsubscribe_topics(hass, sub_state)
async_fire_mqtt_message(hass, "test-topic1", "test-payload")
async_fire_mqtt_message(hass, "test-topic2", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 1
async def test_modify_topics(hass, mqtt_mock, caplog):
"""Test modification of topics."""
calls1 = []
@callback
def record_calls1(*args):
"""Record calls."""
calls1.append(args)
calls2 = []
@callback
def record_calls2(*args):
"""Record calls."""
calls2.append(args)
sub_state = None
sub_state = await async_subscribe_topics(
hass,
sub_state,
{
"test_topic1": {"topic": "test-topic1", "msg_callback": record_calls1},
"test_topic2": {"topic": "test-topic2", "msg_callback": record_calls2},
},
)
async_fire_mqtt_message(hass, "test-topic1", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 0
async_fire_mqtt_message(hass, "test-topic2", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 1
sub_state = await async_subscribe_topics(
hass,
sub_state,
{"test_topic1": {"topic": "test-topic1_1", "msg_callback": record_calls1}},
)
async_fire_mqtt_message(hass, "test-topic1", "test-payload")
async_fire_mqtt_message(hass, "test-topic2", "test-payload")
assert len(calls1) == 1
assert len(calls2) == 1
async_fire_mqtt_message(hass, "test-topic1_1", "test-payload")
assert len(calls1) == 2
assert calls1[1][0].topic == "test-topic1_1"
assert calls1[1][0].payload == "test-payload"
assert len(calls2) == 1
await async_unsubscribe_topics(hass, sub_state)
async_fire_mqtt_message(hass, "test-topic1_1", "test-payload")
async_fire_mqtt_message(hass, "test-topic2", "test-payload")
assert len(calls1) == 2
assert len(calls2) == 1
async def test_qos_encoding_default(hass, mqtt_mock, caplog):
"""Test default qos and encoding."""
@callback
def msg_callback(*args):
"""Do nothing."""
pass
sub_state = None
sub_state = await async_subscribe_topics(
hass,
sub_state,
{"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}},
)
mqtt_mock.async_subscribe.assert_called_once_with("test-topic1", ANY, 0, "utf-8")
async def test_qos_encoding_custom(hass, mqtt_mock, caplog):
"""Test custom qos and encoding."""
@callback
def msg_callback(*args):
"""Do nothing."""
pass
sub_state = None
sub_state = await async_subscribe_topics(
hass,
sub_state,
{
"test_topic1": {
"topic": "test-topic1",
"msg_callback": msg_callback,
"qos": 1,
"encoding": "utf-16",
}
},
)
mqtt_mock.async_subscribe.assert_called_once_with("test-topic1", ANY, 1, "utf-16")
async def test_no_change(hass, mqtt_mock, caplog):
"""Test subscription to topics without change."""
@callback
def msg_callback(*args):
"""Do nothing."""
pass
sub_state = None
sub_state = await async_subscribe_topics(
hass,
sub_state,
{"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}},
)
call_count = mqtt_mock.async_subscribe.call_count
sub_state = await async_subscribe_topics(
hass,
sub_state,
{"test_topic1": {"topic": "test-topic1", "msg_callback": msg_callback}},
)
assert call_count == mqtt_mock.async_subscribe.call_count
|
import functools
from typing import Optional, FrozenSet
from PyQt5.QtCore import pyqtSlot, QObject
from qutebrowser.config import config
from qutebrowser.misc import debugcachestats
from qutebrowser.utils import jinja, log
def set_register(obj: QObject,
stylesheet: str = None, *,
update: bool = True) -> None:
"""Set the stylesheet for an object.
Also, register an update when the config is changed.
Args:
obj: The object to set the stylesheet for and register.
Must have a STYLESHEET attribute if stylesheet is not given.
stylesheet: The stylesheet to use.
update: Whether to update the stylesheet on config changes.
"""
observer = _StyleSheetObserver(obj, stylesheet, update)
observer.register()
@debugcachestats.register()
@functools.lru_cache()
def _render_stylesheet(stylesheet: str) -> str:
"""Render the given stylesheet jinja template."""
with jinja.environment.no_autoescape():
template = jinja.environment.from_string(stylesheet)
return template.render(conf=config.val)
def init() -> None:
config.instance.changed.connect(_render_stylesheet.cache_clear)
class _StyleSheetObserver(QObject):
"""Set the stylesheet on the given object and update it on changes.
Attributes:
_obj: The object to observe.
_stylesheet: The stylesheet template to use.
_options: The config options that the stylesheet uses. When it's not
necessary to listen for config changes, this attribute may be
None.
"""
def __init__(self, obj: QObject,
stylesheet: Optional[str], update: bool) -> None:
super().__init__()
self._obj = obj
self._update = update
# We only need to hang around if we are asked to update.
if update:
self.setParent(self._obj)
if stylesheet is None:
self._stylesheet: str = obj.STYLESHEET
else:
self._stylesheet = stylesheet
if update:
self._options: Optional[FrozenSet[str]] = jinja.template_config_variables(
self._stylesheet)
else:
self._options = None
def _get_stylesheet(self) -> str:
"""Format a stylesheet based on a template.
Return:
The formatted template as string.
"""
return _render_stylesheet(self._stylesheet)
@pyqtSlot(str)
def _maybe_update_stylesheet(self, option: str) -> None:
"""Update the stylesheet for obj if the option changed affects it."""
assert self._options is not None
if option in self._options:
self._obj.setStyleSheet(self._get_stylesheet())
def register(self) -> None:
"""Do a first update and listen for more."""
qss = self._get_stylesheet()
log.config.vdebug( # type: ignore[attr-defined]
"stylesheet for {}: {}".format(self._obj.__class__.__name__, qss))
self._obj.setStyleSheet(qss)
if self._update:
config.instance.changed.connect(self._maybe_update_stylesheet)
|
import logging
import hdate
from homeassistant.const import SUN_EVENT_SUNSET
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.sun import get_astral_event_date
import homeassistant.util.dt as dt_util
from . import DOMAIN, SENSOR_TYPES
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Jewish calendar sensor platform."""
if discovery_info is None:
return
sensors = [
JewishCalendarSensor(hass.data[DOMAIN], sensor, sensor_info)
for sensor, sensor_info in SENSOR_TYPES["data"].items()
]
sensors.extend(
JewishCalendarTimeSensor(hass.data[DOMAIN], sensor, sensor_info)
for sensor, sensor_info in SENSOR_TYPES["time"].items()
)
async_add_entities(sensors)
class JewishCalendarSensor(Entity):
"""Representation of an Jewish calendar sensor."""
def __init__(self, data, sensor, sensor_info):
"""Initialize the Jewish calendar sensor."""
self._location = data["location"]
self._type = sensor
self._name = f"{data['name']} {sensor_info[0]}"
self._icon = sensor_info[1]
self._hebrew = data["language"] == "hebrew"
self._candle_lighting_offset = data["candle_lighting_offset"]
self._havdalah_offset = data["havdalah_offset"]
self._diaspora = data["diaspora"]
self._state = None
self._prefix = data["prefix"]
self._holiday_attrs = {}
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unique_id(self) -> str:
"""Generate a unique id."""
return f"{self._prefix}_{self._type}"
@property
def icon(self):
"""Icon to display in the front end."""
return self._icon
@property
def state(self):
"""Return the state of the sensor."""
return self._state
async def async_update(self):
"""Update the state of the sensor."""
now = dt_util.now()
_LOGGER.debug("Now: %s Location: %r", now, self._location)
today = now.date()
sunset = dt_util.as_local(
get_astral_event_date(self.hass, SUN_EVENT_SUNSET, today)
)
_LOGGER.debug("Now: %s Sunset: %s", now, sunset)
daytime_date = hdate.HDate(today, diaspora=self._diaspora, hebrew=self._hebrew)
# The Jewish day starts after darkness (called "tzais") and finishes at
# sunset ("shkia"). The time in between is a gray area (aka "Bein
# Hashmashot" - literally: "in between the sun and the moon").
# For some sensors, it is more interesting to consider the date to be
# tomorrow based on sunset ("shkia"), for others based on "tzais".
# Hence the following variables.
after_tzais_date = after_shkia_date = daytime_date
today_times = self.make_zmanim(today)
if now > sunset:
after_shkia_date = daytime_date.next_day
if today_times.havdalah and now > today_times.havdalah:
after_tzais_date = daytime_date.next_day
self._state = self.get_state(daytime_date, after_shkia_date, after_tzais_date)
_LOGGER.debug("New value for %s: %s", self._type, self._state)
def make_zmanim(self, date):
"""Create a Zmanim object."""
return hdate.Zmanim(
date=date,
location=self._location,
candle_lighting_offset=self._candle_lighting_offset,
havdalah_offset=self._havdalah_offset,
hebrew=self._hebrew,
)
@property
def device_state_attributes(self):
"""Return the state attributes."""
if self._type != "holiday":
return {}
return self._holiday_attrs
def get_state(self, daytime_date, after_shkia_date, after_tzais_date):
"""For a given type of sensor, return the state."""
# Terminology note: by convention in py-libhdate library, "upcoming"
# refers to "current" or "upcoming" dates.
if self._type == "date":
return after_shkia_date.hebrew_date
if self._type == "weekly_portion":
# Compute the weekly portion based on the upcoming shabbat.
return after_tzais_date.upcoming_shabbat.parasha
if self._type == "holiday":
self._holiday_attrs["id"] = after_shkia_date.holiday_name
self._holiday_attrs["type"] = after_shkia_date.holiday_type.name
self._holiday_attrs["type_id"] = after_shkia_date.holiday_type.value
return after_shkia_date.holiday_description
if self._type == "omer_count":
return after_shkia_date.omer_day
if self._type == "daf_yomi":
return daytime_date.daf_yomi
return None
class JewishCalendarTimeSensor(JewishCalendarSensor):
"""Implement attrbutes for sensors returning times."""
@property
def state(self):
"""Return the state of the sensor."""
return dt_util.as_utc(self._state) if self._state is not None else None
@property
def device_class(self):
"""Return the class of this sensor."""
return "timestamp"
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {}
if self._state is None:
return attrs
attrs["timestamp"] = self._state.timestamp()
return attrs
def get_state(self, daytime_date, after_shkia_date, after_tzais_date):
"""For a given type of sensor, return the state."""
if self._type == "upcoming_shabbat_candle_lighting":
times = self.make_zmanim(
after_tzais_date.upcoming_shabbat.previous_day.gdate
)
return times.candle_lighting
if self._type == "upcoming_candle_lighting":
times = self.make_zmanim(
after_tzais_date.upcoming_shabbat_or_yom_tov.first_day.previous_day.gdate
)
return times.candle_lighting
if self._type == "upcoming_shabbat_havdalah":
times = self.make_zmanim(after_tzais_date.upcoming_shabbat.gdate)
return times.havdalah
if self._type == "upcoming_havdalah":
times = self.make_zmanim(
after_tzais_date.upcoming_shabbat_or_yom_tov.last_day.gdate
)
return times.havdalah
times = self.make_zmanim(dt_util.now()).zmanim
return times[self._type]
|
from homeassistant.components import mysensors
from homeassistant.components.device_tracker import DOMAIN
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.util import slugify
async def async_setup_scanner(hass, config, async_see, discovery_info=None):
"""Set up the MySensors device scanner."""
new_devices = mysensors.setup_mysensors_platform(
hass,
DOMAIN,
discovery_info,
MySensorsDeviceScanner,
device_args=(hass, async_see),
)
if not new_devices:
return False
for device in new_devices:
gateway_id = id(device.gateway)
dev_id = (gateway_id, device.node_id, device.child_id, device.value_type)
async_dispatcher_connect(
hass,
mysensors.const.CHILD_CALLBACK.format(*dev_id),
device.async_update_callback,
)
async_dispatcher_connect(
hass,
mysensors.const.NODE_CALLBACK.format(gateway_id, device.node_id),
device.async_update_callback,
)
return True
class MySensorsDeviceScanner(mysensors.device.MySensorsDevice):
"""Represent a MySensors scanner."""
def __init__(self, hass, async_see, *args):
"""Set up instance."""
super().__init__(*args)
self.async_see = async_see
self.hass = hass
async def _async_update_callback(self):
"""Update the device."""
await self.async_update()
node = self.gateway.sensors[self.node_id]
child = node.children[self.child_id]
position = child.values[self.value_type]
latitude, longitude, _ = position.split(",")
await self.async_see(
dev_id=slugify(self.name),
host_name=self.name,
gps=(latitude, longitude),
battery=node.battery_level,
attributes=self.device_state_attributes,
)
|
import os
import sys
curdir = os.path.dirname(os.path.abspath(__file__))
os.chdir(curdir)
sys.path.insert(0, '../')
sys.path.insert(0, '../../ceph')
import unittest
from cephstats import process_ceph_status
class TestCephStats(unittest.TestCase):
"""
Test collect ceph data
"""
def test_sample_data(self):
"""
Get ceph information from sample data
"""
f = open('sample.txt')
ret = {'rd': '8643000.0', 'wr': '4821000.0', 'iops': '481'}
self.assertEqual(process_ceph_status(f.read()), ret)
f.close()
def test_sample_data_noio(self):
"""
Get ceph information from sample data, missing the 'client io'
"""
f = open('sample-noio.txt')
self.assertEqual(process_ceph_status(f.read()), {})
f.close()
if __name__ == '__main__':
unittest.main()
|
import logging
from typing import Any, Dict, Optional
from pyownet import protocol
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import StateType
from .const import SENSOR_TYPES
_LOGGER = logging.getLogger(__name__)
class OneWire(Entity):
"""Implementation of a 1-Wire sensor."""
def __init__(self, name, device_file, sensor_type, device_info=None):
"""Initialize the sensor."""
self._name = f"{name} {sensor_type.capitalize()}"
self._device_file = device_file
self._device_class = SENSOR_TYPES[sensor_type][2]
self._unit_of_measurement = SENSOR_TYPES[sensor_type][1]
self._device_info = device_info
self._state = None
self._value_raw = None
@property
def name(self) -> Optional[str]:
"""Return the name of the sensor."""
return self._name
@property
def state(self) -> StateType:
"""Return the state of the sensor."""
if "count" in self._unit_of_measurement:
return int(self._state)
return self._state
@property
def device_class(self) -> Optional[str]:
"""Return the class of this device."""
return self._device_class
@property
def unit_of_measurement(self) -> Optional[str]:
"""Return the unit the value is expressed in."""
return self._unit_of_measurement
@property
def device_state_attributes(self) -> Optional[Dict[str, Any]]:
"""Return the state attributes of the sensor."""
return {"device_file": self._device_file, "raw_value": self._value_raw}
@property
def unique_id(self) -> Optional[str]:
"""Return a unique ID."""
return self._device_file
@property
def device_info(self) -> Optional[Dict[str, Any]]:
"""Return device specific attributes."""
return self._device_info
class OneWireProxy(OneWire):
"""Implementation of a 1-Wire sensor through owserver."""
def __init__(self, name, device_file, sensor_type, device_info, owproxy):
"""Initialize the sensor."""
super().__init__(name, device_file, sensor_type, device_info)
self._owproxy = owproxy
def _read_value_ownet(self):
"""Read a value from the owserver."""
return self._owproxy.read(self._device_file).decode().lstrip()
def update(self):
"""Get the latest data from the device."""
value = None
value_read = False
try:
value_read = self._read_value_ownet()
except protocol.Error as exc:
_LOGGER.error("Owserver failure in read(), got: %s", exc)
if value_read:
value = round(float(value_read), 1)
self._value_raw = float(value_read)
self._state = value
|
from homeassistant.core import State
from homeassistant.setup import async_setup_component
VALID_OPTION1 = "Option A"
VALID_OPTION2 = "Option B"
VALID_OPTION3 = "Option C"
VALID_OPTION4 = "Option D"
VALID_OPTION5 = "Option E"
VALID_OPTION6 = "Option F"
INVALID_OPTION = "Option X"
VALID_OPTION_SET1 = [VALID_OPTION1, VALID_OPTION2, VALID_OPTION3]
VALID_OPTION_SET2 = [VALID_OPTION4, VALID_OPTION5, VALID_OPTION6]
ENTITY = "input_select.test_select"
async def test_reproducing_states(hass, caplog):
"""Test reproducing Input select states."""
# Setup entity
assert await async_setup_component(
hass,
"input_select",
{
"input_select": {
"test_select": {"options": VALID_OPTION_SET1, "initial": VALID_OPTION1}
}
},
)
# These calls should do nothing as entities already in desired state
await hass.helpers.state.async_reproduce_state(
[
State(ENTITY, VALID_OPTION1),
# Should not raise
State("input_select.non_existing", VALID_OPTION1),
],
)
# Test that entity is in desired state
assert hass.states.get(ENTITY).state == VALID_OPTION1
# Try reproducing with different state
await hass.helpers.state.async_reproduce_state(
[
State(ENTITY, VALID_OPTION3),
# Should not raise
State("input_select.non_existing", VALID_OPTION3),
],
)
# Test that we got the desired result
assert hass.states.get(ENTITY).state == VALID_OPTION3
# Test setting state to invalid state
await hass.helpers.state.async_reproduce_state([State(ENTITY, INVALID_OPTION)])
# The entity state should be unchanged
assert hass.states.get(ENTITY).state == VALID_OPTION3
# Test setting a different option set
await hass.helpers.state.async_reproduce_state(
[State(ENTITY, VALID_OPTION5, {"options": VALID_OPTION_SET2})]
)
# These should fail if options weren't changed to VALID_OPTION_SET2
assert hass.states.get(ENTITY).attributes["options"] == VALID_OPTION_SET2
assert hass.states.get(ENTITY).state == VALID_OPTION5
|
import logging
import voluptuous as vol
from homeassistant.components.binary_sensor import (
DEVICE_CLASS_MOVING,
PLATFORM_SCHEMA,
BinarySensorEntity,
)
import homeassistant.helpers.config_validation as cv
from . import (
ATTR_CREATED,
ATTR_IPV4_ADDRESS,
ATTR_IPV6_ADDRESS,
ATTR_MEMORY,
ATTR_NODE_ID,
ATTR_NODE_NAME,
ATTR_REGION,
ATTR_VCPUS,
CONF_NODES,
DATA_LINODE,
)
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Node"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_NODES): vol.All(cv.ensure_list, [cv.string])}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Linode droplet sensor."""
linode = hass.data.get(DATA_LINODE)
nodes = config.get(CONF_NODES)
dev = []
for node in nodes:
node_id = linode.get_node_id(node)
if node_id is None:
_LOGGER.error("Node %s is not available", node)
return
dev.append(LinodeBinarySensor(linode, node_id))
add_entities(dev, True)
class LinodeBinarySensor(BinarySensorEntity):
"""Representation of a Linode droplet sensor."""
def __init__(self, li, node_id):
"""Initialize a new Linode sensor."""
self._linode = li
self._node_id = node_id
self._state = None
self.data = None
self._attrs = {}
self._name = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def is_on(self):
"""Return true if the binary sensor is on."""
return self._state
@property
def device_class(self):
"""Return the class of this sensor."""
return DEVICE_CLASS_MOVING
@property
def device_state_attributes(self):
"""Return the state attributes of the Linode Node."""
return self._attrs
def update(self):
"""Update state of sensor."""
self._linode.update()
if self._linode.data is not None:
for node in self._linode.data:
if node.id == self._node_id:
self.data = node
if self.data is not None:
self._state = self.data.status == "running"
self._attrs = {
ATTR_CREATED: self.data.created,
ATTR_NODE_ID: self.data.id,
ATTR_NODE_NAME: self.data.label,
ATTR_IPV4_ADDRESS: self.data.ipv4,
ATTR_IPV6_ADDRESS: self.data.ipv6,
ATTR_MEMORY: self.data.specs.memory,
ATTR_REGION: self.data.region.country,
ATTR_VCPUS: self.data.specs.vcpus,
}
self._name = self.data.label
|
from homeassistant import data_entry_flow
from homeassistant.components.vesync import DOMAIN, config_flow
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from tests.async_mock import patch
from tests.common import MockConfigEntry
async def test_abort_already_setup(hass):
"""Test if we abort because component is already setup."""
flow = config_flow.VeSyncFlowHandler()
flow.hass = hass
MockConfigEntry(domain=DOMAIN, title="user", data={"user": "pass"}).add_to_hass(
hass
)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "single_instance_allowed"
async def test_invalid_login_error(hass):
"""Test if we return error for invalid username and password."""
test_dict = {CONF_USERNAME: "user", CONF_PASSWORD: "pass"}
flow = config_flow.VeSyncFlowHandler()
flow.hass = hass
with patch("pyvesync.vesync.VeSync.login", return_value=False):
result = await flow.async_step_user(user_input=test_dict)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"base": "invalid_auth"}
async def test_config_flow_configuration_yaml(hass):
"""Test config flow with configuration.yaml user input."""
test_dict = {CONF_USERNAME: "user", CONF_PASSWORD: "pass"}
flow = config_flow.VeSyncFlowHandler()
flow.hass = hass
with patch("pyvesync.vesync.VeSync.login", return_value=True):
result = await flow.async_step_import(test_dict)
assert result["data"].get(CONF_USERNAME) == test_dict[CONF_USERNAME]
assert result["data"].get(CONF_PASSWORD) == test_dict[CONF_PASSWORD]
async def test_config_flow_user_input(hass):
"""Test config flow with user input."""
flow = config_flow.VeSyncFlowHandler()
flow.hass = hass
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
with patch("pyvesync.vesync.VeSync.login", return_value=True):
result = await flow.async_step_user(
{CONF_USERNAME: "user", CONF_PASSWORD: "pass"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["data"][CONF_USERNAME] == "user"
assert result["data"][CONF_PASSWORD] == "pass"
|
import voluptuous as vol
from homeassistant.components.light import (
ATTR_BRIGHTNESS,
PLATFORM_SCHEMA,
SUPPORT_BRIGHTNESS,
LightEntity,
)
from homeassistant.const import CONF_LIGHTS
import homeassistant.helpers.config_validation as cv
from .base_class import SWITCHES_SCHEMA, PilightBaseDevice
from .const import CONF_DIMLEVEL_MAX, CONF_DIMLEVEL_MIN
LIGHTS_SCHEMA = SWITCHES_SCHEMA.extend(
{
vol.Optional(CONF_DIMLEVEL_MIN, default=0): cv.positive_int,
vol.Optional(CONF_DIMLEVEL_MAX, default=15): cv.positive_int,
}
)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_LIGHTS): vol.Schema({cv.string: LIGHTS_SCHEMA})}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Pilight platform."""
switches = config.get(CONF_LIGHTS)
devices = []
for dev_name, dev_config in switches.items():
devices.append(PilightLight(hass, dev_name, dev_config))
add_entities(devices)
class PilightLight(PilightBaseDevice, LightEntity):
"""Representation of a Pilight switch."""
def __init__(self, hass, name, config):
"""Initialize a switch."""
super().__init__(hass, name, config)
self._dimlevel_min = config.get(CONF_DIMLEVEL_MIN)
self._dimlevel_max = config.get(CONF_DIMLEVEL_MAX)
@property
def brightness(self):
"""Return the brightness."""
return self._brightness
@property
def supported_features(self):
"""Flag supported features."""
return SUPPORT_BRIGHTNESS
def turn_on(self, **kwargs):
"""Turn the switch on by calling pilight.send service with on code."""
# Update brightness only if provided as an argument.
# This will allow the switch to keep its previous brightness level.
dimlevel = None
if ATTR_BRIGHTNESS in kwargs:
self._brightness = kwargs[ATTR_BRIGHTNESS]
# Calculate pilight brightness (as a range of 0 to 15)
# By creating a percentage
percentage = self._brightness / 255
# Then calculate the dimmer range (aka amount of available brightness steps).
dimrange = self._dimlevel_max - self._dimlevel_min
# Finally calculate the pilight brightness.
# We add dimlevel_min back in to ensure the minimum is always reached.
dimlevel = int(percentage * dimrange + self._dimlevel_min)
self.set_state(turn_on=True, dimlevel=dimlevel)
|
from homeassistant.components.directv.const import DOMAIN
from homeassistant.config_entries import (
ENTRY_STATE_LOADED,
ENTRY_STATE_NOT_LOADED,
ENTRY_STATE_SETUP_RETRY,
)
from homeassistant.helpers.typing import HomeAssistantType
from tests.components.directv import setup_integration
from tests.test_util.aiohttp import AiohttpClientMocker
# pylint: disable=redefined-outer-name
async def test_config_entry_not_ready(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the DirecTV configuration entry not ready."""
entry = await setup_integration(hass, aioclient_mock, setup_error=True)
assert entry.state == ENTRY_STATE_SETUP_RETRY
async def test_unload_config_entry(
hass: HomeAssistantType, aioclient_mock: AiohttpClientMocker
) -> None:
"""Test the DirecTV configuration entry unloading."""
entry = await setup_integration(hass, aioclient_mock)
assert entry.entry_id in hass.data[DOMAIN]
assert entry.state == ENTRY_STATE_LOADED
await hass.config_entries.async_unload(entry.entry_id)
await hass.async_block_till_done()
assert entry.entry_id not in hass.data[DOMAIN]
assert entry.state == ENTRY_STATE_NOT_LOADED
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from mock import Mock
from mock import patch
from diamond.collector import Collector
from twemproxy import TwemproxyCollector
try:
import simplejson as json
except ImportError:
import json
from os import path
###############################################################################
class TestTwemproxyCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('TwemproxyCollector', {
'interval': 60,
'hosts': ['localhost:22222'],
})
self.collector = TwemproxyCollector(config, None)
def test_import(self):
self.assertTrue(TwemproxyCollector)
@patch.object(Collector, 'publish')
def test_should_work_with_real_data(self, publish_mock):
patch_raw_stats1 = patch.object(
TwemproxyCollector,
'get_raw_stats',
Mock(return_value=json.loads(self.getFixture(
'stats1').getvalue())))
patch_raw_stats2 = patch.object(
TwemproxyCollector,
'get_raw_stats',
Mock(return_value=json.loads(self.getFixture(
'stats2').getvalue())))
patch_raw_stats1.start()
self.collector.collect()
patch_raw_stats1.stop()
self.assertPublishedMany(publish_mock, {})
patch_raw_stats2.start()
self.collector.collect()
patch_raw_stats2.stop()
with open(path.join(path.dirname(__file__),
'metrics.json'), 'rb') as fp:
metrics = json.load(fp)
self.setDocExample(collector=self.collector.__class__.__name__,
metrics=metrics,
defaultpath=self.collector.config['path'])
self.assertPublishedMany(publish_mock, metrics)
###############################################################################
if __name__ == "__main__":
unittest.main()
|
from functools import reduce
import sys
import tensorflow as tf
import tensornetwork
from examples.wavefunctions.trotter import trotter_prepare_gates
def inner(psi1, psi2):
"""Computes the inner product <psi1|psi2>.
Args:
psi1: A tensor representing the first wavefunction.
psi2: A tensor representing the second wavefunction.
Returns:
inner_product: The vector inner product.
"""
return tf.reduce_sum(tf.math.conj(psi1) * psi2)
def apply_op(psi, op, n1, pbc=False):
"""Apply a local operator to a wavefunction.
The number of dimensions of the tensor representing the wavefunction `psi`
is taken to be the number of lattice sites `N`.
The operator acts nontrivially on sites `n1` to `n1 + k - 1` of psi, where
`0 <= n1 < N`, and is expected to have `2*k` dimensions.
The first `k` dimensions represent the output and the last `k` dimensions
represent the input, to be contracted with `psi`.
Args:
psi: An `N`-dimensional tensor representing the wavefunction.
op: Tensor with `2 * k` dimensions. The operator to apply.
n1: The number of the leftmost site at which to apply the operator.
pbc: If `True`, use periodic boundary conditions, so that site `N` is
identified with site `0`. Otherwise, site `N-1` has no neighbors to the
right.
Returns:
psi_final: The result of applying `op` to `psi`.
"""
n_psi = tensornetwork.Node(psi, backend="tensorflow")
site_edges = n_psi.get_all_edges()
site_edges, n_op = _apply_op_network(site_edges, op, n1, pbc)
n_res = tensornetwork.contract_between(
n_op, n_psi, output_edge_order=site_edges)
return n_res.tensor
def _apply_op_network(site_edges, op, n1, pbc=False):
N = len(site_edges)
op_sites = len(op.shape) // 2
n_op = tensornetwork.Node(op, backend="tensorflow")
for m in range(op_sites):
target_site = (n1 + m) % N if pbc else n1 + m
tensornetwork.connect(n_op[op_sites + m], site_edges[target_site])
site_edges[target_site] = n_op[m]
return site_edges, n_op
def expval(psi, op, n1, pbc=False):
"""Expectation value of a k-local operator, acting on sites n1 to n1 + k-1.
In braket notation: <psi|op(n1)|psi>
The number of dimensions of the tensor representing the wavefunction `psi`
is taken to be the number of lattice sites `N`.
Args:
psi: An `N`-dimensional tensor representing the wavefunction.
op: Tensor with `2 * k` dimensions. The operator to apply.
n1: The number of the leftmost site at which to apply the operator.
pbc: If `True`, use periodic boundary conditions, so that site `N` is
identified with site `0`. Otherwise, site `N-1` has no neighbors to the
right.
Returns:
expval: The expectation value.
"""
n_psi = tensornetwork.Node(psi, backend="tensorflow")
site_edges = n_psi.get_all_edges()
site_edges, n_op = _apply_op_network(site_edges, op, n1, pbc)
n_op_psi = n_op @ n_psi
n_psi_conj = tensornetwork.Node(tf.math.conj(psi), backend="tensorflow")
for i in range(len(site_edges)):
tensornetwork.connect(site_edges[i], n_psi_conj[i])
res = n_psi_conj @ n_op_psi
return res.tensor
def evolve_trotter(psi,
H,
step_size,
num_steps,
euclidean=False,
callback=None):
"""Evolve an initial wavefunction psi using a trotter decomposition of H.
If the evolution is euclidean, the wavefunction will be normalized after
each step.
Args:
psi: An `N`-dimensional tensor representing the initial wavefunction.
H: A list of `N-1` tensors representing nearest-neighbor operators.
step_size: The trotter step size.
num_steps: The number of trotter steps to take.
euclidean: If `True`, evolve in Euclidean (imaginary) time.
callback: Optional callback function for monitoring the evolution.
Returns:
psi_t: The final wavefunction.
t: The final time.
"""
num_sites = len(psi.shape)
layers = trotter_prepare_gates(H, step_size, num_sites, euclidean)
return _evolve_trotter_gates(
psi, layers, step_size, num_steps, euclidean=euclidean, callback=callback)
def _evolve_trotter_gates(psi,
layers,
step_size,
num_steps,
euclidean=False,
callback=None):
"""Evolve an initial wavefunction psi via gates specified in `layers`.
If the evolution is euclidean, the wavefunction will be normalized
after each step.
"""
t = 0.0
for i in range(num_steps):
psi = apply_circuit(psi, layers)
if euclidean:
psi = tf.divide(psi, tf.norm(psi))
t += step_size
if callback is not None:
callback(psi, t, i)
return psi, t
def evolve_trotter_defun(psi,
H,
step_size,
num_steps,
euclidean=False,
callback=None,
batch_size=1):
"""Evolve an initial wavefunction psi using a trotter decomposition of H.
If the evolution is euclidean, the wavefunction will be normalized after
each step.
In this version, `batch_size` steps are "compiled" to a computational graph
using `defun`, which greatly decreases overhead.
Args:
psi: An `N`-dimensional tensor representing the initial wavefunction.
H: A list of `N-1` tensors representing nearest-neighbor operators.
step_size: The trotter step size.
num_steps: The number of trotter steps to take.
euclidean: If `True`, evolve in Euclidean (imaginary) time.
callback: Optional callback function for monitoring the evolution.
batch_size: The number of steps to unroll in the computational graph.
Returns:
psi_t: The final wavefunction.
t: The final time.
"""
n_batches, rem = divmod(num_steps, batch_size)
step_size = tf.cast(step_size, psi.dtype)
num_sites = len(psi.shape)
layers = trotter_prepare_gates(H, step_size, num_sites, euclidean)
t = 0.0
for i in range(n_batches):
psi, t_b = _evolve_trotter_gates_defun(
psi, layers, step_size, batch_size, euclidean=euclidean, callback=None)
t += t_b
if callback is not None:
callback(psi, t, (i + 1) * batch_size - 1)
if rem > 0:
psi, t_b = _evolve_trotter_gates_defun(
psi, layers, step_size, rem, euclidean=euclidean, callback=None)
t += t_b
return psi, t
#@tf.contrib.eager.defun(autograph=True)
@tf.function(autograph=True)
def _evolve_trotter_gates_defun(psi,
layers,
step_size,
num_steps,
euclidean=False,
callback=None):
return _evolve_trotter_gates(
psi, layers, step_size, num_steps, euclidean=euclidean, callback=callback)
def apply_circuit(psi, layers):
"""Applies a quantum circuit to a wavefunction.
The circuit consists of a sequence of layers, with each layer consisting
of non-overlapping gates.
Args:
psi: An `N`-dimensional tensor representing the initial wavefunction.
layers: A sequence of layers. Each layer is a sequence of gates, with
each index of a layer corresponding to a site in `psi`. The `i`th gate
of a layer acts on sites `i` to `i + k - 1`, where `k` is the range of
the gate. Gates may not overlap within a layer.
Returns:
psi_t: The final wavefunction.
"""
num_sites = len(psi.shape)
n_psi = tensornetwork.Node(psi, backend="tensorflow")
site_edges = n_psi.get_all_edges()
nodes = [n_psi]
for gates in layers:
skip = 0
for n in range(num_sites):
if n < len(gates):
gate = gates[n]
else:
gate = None
if skip > 0:
if gate is not None:
raise ValueError(
"Overlapping gates in same layer at site {}!".format(n))
skip -= 1
elif gate is not None:
site_edges, n_gate = _apply_op_network(site_edges, gate, n)
nodes.append(n_gate)
# keep track of how many sites this gate included
op_sites = len(gate.shape) // 2
skip = op_sites - 1
# NOTE: This may not be the optimal order if transpose costs are considered.
n_psi = reduce(tensornetwork.contract_between, nodes)
n_psi.reorder_edges(site_edges)
return n_psi.tensor
|
class StateNameMixin:
"""
This class is inherited by classes which deal with state names of variables.
The state names are stored in instances of `StateNameMixin`. The conversion between
state number and names are also handled by methods in this class.
"""
def store_state_names(self, variables, cardinality, state_names):
"""
Initialize an instance of StateNameMixin.
Example
-------
>>> import numpy as np
>>> from pgmpy.factors.discrete import DiscreteFactor
>>> sn = {'speed': ['low', 'medium', 'high'],
... 'switch': ['on', 'off'],
... 'time': ['day', 'night']}
>>> phi = DiscreteFactor(variables=['speed', 'switch', 'time'],
... cardinality=[3, 2, 2],
... values=np.ones(12),
... state_names=sn)
>>> print(phi.state_names)
"""
if state_names:
for key, value in state_names.items():
if not isinstance(value, (list, tuple)):
raise ValueError(
"The state names must be for the form: {variable: list_of_states}"
)
elif not len(set(value)) == len(value):
raise ValueError(f"Repeated statenames for variable: {key}")
# Make a copy, so that the original object does't get modified after operations.
self.state_names = state_names.copy()
# Create maps for easy access to specific state names of state numbers.
if state_names:
self.name_to_no = {}
self.no_to_name = {}
for key, values in self.state_names.items():
self.name_to_no[key] = {
name: no for no, name in enumerate(self.state_names[key])
}
self.no_to_name[key] = {
no: name for no, name in enumerate(self.state_names[key])
}
else:
self.state_names = {
var: list(range(int(cardinality[index])))
for index, var in enumerate(variables)
}
self.name_to_no = {
var: {i: i for i in range(int(cardinality[index]))}
for index, var in enumerate(variables)
}
self.no_to_name = self.name_to_no.copy()
def get_state_names(self, var, state_no):
"""
Given `var` and `state_no` returns the state name.
"""
if self.state_names:
return self.no_to_name[var][state_no]
else:
return state_no
def get_state_no(self, var, state_name):
"""
Given `var` and `state_name` return the state number.
"""
if self.state_names:
return self.name_to_no[var][state_name]
else:
return state_name
def add_state_names(self, phi1):
"""
Updates the attributes of this class with another factor `phi1`.
Parameters
----------
phi1: Instance of pgmpy.factors.DiscreteFactor
The factor whose states and variables need to be added.
"""
self.state_names.update(phi1.state_names)
self.name_to_no.update(phi1.name_to_no)
self.no_to_name.update(phi1.no_to_name)
def del_state_names(self, var_list):
"""
Deletes the state names for variables in var_list
"""
for var in var_list:
del self.state_names[var]
del self.name_to_no[var]
del self.no_to_name[var]
|
import logging
from kalliope.core import NeuronModule
from kalliope.core.NeuronModule import MissingParameterException
from kalliope.core.NotificationManager import NotificationManager
logging.basicConfig()
logger = logging.getLogger("kalliope")
class Signals(NeuronModule):
def __init__(self, **kwargs):
super(Signals, self).__init__(**kwargs)
# get the command
self.notification = kwargs.get('notification', None)
self.payload = kwargs.get('payload', None)
if self._is_parameters_ok():
logger.debug("[Signals] Send a notification to all subscribed classes, notification: '%s', payload: %s"
% (self.notification, self.payload))
NotificationManager.send_notification(self.notification, self.payload)
def _is_parameters_ok(self):
if self.notification is None:
raise MissingParameterException("[Signals] This neuron require a 'notification parameter'")
return True
|
import logging
import attr
import pytest
from qutebrowser.utils import usertypes, message
@attr.s
class Message:
"""Information about a shown message."""
level = attr.ib()
text = attr.ib()
class MessageMock:
"""Helper object for message_mock.
Attributes:
Message: A object representing a message.
messages: A list of Message objects.
"""
def __init__(self):
self.messages = []
def _record_message(self, level, text):
log_levels = {
usertypes.MessageLevel.error: logging.ERROR,
usertypes.MessageLevel.info: logging.INFO,
usertypes.MessageLevel.warning: logging.WARNING,
}
log_level = log_levels[level]
logging.getLogger('messagemock').log(log_level, text)
self.messages.append(Message(level, text))
def getmsg(self, level=None):
"""Get the only message in self.messages.
Raises ValueError if there are multiple or no messages.
Args:
level: The message level to check against, or None.
"""
assert len(self.messages) == 1
msg = self.messages[0]
if level is not None:
assert msg.level == level
return msg
def patch(self):
"""Start recording messages."""
message.global_bridge.show_message.connect(self._record_message)
message.global_bridge._connected = True
def unpatch(self):
"""Stop recording messages."""
message.global_bridge.show_message.disconnect(self._record_message)
@pytest.fixture
def message_mock():
"""Fixture to get a MessageMock."""
mmock = MessageMock()
mmock.patch()
yield mmock
mmock.unpatch()
|
from unittest import mock
from homeassistant.components import binary_sensor
from homeassistant.const import STATE_OFF, STATE_ON
def test_state():
"""Test binary sensor state."""
sensor = binary_sensor.BinarySensorEntity()
assert STATE_OFF == sensor.state
with mock.patch(
"homeassistant.components.binary_sensor.BinarySensorEntity.is_on",
new=False,
):
assert STATE_OFF == binary_sensor.BinarySensorEntity().state
with mock.patch(
"homeassistant.components.binary_sensor.BinarySensorEntity.is_on",
new=True,
):
assert STATE_ON == binary_sensor.BinarySensorEntity().state
def test_deprecated_base_class(caplog):
"""Test deprecated base class."""
class CustomBinarySensor(binary_sensor.BinarySensorDevice):
pass
CustomBinarySensor()
assert "BinarySensorDevice is deprecated, modify CustomBinarySensor" in caplog.text
|
import re
from collections import defaultdict
from typing import Optional, Pattern
from django.utils.functional import SimpleLazyObject
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.translation import gettext
from django.utils.translation import gettext_lazy as _
from weblate.checks.base import SourceCheck, TargetCheck
PYTHON_PRINTF_MATCH = re.compile(
r"""
%( # initial %
(?:\((?P<key>[^)]+)\))? # Python style variables, like %(var)s
(?P<fullvar>
[ +#-]* # flags
(?:\d+)? # width
(?:\.\d+)? # precision
(hh|h|l|ll)? # length formatting
(?P<type>[a-zA-Z%]) # type (%s, %d, etc.)
|) # incomplete format string
)""",
re.VERBOSE,
)
PHP_PRINTF_MATCH = re.compile(
r"""
%( # initial %
(?:(?P<ord>\d+)\$)? # variable order, like %1$s
(?P<fullvar>
[ +#-]* # flags
(?:\d+)? # width
(?:\.\d+)? # precision
(hh|h|l|ll)? # length formatting
(?P<type>[a-zA-Z%]) # type (%s, %d, etc.)
|) # incomplete format string
)""",
re.VERBOSE,
)
C_PRINTF_MATCH = re.compile(
r"""
%( # initial %
(?:(?P<ord>\d+)\$)? # variable order, like %1$s
(?P<fullvar>
[ +#'-]* # flags
(?:\d+)? # width
(?:\.\d+)? # precision
(hh|h|l|ll)? # length formatting
(?P<type>[a-zA-Z%]) # type (%s, %d, etc.)
|) # incomplete format string
)""",
re.VERBOSE,
)
PYTHON_BRACE_MATCH = re.compile(
r"""
{( # initial {
| # blank for position based
(?P<field>
[0-9]+| # numerical
[_A-Za-z][_0-9A-Za-z]* # identifier
)
(?P<attr>
\.[_A-Za-z][_0-9A-Za-z]* # attribute identifier
|\[[^]]+\] # index identifier
)*
(?P<conversion>
![rsa]
)?
(?P<format_spec>
:
.? # fill
[<>=^]? # align
[+ -]? # sign
[#]? # alternate
0? # 0 prefix
(?:[1-9][0-9]*)? # width
,? # , separator
(?:\.[1-9][0-9]*)? # precision
[bcdeEfFgGnosxX%]? # type
)?
)} # trailing }
""",
re.VERBOSE,
)
C_SHARP_MATCH = re.compile(
r"""
{ # initial {
(?P<arg>\d+) # variable order
(?P<width>
[,-?\s]+ # flags
(?:\d+)? # width
(?:\.\d+)? # precision
)?
(?P<format>
: # ':' identifier
((
[a-zA-Z0#.,\s]* # type
(?:\d+)? # numerical
))?
)?
} # Ending }
""",
re.VERBOSE,
)
JAVA_MATCH = re.compile(
r"""
%((?![\s]) # initial % (no space after)
(?:(?P<ord>\d+)\$)? # variable order, like %1$s
(?P<fullvar>
[-.#+0,(]* # flags
(?:\d+)? # width
(?:\.\d+)? # precision
(?P<type>
((?<![tT])[tT][A-Za-z]|[A-Za-z])) # type (%s, %d, %te, etc.)
)
)
""",
re.VERBOSE,
)
JAVA_MESSAGE_MATCH = re.compile(
r"""
{ # initial {
(?P<arg>\d+) # variable order
\s*
(
,\s*(?P<format>[a-z]+) # format type
(,\s*(?P<style>\S+))? # format style
)?
\s*
} # Ending }
""",
re.VERBOSE,
)
I18NEXT_MATCH = re.compile(
r"""
(
\$t\((.+?)\) # nesting
|
{{(.+?)}} # interpolation
)
""",
re.VERBOSE,
)
ES_TEMPLATE_MATCH = re.compile(
r"""
\${ # start symbol
\s* # ignore whitespace
(([^}]+)) # variable name
\s* # ignore whitespace
} # end symbol
""",
re.VERBOSE,
)
PERCENT_MATCH = re.compile(r"(%([a-zA-Z0-9_]+)%)")
VUE_MATCH = re.compile(
r"(%?{([^}]+)}|@(?:\.[a-z]+)?:(?:\([^)]+\)|[a-z_.]+))", re.IGNORECASE
)
WHITESPACE = re.compile(r"\s+")
def c_format_is_position_based(string):
return "$" not in string and string != "%"
def python_format_is_position_based(string):
return "(" not in string and string != "%"
def name_format_is_position_based(string):
return string == ""
FLAG_RULES = {
"python-format": (PYTHON_PRINTF_MATCH, python_format_is_position_based),
"php-format": (PHP_PRINTF_MATCH, c_format_is_position_based),
"c-format": (C_PRINTF_MATCH, c_format_is_position_based),
"perl-format": (C_PRINTF_MATCH, c_format_is_position_based),
"javascript-format": (C_PRINTF_MATCH, c_format_is_position_based),
"python-brace-format": (PYTHON_BRACE_MATCH, name_format_is_position_based),
"c-sharp-format": (C_SHARP_MATCH, name_format_is_position_based),
"java-format": (JAVA_MATCH, c_format_is_position_based),
}
class BaseFormatCheck(TargetCheck):
"""Base class for fomat string checks."""
regexp: Optional[Pattern[str]] = None
default_disabled = True
def check_target_unit(self, sources, targets, unit):
"""Check single unit, handling plurals."""
return any(self.check_generator(sources, targets, unit))
def check_generator(self, sources, targets, unit):
# Special case languages with single plural form
if len(sources) > 1 and len(targets) == 1:
yield self.check_format(sources[1], targets[0], False)
return
# Use plural as source in case singlular misses format string and plural has it
if (
len(sources) > 1
and not self.extract_matches(sources[0])
and self.extract_matches(sources[1])
):
source = sources[1]
else:
source = sources[0]
# Fetch plural examples
plural_examples = SimpleLazyObject(lambda: unit.translation.plural.examples)
# Check singular
yield self.check_format(
source,
targets[0],
# Allow to skip format string in case there is single plural or in special
# case of 0, 1 plural. It is technically wrong, but in many cases there
# won't be 0 so don't trigger too many false positives
len(sources) > 1
and (len(plural_examples[0]) == 1 or plural_examples[0] == ["0", "1"]),
)
# Do we have more to check?
if len(sources) == 1:
return
# Check plurals against plural from source
for i, target in enumerate(targets[1:]):
yield self.check_format(
sources[1], target, len(plural_examples[i + 1]) == 1
)
def format_string(self, string):
return string
def cleanup_string(self, text):
return text
def normalize(self, matches):
return matches
def extract_matches(self, string):
return [self.cleanup_string(x[0]) for x in self.regexp.findall(string)]
def check_format(self, source, target, ignore_missing):
"""Generic checker for format strings."""
if not target or not source:
return False
uses_position = True
# Calculate value
src_matches = self.extract_matches(source)
if src_matches:
uses_position = any(self.is_position_based(x) for x in src_matches)
tgt_matches = self.extract_matches(target)
if not uses_position:
src_matches = set(src_matches)
tgt_matches = set(tgt_matches)
if src_matches != tgt_matches:
# Ignore mismatch in percent position
if self.normalize(src_matches) == self.normalize(tgt_matches):
return False
# We can ignore missing format strings
# for first of plurals
if ignore_missing and tgt_matches < src_matches:
return False
if not uses_position:
missing = sorted(src_matches - tgt_matches)
extra = sorted(tgt_matches - src_matches)
else:
missing = []
extra = []
for i in range(min(len(src_matches), len(tgt_matches))):
if src_matches[i] != tgt_matches[i]:
missing.append(src_matches[i])
extra.append(tgt_matches[i])
missing.extend(src_matches[len(tgt_matches) :])
extra.extend(tgt_matches[len(src_matches) :])
return {"missing": missing, "extra": extra}
return False
def is_position_based(self, string):
return False
def check_single(self, source, target, unit):
"""We don't check target strings here."""
return False
def check_highlight(self, source, unit):
if self.should_skip(unit):
return []
ret = []
match_objects = self.regexp.finditer(source)
for match in match_objects:
ret.append((match.start(), match.end(), match.group()))
return ret
def format_result(self, result):
if result["missing"]:
yield gettext("Following format strings are missing: %s") % ", ".join(
self.format_string(x) for x in sorted(set(result["missing"]))
)
if result["extra"]:
yield gettext("Following format strings are extra: %s") % ", ".join(
self.format_string(x) for x in sorted(set(result["extra"]))
)
def get_description(self, check_obj):
unit = check_obj.unit
checks = self.check_generator(
unit.get_source_plurals(), unit.get_target_plurals(), unit
)
errors = []
# Merge plurals
results = defaultdict(list)
for result in checks:
if result:
for key, value in result.items():
results[key].extend(value)
if results:
errors.extend(self.format_result(results))
if errors:
return mark_safe("<br />".join(escape(error) for error in errors))
return super().get_description(check_obj)
class BasePrintfCheck(BaseFormatCheck):
"""Base class for printf based format checks."""
def __init__(self):
super().__init__()
self.regexp, self._is_position_based = FLAG_RULES[self.enable_string]
def is_position_based(self, string):
return self._is_position_based(string)
def normalize(self, matches):
return [m for m in matches if m != "%"]
def format_string(self, string):
return f"%{string}"
def cleanup_string(self, text):
"""Remove locale specific code from format string."""
if "'" in text:
return text.replace("'", "")
return text
class PythonFormatCheck(BasePrintfCheck):
"""Check for Python format string."""
check_id = "python_format"
name = _("Python format")
description = _("Python format string does not match source")
class PHPFormatCheck(BasePrintfCheck):
"""Check for PHP format string."""
check_id = "php_format"
name = _("PHP format")
description = _("PHP format string does not match source")
class CFormatCheck(BasePrintfCheck):
"""Check for C format string."""
check_id = "c_format"
name = _("C format")
description = _("C format string does not match source")
class PerlFormatCheck(CFormatCheck):
"""Check for Perl format string."""
check_id = "perl_format"
name = _("Perl format")
description = _("Perl format string does not match source")
class JavaScriptFormatCheck(CFormatCheck):
"""Check for JavaScript format string."""
check_id = "javascript_format"
name = _("JavaScript format")
description = _("JavaScript format string does not match source")
class PythonBraceFormatCheck(BaseFormatCheck):
"""Check for Python format string."""
check_id = "python_brace_format"
name = _("Python brace format")
description = _("Python brace format string does not match source")
regexp = PYTHON_BRACE_MATCH
def is_position_based(self, string):
return name_format_is_position_based(string)
def format_string(self, string):
return "{%s}" % string
class CSharpFormatCheck(BaseFormatCheck):
"""Check for C# format string."""
check_id = "c_sharp_format"
name = _("C# format")
description = _("C# format string does not match source")
regexp = C_SHARP_MATCH
def is_position_based(self, string):
return name_format_is_position_based(string)
def format_string(self, string):
return "{%s}" % string
class JavaFormatCheck(BasePrintfCheck):
"""Check for Java format string."""
check_id = "java_format"
name = _("Java format")
description = _("Java format string does not match source")
class JavaMessageFormatCheck(BaseFormatCheck):
"""Check for Java MessageFormat string."""
check_id = "java_messageformat"
name = _("Java MessageFormat")
description = _("Java MessageFormat string does not match source")
regexp = JAVA_MESSAGE_MATCH
def format_string(self, string):
return "{%s}" % string
def should_skip(self, unit):
if "auto-java-messageformat" in unit.all_flags and "{0" in unit.source:
return False
return super().should_skip(unit)
def check_format(self, source, target, ignore_missing):
"""Generic checker for format strings."""
if not target or not source:
return False
result = super().check_format(source, target, ignore_missing)
# Even number of quotes
if target.count("'") % 2 != 0:
if not result:
result = {"missing": [], "extra": []}
result["missing"].append("'")
return result
def format_result(self, result):
if "'" in result["missing"]:
result["missing"].remove("'")
yield gettext("You need to pair up an apostrophe with another one.")
yield from super().format_result(result)
class I18NextInterpolationCheck(BaseFormatCheck):
check_id = "i18next_interpolation"
name = _("i18next interpolation")
description = _("The i18next interpolation does not match source")
regexp = I18NEXT_MATCH
def cleanup_string(self, text):
return WHITESPACE.sub("", text)
class ESTemplateLiteralsCheck(BaseFormatCheck):
"""Check for ES template literals."""
check_id = "es_format"
name = _("ECMAScript template literals")
description = _("ECMAScript template literals do not match source")
regexp = ES_TEMPLATE_MATCH
def cleanup_string(self, text):
return WHITESPACE.sub("", text)
def format_string(self, string):
return f"${{{string}}}"
class PercentPlaceholdersCheck(BaseFormatCheck):
check_id = "percent_placeholders"
name = _("Percent placeholders")
description = _("The percent placeholders do not match source")
regexp = PERCENT_MATCH
class VueFormattingCheck(BaseFormatCheck):
check_id = "vue_format"
name = _("Vue I18n formatting")
description = _("The Vue I18n formatting does not match source")
regexp = VUE_MATCH
class MultipleUnnamedFormatsCheck(SourceCheck):
check_id = "unnamed_format"
name = _("Multiple unnamed variables")
description = _(
"There are multiple unnamed variables in the string, "
"making it impossible for translators to reorder them"
)
def check_source_unit(self, source, unit):
"""Check source string."""
rules = [FLAG_RULES[flag] for flag in unit.all_flags if flag in FLAG_RULES]
if not rules:
return False
found = 0
for regexp, is_position_based in rules:
for match in regexp.findall(source[0]):
if is_position_based(match[0]):
found += 1
if found >= 2:
return True
return False
|
from typing import Text, Union, Optional, Sequence, Tuple
from tensornetwork.tensor import Tensor
from tensornetwork import ncon_interface
def _check_backends(tensors: Sequence[Tensor], fname: str) -> Tuple[bool, str]:
""" Checks that each of tensors has the same backend, returning True and an
empty string if so, or False and an error string if not.
Args:
tensors: The list of tensors whose backends to check.
fname: The name of the calling function, which will go into the errstring.
Returns:
(flag, errstr): Whether all backends agree, and an error message if not.
"""
backend_names = [tensor.backend.name for tensor in tensors]
backends_check = [backend_names[0] == name for name in backend_names[1:]]
all_backends_same = all(backends_check)
errstr = ""
if not all_backends_same:
errstr = "All Tensors fed to " + fname + "must have the same backend."
errstr += "Backends were: \n"
errstr += str([name + "\n" for name in backend_names])
return all_backends_same, errstr
def tensordot(a: Tensor, b: Tensor,
axes: Union[int, Sequence[Sequence[int]]]) -> Tensor:
"""Do a tensordot (contraction) of Tensors `a` and `b` over the given axes.
The behaviour of this function largely matches that of np.tensordot.
Args:
a: A Tensor.
b: Another Tensor.
axes: Two lists of integers. These values are the contraction
axes. A single integer may also be supplied, in which case both
tensors are contracted over this axis.
Raises:
ValueError, if a and b have different backends.
Returns:
The result of the tensordot, a Tensor.
"""
if a.backend.name != b.backend.name:
errstr = "Tried to Tensordot Tensors with differing backends \n"
errstr += a.backend.name + "and " + b.backend.name + "."
raise ValueError(errstr)
out_array = a.backend.tensordot(a.array, b.array, axes)
out_tensor = Tensor(out_array, backend=a.backend)
return out_tensor
def reshape(tensor: Tensor, new_shape: Sequence[int]) -> Tensor:
"""Reshape Tensor to the given shape.
Args:
tensor: Tensor to reshape.
new_shape: The new shape.
Returns:
The reshaped Tensor.
"""
return tensor.reshape(new_shape)
def transpose(tensor: Tensor, perm: Optional[Sequence[int]] = None) -> Tensor:
""" Return a new `Tensor` transposed according to the permutation set
by `axes`. By default the axes are reversed.
Args:
axes: The permutation. If None (default) the index order is reversed.
Returns:
The transposed `Tensor`.
"""
return tensor.transpose(perm=perm)
def take_slice(tensor: Tensor, start_indices: Tuple[int, ...],
slice_sizes: Tuple[int, ...]) -> Tensor:
"""Obtains a slice of a Tensor based on start_indices and slice_sizes.
Args:
Tensor: A Tensor.
start_indices: Tuple of integers denoting start indices of slice.
slice_sizes: Tuple of integers denoting size of slice along each axis.
Returns:
The slice, a Tensor.
"""
sliced = tensor.backend.slice(tensor.array, start_indices, slice_sizes)
sliced_tensor = Tensor(sliced, backend=tensor.backend)
return sliced_tensor
def shape(tensor: Tensor) -> Tuple[int, ...]:
"""Get the shape of a Tensor as a tuple of integers.
Args:
Tensor: A Tensor.
Returns:
The shape of the input Tensor.
"""
return tensor.shape
def sqrt(tensor: Tensor) -> Tensor:
"""Take the square root (element wise) of a given Tensor."""
out_array = tensor.backend.sqrt(tensor.array)
return Tensor(out_array, backend=tensor.backend)
def outer(tensor1: Tensor, tensor2: Tensor) -> Tensor:
"""Calculate the outer product of the two given Tensors."""
tensors = [tensor1, tensor2]
all_backends_same, errstr = _check_backends(tensors, "outer")
if not all_backends_same:
raise ValueError(errstr)
out_data = tensor1.backend.outer_product(tensor1.array, tensor2.array)
return Tensor(out_data, backend=tensor1.backend)
def einsum(expression: Text, *tensors: Tensor, optimize: bool) -> Tensor:
"""Calculate sum of products of Tensors according to expression."""
all_backends_same, errstr = _check_backends(tensors, "einsum")
if not all_backends_same:
raise ValueError(errstr)
backend = tensors[0].backend
arrays = [tensor.array for tensor in tensors]
result_data = backend.einsum(expression, *arrays, optimize=optimize)
return Tensor(result_data, backend=backend)
def conj(tensor: Tensor) -> Tensor:
"""
Return the complex conjugate of `Tensor`
Args:
Tensor: A Tensor.
Returns:
The complex conjugated Tensor.
"""
return tensor.conj()
def hconj(tensor: Tensor, perm: Optional[Sequence[int]] = None) -> Tensor:
""" The Hermitian conjugated tensor; e.g. the complex conjugate tranposed
by the permutation set be `axes`. By default the axes are reversed.
Args:
tensor: The Tensor to conjugate.
axes: The permutation. If None (default) the index order is reversed.
Returns:
The Hermitian conjugated `Tensor`.
"""
return tensor.hconj(perm=perm)
def sin(tensor: Tensor) -> Tensor:
"""
Return sin of `Tensor`.
Args:
Tensor: A Tensor.
Returns:
Tensor
"""
out_array = tensor.backend.sin(tensor.array)
return Tensor(out_array, backend=tensor.backend)
def cos(tensor: Tensor) -> Tensor:
"""
Return cos of `Tensor`.
Args:
Tensor: A Tensor.
Returns:
Tensor
"""
out_array = tensor.backend.cos(tensor.array)
return Tensor(out_array, backend=tensor.backend)
def exp(tensor: Tensor) -> Tensor:
"""
Return elementwise exp of `Tensor`.
Args:
Tensor: A Tensor.
Returns:
Tensor
"""
out_array = tensor.backend.exp(tensor.array)
return Tensor(out_array, backend=tensor.backend)
def log(tensor: Tensor) -> Tensor:
"""
Return elementwise natural logarithm of `Tensor`.
Args:
Tensor: A Tensor.
Returns:
Tensor
"""
out_array = tensor.backend.log(tensor.array)
return Tensor(out_array, backend=tensor.backend)
def diagonal(tensor: Tensor, offset: int = 0, axis1: int = -2,
axis2: int = -1) -> Tensor:
"""
Extracts the offset'th diagonal from the matrix slice of tensor indexed
by (axis1, axis2).
Args:
tensor: A Tensor.
offset: Offset of the diagonal from the main diagonal.
axis1, axis2: Indices of the matrix slice to extract from.
Returns:
out : A 1D Tensor storing the elements of the selected diagonal.
"""
backend = tensor.backend
result = backend.diagonal(tensor.array, offset=offset, axis1=axis1,
axis2=axis2)
return Tensor(result, backend=backend)
def diagflat(tensor: Tensor, k: int = 0) -> Tensor:
"""
Flattens tensor and places its elements at the k'th diagonal of a new
(tensor.size + k, tensor.size + k) `Tensor` of zeros.
Args:
tensor: A Tensor.
k : The elements of tensor will be stored at this diagonal.
Returns:
out : A (tensor.size + k, tensor.size + k) `Tensor` with the elements
of tensor on its kth diagonal.
"""
backend = tensor.backend
result = backend.diagflat(tensor.array, k=k)
return Tensor(result, backend=backend)
def trace(tensor: Tensor, offset: int = 0, axis1: int = -2,
axis2: int = -1) -> Tensor:
"""Calculate the sum along diagonal entries of the given Tensor. The
entries of the offset`th diagonal of the matrix slice of tensor indexed by
(axis1, axis2) are summed.
Args:
tensor: A Tensor.
offset: Offset of the diagonal from the main diagonal.
axis1, axis2: Indices of the matrix slice to extract from.
Returns:
out: The trace.
"""
backend = tensor.backend
result = backend.trace(tensor.array, offset=offset, axis1=axis1,
axis2=axis2)
return Tensor(result, backend=backend)
def sign(tensor: Tensor) -> Tensor:
""" Returns the sign of the elements of Tensor.
"""
backend = tensor.backend
result = backend.sign(tensor.array)
return Tensor(result, backend=backend)
# pylint: disable=redefined-builtin
def abs(tensor: Tensor) -> Tensor:
""" Returns the absolute value of the elements of Tensor.
"""
backend = tensor.backend
result = backend.abs(tensor.array)
return Tensor(result, backend=backend)
def pivot(tensor: Tensor, pivot_axis: int = -1) -> Tensor:
""" Reshapes tensor into a matrix about the pivot_axis. Equivalent to
tensor.reshape(prod(tensor.shape[:pivot_axis]),
prod(tensor.shape[pivot_axis:])).
Args:
tensor: The input tensor.
pivot_axis: Axis to pivot around.
"""
backend = tensor.backend
result = backend.pivot(tensor.array, pivot_axis=pivot_axis)
return Tensor(result, backend=backend)
def kron(tensorA: Tensor, tensorB: Tensor) -> Tensor:
"""
Compute the (tensor) kronecker product between `tensorA` and
`tensorB`. `tensorA` and `tensorB` can be tensors of any
even order (i.e. `tensorA.ndim % 2 == 0`, `tensorB.ndim % 2 == 0`).
The returned tensor has index ordering such that when reshaped into
a matrix with `pivot =t ensorA.ndim//2 + tensorB.ndim//2`,
the resulting matrix is identical to the result of numpy's
`np.kron(matrixA, matrixB)`, with `matrixA, matrixB` matrices
obtained from reshaping `tensorA` and `tensorB` into matrices with
`pivotA = tensorA.ndim//2`, `pivotB = tensorB.ndim//2`
Example:
`tensorA.shape = (2,3,4,5)`, `tensorB.shape(6,7)` ->
`kron(tensorA, tensorB).shape = (2, 3, 6, 4, 5, 7)`
Args:
tensorA: A `Tensor`.
tensorB: A `Tensor`.
Returns:
Tensor: The kronecker product.
Raises:
ValueError: - If backends, are not matching.
- If ndims of the input tensors are not even.
"""
tensors = [tensorA, tensorA]
all_backends_same, errstr = _check_backends(tensors, "kron")
if not all_backends_same:
raise ValueError(errstr)
ndimA, ndimB = tensorA.ndim, tensorB.ndim
if ndimA % 2 != 0:
raise ValueError(f"kron only supports tensors with even number of legs."
f"found tensorA.ndim = {ndimA}")
if ndimB % 2 != 0:
raise ValueError(f"kron only supports tensors with even number of legs."
f"found tensorB.ndim = {ndimB}")
backend = tensorA.backend
incoming = list(range(ndimA // 2)) + list(range(ndimA, ndimA + ndimB // 2))
outgoing = list(range(ndimA // 2, ndimA)) + list(
range(ndimA + ndimB // 2, ndimA + ndimB))
arr = backend.transpose(
backend.outer_product(tensorA.array, tensorB.array), incoming + outgoing)
return Tensor(arr, backend=backend)
|
from datetime import timedelta
import logging
import dlipower
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA, SwitchEntity
from homeassistant.const import (
CONF_HOST,
CONF_NAME,
CONF_PASSWORD,
CONF_TIMEOUT,
CONF_USERNAME,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.util import Throttle
_LOGGER = logging.getLogger(__name__)
CONF_CYCLETIME = "cycletime"
DEFAULT_NAME = "DINRelay"
DEFAULT_USERNAME = "admin"
DEFAULT_PASSWORD = "admin"
DEFAULT_TIMEOUT = 20
DEFAULT_CYCLETIME = 2
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=5)
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_USERNAME, default=DEFAULT_USERNAME): cv.string,
vol.Optional(CONF_PASSWORD, default=DEFAULT_PASSWORD): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): vol.All(
vol.Coerce(int), vol.Range(min=1, max=600)
),
vol.Optional(CONF_CYCLETIME, default=DEFAULT_CYCLETIME): vol.All(
vol.Coerce(int), vol.Range(min=1, max=600)
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Find and return DIN III Relay switch."""
host = config[CONF_HOST]
controller_name = config[CONF_NAME]
user = config[CONF_USERNAME]
pswd = config[CONF_PASSWORD]
tout = config[CONF_TIMEOUT]
cycl = config[CONF_CYCLETIME]
power_switch = dlipower.PowerSwitch(
hostname=host, userid=user, password=pswd, timeout=tout, cycletime=cycl
)
if not power_switch.verify():
_LOGGER.error("Could not connect to DIN III Relay")
return False
outlets = []
parent_device = DINRelayDevice(power_switch)
outlets.extend(
DINRelay(controller_name, parent_device, outlet) for outlet in power_switch[0:]
)
add_entities(outlets)
class DINRelay(SwitchEntity):
"""Representation of an individual DIN III relay port."""
def __init__(self, controller_name, parent_device, outlet):
"""Initialize the DIN III Relay switch."""
self._controller_name = controller_name
self._parent_device = parent_device
self._outlet = outlet
self._outlet_number = self._outlet.outlet_number
self._name = self._outlet.description
self._state = self._outlet.state == "ON"
@property
def name(self):
"""Return the display name of this relay."""
return f"{self._controller_name}_{self._name}"
@property
def is_on(self):
"""Return true if relay is on."""
return self._state
def turn_on(self, **kwargs):
"""Instruct the relay to turn on."""
self._outlet.on()
def turn_off(self, **kwargs):
"""Instruct the relay to turn off."""
self._outlet.off()
def update(self):
"""Trigger update for all switches on the parent device."""
self._parent_device.update()
outlet_status = self._parent_device.get_outlet_status(self._outlet_number)
self._name = outlet_status[1]
self._state = outlet_status[2] == "ON"
class DINRelayDevice:
"""Device representation for per device throttling."""
def __init__(self, power_switch):
"""Initialize the DINRelay device."""
self._power_switch = power_switch
self._statuslist = None
def get_outlet_status(self, outlet_number):
"""Get status of outlet from cached status list."""
return self._statuslist[outlet_number - 1]
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def update(self):
"""Fetch new state data for this device."""
self._statuslist = self._power_switch.statuslist()
|
import itertools
import posixpath
import socket
import sys
from http import client
import vobject
from radicale import app, httputils
from radicale import item as radicale_item
from radicale import pathutils, rights, storage, xmlutils
from radicale.log import logger
MIMETYPE_TAGS = {value: key for key, value in xmlutils.MIMETYPES.items()}
def prepare(vobject_items, path, content_type, permissions, parent_permissions,
tag=None, write_whole_collection=None):
if (write_whole_collection or permissions and not parent_permissions):
write_whole_collection = True
tag = radicale_item.predict_tag_of_whole_collection(
vobject_items, MIMETYPE_TAGS.get(content_type))
if not tag:
raise ValueError("Can't determine collection tag")
collection_path = pathutils.strip_path(path)
elif (write_whole_collection is not None and not write_whole_collection or
not permissions and parent_permissions):
write_whole_collection = False
if tag is None:
tag = radicale_item.predict_tag_of_parent_collection(vobject_items)
collection_path = posixpath.dirname(pathutils.strip_path(path))
props = None
stored_exc_info = None
items = []
try:
if tag:
radicale_item.check_and_sanitize_items(
vobject_items, is_collection=write_whole_collection, tag=tag)
if write_whole_collection and tag == "VCALENDAR":
vobject_components = []
vobject_item, = vobject_items
for content in ("vevent", "vtodo", "vjournal"):
vobject_components.extend(
getattr(vobject_item, "%s_list" % content, []))
vobject_components_by_uid = itertools.groupby(
sorted(vobject_components, key=radicale_item.get_uid),
radicale_item.get_uid)
for _, components in vobject_components_by_uid:
vobject_collection = vobject.iCalendar()
for component in components:
vobject_collection.add(component)
item = radicale_item.Item(collection_path=collection_path,
vobject_item=vobject_collection)
item.prepare()
items.append(item)
elif write_whole_collection and tag == "VADDRESSBOOK":
for vobject_item in vobject_items:
item = radicale_item.Item(collection_path=collection_path,
vobject_item=vobject_item)
item.prepare()
items.append(item)
elif not write_whole_collection:
vobject_item, = vobject_items
item = radicale_item.Item(collection_path=collection_path,
vobject_item=vobject_item)
item.prepare()
items.append(item)
if write_whole_collection:
props = {}
if tag:
props["tag"] = tag
if tag == "VCALENDAR" and vobject_items:
if hasattr(vobject_items[0], "x_wr_calname"):
calname = vobject_items[0].x_wr_calname.value
if calname:
props["D:displayname"] = calname
if hasattr(vobject_items[0], "x_wr_caldesc"):
caldesc = vobject_items[0].x_wr_caldesc.value
if caldesc:
props["C:calendar-description"] = caldesc
radicale_item.check_and_sanitize_props(props)
except Exception:
stored_exc_info = sys.exc_info()
# Use generator for items and delete references to free memory
# early
def items_generator():
while items:
yield items.pop(0)
return (items_generator(), tag, write_whole_collection, props,
stored_exc_info)
class ApplicationPutMixin:
def do_PUT(self, environ, base_prefix, path, user):
"""Manage PUT request."""
access = app.Access(self._rights, user, path)
if not access.check("w"):
return httputils.NOT_ALLOWED
try:
content = httputils.read_request_body(self.configuration, environ)
except RuntimeError as e:
logger.warning("Bad PUT request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
except socket.timeout:
logger.debug("Client timed out", exc_info=True)
return httputils.REQUEST_TIMEOUT
# Prepare before locking
content_type = environ.get("CONTENT_TYPE", "").split(";")[0]
try:
vobject_items = tuple(vobject.readComponents(content or ""))
except Exception as e:
logger.warning(
"Bad PUT request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
(prepared_items, prepared_tag, prepared_write_whole_collection,
prepared_props, prepared_exc_info) = prepare(
vobject_items, path, content_type,
bool(rights.intersect(access.permissions, "Ww")),
bool(rights.intersect(access.parent_permissions, "w")))
with self._storage.acquire_lock("w", user):
item = next(self._storage.discover(path), None)
parent_item = next(
self._storage.discover(access.parent_path), None)
if not parent_item:
return httputils.CONFLICT
write_whole_collection = (
isinstance(item, storage.BaseCollection) or
not parent_item.get_meta("tag"))
if write_whole_collection:
tag = prepared_tag
else:
tag = parent_item.get_meta("tag")
if write_whole_collection:
if ("w" if tag else "W") not in access.permissions:
return httputils.NOT_ALLOWED
elif "w" not in access.parent_permissions:
return httputils.NOT_ALLOWED
etag = environ.get("HTTP_IF_MATCH", "")
if not item and etag:
# Etag asked but no item found: item has been removed
return httputils.PRECONDITION_FAILED
if item and etag and item.etag != etag:
# Etag asked but item not matching: item has changed
return httputils.PRECONDITION_FAILED
match = environ.get("HTTP_IF_NONE_MATCH", "") == "*"
if item and match:
# Creation asked but item found: item can't be replaced
return httputils.PRECONDITION_FAILED
if (tag != prepared_tag or
prepared_write_whole_collection != write_whole_collection):
(prepared_items, prepared_tag, prepared_write_whole_collection,
prepared_props, prepared_exc_info) = prepare(
vobject_items, path, content_type,
bool(rights.intersect(access.permissions, "Ww")),
bool(rights.intersect(access.parent_permissions, "w")),
tag, write_whole_collection)
props = prepared_props
if prepared_exc_info:
logger.warning(
"Bad PUT request on %r: %s", path, prepared_exc_info[1],
exc_info=prepared_exc_info)
return httputils.BAD_REQUEST
if write_whole_collection:
try:
etag = self._storage.create_collection(
path, prepared_items, props).etag
except ValueError as e:
logger.warning(
"Bad PUT request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
else:
prepared_item, = prepared_items
if (item and item.uid != prepared_item.uid or
not item and parent_item.has_uid(prepared_item.uid)):
return self._webdav_error_response(
client.CONFLICT, "%s:no-uid-conflict" % (
"C" if tag == "VCALENDAR" else "CR"))
href = posixpath.basename(pathutils.strip_path(path))
try:
etag = parent_item.upload(href, prepared_item).etag
except ValueError as e:
logger.warning(
"Bad PUT request on %r: %s", path, e, exc_info=True)
return httputils.BAD_REQUEST
headers = {"ETag": etag}
return client.CREATED, headers, None
|
import sys
import simplejson as json
from docker_registry.core import exceptions
import docker_registry.storage as storage
store = storage.load()
def walk_all_tags():
for namespace_path in store.list_directory(store.repositories):
for repos_path in store.list_directory(namespace_path):
try:
for tag in store.list_directory(repos_path):
fname = tag.split('/').pop()
if not fname.startswith('tag_'):
continue
(namespace, repos) = repos_path.split('/')[-2:]
yield (namespace, repos, store.get_content(tag))
except OSError:
pass
def walk_ancestry(image_id):
try:
# Note(dmp): unicode patch
ancestry = store.get_json(store.image_ancestry_path(image_id))
return iter(ancestry)
except exceptions.FileNotFoundError:
print('Ancestry file for {0} is missing'.format(image_id))
return []
def get_image_checksum(image_id):
checksum_path = store.image_checksum_path(image_id)
if not store.exists(checksum_path):
return
checksum = store.get_content(checksum_path)
return checksum.strip()
def dump_json(all_repos, all_checksums, filename):
data = []
for ((namespace, repos), images) in all_repos.iteritems():
images_checksums = []
for i in set(images):
images_checksums.append({'id': i, 'checksum': all_checksums[i]})
data.append({
'namespace': namespace,
'repository': repos,
'images': images_checksums
})
with open(filename, 'w') as f:
json.dump(data, f, indent=4)
if __name__ == '__main__':
if len(sys.argv) < 2:
print('Usage: {0} <output_file>'.format(sys.argv[0]))
sys.exit(1)
all_repos = {}
all_checksums = {}
for (namespace, repos, image_id) in walk_all_tags():
key = (namespace, repos)
if key not in all_repos:
all_repos[key] = []
for i in walk_ancestry(image_id):
all_repos[key].append(i)
if i in all_checksums:
continue
all_checksums[i] = get_image_checksum(i)
dump_json(all_repos, all_checksums, sys.argv[1])
|
import asyncio
from datetime import timedelta
from pytile import async_login
from pytile.errors import SessionExpiredError, TileError
from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import callback
from homeassistant.helpers import aiohttp_client
from homeassistant.helpers.update_coordinator import (
CoordinatorEntity,
DataUpdateCoordinator,
UpdateFailed,
)
from .const import DATA_COORDINATOR, DOMAIN, LOGGER
PLATFORMS = ["device_tracker"]
DEVICE_TYPES = ["PHONE", "TILE"]
DEFAULT_ATTRIBUTION = "Data provided by Tile"
DEFAULT_ICON = "mdi:view-grid"
DEFAULT_UPDATE_INTERVAL = timedelta(minutes=2)
CONF_SHOW_INACTIVE = "show_inactive"
async def async_setup(hass, config):
"""Set up the Tile component."""
hass.data[DOMAIN] = {DATA_COORDINATOR: {}}
return True
async def async_setup_entry(hass, config_entry):
"""Set up Tile as config entry."""
websession = aiohttp_client.async_get_clientsession(hass)
client = await async_login(
config_entry.data[CONF_USERNAME],
config_entry.data[CONF_PASSWORD],
session=websession,
)
async def async_update_data():
"""Get new data from the API."""
try:
return await client.tiles.all()
except SessionExpiredError:
LOGGER.info("Tile session expired; creating a new one")
await client.async_init()
except TileError as err:
raise UpdateFailed(f"Error while retrieving data: {err}") from err
coordinator = DataUpdateCoordinator(
hass,
LOGGER,
name=config_entry.title,
update_interval=DEFAULT_UPDATE_INTERVAL,
update_method=async_update_data,
)
await coordinator.async_refresh()
hass.data[DOMAIN][DATA_COORDINATOR][config_entry.entry_id] = coordinator
for component in PLATFORMS:
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, component)
)
return True
async def async_unload_entry(hass, config_entry):
"""Unload a Tile config entry."""
unload_ok = all(
await asyncio.gather(
*[
hass.config_entries.async_forward_entry_unload(config_entry, component)
for component in PLATFORMS
]
)
)
if unload_ok:
hass.data[DOMAIN][DATA_COORDINATOR].pop(config_entry.entry_id)
return unload_ok
class TileEntity(CoordinatorEntity):
"""Define a generic Tile entity."""
def __init__(self, coordinator):
"""Initialize."""
super().__init__(coordinator)
self._attrs = {ATTR_ATTRIBUTION: DEFAULT_ATTRIBUTION}
self._name = None
self._unique_id = None
@property
def device_state_attributes(self):
"""Return the device state attributes."""
return self._attrs
@property
def icon(self):
"""Return the icon."""
return DEFAULT_ICON
@property
def name(self):
"""Return the name."""
return self._name
@property
def unique_id(self):
"""Return the unique ID of the entity."""
return self._unique_id
@callback
def _update_from_latest_data(self):
"""Update the entity from the latest data."""
raise NotImplementedError
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def update():
"""Update the state."""
self._update_from_latest_data()
self.async_write_ha_state()
self.async_on_remove(self.coordinator.async_add_listener(update))
self._update_from_latest_data()
|
from __future__ import division
import numpy as np
import PIL.Image
import chainer
from chainercv import transforms
def scale_mask(mask, bbox, size):
"""Scale instance segmentation mask while keeping the aspect ratio.
This function exploits the sparsity of :obj:`mask` to speed up
resize operation.
The input image will be resized so that
the shorter edge will be scaled to length :obj:`size` after
resizing.
Args:
mask (array): An array whose shape is :math:`(R, H, W)`.
:math:`R` is the number of masks.
The dtype should be :obj:`numpy.bool`.
bbox (array): The bounding boxes around the masked region
of :obj:`mask`. This is expected to be the value
obtained by :obj:`bbox = chainercv.utils.mask_to_bbox(mask)`.
size (int): The length of the smaller edge.
Returns:
array:
An array whose shape is :math:`(R, H, W)`.
:math:`R` is the number of masks.
The dtype should be :obj:`numpy.bool`.
"""
xp = chainer.backends.cuda.get_array_module(mask)
mask = chainer.cuda.to_cpu(mask)
bbox = chainer.cuda.to_cpu(bbox)
R, H, W = mask.shape
if H < W:
out_size = (size, int(size * W / H))
scale = size / H
else:
out_size = (int(size * H / W), size)
scale = size / W
bbox[:, :2] = np.floor(bbox[:, :2])
bbox[:, 2:] = np.ceil(bbox[:, 2:])
bbox = bbox.astype(np.int32)
scaled_bbox = bbox * scale
scaled_bbox[:, :2] = np.floor(scaled_bbox[:, :2])
scaled_bbox[:, 2:] = np.ceil(scaled_bbox[:, 2:])
scaled_bbox = scaled_bbox.astype(np.int32)
out_mask = xp.zeros((R,) + out_size, dtype=np.bool)
for i, (m, bb, scaled_bb) in enumerate(
zip(mask, bbox, scaled_bbox)):
cropped_m = m[bb[0]:bb[2], bb[1]:bb[3]]
h = scaled_bb[2] - scaled_bb[0]
w = scaled_bb[3] - scaled_bb[1]
cropped_m = transforms.resize(
cropped_m[None].astype(np.float32),
(h, w),
interpolation=PIL.Image.NEAREST)[0]
if xp != np:
cropped_m = xp.array(cropped_m)
out_mask[i, scaled_bb[0]:scaled_bb[2],
scaled_bb[1]:scaled_bb[3]] = cropped_m
return out_mask
|
import unicodedata
import os
from functools import reduce
from collections import deque
###{standalone
import sys, re
import logging
logger = logging.getLogger("lark")
logger.addHandler(logging.StreamHandler())
# Set to highest level, since we have some warnings amongst the code
# By default, we should not output any log messages
logger.setLevel(logging.CRITICAL)
Py36 = (sys.version_info[:2] >= (3, 6))
NO_VALUE = object()
def classify(seq, key=None, value=None):
d = {}
for item in seq:
k = key(item) if (key is not None) else item
v = value(item) if (value is not None) else item
if k in d:
d[k].append(v)
else:
d[k] = [v]
return d
def _deserialize(data, namespace, memo):
if isinstance(data, dict):
if '__type__' in data: # Object
class_ = namespace[data['__type__']]
return class_.deserialize(data, memo)
elif '@' in data:
return memo[data['@']]
return {key:_deserialize(value, namespace, memo) for key, value in data.items()}
elif isinstance(data, list):
return [_deserialize(value, namespace, memo) for value in data]
return data
class Serialize(object):
"""Safe-ish serialization interface that doesn't rely on Pickle
Attributes:
__serialize_fields__ (List[str]): Fields (aka attributes) to serialize.
__serialize_namespace__ (list): List of classes that deserialization is allowed to instantiate.
Should include all field types that aren't builtin types.
"""
def memo_serialize(self, types_to_memoize):
memo = SerializeMemoizer(types_to_memoize)
return self.serialize(memo), memo.serialize()
def serialize(self, memo=None):
if memo and memo.in_types(self):
return {'@': memo.memoized.get(self)}
fields = getattr(self, '__serialize_fields__')
res = {f: _serialize(getattr(self, f), memo) for f in fields}
res['__type__'] = type(self).__name__
postprocess = getattr(self, '_serialize', None)
if postprocess:
postprocess(res, memo)
return res
@classmethod
def deserialize(cls, data, memo):
namespace = getattr(cls, '__serialize_namespace__', {})
namespace = {c.__name__:c for c in namespace}
fields = getattr(cls, '__serialize_fields__')
if '@' in data:
return memo[data['@']]
inst = cls.__new__(cls)
for f in fields:
try:
setattr(inst, f, _deserialize(data[f], namespace, memo))
except KeyError as e:
raise KeyError("Cannot find key for class", cls, e)
postprocess = getattr(inst, '_deserialize', None)
if postprocess:
postprocess()
return inst
class SerializeMemoizer(Serialize):
"A version of serialize that memoizes objects to reduce space"
__serialize_fields__ = 'memoized',
def __init__(self, types_to_memoize):
self.types_to_memoize = tuple(types_to_memoize)
self.memoized = Enumerator()
def in_types(self, value):
return isinstance(value, self.types_to_memoize)
def serialize(self):
return _serialize(self.memoized.reversed(), None)
@classmethod
def deserialize(cls, data, namespace, memo):
return _deserialize(data, namespace, memo)
try:
STRING_TYPE = basestring
except NameError: # Python 3
STRING_TYPE = str
import types
from functools import wraps, partial
from contextlib import contextmanager
Str = type(u'')
try:
classtype = types.ClassType # Python2
except AttributeError:
classtype = type # Python3
def smart_decorator(f, create_decorator):
if isinstance(f, types.FunctionType):
return wraps(f)(create_decorator(f, True))
elif isinstance(f, (classtype, type, types.BuiltinFunctionType)):
return wraps(f)(create_decorator(f, False))
elif isinstance(f, types.MethodType):
return wraps(f)(create_decorator(f.__func__, True))
elif isinstance(f, partial):
# wraps does not work for partials in 2.7: https://bugs.python.org/issue3445
return wraps(f.func)(create_decorator(lambda *args, **kw: f(*args[1:], **kw), True))
else:
return create_decorator(f.__func__.__call__, True)
try:
import regex
except ImportError:
regex = None
import sre_parse
import sre_constants
categ_pattern = re.compile(r'\\p{[A-Za-z_]+}')
def get_regexp_width(expr):
if regex:
# Since `sre_parse` cannot deal with Unicode categories of the form `\p{Mn}`, we replace these with
# a simple letter, which makes no difference as we are only trying to get the possible lengths of the regex
# match here below.
regexp_final = re.sub(categ_pattern, 'A', expr)
else:
if re.search(categ_pattern, expr):
raise ImportError('`regex` module must be installed in order to use Unicode categories.', expr)
regexp_final = expr
try:
return [int(x) for x in sre_parse.parse(regexp_final).getwidth()]
except sre_constants.error:
raise ValueError(expr)
###}
_ID_START = 'Lu', 'Ll', 'Lt', 'Lm', 'Lo', 'Mn', 'Mc', 'Pc'
_ID_CONTINUE = _ID_START + ('Nd', 'Nl',)
def _test_unicode_category(s, categories):
if len(s) != 1:
return all(_test_unicode_category(char, categories) for char in s)
return s == '_' or unicodedata.category(s) in categories
def is_id_continue(s):
"""
Checks if all characters in `s` are alphanumeric characters (Unicode standard, so diacritics, indian vowels, non-latin
numbers, etc. all pass). Synonymous with a Python `ID_CONTINUE` identifier. See PEP 3131 for details.
"""
return _test_unicode_category(s, _ID_CONTINUE)
def is_id_start(s):
"""
Checks if all characters in `s` are alphabetic characters (Unicode standard, so diacritics, indian vowels, non-latin
numbers, etc. all pass). Synonymous with a Python `ID_START` identifier. See PEP 3131 for details.
"""
return _test_unicode_category(s, _ID_START)
def dedup_list(l):
"""Given a list (l) will removing duplicates from the list,
preserving the original order of the list. Assumes that
the list entries are hashable."""
dedup = set()
return [x for x in l if not (x in dedup or dedup.add(x))]
try:
from contextlib import suppress # Python 3
except ImportError:
@contextmanager
def suppress(*excs):
'''Catch and dismiss the provided exception
>>> x = 'hello'
>>> with suppress(IndexError):
... x = x[10]
>>> x
'hello'
'''
try:
yield
except excs:
pass
try:
compare = cmp
except NameError:
def compare(a, b):
if a == b:
return 0
elif a > b:
return 1
return -1
class Enumerator(Serialize):
def __init__(self):
self.enums = {}
def get(self, item):
if item not in self.enums:
self.enums[item] = len(self.enums)
return self.enums[item]
def __len__(self):
return len(self.enums)
def reversed(self):
r = {v: k for k, v in self.enums.items()}
assert len(r) == len(self.enums)
return r
def combine_alternatives(lists):
"""
Accepts a list of alternatives, and enumerates all their possible concatinations.
Examples:
>>> combine_alternatives([range(2), [4,5]])
[[0, 4], [0, 5], [1, 4], [1, 5]]
>>> combine_alternatives(["abc", "xy", '$'])
[['a', 'x', '$'], ['a', 'y', '$'], ['b', 'x', '$'], ['b', 'y', '$'], ['c', 'x', '$'], ['c', 'y', '$']]
>>> combine_alternatives([])
[[]]
"""
if not lists:
return [[]]
assert all(l for l in lists), lists
init = [[x] for x in lists[0]]
return reduce(lambda a,b: [i+[j] for i in a for j in b], lists[1:], init)
class FS:
open = open
exists = os.path.exists
def isascii(s):
""" str.isascii only exists in python3.7+ """
try:
return s.isascii()
except AttributeError:
try:
s.encode('ascii')
return True
except (UnicodeDecodeError, UnicodeEncodeError):
return False
class fzset(frozenset):
def __repr__(self):
return '{%s}' % ', '.join(map(repr, self))
def classify_bool(seq, pred):
true_elems = []
false_elems = []
for elem in seq:
if pred(elem):
true_elems.append(elem)
else:
false_elems.append(elem)
return true_elems, false_elems
def bfs(initial, expand):
open_q = deque(list(initial))
visited = set(open_q)
while open_q:
node = open_q.popleft()
yield node
for next_node in expand(node):
if next_node not in visited:
visited.add(next_node)
open_q.append(next_node)
def _serialize(value, memo):
if isinstance(value, Serialize):
return value.serialize(memo)
elif isinstance(value, list):
return [_serialize(elem, memo) for elem in value]
elif isinstance(value, frozenset):
return list(value) # TODO reversible?
elif isinstance(value, dict):
return {key:_serialize(elem, memo) for key, elem in value.items()}
# assert value is None or isinstance(value, (int, float, str, tuple)), value
return value
|
import asyncio
from logging import Logger
from typing import Any, Awaitable, Callable, Optional
from homeassistant.core import HassJob, HomeAssistant, callback
class Debouncer:
"""Class to rate limit calls to a specific command."""
def __init__(
self,
hass: HomeAssistant,
logger: Logger,
*,
cooldown: float,
immediate: bool,
function: Optional[Callable[..., Awaitable[Any]]] = None,
):
"""Initialize debounce.
immediate: indicate if the function needs to be called right away and
wait <cooldown> until executing next invocation.
function: optional and can be instantiated later.
"""
self.hass = hass
self.logger = logger
self._function = function
self.cooldown = cooldown
self.immediate = immediate
self._timer_task: Optional[asyncio.TimerHandle] = None
self._execute_at_end_of_timer: bool = False
self._execute_lock = asyncio.Lock()
self._job: Optional[HassJob] = None if function is None else HassJob(function)
@property
def function(self) -> Optional[Callable[..., Awaitable[Any]]]:
"""Return the function being wrapped by the Debouncer."""
return self._function
@function.setter
def function(self, function: Callable[..., Awaitable[Any]]) -> None:
"""Update the function being wrapped by the Debouncer."""
self._function = function
if self._job is None or function != self._job.target:
self._job = HassJob(function)
async def async_call(self) -> None:
"""Call the function."""
assert self.function is not None
if self._timer_task:
if not self._execute_at_end_of_timer:
self._execute_at_end_of_timer = True
return
# Locked means a call is in progress. Any call is good, so abort.
if self._execute_lock.locked():
return
if not self.immediate:
self._execute_at_end_of_timer = True
self._schedule_timer()
return
async with self._execute_lock:
# Abort if timer got set while we're waiting for the lock.
if self._timer_task:
return
await self.hass.async_add_hass_job(self._job) # type: ignore
self._schedule_timer()
async def _handle_timer_finish(self) -> None:
"""Handle a finished timer."""
assert self.function is not None
self._timer_task = None
if not self._execute_at_end_of_timer:
return
self._execute_at_end_of_timer = False
# Locked means a call is in progress. Any call is good, so abort.
if self._execute_lock.locked():
return
async with self._execute_lock:
# Abort if timer got set while we're waiting for the lock.
if self._timer_task:
return # type: ignore
try:
await self.hass.async_add_hass_job(self._job) # type: ignore
except Exception: # pylint: disable=broad-except
self.logger.exception("Unexpected exception from %s", self.function)
self._schedule_timer()
@callback
def async_cancel(self) -> None:
"""Cancel any scheduled call."""
if self._timer_task:
self._timer_task.cancel()
self._timer_task = None
self._execute_at_end_of_timer = False
@callback
def _schedule_timer(self) -> None:
"""Schedule a timer."""
self._timer_task = self.hass.loop.call_later(
self.cooldown,
lambda: self.hass.async_create_task(self._handle_timer_finish()),
)
|
import logging
import re
import requests
import voluptuous as vol
from homeassistant.components.device_tracker import (
DOMAIN,
PLATFORM_SCHEMA,
DeviceScanner,
)
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
HTTP_OK,
HTTP_UNAUTHORIZED,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
_DDWRT_DATA_REGEX = re.compile(r"\{(\w+)::([^\}]*)\}")
_MAC_REGEX = re.compile(r"(([0-9A-Fa-f]{1,2}\:){5}[0-9A-Fa-f]{1,2})")
DEFAULT_SSL = False
DEFAULT_VERIFY_SSL = True
CONF_WIRELESS_ONLY = "wireless_only"
DEFAULT_WIRELESS_ONLY = True
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PASSWORD): cv.string,
vol.Required(CONF_USERNAME): cv.string,
vol.Optional(CONF_SSL, default=DEFAULT_SSL): cv.boolean,
vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean,
vol.Optional(CONF_WIRELESS_ONLY, default=DEFAULT_WIRELESS_ONLY): cv.boolean,
}
)
def get_scanner(hass, config):
"""Validate the configuration and return a DD-WRT scanner."""
try:
return DdWrtDeviceScanner(config[DOMAIN])
except ConnectionError:
return None
class DdWrtDeviceScanner(DeviceScanner):
"""This class queries a wireless router running DD-WRT firmware."""
def __init__(self, config):
"""Initialize the DD-WRT scanner."""
self.protocol = "https" if config[CONF_SSL] else "http"
self.verify_ssl = config[CONF_VERIFY_SSL]
self.host = config[CONF_HOST]
self.username = config[CONF_USERNAME]
self.password = config[CONF_PASSWORD]
self.wireless_only = config[CONF_WIRELESS_ONLY]
self.last_results = {}
self.mac2name = {}
# Test the router is accessible
url = f"{self.protocol}://{self.host}/Status_Wireless.live.asp"
data = self.get_ddwrt_data(url)
if not data:
raise ConnectionError("Cannot connect to DD-Wrt router")
def scan_devices(self):
"""Scan for new devices and return a list with found device IDs."""
self._update_info()
return self.last_results
def get_device_name(self, device):
"""Return the name of the given device or None if we don't know."""
# If not initialised and not already scanned and not found.
if device not in self.mac2name:
url = f"{self.protocol}://{self.host}/Status_Lan.live.asp"
data = self.get_ddwrt_data(url)
if not data:
return None
dhcp_leases = data.get("dhcp_leases")
if not dhcp_leases:
return None
# Remove leading and trailing quotes and spaces
cleaned_str = dhcp_leases.replace('"', "").replace("'", "").replace(" ", "")
elements = cleaned_str.split(",")
num_clients = int(len(elements) / 5)
self.mac2name = {}
for idx in range(0, num_clients):
# The data is a single array
# every 5 elements represents one host, the MAC
# is the third element and the name is the first.
mac_index = (idx * 5) + 2
if mac_index < len(elements):
mac = elements[mac_index]
self.mac2name[mac] = elements[idx * 5]
return self.mac2name.get(device)
def _update_info(self):
"""Ensure the information from the DD-WRT router is up to date.
Return boolean if scanning successful.
"""
_LOGGER.debug("Checking ARP")
endpoint = "Wireless" if self.wireless_only else "Lan"
url = f"{self.protocol}://{self.host}/Status_{endpoint}.live.asp"
data = self.get_ddwrt_data(url)
if not data:
return False
self.last_results = []
if self.wireless_only:
active_clients = data.get("active_wireless")
else:
active_clients = data.get("arp_table")
if not active_clients:
return False
# The DD-WRT UI uses its own data format and then
# regex's out values so this is done here too
# Remove leading and trailing single quotes.
clean_str = active_clients.strip().strip("'")
elements = clean_str.split("','")
self.last_results.extend(item for item in elements if _MAC_REGEX.match(item))
return True
def get_ddwrt_data(self, url):
"""Retrieve data from DD-WRT and return parsed result."""
try:
response = requests.get(
url,
auth=(self.username, self.password),
timeout=4,
verify=self.verify_ssl,
)
except requests.exceptions.Timeout:
_LOGGER.exception("Connection to the router timed out")
return
if response.status_code == HTTP_OK:
return _parse_ddwrt_response(response.text)
if response.status_code == HTTP_UNAUTHORIZED:
# Authentication error
_LOGGER.exception(
"Failed to authenticate, check your username and password"
)
return
_LOGGER.error("Invalid response from DD-WRT: %s", response)
def _parse_ddwrt_response(data_str):
"""Parse the DD-WRT data format."""
return dict(_DDWRT_DATA_REGEX.findall(data_str))
|
from typing import NewType, TYPE_CHECKING
from redbot.core.commands import BadArgument
from redbot.core.i18n import Translator
from redbot.core.utils.chat_formatting import inline
_ = Translator("Economy", __file__)
# Duplicate of redbot.cogs.cleanup.converters.PositiveInt
PositiveInt = NewType("PositiveInt", int)
if TYPE_CHECKING:
positive_int = PositiveInt
else:
def positive_int(arg: str) -> int:
try:
ret = int(arg)
except ValueError:
raise BadArgument(_("{arg} is not an integer.").format(arg=inline(arg)))
if ret <= 0:
raise BadArgument(_("{arg} is not a positive integer.").format(arg=inline(arg)))
return ret
|
from collections import Counter
from itertools import chain
class FeatsFromSpacyDoc(object):
def __init__(self,
use_lemmas=False,
entity_types_to_censor=set(),
tag_types_to_censor=set(),
strip_final_period=False):
'''
Parameters
----------
use_lemmas : bool, optional
False by default
entity_types_to_censor : set, optional
empty by default
tag_types_to_censor : set, optional
empty by default
strip_final_period : bool, optional
if you know that spacy is going to mess up parsing, strip final period. default no.
'''
self._use_lemmas = use_lemmas
assert type(entity_types_to_censor) == set
assert type(tag_types_to_censor) == set
self._entity_types_to_censor = entity_types_to_censor
self._tag_types_to_censor = tag_types_to_censor
self._strip_final_period = strip_final_period
def _post_process_term(self, term):
if self._strip_final_period and (term.strip().endswith('.') or term.strip().endswith(',')):
term = term.strip()[:-1]
return term
def get_doc_metadata(self, doc):
return Counter()
def get_feats(self, doc):
'''
Parameters
----------
doc, Spacy Docs
Returns
-------
Counter (unigram, bigram) -> count
'''
ngram_counter = Counter()
for sent in doc.sents:
unigrams = self._get_unigram_feats(sent)
bigrams = self._get_bigram_feats(unigrams)
ngram_counter += Counter(chain(unigrams, bigrams))
return ngram_counter
def _get_bigram_feats(self, unigrams):
if len(unigrams) > 1:
bigrams = map(' '.join, zip(unigrams[:-1], unigrams[1:]))
else:
bigrams = []
return bigrams
def _get_unigram_feats(self, sent):
unigrams = []
for tok in sent:
if tok.pos_ not in ('PUNCT', 'SPACE', 'X'):
if tok.ent_type_ in self._entity_types_to_censor:
unigrams.append('_' + tok.ent_type_)
elif tok.tag_ in self._tag_types_to_censor:
unigrams.append(tok.tag_)
elif self._use_lemmas and tok.lemma_.strip():
unigrams.append(self._post_process_term(tok.lemma_.strip().lower()))
elif tok.lower_.strip():
unigrams.append(self._post_process_term(tok.lower_.strip()))
return unigrams
def has_metadata_term_list(self):
'''
Returns True if there is a meta data term list associated with object, False if not.
Returns
-------
bool
'''
return False
def get_top_model_term_lists(self):
raise Exception("No topic models associated with these features.")
|
import os
import pytest
from nikola import __main__
from .helper import cd
from .test_demo_build import prepare_demo_site
from .test_empty_build import ( # NOQA
test_archive_exists,
test_avoid_double_slash_in_rss,
test_check_files,
test_check_links,
test_index_in_sitemap,
)
@pytest.fixture(scope="module")
def build(target_dir):
"""Fill the site with demo content and build it."""
prepare_demo_site(target_dir)
build_dir = os.path.join(target_dir, "posts")
with cd(build_dir):
__main__.main(["build"])
|
from django.contrib.sites.models import Site
from django.core import mail
from django.test import TestCase
from django.test.utils import override_settings
from django.urls import reverse
from django.utils import timezone
import django_comments as comments
from django_comments.forms import CommentForm
from django_comments.moderation import moderator as moderator_stack
from zinnia.managers import PUBLISHED
from zinnia.models.author import Author
from zinnia.models.entry import Entry
from zinnia.moderator import EntryCommentModerator
from zinnia.signals import connect_discussion_signals
from zinnia.signals import disconnect_discussion_signals
from zinnia.signals import disconnect_entry_signals
from zinnia.tests.utils import skip_if_custom_user
@skip_if_custom_user
@override_settings(
TEMPLATES=[
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'OPTIONS': {
'loaders': [
'zinnia.tests.utils.VoidLoader',
]
}
}
]
)
class CommentModeratorTestCase(TestCase):
"""Test cases for the moderator"""
def setUp(self):
disconnect_entry_signals()
disconnect_discussion_signals()
self.site = Site.objects.get_current()
self.author = Author.objects.create(username='admin',
email='admin@example.com')
params = {'title': 'My test entry',
'content': 'My test entry',
'slug': 'my-test-entry',
'status': PUBLISHED}
self.entry = Entry.objects.create(**params)
self.entry.sites.add(self.site)
self.entry.authors.add(self.author)
def test_email(self):
comment = comments.get_model().objects.create(
comment='My Comment', user=self.author, is_public=True,
content_object=self.entry, submit_date=timezone.now(),
site=self.site)
self.assertEqual(len(mail.outbox), 0)
moderator = EntryCommentModerator(Entry)
moderator.email_reply = False
moderator.email_authors = False
moderator.mail_comment_notification_recipients = []
moderator.email(comment, self.entry, 'request')
self.assertEqual(len(mail.outbox), 0)
moderator.email_reply = True
moderator.email_authors = True
moderator.mail_comment_notification_recipients = ['admin@example.com']
moderator.email(comment, self.entry, 'request')
self.assertEqual(len(mail.outbox), 1)
def test_do_email_notification(self):
comment = comments.get_model().objects.create(
comment='My Comment', user=self.author, is_public=True,
content_object=self.entry, submit_date=timezone.now(),
site=self.site)
self.assertEqual(len(mail.outbox), 0)
moderator = EntryCommentModerator(Entry)
moderator.mail_comment_notification_recipients = ['admin@example.com']
moderator.do_email_notification(comment, self.entry, self.site)
self.assertEqual(len(mail.outbox), 1)
def test_do_email_authors(self):
comment = comments.get_model().objects.create(
comment='My Comment', user=self.author, is_public=True,
content_object=self.entry, submit_date=timezone.now(),
site=self.site)
self.assertEqual(len(mail.outbox), 0)
moderator = EntryCommentModerator(Entry)
moderator.email_authors = True
moderator.mail_comment_notification_recipients = [
'admin@example.com', 'webmaster@example.com']
moderator.do_email_authors(comment, self.entry, self.site)
self.assertEqual(len(mail.outbox), 0)
moderator.mail_comment_notification_recipients = []
moderator.do_email_authors(comment, self.entry, self.site)
self.assertEqual(len(mail.outbox), 1)
def test_do_email_authors_without_email(self):
"""
https://github.com/Fantomas42/django-blog-zinnia/issues/145
"""
comment = comments.get_model().objects.create(
comment='My Comment', user=self.author, is_public=True,
content_object=self.entry, submit_date=timezone.now(),
site=self.site)
self.assertEqual(len(mail.outbox), 0)
moderator = EntryCommentModerator(Entry)
moderator.email_authors = True
moderator.mail_comment_notification_recipients = []
contributor = Author.objects.create(username='contributor',
email='contrib@example.com')
self.entry.authors.add(contributor)
moderator.do_email_authors(comment, self.entry, self.site)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(
set(mail.outbox[0].to),
set(['admin@example.com', 'contrib@example.com']))
mail.outbox = []
contributor.email = ''
contributor.save()
moderator.do_email_authors(comment, self.entry, self.site)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, ['admin@example.com'])
def test_do_email_reply(self):
comment = comments.get_model().objects.create(
comment='My Comment 1', user=self.author, is_public=True,
content_object=self.entry, submit_date=timezone.now(),
site=self.site)
moderator = EntryCommentModerator(Entry)
moderator.email_reply = True
moderator.mail_comment_notification_recipients = [
'admin@example.com', 'webmaster@example.com']
moderator.do_email_reply(comment, self.entry, self.site)
self.assertEqual(len(mail.outbox), 0)
comment = comments.get_model().objects.create(
comment='My Comment 2', user_email='user_1@example.com',
content_object=self.entry, is_public=True,
submit_date=timezone.now(), site=self.site)
moderator.do_email_reply(comment, self.entry, self.site)
self.assertEqual(len(mail.outbox), 0)
comment = comments.get_model().objects.create(
comment='My Comment 3', user_email='user_2@example.com',
content_object=self.entry, is_public=True,
submit_date=timezone.now(), site=self.site)
moderator.do_email_reply(comment, self.entry, self.site)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].bcc, ['user_1@example.com'])
comment = comments.get_model().objects.create(
comment='My Comment 4', user=self.author, is_public=True,
content_object=self.entry, submit_date=timezone.now(),
site=self.site)
moderator.do_email_reply(comment, self.entry, self.site)
self.assertEqual(len(mail.outbox), 2)
self.assertEqual(
set(mail.outbox[1].bcc),
set(['user_1@example.com', 'user_2@example.com']))
def test_moderate(self):
comment = comments.get_model().objects.create(
comment='My Comment', user=self.author, is_public=True,
content_object=self.entry, submit_date=timezone.now(),
site=self.site)
moderator = EntryCommentModerator(Entry)
moderator.auto_moderate_comments = True
moderator.spam_checker_backends = ()
self.assertTrue(moderator.moderate(comment, self.entry, 'request'))
moderator.auto_moderate_comments = False
self.assertFalse(moderator.moderate(comment, self.entry, 'request'))
moderator.spam_checker_backends = (
'zinnia.spam_checker.backends.all_is_spam',)
self.assertTrue(moderator.moderate(comment, self.entry, 'request'))
def test_moderate_comment_on_entry_without_author(self):
self.entry.authors.clear()
comment = comments.get_model().objects.create(
comment='My Comment', user=self.author, is_public=True,
content_object=self.entry, submit_date=timezone.now(),
site=self.site)
moderator = EntryCommentModerator(Entry)
moderator.auto_moderate_comments = False
moderator.spam_checker_backends = (
'zinnia.spam_checker.backends.all_is_spam',)
self.assertTrue(moderator.moderate(comment, self.entry, 'request'))
def test_integrity_error_on_duplicate_spam_comments(self):
class AllIsSpamModerator(EntryCommentModerator):
spam_checker_backends = [
'zinnia.spam_checker.backends.all_is_spam']
moderator_stack.unregister(Entry)
moderator_stack.register(Entry, AllIsSpamModerator)
datas = {'name': 'Jim Bob',
'email': 'jim.bob@example.com',
'url': '',
'comment': 'This is my comment'}
f = CommentForm(self.entry)
datas.update(f.initial)
url = reverse('comments-post-comment')
self.assertEqual(self.entry.comment_count, 0)
connect_discussion_signals()
self.client.post(url, datas)
self.client.post(url, datas)
disconnect_discussion_signals()
self.assertEqual(comments.get_model().objects.count(), 1)
entry_reloaded = Entry.objects.get(pk=self.entry.pk)
self.assertEqual(entry_reloaded.comment_count, 0)
def test_comment_count_denormalization(self):
class AllIsSpamModerator(EntryCommentModerator):
spam_checker_backends = [
'zinnia.spam_checker.backends.all_is_spam']
class NoMailNoSpamModerator(EntryCommentModerator):
def email(self, *ka, **kw):
pass
def moderate(self, *ka, **kw):
return False
datas = {'name': 'Jim Bob',
'email': 'jim.bob@example.com',
'url': '',
'comment': 'This is my comment'}
f = CommentForm(self.entry)
datas.update(f.initial)
url = reverse('comments-post-comment')
moderator_stack.unregister(Entry)
moderator_stack.register(Entry, AllIsSpamModerator)
self.assertEqual(self.entry.comment_count, 0)
connect_discussion_signals()
self.client.post(url, datas)
entry_reloaded = Entry.objects.get(pk=self.entry.pk)
self.assertEqual(entry_reloaded.comment_count, 0)
moderator_stack.unregister(Entry)
moderator_stack.register(Entry, NoMailNoSpamModerator)
datas['comment'] = 'This a published comment'
self.client.post(url, datas)
disconnect_discussion_signals()
entry_reloaded = Entry.objects.get(pk=self.entry.pk)
self.assertEqual(entry_reloaded.comment_count, 1)
|
import os.path
from django.core import mail
from django.urls import reverse
from weblate.trans.models import Announcement, Component, Project, Translation
from weblate.trans.tests.test_views import ViewTestCase
from weblate.utils.data import data_dir
from weblate.utils.files import remove_tree
class RemovalTest(ViewTestCase):
def test_translation(self):
self.make_manager()
kwargs = {"lang": "cs"}
kwargs.update(self.kw_component)
url = reverse("remove_translation", kwargs=kwargs)
response = self.client.post(url, {"confirm": ""}, follow=True)
self.assertContains(
response, "The slug does not match the one marked for deletion!"
)
response = self.client.post(url, {"confirm": "test/test/cs"}, follow=True)
self.assertContains(response, "Translation has been removed.")
def test_component(self):
self.make_manager()
url = reverse("remove_component", kwargs=self.kw_component)
response = self.client.post(url, {"confirm": ""}, follow=True)
self.assertContains(
response, "The slug does not match the one marked for deletion!"
)
response = self.client.post(url, {"confirm": "test/test"}, follow=True)
self.assertContains(
response, "Translation component was scheduled for removal."
)
def test_project(self):
self.make_manager()
url = reverse("remove_project", kwargs=self.kw_project)
response = self.client.post(url, {"confirm": ""}, follow=True)
self.assertContains(
response, "The slug does not match the one marked for deletion!"
)
response = self.client.post(url, {"confirm": "test"}, follow=True)
self.assertContains(response, "Project was scheduled for removal.")
def test_project_language(self):
self.make_manager()
self.assertEqual(Translation.objects.count(), 4)
url = reverse(
"remove-project-language",
kwargs={"project": self.project.slug, "lang": "cs"},
)
response = self.client.post(url, {"confirm": ""}, follow=True)
self.assertContains(
response, "The slug does not match the one marked for deletion!"
)
response = self.client.post(url, {"confirm": "test/cs"}, follow=True)
self.assertContains(response, "Language of the project was removed.")
self.assertEqual(Translation.objects.count(), 3)
class RenameTest(ViewTestCase):
def test_denied(self):
self.assertNotContains(
self.client.get(reverse("project", kwargs=self.kw_project)), "#rename"
)
self.assertNotContains(
self.client.get(reverse("component", kwargs=self.kw_component)), "#rename"
)
response = self.client.post(
reverse("rename", kwargs=self.kw_project), {"slug": "xxxx"}
)
self.assertEqual(response.status_code, 403)
response = self.client.post(
reverse("rename", kwargs=self.kw_component), {"slug": "xxxx"}
)
self.assertEqual(response.status_code, 403)
other = Project.objects.create(name="Other", slug="other")
response = self.client.post(
reverse("move", kwargs=self.kw_component), {"project": other.pk}
)
self.assertEqual(response.status_code, 403)
def test_move_component(self):
self.make_manager()
other = Project.objects.create(name="Other project", slug="other")
self.assertContains(
self.client.get(reverse("component", kwargs=self.kw_component)),
"Other project",
)
response = self.client.post(
reverse("move", kwargs=self.kw_component), {"project": other.pk}
)
self.assertRedirects(response, "/projects/other/test/")
component = Component.objects.get(pk=self.component.pk)
self.assertEqual(component.project.slug, "other")
self.assertIsNotNone(component.repository.last_remote_revision)
def test_rename_component(self):
self.make_manager()
self.assertContains(
self.client.get(reverse("component", kwargs=self.kw_component)), "#rename"
)
response = self.client.post(
reverse("rename", kwargs=self.kw_component), {"slug": "xxxx"}
)
self.assertRedirects(response, "/projects/test/xxxx/")
component = Component.objects.get(pk=self.component.pk)
self.assertEqual(component.slug, "xxxx")
self.assertIsNotNone(component.repository.last_remote_revision)
response = self.client.get(component.get_absolute_url())
self.assertContains(response, "/projects/test/xxxx/")
# Test rename redirect in middleware
response = self.client.get(reverse("component", kwargs=self.kw_component))
self.assertRedirects(response, component.get_absolute_url(), status_code=301)
def test_rename_project(self):
# Remove stale dir from previous tests
target = os.path.join(data_dir("vcs"), "xxxx")
if os.path.exists(target):
remove_tree(target)
self.make_manager()
self.assertContains(
self.client.get(reverse("project", kwargs=self.kw_project)), "#rename"
)
response = self.client.post(
reverse("rename", kwargs=self.kw_project), {"slug": "xxxx"}
)
self.assertRedirects(response, "/projects/xxxx/")
project = Project.objects.get(pk=self.project.pk)
self.assertEqual(project.slug, "xxxx")
for component in project.component_set.iterator():
self.assertIsNotNone(component.repository.last_remote_revision)
response = self.client.get(component.get_absolute_url())
self.assertContains(response, "/projects/xxxx/")
# Test rename redirect in middleware
response = self.client.get(reverse("project", kwargs=self.kw_project))
self.assertRedirects(response, project.get_absolute_url(), status_code=301)
def test_rename_project_conflict(self):
# Test rename conflict
self.make_manager()
Project.objects.create(name="Other project", slug="other")
response = self.client.post(
reverse("rename", kwargs=self.kw_project), {"slug": "other"}, follow=True
)
self.assertContains(response, "Project with this URL slug already exists.")
def test_rename_component_conflict(self):
# Test rename conflict
self.make_manager()
self.create_link_existing()
response = self.client.post(
reverse("rename", kwargs=self.kw_component), {"slug": "test2"}, follow=True
)
self.assertContains(
response, "Component with this URL slug already exists in the project."
)
class AnnouncementTest(ViewTestCase):
data = {"message": "Announcement testing", "category": "warning"}
outbox = 0
def perform_test(self, url):
response = self.client.post(url, self.data, follow=True)
self.assertEqual(response.status_code, 403)
self.make_manager()
# Add second user to receive notifications
self.project.add_user(self.anotheruser, "@Administration")
response = self.client.post(url, self.data, follow=True)
self.assertContains(response, self.data["message"])
self.assertEqual(len(mail.outbox), self.outbox)
def test_translation(self):
kwargs = {"lang": "cs"}
kwargs.update(self.kw_component)
url = reverse("announcement_translation", kwargs=kwargs)
self.perform_test(url)
def test_component(self):
url = reverse("announcement_component", kwargs=self.kw_component)
self.perform_test(url)
def test_project(self):
url = reverse("announcement_project", kwargs=self.kw_project)
self.perform_test(url)
def test_delete(self):
self.test_project()
message = Announcement.objects.all()[0]
self.client.post(reverse("announcement-delete", kwargs={"pk": message.pk}))
self.assertEqual(Announcement.objects.count(), 0)
def test_delete_deny(self):
message = Announcement.objects.create(message="test")
self.client.post(reverse("announcement-delete", kwargs={"pk": message.pk}))
self.assertEqual(Announcement.objects.count(), 1)
class AnnouncementNotifyTest(AnnouncementTest):
data = {"message": "Announcement testing", "category": "warning", "notify": "1"}
outbox = 1
|
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import Config, HomeAssistant
# Have to import for config_flow to work even if they are not used here
from .config_flow import smhi_locations # noqa: F401
from .const import DOMAIN # noqa: F401
DEFAULT_NAME = "smhi"
async def async_setup(hass: HomeAssistant, config: Config) -> bool:
"""Set up configured SMHI."""
# We allow setup only through config flow type of config
return True
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Set up SMHI forecast as config entry."""
hass.async_create_task(
hass.config_entries.async_forward_entry_setup(config_entry, "weather")
)
return True
async def async_unload_entry(hass: HomeAssistant, config_entry: ConfigEntry) -> bool:
"""Unload a config entry."""
await hass.config_entries.async_forward_entry_unload(config_entry, "weather")
return True
|
from homeassistant.components.google_assistant import helpers
from tests.async_mock import MagicMock
def mock_google_config_store(agent_user_ids=None):
"""Fake a storage for google assistant."""
store = MagicMock(spec=helpers.GoogleConfigStore)
if agent_user_ids is not None:
store.agent_user_ids = agent_user_ids
else:
store.agent_user_ids = {}
return store
class MockConfig(helpers.AbstractConfig):
"""Fake config that always exposes everything."""
def __init__(
self,
*,
secure_devices_pin=None,
should_expose=None,
should_2fa=None,
entity_config=None,
hass=None,
local_sdk_webhook_id=None,
local_sdk_user_id=None,
enabled=True,
agent_user_ids=None,
):
"""Initialize config."""
super().__init__(hass)
self._should_expose = should_expose
self._should_2fa = should_2fa
self._secure_devices_pin = secure_devices_pin
self._entity_config = entity_config or {}
self._local_sdk_webhook_id = local_sdk_webhook_id
self._local_sdk_user_id = local_sdk_user_id
self._enabled = enabled
self._store = mock_google_config_store(agent_user_ids)
@property
def enabled(self):
"""Return if Google is enabled."""
return self._enabled
@property
def secure_devices_pin(self):
"""Return secure devices pin."""
return self._secure_devices_pin
@property
def entity_config(self):
"""Return secure devices pin."""
return self._entity_config
@property
def local_sdk_webhook_id(self):
"""Return local SDK webhook id."""
return self._local_sdk_webhook_id
@property
def local_sdk_user_id(self):
"""Return local SDK webhook id."""
return self._local_sdk_user_id
def get_agent_user_id(self, context):
"""Get agent user ID making request."""
return context.user_id
def should_expose(self, state):
"""Expose it all."""
return self._should_expose is None or self._should_expose(state)
def should_2fa(self, state):
"""Expose it all."""
return self._should_2fa is None or self._should_2fa(state)
BASIC_CONFIG = MockConfig()
DEMO_DEVICES = [
{
"id": "light.kitchen_lights",
"name": {"name": "Kitchen Lights"},
"traits": [
"action.devices.traits.OnOff",
"action.devices.traits.Brightness",
"action.devices.traits.ColorSetting",
],
"type": "action.devices.types.LIGHT",
"willReportState": False,
},
{
"id": "switch.ac",
"name": {"name": "AC"},
"traits": ["action.devices.traits.OnOff"],
"type": "action.devices.types.OUTLET",
"willReportState": False,
},
{
"id": "switch.decorative_lights",
"name": {"name": "Decorative Lights"},
"traits": ["action.devices.traits.OnOff"],
"type": "action.devices.types.SWITCH",
"willReportState": False,
},
{
"id": "light.ceiling_lights",
"name": {
"name": "Roof Lights",
"nicknames": ["Roof Lights", "top lights", "ceiling lights"],
},
"traits": [
"action.devices.traits.OnOff",
"action.devices.traits.Brightness",
"action.devices.traits.ColorSetting",
],
"type": "action.devices.types.LIGHT",
"willReportState": False,
},
{
"id": "light.bed_light",
"name": {"name": "Bed Light"},
"traits": [
"action.devices.traits.OnOff",
"action.devices.traits.Brightness",
"action.devices.traits.ColorSetting",
"action.devices.traits.Modes",
],
"type": "action.devices.types.LIGHT",
"willReportState": False,
},
{
"id": "cover.living_room_window",
"name": {"name": "Living Room Window"},
"traits": ["action.devices.traits.OpenClose"],
"type": "action.devices.types.BLINDS",
"willReportState": False,
},
{
"id": "cover.hall_window",
"name": {"name": "Hall Window"},
"traits": ["action.devices.traits.OpenClose"],
"type": "action.devices.types.BLINDS",
"willReportState": False,
},
{
"id": "cover.garage_door",
"name": {"name": "Garage Door"},
"traits": ["action.devices.traits.OpenClose"],
"type": "action.devices.types.GARAGE",
"willReportState": False,
},
{
"id": "cover.kitchen_window",
"name": {"name": "Kitchen Window"},
"traits": ["action.devices.traits.OpenClose"],
"type": "action.devices.types.BLINDS",
"willReportState": False,
},
{
"id": "media_player.bedroom",
"name": {"name": "Bedroom"},
"traits": [
"action.devices.traits.OnOff",
"action.devices.traits.Volume",
"action.devices.traits.Modes",
"action.devices.traits.TransportControl",
"action.devices.traits.MediaState",
],
"type": "action.devices.types.SETTOP",
"willReportState": False,
},
{
"id": "media_player.living_room",
"name": {"name": "Living Room"},
"traits": [
"action.devices.traits.OnOff",
"action.devices.traits.Volume",
"action.devices.traits.Modes",
"action.devices.traits.TransportControl",
"action.devices.traits.MediaState",
],
"type": "action.devices.types.SETTOP",
"willReportState": False,
},
{
"id": "media_player.lounge_room",
"name": {"name": "Lounge room"},
"traits": [
"action.devices.traits.InputSelector",
"action.devices.traits.OnOff",
"action.devices.traits.Modes",
"action.devices.traits.TransportControl",
"action.devices.traits.MediaState",
],
"type": "action.devices.types.SETTOP",
"willReportState": False,
},
{
"id": "media_player.walkman",
"name": {"name": "Walkman"},
"traits": [
"action.devices.traits.OnOff",
"action.devices.traits.Volume",
"action.devices.traits.Modes",
"action.devices.traits.TransportControl",
"action.devices.traits.MediaState",
],
"type": "action.devices.types.SETTOP",
"willReportState": False,
},
{
"id": "fan.living_room_fan",
"name": {"name": "Living Room Fan"},
"traits": ["action.devices.traits.FanSpeed", "action.devices.traits.OnOff"],
"type": "action.devices.types.FAN",
"willReportState": False,
},
{
"id": "fan.ceiling_fan",
"name": {"name": "Ceiling Fan"},
"traits": ["action.devices.traits.FanSpeed", "action.devices.traits.OnOff"],
"type": "action.devices.types.FAN",
"willReportState": False,
},
{
"id": "climate.hvac",
"name": {"name": "Hvac"},
"traits": [
"action.devices.traits.TemperatureSetting",
"action.devices.traits.FanSpeed",
],
"type": "action.devices.types.THERMOSTAT",
"willReportState": False,
"attributes": {
"availableThermostatModes": "off,heat,cool,heatcool,auto,dry,fan-only",
"thermostatTemperatureUnit": "C",
},
},
{
"id": "climate.heatpump",
"name": {"name": "HeatPump"},
"traits": ["action.devices.traits.TemperatureSetting"],
"type": "action.devices.types.THERMOSTAT",
"willReportState": False,
},
{
"id": "climate.ecobee",
"name": {"name": "Ecobee"},
"traits": [
"action.devices.traits.TemperatureSetting",
"action.devices.traits.FanSpeed",
],
"type": "action.devices.types.THERMOSTAT",
"willReportState": False,
},
{
"id": "humidifier.humidifier",
"name": {"name": "Humidifier"},
"traits": [
"action.devices.traits.HumiditySetting",
"action.devices.traits.OnOff",
],
"type": "action.devices.types.HUMIDIFIER",
"willReportState": False,
"attributes": {"humiditySetpointRange": {"minPercent": 0, "maxPercent": 100}},
},
{
"id": "humidifier.dehumidifier",
"name": {"name": "Dehumidifier"},
"traits": [
"action.devices.traits.HumiditySetting",
"action.devices.traits.OnOff",
],
"type": "action.devices.types.DEHUMIDIFIER",
"willReportState": False,
"attributes": {"humiditySetpointRange": {"minPercent": 0, "maxPercent": 100}},
},
{
"id": "humidifier.hygrostat",
"name": {"name": "Hygrostat"},
"traits": [
"action.devices.traits.HumiditySetting",
"action.devices.traits.Modes",
"action.devices.traits.OnOff",
],
"type": "action.devices.types.HUMIDIFIER",
"willReportState": False,
"attributes": {"humiditySetpointRange": {"minPercent": 0, "maxPercent": 100}},
},
{
"id": "lock.front_door",
"name": {"name": "Front Door"},
"traits": ["action.devices.traits.LockUnlock"],
"type": "action.devices.types.LOCK",
"willReportState": False,
},
{
"id": "lock.kitchen_door",
"name": {"name": "Kitchen Door"},
"traits": ["action.devices.traits.LockUnlock"],
"type": "action.devices.types.LOCK",
"willReportState": False,
},
{
"id": "lock.openable_lock",
"name": {"name": "Openable Lock"},
"traits": ["action.devices.traits.LockUnlock"],
"type": "action.devices.types.LOCK",
"willReportState": False,
},
{
"id": "alarm_control_panel.alarm",
"name": {"name": "Alarm"},
"traits": ["action.devices.traits.ArmDisarm"],
"type": "action.devices.types.SECURITYSYSTEM",
"willReportState": False,
},
]
|
from Adafruit_BBIO import GPIO # pylint: disable=import-error
from homeassistant.const import EVENT_HOMEASSISTANT_START, EVENT_HOMEASSISTANT_STOP
DOMAIN = "bbb_gpio"
def setup(hass, config):
"""Set up the BeagleBone Black GPIO component."""
# pylint: disable=import-error
def cleanup_gpio(event):
"""Stuff to do before stopping."""
GPIO.cleanup()
def prepare_gpio(event):
"""Stuff to do when Home Assistant starts."""
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, cleanup_gpio)
hass.bus.listen_once(EVENT_HOMEASSISTANT_START, prepare_gpio)
return True
def setup_output(pin):
"""Set up a GPIO as output."""
GPIO.setup(pin, GPIO.OUT)
def setup_input(pin, pull_mode):
"""Set up a GPIO as input."""
GPIO.setup(pin, GPIO.IN, GPIO.PUD_DOWN if pull_mode == "DOWN" else GPIO.PUD_UP)
def write_output(pin, value):
"""Write a value to a GPIO."""
GPIO.output(pin, value)
def read_input(pin):
"""Read a value from a GPIO."""
return GPIO.input(pin) is GPIO.HIGH
def edge_detect(pin, event_callback, bounce):
"""Add detection for RISING and FALLING events."""
GPIO.add_event_detect(pin, GPIO.BOTH, callback=event_callback, bouncetime=bounce)
|
import atexit
import os
import pytest
import sys
from kombu.exceptions import VersionMismatch
@pytest.fixture(scope='session')
def multiprocessing_workaround(request):
yield
# Workaround for multiprocessing bug where logging
# is attempted after global already collected at shutdown.
canceled = set()
try:
import multiprocessing.util
canceled.add(multiprocessing.util._exit_function)
except (AttributeError, ImportError):
pass
try:
atexit._exithandlers[:] = [
e for e in atexit._exithandlers if e[0] not in canceled
]
except AttributeError: # pragma: no cover
pass # Py3 missing _exithandlers
def zzz_reset_memory_transport_state():
yield
from kombu.transport import memory
memory.Transport.state.clear()
@pytest.fixture(autouse=True)
def test_cases_has_patching(request, patching):
if request.instance:
request.instance.patching = patching
@pytest.fixture
def hub(request):
from kombu.asynchronous import Hub, get_event_loop, set_event_loop
_prev_hub = get_event_loop()
hub = Hub()
set_event_loop(hub)
yield hub
if _prev_hub is not None:
set_event_loop(_prev_hub)
def find_distribution_modules(name=__name__, file=__file__):
current_dist_depth = len(name.split('.')) - 1
current_dist = os.path.join(os.path.dirname(file),
*([os.pardir] * current_dist_depth))
abs = os.path.abspath(current_dist)
dist_name = os.path.basename(abs)
for dirpath, dirnames, filenames in os.walk(abs):
package = (dist_name + dirpath[len(abs):]).replace('/', '.')
if '__init__.py' in filenames:
yield package
for filename in filenames:
if filename.endswith('.py') and filename != '__init__.py':
yield '.'.join([package, filename])[:-3]
def import_all_modules(name=__name__, file=__file__, skip=[]):
for module in find_distribution_modules(name, file):
if module not in skip:
print(f'preimporting {module!r} for coverage...')
try:
__import__(module)
except (ImportError, VersionMismatch, AttributeError):
pass
def is_in_coverage():
return (os.environ.get('COVER_ALL_MODULES') or
any('--cov' in arg for arg in sys.argv))
@pytest.fixture(scope='session')
def cover_all_modules():
# so coverage sees all our modules.
if is_in_coverage():
import_all_modules()
|
import os
import pytest
from molecule.command.init import role
@pytest.fixture
def _command_args():
return {
'dependency_name': 'galaxy',
'driver_name': 'docker',
'lint_name': 'ansible-lint',
'provisioner_name': 'ansible',
'role_name': 'test-role',
'scenario_name': 'default',
'subcommand': __name__,
'verifier_name': 'testinfra'
}
@pytest.fixture
def _instance(_command_args):
return role.Role(_command_args)
@pytest.fixture
def _resources_folder_path():
resources_folder_path = os.path.join(
os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
'resources')
return resources_folder_path
@pytest.fixture
def custom_template_dir(_resources_folder_path):
custom_template_dir_path = os.path.join(_resources_folder_path,
'custom_role_template')
return custom_template_dir_path
@pytest.fixture
def invalid_template_dir(_resources_folder_path):
invalid_role_template_path = os.path.join(_resources_folder_path,
'invalid_role_template')
return invalid_role_template_path
@pytest.fixture
def custom_readme_content(custom_template_dir):
readme_path = os.path.join(custom_template_dir,
'{{cookiecutter.role_name}}', 'README.md')
custom_readme_content = ""
with open(readme_path, 'r') as readme:
custom_readme_content = readme.read()
return custom_readme_content
def test_execute(temp_dir, _instance, patched_logger_info,
patched_logger_success):
_instance.execute()
msg = 'Initializing new role test-role...'
patched_logger_info.assert_called_once_with(msg)
assert os.path.isdir('./test-role')
assert os.path.isdir('./test-role/molecule/default')
assert os.path.isdir('./test-role/molecule/default/tests')
role_directory = os.path.join(temp_dir.strpath, 'test-role')
msg = 'Initialized role in {} successfully.'.format(role_directory)
patched_logger_success.assert_called_once_with(msg)
def test_execute_role_exists(temp_dir, _instance, patched_logger_critical):
_instance.execute()
with pytest.raises(SystemExit) as e:
_instance.execute()
assert 1 == e.value.code
msg = 'The directory test-role exists. Cannot create new role.'
patched_logger_critical.assert_called_once_with(msg)
def test_execute_with_custom_template(temp_dir, custom_template_dir,
custom_readme_content, _command_args):
_command_args['template'] = custom_template_dir
custom_template_instance = role.Role(_command_args)
custom_template_instance.execute()
readme_path = './test-role/README.md'
assert os.path.isfile(readme_path)
with open(readme_path, 'r') as readme:
assert readme.read() == custom_readme_content
assert os.path.isdir('./test-role/molecule/default')
assert os.path.isdir('./test-role/molecule/default/tests')
def test_execute_with_absent_template(temp_dir, _command_args,
patched_logger_critical):
incorrect_path = os.path.join("absent_template_dir")
_command_args['template'] = incorrect_path
absent_template_instance = role.Role(_command_args)
with pytest.raises(SystemExit) as e:
absent_template_instance.execute()
assert e.value.code == 1
patched_logger_critical.assert_called_once()
def test_execute_with_incorrect_template(temp_dir, invalid_template_dir,
_command_args,
patched_logger_critical):
_command_args['template'] = invalid_template_dir
invalid_template_instance = role.Role(_command_args)
with pytest.raises(SystemExit) as e:
invalid_template_instance.execute()
assert e.value.code == 1
patched_logger_critical.assert_called_once()
|
import pytest
from homeassistant import data_entry_flow
from homeassistant.components.velbus import config_flow
from homeassistant.const import CONF_NAME, CONF_PORT
from tests.async_mock import Mock, patch
from tests.common import MockConfigEntry
PORT_SERIAL = "/dev/ttyACME100"
PORT_TCP = "127.0.1.0.1:3788"
@pytest.fixture(name="controller_assert")
def mock_controller_assert():
"""Mock the velbus controller with an assert."""
with patch("velbus.Controller", side_effect=Exception()):
yield
@pytest.fixture(name="controller")
def mock_controller():
"""Mock a successful velbus controller."""
controller = Mock()
with patch("velbus.Controller", return_value=controller):
yield controller
def init_config_flow(hass):
"""Init a configuration flow."""
flow = config_flow.VelbusConfigFlow()
flow.hass = hass
return flow
async def test_user(hass, controller):
"""Test user config."""
flow = init_config_flow(hass)
result = await flow.async_step_user()
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["step_id"] == "user"
result = await flow.async_step_user(
{CONF_NAME: "Velbus Test Serial", CONF_PORT: PORT_SERIAL}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "velbus_test_serial"
assert result["data"][CONF_PORT] == PORT_SERIAL
result = await flow.async_step_user(
{CONF_NAME: "Velbus Test TCP", CONF_PORT: PORT_TCP}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "velbus_test_tcp"
assert result["data"][CONF_PORT] == PORT_TCP
async def test_user_fail(hass, controller_assert):
"""Test user config."""
flow = init_config_flow(hass)
result = await flow.async_step_user(
{CONF_NAME: "Velbus Test Serial", CONF_PORT: PORT_SERIAL}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_PORT: "cannot_connect"}
result = await flow.async_step_user(
{CONF_NAME: "Velbus Test TCP", CONF_PORT: PORT_TCP}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {CONF_PORT: "cannot_connect"}
async def test_import(hass, controller):
"""Test import step."""
flow = init_config_flow(hass)
result = await flow.async_step_import({CONF_PORT: PORT_TCP})
assert result["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
assert result["title"] == "velbus_import"
async def test_abort_if_already_setup(hass):
"""Test we abort if Daikin is already setup."""
flow = init_config_flow(hass)
MockConfigEntry(
domain="velbus", data={CONF_PORT: PORT_TCP, CONF_NAME: "velbus home"}
).add_to_hass(hass)
result = await flow.async_step_import(
{CONF_PORT: PORT_TCP, CONF_NAME: "velbus import test"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_ABORT
assert result["reason"] == "already_configured"
result = await flow.async_step_user(
{CONF_PORT: PORT_TCP, CONF_NAME: "velbus import test"}
)
assert result["type"] == data_entry_flow.RESULT_TYPE_FORM
assert result["errors"] == {"port": "already_configured"}
|
from array import array
from itertools import chain
import logging
from math import sqrt
import numpy as np
from scipy import sparse
from gensim.matutils import corpus2csc
from gensim.utils import SaveLoad, is_corpus
logger = logging.getLogger(__name__)
NON_NEGATIVE_NORM_ASSERTION_MESSAGE = (
u"sparse documents must not contain any explicit "
u"zero entries and the similarity matrix S must satisfy x^T * S * x >= 0 for any "
u"nonzero bag-of-words vector x."
)
class TermSimilarityIndex(SaveLoad):
"""
Base class = common interface for retrieving the most similar terms for a given term.
See Also
--------
:class:`~gensim.similarities.termsim.SparseTermSimilarityMatrix`
Build a term similarity matrix and compute the Soft Cosine Measure.
"""
def most_similar(self, term, topn=10):
"""Get most similar terms for a given term.
Return the most similar terms for a given term along with their similarities.
Parameters
----------
term : str
The term for which we are retrieving `topn` most similar terms.
topn : int, optional
The maximum number of most similar terms to `term` that will be retrieved.
Returns
-------
iterable of (str, float)
Most similar terms along with their similarities to `term`. Only terms distinct from
`term` must be returned.
"""
raise NotImplementedError
def __str__(self):
members = ', '.join('%s=%s' % pair for pair in vars(self).items())
return '%s(%s)' % (self.__class__.__name__, members)
class UniformTermSimilarityIndex(TermSimilarityIndex):
"""
Retrieves most similar terms for a given term under the hypothesis that the similarities between
distinct terms are uniform.
Parameters
----------
dictionary : :class:`~gensim.corpora.dictionary.Dictionary`
A dictionary that specifies the considered terms.
term_similarity : float, optional
The uniform similarity between distinct terms.
See Also
--------
:class:`~gensim.similarities.termsim.SparseTermSimilarityMatrix`
Build a term similarity matrix and compute the Soft Cosine Measure.
Notes
-----
This class is mainly intended for testing SparseTermSimilarityMatrix and other classes that
depend on the TermSimilarityIndex.
"""
def __init__(self, dictionary, term_similarity=0.5):
self.dictionary = sorted(dictionary.items())
self.term_similarity = term_similarity
def most_similar(self, t1, topn=10):
for __, (t2_index, t2) in zip(range(topn), (
(t2_index, t2) for t2_index, t2 in self.dictionary if t2 != t1)):
yield (t2, self.term_similarity)
class WordEmbeddingSimilarityIndex(TermSimilarityIndex):
"""
Use objects of this class to:
1) Compute cosine similarities between word embeddings.
2) Retrieve the closest word embeddings (by cosine similarity) to a given word embedding.
Parameters
----------
keyedvectors : :class:`~gensim.models.keyedvectors.KeyedVectors`
The word embeddings.
threshold : float, optional
Only embeddings more similar than `threshold` are considered when retrieving word embeddings
closest to a given word embedding.
exponent : float, optional
Take the word embedding similarities larger than `threshold` to the power of `exponent`.
kwargs : dict or None
A dict with keyword arguments that will be passed to the `keyedvectors.most_similar` method
when retrieving the word embeddings closest to a given word embedding.
See Also
--------
:class:`~gensim.similarities.termsim.SparseTermSimilarityMatrix`
Build a term similarity matrix and compute the Soft Cosine Measure.
"""
def __init__(self, keyedvectors, threshold=0.0, exponent=2.0, kwargs=None):
self.keyedvectors = keyedvectors
self.threshold = threshold
self.exponent = exponent
self.kwargs = kwargs or {}
super(WordEmbeddingSimilarityIndex, self).__init__()
def most_similar(self, t1, topn=10):
if t1 not in self.keyedvectors:
logger.debug('an out-of-dictionary term "%s"', t1)
else:
most_similar = self.keyedvectors.most_similar(positive=[t1], topn=topn, **self.kwargs)
for t2, similarity in most_similar:
if similarity > self.threshold:
yield (t2, similarity**self.exponent)
def _shortest_uint_dtype(max_value):
"""Get the shortest unsingned integer data-type required for representing values up to a given
maximum value.
Returns the shortest unsingned integer data-type required for representing values up to a given
maximum value.
Parameters
----------
max_value : int
The maximum value we wish to represent.
Returns
-------
data-type
The shortest unsigned integer data-type required for representing values up to a given
maximum value.
"""
if max_value < 2**8:
return np.uint8
elif max_value < 2**16:
return np.uint16
elif max_value < 2**32:
return np.uint32
return np.uint64
def _create_source(index, dictionary, tfidf, symmetric, dominant, nonzero_limit, dtype):
"""Build a sparse term similarity matrix using a term similarity index.
Returns
-------
matrix : :class:`scipy.sparse.coo_matrix`
The sparse term similarity matrix.
"""
assert isinstance(index, TermSimilarityIndex)
assert dictionary is not None
matrix_order = len(dictionary)
if matrix_order == 0:
raise ValueError('Dictionary provided to SparseTermSimilarityMatrix must not be empty')
logger.info("constructing a sparse term similarity matrix using %s", index)
if nonzero_limit is None:
nonzero_limit = matrix_order
def tfidf_sort_key(term_index):
if isinstance(term_index, tuple):
term_index, *_ = term_index
term_idf = tfidf.idfs[term_index]
return (-term_idf, term_index)
if tfidf is None:
logger.info("iterating over columns in dictionary order")
columns = sorted(dictionary.keys())
else:
assert max(tfidf.idfs) == matrix_order - 1
logger.info("iterating over columns in tf-idf order")
columns = sorted(tfidf.idfs.keys(), key=tfidf_sort_key)
nonzero_counter_dtype = _shortest_uint_dtype(nonzero_limit)
column_nonzero = np.array([0] * matrix_order, dtype=nonzero_counter_dtype)
if dominant:
column_sum = np.zeros(matrix_order, dtype=dtype)
if symmetric:
assigned_cells = set()
row_buffer = array('Q')
column_buffer = array('Q')
if dtype is np.float16 or dtype is np.float32:
data_buffer = array('f')
elif dtype is np.float64:
data_buffer = array('d')
else:
raise ValueError('Dtype %s is unsupported, use numpy.float16, float32, or float64.' % dtype)
def cell_full(t1_index, t2_index, similarity):
if dominant and column_sum[t1_index] + abs(similarity) >= 1.0:
return True # after adding the similarity, the matrix would cease to be strongly diagonally dominant
assert column_nonzero[t1_index] <= nonzero_limit
if column_nonzero[t1_index] == nonzero_limit:
return True # after adding the similarity, the column would contain more than nonzero_limit elements
if symmetric and (t1_index, t2_index) in assigned_cells:
return True # a similarity has already been assigned to this cell
return False
def populate_buffers(t1_index, t2_index, similarity):
column_buffer.append(t1_index)
row_buffer.append(t2_index)
data_buffer.append(similarity)
column_nonzero[t1_index] += 1
if symmetric:
assigned_cells.add((t1_index, t2_index))
if dominant:
column_sum[t1_index] += abs(similarity)
try:
from tqdm import tqdm as progress_bar
except ImportError:
def progress_bar(iterable):
return iterable
for column_number, t1_index in enumerate(progress_bar(columns)):
column_buffer.append(column_number)
row_buffer.append(column_number)
data_buffer.append(1.0)
if nonzero_limit <= 0:
continue
t1 = dictionary[t1_index]
num_nonzero = column_nonzero[t1_index]
num_rows = nonzero_limit - num_nonzero
most_similar = [
(dictionary.token2id[term], similarity)
for term, similarity in index.most_similar(t1, topn=num_rows)
if term in dictionary.token2id
] if num_rows > 0 else []
if tfidf is None:
rows = sorted(most_similar)
else:
rows = sorted(most_similar, key=tfidf_sort_key)
for t2_index, similarity in rows:
if cell_full(t1_index, t2_index, similarity):
continue
if not symmetric:
populate_buffers(t1_index, t2_index, similarity)
elif not cell_full(t2_index, t1_index, similarity):
populate_buffers(t1_index, t2_index, similarity)
populate_buffers(t2_index, t1_index, similarity)
data_buffer = np.frombuffer(data_buffer, dtype=dtype)
row_buffer = np.frombuffer(row_buffer, dtype=np.uint64)
column_buffer = np.frombuffer(column_buffer, dtype=np.uint64)
matrix = sparse.coo_matrix((data_buffer, (row_buffer, column_buffer)), shape=(matrix_order, matrix_order))
logger.info(
"constructed a sparse term similarity matrix with %0.06f%% density",
100.0 * matrix.getnnz() / matrix_order**2,
)
return matrix
def _normalize_dense_vector(vector, matrix, normalization):
"""Normalize a dense vector after a change of basis.
Parameters
----------
vector : 1xN ndarray
A dense vector.
matrix : NxN ndarray
A change-of-basis matrix.
normalization : {True, False, 'maintain'}
Whether the vector will be L2-normalized (True; corresponds to the soft
cosine measure), maintain its L2-norm during the change of basis
('maintain'; corresponds to query expansion with partial membership),
or kept as-is (False; corresponds to query expansion).
Returns
-------
vector : ndarray
The normalized dense vector.
"""
if not normalization:
return vector
vector_norm = vector.T.dot(matrix).dot(vector)[0, 0]
assert vector_norm >= 0.0, NON_NEGATIVE_NORM_ASSERTION_MESSAGE
if normalization == 'maintain' and vector_norm > 0.0:
vector_norm /= vector.T.dot(vector)
vector_norm = sqrt(vector_norm)
normalized_vector = vector
if vector_norm > 0.0:
normalized_vector /= vector_norm
return normalized_vector
def _normalize_dense_corpus(corpus, matrix, normalization):
"""Normalize a dense corpus after a change of basis.
Parameters
----------
corpus : MxN ndarray
A dense corpus.
matrix : NxN ndarray
A change-of-basis matrix.
normalization : {True, False, 'maintain'}
Whether the vector will be L2-normalized (True; corresponds to the soft
cosine measure), maintain its L2-norm during the change of basis
('maintain'; corresponds to query expansion with partial membership),
or kept as-is (False; corresponds to query expansion).
Returns
-------
normalized_corpus : ndarray
The normalized dense corpus.
"""
if not normalization:
return corpus
# use the following equality: np.diag(A.T.dot(B).dot(A)) == A.T.dot(B).multiply(A.T).sum(axis=1).T
corpus_norm = np.multiply(corpus.T.dot(matrix), corpus.T).sum(axis=1).T
assert corpus_norm.min() >= 0.0, NON_NEGATIVE_NORM_ASSERTION_MESSAGE
if normalization == 'maintain':
corpus_norm /= np.multiply(corpus.T, corpus.T).sum(axis=1).T
corpus_norm = np.sqrt(corpus_norm)
normalized_corpus = np.multiply(corpus, 1.0 / corpus_norm)
normalized_corpus = np.nan_to_num(normalized_corpus) # account for division by zero
return normalized_corpus
def _normalize_sparse_corpus(corpus, matrix, normalization):
"""Normalize a sparse corpus after a change of basis.
Parameters
----------
corpus : MxN :class:`scipy.sparse.csc_matrix`
A sparse corpus.
matrix : NxN :class:`scipy.sparse.csc_matrix`
A change-of-basis matrix.
normalization : {True, False, 'maintain'}
Whether the vector will be L2-normalized (True; corresponds to the soft
cosine measure), maintain its L2-norm during the change of basis
('maintain'; corresponds to query expansion with partial membership),
or kept as-is (False; corresponds to query expansion).
Returns
-------
normalized_corpus : :class:`scipy.sparse.csc_matrix`
The normalized sparse corpus.
"""
if not normalization:
return corpus
# use the following equality: np.diag(A.T.dot(B).dot(A)) == A.T.dot(B).multiply(A.T).sum(axis=1).T
corpus_norm = corpus.T.dot(matrix).multiply(corpus.T).sum(axis=1).T
assert corpus_norm.min() >= 0.0, NON_NEGATIVE_NORM_ASSERTION_MESSAGE
if normalization == 'maintain':
corpus_norm /= corpus.T.multiply(corpus.T).sum(axis=1).T
corpus_norm = np.sqrt(corpus_norm)
normalized_corpus = corpus.multiply(sparse.csr_matrix(1.0 / corpus_norm))
normalized_corpus[normalized_corpus == np.inf] = 0 # account for division by zero
return normalized_corpus
class SparseTermSimilarityMatrix(SaveLoad):
"""
Builds a sparse term similarity matrix using a term similarity index.
Examples
--------
>>> from gensim.test.utils import common_texts
>>> from gensim.corpora import Dictionary
>>> from gensim.models import Word2Vec, WordEmbeddingSimilarityIndex
>>> from gensim.similarities import SoftCosineSimilarity, SparseTermSimilarityMatrix
>>> from gensim.similarities.index import AnnoyIndexer
>>> from scikits.sparse.cholmod import cholesky
>>>
>>> model = Word2Vec(common_texts, vector_size=20, min_count=1) # train word-vectors
>>> annoy = AnnoyIndexer(model, num_trees=2) # use annoy for faster word similarity lookups
>>> termsim_index = WordEmbeddingSimilarityIndex(model.wv, kwargs={'indexer': annoy})
>>> dictionary = Dictionary(common_texts)
>>> bow_corpus = [dictionary.doc2bow(document) for document in common_texts]
>>> similarity_matrix = SparseTermSimilarityMatrix(termsim_index, dictionary, symmetric=True, dominant=True)
>>> docsim_index = SoftCosineSimilarity(bow_corpus, similarity_matrix, num_best=10)
>>>
>>> query = 'graph trees computer'.split() # make a query
>>> sims = docsim_index[dictionary.doc2bow(query)] # calculate similarity of query to each doc from bow_corpus
>>>
>>> word_embeddings = cholesky(similarity_matrix.matrix).L() # obtain word embeddings from similarity matrix
Check out `Tutorial Notebook
<https://github.com/RaRe-Technologies/gensim/blob/develop/docs/notebooks/soft_cosine_tutorial.ipynb>`_
for more examples.
Parameters
----------
source : :class:`~gensim.similarities.termsim.TermSimilarityIndex` or :class:`scipy.sparse.spmatrix`
The source of the term similarity. Either a term similarity index that will be used for
building the term similarity matrix, or an existing sparse term similarity matrix that will
be encapsulated and stored in the matrix attribute. When a matrix is specified as the
source, any other parameters will be ignored.
dictionary : :class:`~gensim.corpora.dictionary.Dictionary` or None, optional
A dictionary that specifies a mapping between terms and the indices of rows and columns
of the resulting term similarity matrix. The dictionary may only be None when source is
a :class:`scipy.sparse.spmatrix`.
tfidf : :class:`gensim.models.tfidfmodel.TfidfModel` or None, optional
A model that specifies the relative importance of the terms in the dictionary. The columns
of the term similarity matrix will be build in a decreasing order of importance of
terms, or in the order of term identifiers if None.
symmetric : bool, optional
Whether the symmetry of the term similarity matrix will be enforced. Symmetry is a necessary
precondition for positive definiteness, which is necessary if you later wish to derive a
unique change-of-basis matrix from the term similarity matrix using Cholesky factorization.
Setting symmetric to False will significantly reduce memory usage during matrix construction.
dominant: bool, optional
Whether the strict column diagonal dominance of the term similarity matrix will be enforced.
Strict diagonal dominance and symmetry are sufficient preconditions for positive
definiteness, which is necessary if you later wish to derive a change-of-basis matrix from
the term similarity matrix using Cholesky factorization.
nonzero_limit : int or None, optional
The maximum number of non-zero elements outside the diagonal in a single column of the
sparse term similarity matrix. If None, then no limit will be imposed.
dtype : numpy.dtype, optional
The data type of the sparse term similarity matrix.
Attributes
----------
matrix : :class:`scipy.sparse.csc_matrix`
The encapsulated sparse term similarity matrix.
Raises
------
ValueError
If `dictionary` is empty.
"""
def __init__(self, source, dictionary=None, tfidf=None, symmetric=True, dominant=False,
nonzero_limit=100, dtype=np.float32):
if not sparse.issparse(source):
index = source
args = (index, dictionary, tfidf, symmetric, dominant, nonzero_limit, dtype)
source = _create_source(*args)
assert sparse.issparse(source)
self.matrix = source.tocsc()
def inner_product(self, X, Y, normalized=(False, False)):
"""Get the inner product(s) between real vectors / corpora X and Y.
Return the inner product(s) between real vectors / corpora vec1 and vec2 expressed in a
non-orthogonal normalized basis, where the dot product between the basis vectors is given by
the sparse term similarity matrix.
Parameters
----------
vec1 : list of (int, float) or iterable of list of (int, float)
A query vector / corpus in the sparse bag-of-words format.
vec2 : list of (int, float) or iterable of list of (int, float)
A document vector / corpus in the sparse bag-of-words format.
normalized : tuple of {True, False, 'maintain'}, optional
First/second value specifies whether the query/document vectors in the inner product
will be L2-normalized (True; corresponds to the soft cosine measure), maintain their
L2-norm during change of basis ('maintain'; corresponds to query expansion with partial
membership), or kept as-is (False; corresponds to query expansion; default).
Returns
-------
`self.matrix.dtype`, `scipy.sparse.csr_matrix`, or :class:`numpy.matrix`
The inner product(s) between `X` and `Y`.
References
----------
The soft cosine measure was perhaps first described by [sidorovetal14]_.
Further notes on the efficient implementation of the soft cosine measure are described by
[novotny18]_.
.. [sidorovetal14] Grigori Sidorov et al., "Soft Similarity and Soft Cosine Measure: Similarity
of Features in Vector Space Model", 2014, http://www.cys.cic.ipn.mx/ojs/index.php/CyS/article/view/2043/1921.
.. [novotny18] Vít Novotný, "Implementation Notes for the Soft Cosine Measure", 2018,
http://dx.doi.org/10.1145/3269206.3269317.
"""
if not X or not Y:
return self.matrix.dtype.type(0.0)
normalized_X, normalized_Y = normalized
valid_normalized_values = (True, False, 'maintain')
if normalized_X not in valid_normalized_values:
raise ValueError('{} is not a valid value of normalize'.format(normalized_X))
if normalized_Y not in valid_normalized_values:
raise ValueError('{} is not a valid value of normalize'.format(normalized_Y))
is_corpus_X, X = is_corpus(X)
is_corpus_Y, Y = is_corpus(Y)
if not is_corpus_X and not is_corpus_Y:
X = dict(X)
Y = dict(Y)
word_indices = np.array(sorted(set(chain(X, Y))))
dtype = self.matrix.dtype
X = np.array([X[i] if i in X else 0 for i in word_indices], dtype=dtype)
Y = np.array([Y[i] if i in Y else 0 for i in word_indices], dtype=dtype)
matrix = self.matrix[word_indices[:, None], word_indices].todense()
X = _normalize_dense_vector(X, matrix, normalized_X)
Y = _normalize_dense_vector(Y, matrix, normalized_Y)
result = X.T.dot(matrix).dot(Y)
if normalized_X is True and normalized_Y is True:
result = np.clip(result, -1.0, 1.0)
return result[0, 0]
elif not is_corpus_X or not is_corpus_Y:
if is_corpus_X and not is_corpus_Y:
X, Y = Y, X # make Y the corpus
is_corpus_X, is_corpus_Y = is_corpus_Y, is_corpus_X
normalized_X, normalized_Y = normalized_Y, normalized_X
transposed = True
else:
transposed = False
dtype = self.matrix.dtype
expanded_X = corpus2csc([X], num_terms=self.matrix.shape[0], dtype=dtype).T.dot(self.matrix)
word_indices = np.array(sorted(expanded_X.nonzero()[1]))
del expanded_X
X = dict(X)
X = np.array([X[i] if i in X else 0 for i in word_indices], dtype=dtype)
Y = corpus2csc(Y, num_terms=self.matrix.shape[0], dtype=dtype)[word_indices, :].todense()
matrix = self.matrix[word_indices[:, None], word_indices].todense()
X = _normalize_dense_vector(X, matrix, normalized_X)
Y = _normalize_dense_corpus(Y, matrix, normalized_Y)
result = X.dot(matrix).dot(Y)
if normalized_X is True and normalized_Y is True:
result = np.clip(result, -1.0, 1.0)
if transposed:
result = result.T
return result
else: # if is_corpus_X and is_corpus_Y:
dtype = self.matrix.dtype
X = corpus2csc(X if is_corpus_X else [X], num_terms=self.matrix.shape[0], dtype=dtype)
Y = corpus2csc(Y if is_corpus_Y else [Y], num_terms=self.matrix.shape[0], dtype=dtype)
matrix = self.matrix
X = _normalize_sparse_corpus(X, matrix, normalized_X)
Y = _normalize_sparse_corpus(Y, matrix, normalized_Y)
result = X.T.dot(matrix).dot(Y)
if normalized_X is True and normalized_Y is True:
result.data = np.clip(result.data, -1.0, 1.0)
return result
|
from homeassistant.const import DATA_RATE_MEGABITS_PER_SECOND
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.restore_state import RestoreEntity
from . import DATA_UPDATED, DOMAIN as FASTDOTCOM_DOMAIN
ICON = "mdi:speedometer"
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the Fast.com sensor."""
async_add_entities([SpeedtestSensor(hass.data[FASTDOTCOM_DOMAIN])])
class SpeedtestSensor(RestoreEntity):
"""Implementation of a FAst.com sensor."""
def __init__(self, speedtest_data):
"""Initialize the sensor."""
self._name = "Fast.com Download"
self.speedtest_client = speedtest_data
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return DATA_RATE_MEGABITS_PER_SECOND
@property
def icon(self):
"""Return icon."""
return ICON
@property
def should_poll(self):
"""Return the polling requirement for this sensor."""
return False
async def async_added_to_hass(self):
"""Handle entity which will be added."""
await super().async_added_to_hass()
self.async_on_remove(
async_dispatcher_connect(
self.hass, DATA_UPDATED, self._schedule_immediate_update
)
)
state = await self.async_get_last_state()
if not state:
return
self._state = state.state
def update(self):
"""Get the latest data and update the states."""
data = self.speedtest_client.data
if data is None:
return
self._state = data["download"]
@callback
def _schedule_immediate_update(self):
self.async_schedule_update_ha_state(True)
|
import logging
import numpy as np
import pymongo
from pandas import DataFrame
from pandas.util.testing import assert_frame_equal
from ._config import FW_POINTERS_CONFIG_KEY, FwPointersCfg
logger = logging.getLogger(__name__)
NP_OBJECT_DTYPE = np.dtype('O')
# Avoid import-time extra logic
_use_new_count_api = None
def get_fwptr_config(version):
return FwPointersCfg[version.get(FW_POINTERS_CONFIG_KEY, FwPointersCfg.DISABLED.name)]
def _detect_new_count_api():
try:
mongo_v = [int(v) for v in pymongo.version.split('.')]
return mongo_v[0] >= 3 and mongo_v[1] >= 7
except:
return False
def indent(s, num_spaces):
s = s.split('\n')
s = [(num_spaces * ' ') + line for line in s]
s = '\n'.join(s)
return s
def are_equals(o1, o2, **kwargs):
try:
if isinstance(o1, DataFrame):
assert_frame_equal(o1, o2, kwargs)
return True
return o1 == o2
except Exception:
return False
def enable_sharding(arctic, library_name, hashed=True, key='symbol'):
"""
Enable sharding on a library
Parameters:
-----------
arctic: `arctic.Arctic` Arctic class
library_name: `basestring` library name
hashed: `bool` if True, use hashed sharding, if False, use range sharding
See https://docs.mongodb.com/manual/core/hashed-sharding/,
as well as https://docs.mongodb.com/manual/core/ranged-sharding/ for details.
key: `basestring` key to be used for sharding. Defaults to 'symbol', applicable to
all of Arctic's built-in stores except for BSONStore, which typically uses '_id'.
See https://docs.mongodb.com/manual/core/sharding-shard-key/ for details.
"""
c = arctic._conn
lib = arctic[library_name]._arctic_lib
dbname = lib._db.name
library_name = lib.get_top_level_collection().name
try:
c.admin.command('enablesharding', dbname)
except pymongo.errors.OperationFailure as e:
if 'already enabled' not in str(e):
raise
if not hashed:
logger.info("Range sharding '" + key + "' on: " + dbname + '.' + library_name)
c.admin.command('shardCollection', dbname + '.' + library_name, key={key: 1})
else:
logger.info("Hash sharding '" + key + "' on: " + dbname + '.' + library_name)
c.admin.command('shardCollection', dbname + '.' + library_name, key={key: 'hashed'})
def mongo_count(collection, filter=None, **kwargs):
filter = {} if filter is None else filter
global _use_new_count_api
_use_new_count_api = _detect_new_count_api() if _use_new_count_api is None else _use_new_count_api
# This is a temporary compatibility fix for compatibility with pymongo>=3.7, and also avoid deprecation warnings
if _use_new_count_api:
# Projection is ignored for count_documents
return collection.count_documents(filter=filter, **kwargs)
else:
return collection.count(filter=filter, **kwargs)
|
from test import CollectorTestCase
from test import get_collector_config
from test import unittest
from passenger_stats import PassengerCollector
##########################################################################
class TestPassengerCollector(CollectorTestCase):
def setUp(self):
config = get_collector_config('PassengerCollector', {})
self.collector = PassengerCollector(config, None)
def test_import(self):
self.assertTrue(PassengerCollector)
##########################################################################
if __name__ == "__main__":
unittest.main()
|
from homeassistant.const import CONF_NAME, DEVICE_CLASS_BATTERY, PERCENTAGE
from homeassistant.helpers.entity import Entity
from . import CONF_SERIAL, LIGHTWAVE_LINK
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Find and return battery."""
if discovery_info is None:
return
batteries = []
lwlink = hass.data[LIGHTWAVE_LINK]
for device_config in discovery_info.values():
name = device_config[CONF_NAME]
serial = device_config[CONF_SERIAL]
batteries.append(LightwaveBattery(name, lwlink, serial))
async_add_entities(batteries)
class LightwaveBattery(Entity):
"""Lightwave TRV Battery."""
def __init__(self, name, lwlink, serial):
"""Initialize the Lightwave Trv battery sensor."""
self._name = name
self._state = None
self._lwlink = lwlink
self._serial = serial
@property
def device_class(self):
"""Return the device class of the sensor."""
return DEVICE_CLASS_BATTERY
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the state of the sensor."""
return PERCENTAGE
def update(self):
"""Communicate with a Lightwave RTF Proxy to get state."""
(dummy_temp, dummy_targ, battery, dummy_output) = self._lwlink.read_trv_status(
self._serial
)
self._state = battery
|