code
stringlengths 22
1.05M
| apis
sequencelengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from django.contrib import admin
from .models import SearchResult
# Register your models here.
class SearchResultAdmin(admin.ModelAdmin):
fields = ["query", "heading", "url", "text"]
admin.site.register(SearchResult, SearchResultAdmin) | [
"django.contrib.admin.site.register"
] | [((189, 241), 'django.contrib.admin.site.register', 'admin.site.register', (['SearchResult', 'SearchResultAdmin'], {}), '(SearchResult, SearchResultAdmin)\n', (208, 241), False, 'from django.contrib import admin\n')] |
import asyncio
import os
import tempfile
from contextlib import ExitStack
from typing import Text, Optional, List, Union, Dict
from rasa.importers.importer import TrainingDataImporter
from rasa import model
from rasa.model import FingerprintComparisonResult
from rasa.core.domain import Domain
from rasa.utils.common import TempDirectoryPath
from rasa.cli.utils import (
print_success,
print_warning,
print_error,
bcolors,
print_color,
)
from rasa.constants import DEFAULT_MODELS_PATH, DEFAULT_CORE_SUBDIRECTORY_NAME
def train(
domain: Text,
config: Text,
training_files: Union[Text, List[Text]],
output: Text = DEFAULT_MODELS_PATH,
force_training: bool = False,
fixed_model_name: Optional[Text] = None,
persist_nlu_training_data: bool = False,
additional_arguments: Optional[Dict] = None,
loop: Optional[asyncio.AbstractEventLoop] = None,
) -> Optional[Text]:
if loop is None:
try:
loop = asyncio.get_event_loop()
except RuntimeError:
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
return loop.run_until_complete(
train_async(
domain=domain,
config=config,
training_files=training_files,
output_path=output,
force_training=force_training,
fixed_model_name=fixed_model_name,
persist_nlu_training_data=persist_nlu_training_data,
additional_arguments=additional_arguments,
)
)
async def train_async(
domain: Union[Domain, Text],
config: Dict[Text, Text],
training_files: Optional[Union[Text, List[Text]]],
output_path: Text = DEFAULT_MODELS_PATH,
force_training: bool = False,
fixed_model_name: Optional[Text] = None,
persist_nlu_training_data: bool = False,
additional_arguments: Optional[Dict] = None,
) -> Optional[Text]:
"""Trains a Rasa model (Core and NLU).
Args:
domain: Path to the domain file.
config: Dict of paths to the config for Core and NLU. Keys are language codes
training_files: Paths to the training data for Core and NLU.
output_path: Output path.
force_training: If `True` retrain model even if data has not changed.
fixed_model_name: Name of model to be stored.
persist_nlu_training_data: `True` if the NLU training data should be persisted
with the model.
additional_arguments: Additional training parameters.
Returns:
Path of the trained model archive.
"""
# file_importer = TrainingDataImporter.load_from_config(
# config, domain, training_files
# )
with ExitStack() as stack:
train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp()))
# bf mod
from rasa_addons.importers import BotfrontFileImporter
file_importer = BotfrontFileImporter(config, domain, training_files)
# domain = await file_importer.get_domain()
# if domain.is_empty():
# return await handle_domain_if_not_exists(
# file_importer, output_path, fixed_model_name
# )
# /bf mod
return await _train_async_internal(
file_importer,
train_path,
output_path,
force_training,
fixed_model_name,
persist_nlu_training_data,
additional_arguments,
)
async def handle_domain_if_not_exists(
file_importer: TrainingDataImporter, output_path, fixed_model_name
):
nlu_model_only = await _train_nlu_with_validated_data(
file_importer, output=output_path, fixed_model_name=fixed_model_name
)
print_warning(
"Core training was skipped because no valid domain file was found. Only an nlu-model was created."
"Please specify a valid domain using '--domain' argument or check if the provided domain file exists."
)
return nlu_model_only
async def _train_async_internal(
file_importer: TrainingDataImporter,
train_path: Text,
output_path: Text,
force_training: bool,
fixed_model_name: Optional[Text],
persist_nlu_training_data: bool,
additional_arguments: Optional[Dict],
) -> Optional[Text]:
"""Trains a Rasa model (Core and NLU). Use only from `train_async`.
Args:
file_importer: `TrainingDataImporter` which supplies the training data.
train_path: Directory in which to train the model.
output_path: Output path.
force_training: If `True` retrain model even if data has not changed.
persist_nlu_training_data: `True` if the NLU training data should be persisted
with the model.
fixed_model_name: Name of model to be stored.
additional_arguments: Additional training parameters.
Returns:
Path of the trained model archive.
"""
stories, nlu_data = await asyncio.gather(
file_importer.get_stories(), file_importer.get_nlu_data()
)
# if stories.is_empty() and nlu_data.is_empty():
# print_error(
# "No training data given. Please provide stories and NLU data in "
# "order to train a Rasa model using the '--data' argument."
# )
# return
# if nlu_data.is_empty():
# print_warning("No NLU data present. Just a Rasa Core model will be trained.")
# return await _train_core_with_validated_data(
# file_importer,
# output=output_path,
# fixed_model_name=fixed_model_name,
# additional_arguments=additional_arguments,
# )
new_fingerprint = await model.model_fingerprint(file_importer)
old_model = model.get_latest_model(output_path)
fingerprint_comparison = FingerprintComparisonResult(force_training=force_training)
if not force_training:
fingerprint_comparison = model.should_retrain(
new_fingerprint, old_model, train_path
)
# bf mod >
if fingerprint_comparison.nlu == True: # replace True with list of all langs
fingerprint_comparison.nlu = list(new_fingerprint.get("nlu-config", {}).keys())
domain = await file_importer.get_domain()
core_untrainable = domain.is_empty() or stories.is_empty()
nlu_untrainable = [l for l, d in nlu_data.items() if d.is_empty()]
fingerprint_comparison.core = fingerprint_comparison.core and not core_untrainable
fingerprint_comparison.nlu = [l for l in fingerprint_comparison.nlu if l not in nlu_untrainable]
if core_untrainable:
print_color("Skipping Core training since domain or stories are empty.", color=bcolors.OKBLUE)
for lang in nlu_untrainable:
print_color("No NLU data found for language <{}>, skipping training...".format(lang), color=bcolors.OKBLUE)
# </ bf mod
if fingerprint_comparison.is_training_required():
await _do_training(
file_importer,
output_path=output_path,
train_path=train_path,
fingerprint_comparison_result=fingerprint_comparison,
fixed_model_name=fixed_model_name,
persist_nlu_training_data=persist_nlu_training_data,
additional_arguments=additional_arguments,
)
return model.package_model(
fingerprint=new_fingerprint,
output_directory=output_path,
train_path=train_path,
fixed_model_name=fixed_model_name,
)
print_success(
"Nothing changed. You can use the old model stored at '{}'."
"".format(os.path.abspath(old_model))
)
return old_model
async def _do_training(
file_importer: TrainingDataImporter,
output_path: Text,
train_path: Text,
fingerprint_comparison_result: Optional[FingerprintComparisonResult] = None,
fixed_model_name: Optional[Text] = None,
persist_nlu_training_data: bool = False,
additional_arguments: Optional[Dict] = None,
):
if not fingerprint_comparison_result:
fingerprint_comparison_result = FingerprintComparisonResult()
if fingerprint_comparison_result.should_retrain_core():
await _train_core_with_validated_data(
file_importer,
output=output_path,
train_path=train_path,
fixed_model_name=fixed_model_name,
additional_arguments=additional_arguments,
)
elif fingerprint_comparison_result.should_retrain_nlg():
print_color(
"Core stories/configuration did not change. "
"Only the templates section has been changed. A new model with "
"the updated templates will be created.",
color=bcolors.OKBLUE,
)
await model.update_model_with_new_domain(file_importer, train_path)
else:
print_color(
"Core stories/configuration did not change. No need to retrain Core model.",
color=bcolors.OKBLUE,
)
if fingerprint_comparison_result.should_retrain_nlu():
await _train_nlu_with_validated_data(
file_importer,
output=output_path,
train_path=train_path,
fixed_model_name=fixed_model_name,
retrain_nlu=fingerprint_comparison_result.nlu,
persist_nlu_training_data=persist_nlu_training_data,
)
else:
print_color(
"NLU data/configuration did not change. No need to retrain NLU model.",
color=bcolors.OKBLUE,
)
def train_core(
domain: Union[Domain, Text],
config: Text,
stories: Text,
output: Text,
train_path: Optional[Text] = None,
fixed_model_name: Optional[Text] = None,
additional_arguments: Optional[Dict] = None,
) -> Optional[Text]:
loop = asyncio.get_event_loop()
return loop.run_until_complete(
train_core_async(
domain=domain,
config=config,
stories=stories,
output=output,
train_path=train_path,
fixed_model_name=fixed_model_name,
additional_arguments=additional_arguments,
)
)
async def train_core_async(
domain: Union[Domain, Text],
config: Text,
stories: Text,
output: Text,
train_path: Optional[Text] = None,
fixed_model_name: Optional[Text] = None,
additional_arguments: Optional[Dict] = None,
) -> Optional[Text]:
"""Trains a Core model.
Args:
domain: Path to the domain file.
config: Path to the config file for Core.
stories: Path to the Core training data.
output: Output path.
train_path: If `None` the model will be trained in a temporary
directory, otherwise in the provided directory.
fixed_model_name: Name of model to be stored.
uncompress: If `True` the model will not be compressed.
additional_arguments: Additional training parameters.
Returns:
If `train_path` is given it returns the path to the model archive,
otherwise the path to the directory with the trained model files.
"""
file_importer = TrainingDataImporter.load_core_importer_from_config(
config, domain, [stories]
)
domain = await file_importer.get_domain()
if domain.is_empty():
print_error(
"Core training was skipped because no valid domain file was found. "
"Please specify a valid domain using '--domain' argument or check if the provided domain file exists."
)
return None
if not await file_importer.get_stories():
print_error(
"No stories given. Please provide stories in order to "
"train a Rasa Core model using the '--stories' argument."
)
return
return await _train_core_with_validated_data(
file_importer,
output=output,
train_path=train_path,
fixed_model_name=fixed_model_name,
additional_arguments=additional_arguments,
)
async def _train_core_with_validated_data(
file_importer: TrainingDataImporter,
output: Text,
train_path: Optional[Text] = None,
fixed_model_name: Optional[Text] = None,
additional_arguments: Optional[Dict] = None,
) -> Optional[Text]:
"""Train Core with validated training and config data."""
import rasa.core.train
with ExitStack() as stack:
if train_path:
# If the train path was provided, do nothing on exit.
_train_path = train_path
else:
# Otherwise, create a temp train path and clean it up on exit.
_train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp()))
# normal (not compare) training
print_color("Training Core model...", color=bcolors.OKBLUE)
domain, config = await asyncio.gather(
file_importer.get_domain(), file_importer.get_config()
)
await rasa.core.train(
domain_file=domain,
training_resource=file_importer,
output_path=os.path.join(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME),
policy_config=config,
additional_arguments=additional_arguments,
)
print_color("Core model training completed.", color=bcolors.OKBLUE)
if train_path is None:
# Only Core was trained.
new_fingerprint = await model.model_fingerprint(file_importer)
return model.package_model(
fingerprint=new_fingerprint,
output_directory=output,
train_path=_train_path,
fixed_model_name=fixed_model_name,
model_prefix="core-",
)
return _train_path
def train_nlu(
config: Text,
nlu_data: Text,
output: Text,
train_path: Optional[Text] = None,
fixed_model_name: Optional[Text] = None,
persist_nlu_training_data: bool = False,
) -> Optional[Text]:
"""Trains an NLU model.
Args:
config: Path to the config file for NLU.
nlu_data: Path to the NLU training data.
output: Output path.
train_path: If `None` the model will be trained in a temporary
directory, otherwise in the provided directory.
fixed_model_name: Name of the model to be stored.
persist_nlu_training_data: `True` if the NLU training data should be persisted
with the model.
Returns:
If `train_path` is given it returns the path to the model archive,
otherwise the path to the directory with the trained model files.
"""
loop = asyncio.get_event_loop()
return loop.run_until_complete(
_train_nlu_async(
config,
nlu_data,
output,
train_path,
fixed_model_name,
persist_nlu_training_data,
)
)
async def _train_nlu_async(
config: Text,
nlu_data: Text,
output: Text,
train_path: Optional[Text] = None,
fixed_model_name: Optional[Text] = None,
persist_nlu_training_data: bool = False,
):
if not nlu_data:
print_error(
"No NLU data given. Please provide NLU data in order to train "
"a Rasa NLU model using the '--nlu' argument."
)
return
# training NLU only hence the training files still have to be selected
file_importer = TrainingDataImporter.load_nlu_importer_from_config(
config, training_data_paths=[nlu_data]
)
training_datas = await file_importer.get_nlu_data()
if training_datas.is_empty():
print_error(
f"Path '{nlu_data}' doesn't contain valid NLU data in it. "
"Please verify the data format. "
"The NLU model training will be skipped now."
)
return
return await _train_nlu_with_validated_data(
file_importer,
output=output,
train_path=train_path,
fixed_model_name=fixed_model_name,
persist_nlu_training_data=persist_nlu_training_data,
)
async def _train_nlu_with_validated_data(
file_importer: TrainingDataImporter,
output: Text,
train_path: Optional[Text] = None,
fixed_model_name: Optional[Text] = None,
persist_nlu_training_data: bool = False,
retrain_nlu: Union[bool, List[Text]] = True
) -> Optional[Text]:
"""Train NLU with validated training and config data."""
import rasa.nlu.train
with ExitStack() as stack:
models = {}
from rasa.nlu import config as cfg_loader
if train_path:
# If the train path was provided, do nothing on exit.
_train_path = train_path
else:
# Otherwise, create a temp train path and clean it up on exit.
_train_path = stack.enter_context(TempDirectoryPath(tempfile.mkdtemp()))
# bf mod
config = await file_importer.get_nlu_config(retrain_nlu)
for lang in config:
if config[lang]:
print_color("Start training {} NLU model ...".format(lang), color=bcolors.OKBLUE)
_, models[lang], _ = await rasa.nlu.train(
config[lang],
file_importer,
_train_path,
fixed_model_name="nlu-{}".format(lang),
persist_nlu_training_data=persist_nlu_training_data,
)
else:
print_color("NLU data for language <{}> didn't change, skipping training...".format(lang), color=bcolors.OKBLUE)
# /bf mod
print_color("NLU model training completed.", color=bcolors.OKBLUE)
if train_path is None:
# Only NLU was trained
new_fingerprint = await model.model_fingerprint(file_importer)
return model.package_model(
fingerprint=new_fingerprint,
output_directory=output,
train_path=_train_path,
fixed_model_name=fixed_model_name,
model_prefix="nlu-",
)
return _train_path
| [
"rasa.model.FingerprintComparisonResult",
"rasa_addons.importers.BotfrontFileImporter",
"rasa.model.update_model_with_new_domain",
"rasa.model.should_retrain",
"asyncio.new_event_loop",
"rasa.cli.utils.print_color",
"rasa.cli.utils.print_warning",
"rasa.importers.importer.TrainingDataImporter.load_nlu_importer_from_config",
"contextlib.ExitStack",
"rasa.model.package_model",
"asyncio.get_event_loop",
"tempfile.mkdtemp",
"rasa.cli.utils.print_error",
"rasa.importers.importer.TrainingDataImporter.load_core_importer_from_config",
"rasa.model.model_fingerprint",
"os.path.join",
"rasa.model.get_latest_model",
"os.path.abspath",
"asyncio.set_event_loop"
] | [((3729, 3952), 'rasa.cli.utils.print_warning', 'print_warning', (['"""Core training was skipped because no valid domain file was found. Only an nlu-model was created.Please specify a valid domain using \'--domain\' argument or check if the provided domain file exists."""'], {}), '(\n "Core training was skipped because no valid domain file was found. Only an nlu-model was created.Please specify a valid domain using \'--domain\' argument or check if the provided domain file exists."\n )\n', (3742, 3952), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((5750, 5785), 'rasa.model.get_latest_model', 'model.get_latest_model', (['output_path'], {}), '(output_path)\n', (5772, 5785), False, 'from rasa import model\n'), ((5815, 5873), 'rasa.model.FingerprintComparisonResult', 'FingerprintComparisonResult', ([], {'force_training': 'force_training'}), '(force_training=force_training)\n', (5842, 5873), False, 'from rasa.model import FingerprintComparisonResult\n'), ((9790, 9814), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (9812, 9814), False, 'import asyncio\n'), ((11124, 11202), 'rasa.importers.importer.TrainingDataImporter.load_core_importer_from_config', 'TrainingDataImporter.load_core_importer_from_config', (['config', 'domain', '[stories]'], {}), '(config, domain, [stories])\n', (11175, 11202), False, 'from rasa.importers.importer import TrainingDataImporter\n'), ((14616, 14640), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (14638, 14640), False, 'import asyncio\n'), ((15390, 15484), 'rasa.importers.importer.TrainingDataImporter.load_nlu_importer_from_config', 'TrainingDataImporter.load_nlu_importer_from_config', (['config'], {'training_data_paths': '[nlu_data]'}), '(config,\n training_data_paths=[nlu_data])\n', (15440, 15484), False, 'from rasa.importers.importer import TrainingDataImporter\n'), ((2709, 2720), 'contextlib.ExitStack', 'ExitStack', ([], {}), '()\n', (2718, 2720), False, 'from contextlib import ExitStack\n'), ((2916, 2968), 'rasa_addons.importers.BotfrontFileImporter', 'BotfrontFileImporter', (['config', 'domain', 'training_files'], {}), '(config, domain, training_files)\n', (2936, 2968), False, 'from rasa_addons.importers import BotfrontFileImporter\n'), ((5695, 5733), 'rasa.model.model_fingerprint', 'model.model_fingerprint', (['file_importer'], {}), '(file_importer)\n', (5718, 5733), False, 'from rasa import model\n'), ((5934, 5994), 'rasa.model.should_retrain', 'model.should_retrain', (['new_fingerprint', 'old_model', 'train_path'], {}), '(new_fingerprint, old_model, train_path)\n', (5954, 5994), False, 'from rasa import model\n'), ((6604, 6702), 'rasa.cli.utils.print_color', 'print_color', (['"""Skipping Core training since domain or stories are empty."""'], {'color': 'bcolors.OKBLUE'}), "('Skipping Core training since domain or stories are empty.',\n color=bcolors.OKBLUE)\n", (6615, 6702), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((7305, 7446), 'rasa.model.package_model', 'model.package_model', ([], {'fingerprint': 'new_fingerprint', 'output_directory': 'output_path', 'train_path': 'train_path', 'fixed_model_name': 'fixed_model_name'}), '(fingerprint=new_fingerprint, output_directory=\n output_path, train_path=train_path, fixed_model_name=fixed_model_name)\n', (7324, 7446), False, 'from rasa import model\n'), ((8080, 8109), 'rasa.model.FingerprintComparisonResult', 'FingerprintComparisonResult', ([], {}), '()\n', (8107, 8109), False, 'from rasa.model import FingerprintComparisonResult\n'), ((9378, 9492), 'rasa.cli.utils.print_color', 'print_color', (['"""NLU data/configuration did not change. No need to retrain NLU model."""'], {'color': 'bcolors.OKBLUE'}), "(\n 'NLU data/configuration did not change. No need to retrain NLU model.',\n color=bcolors.OKBLUE)\n", (9389, 9492), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((11297, 11488), 'rasa.cli.utils.print_error', 'print_error', (['"""Core training was skipped because no valid domain file was found. Please specify a valid domain using \'--domain\' argument or check if the provided domain file exists."""'], {}), '(\n "Core training was skipped because no valid domain file was found. Please specify a valid domain using \'--domain\' argument or check if the provided domain file exists."\n )\n', (11308, 11488), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((11591, 11724), 'rasa.cli.utils.print_error', 'print_error', (['"""No stories given. Please provide stories in order to train a Rasa Core model using the \'--stories\' argument."""'], {}), '(\n "No stories given. Please provide stories in order to train a Rasa Core model using the \'--stories\' argument."\n )\n', (11602, 11724), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((12353, 12364), 'contextlib.ExitStack', 'ExitStack', ([], {}), '()\n', (12362, 12364), False, 'from contextlib import ExitStack\n'), ((12724, 12783), 'rasa.cli.utils.print_color', 'print_color', (['"""Training Core model..."""'], {'color': 'bcolors.OKBLUE'}), "('Training Core model...', color=bcolors.OKBLUE)\n", (12735, 12783), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((13206, 13273), 'rasa.cli.utils.print_color', 'print_color', (['"""Core model training completed."""'], {'color': 'bcolors.OKBLUE'}), "('Core model training completed.', color=bcolors.OKBLUE)\n", (13217, 13273), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((15121, 15251), 'rasa.cli.utils.print_error', 'print_error', (['"""No NLU data given. Please provide NLU data in order to train a Rasa NLU model using the \'--nlu\' argument."""'], {}), '(\n "No NLU data given. Please provide NLU data in order to train a Rasa NLU model using the \'--nlu\' argument."\n )\n', (15132, 15251), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((15594, 15750), 'rasa.cli.utils.print_error', 'print_error', (['f"""Path \'{nlu_data}\' doesn\'t contain valid NLU data in it. Please verify the data format. The NLU model training will be skipped now."""'], {}), '(\n f"Path \'{nlu_data}\' doesn\'t contain valid NLU data in it. Please verify the data format. The NLU model training will be skipped now."\n )\n', (15605, 15750), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((16444, 16455), 'contextlib.ExitStack', 'ExitStack', ([], {}), '()\n', (16453, 16455), False, 'from contextlib import ExitStack\n'), ((17559, 17625), 'rasa.cli.utils.print_color', 'print_color', (['"""NLU model training completed."""'], {'color': 'bcolors.OKBLUE'}), "('NLU model training completed.', color=bcolors.OKBLUE)\n", (17570, 17625), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((974, 998), 'asyncio.get_event_loop', 'asyncio.get_event_loop', ([], {}), '()\n', (996, 998), False, 'import asyncio\n'), ((7608, 7634), 'os.path.abspath', 'os.path.abspath', (['old_model'], {}), '(old_model)\n', (7623, 7634), False, 'import os\n'), ((8493, 8683), 'rasa.cli.utils.print_color', 'print_color', (['"""Core stories/configuration did not change. Only the templates section has been changed. A new model with the updated templates will be created."""'], {'color': 'bcolors.OKBLUE'}), "(\n 'Core stories/configuration did not change. Only the templates section has been changed. A new model with the updated templates will be created.'\n , color=bcolors.OKBLUE)\n", (8504, 8683), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((8833, 8953), 'rasa.cli.utils.print_color', 'print_color', (['"""Core stories/configuration did not change. No need to retrain Core model."""'], {'color': 'bcolors.OKBLUE'}), "(\n 'Core stories/configuration did not change. No need to retrain Core model.'\n , color=bcolors.OKBLUE)\n", (8844, 8953), False, 'from rasa.cli.utils import print_success, print_warning, print_error, bcolors, print_color\n'), ((13437, 13600), 'rasa.model.package_model', 'model.package_model', ([], {'fingerprint': 'new_fingerprint', 'output_directory': 'output', 'train_path': '_train_path', 'fixed_model_name': 'fixed_model_name', 'model_prefix': '"""core-"""'}), "(fingerprint=new_fingerprint, output_directory=output,\n train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix\n ='core-')\n", (13456, 13600), False, 'from rasa import model\n'), ((17788, 17950), 'rasa.model.package_model', 'model.package_model', ([], {'fingerprint': 'new_fingerprint', 'output_directory': 'output', 'train_path': '_train_path', 'fixed_model_name': 'fixed_model_name', 'model_prefix': '"""nlu-"""'}), "(fingerprint=new_fingerprint, output_directory=output,\n train_path=_train_path, fixed_model_name=fixed_model_name, model_prefix\n ='nlu-')\n", (17807, 17950), False, 'from rasa import model\n'), ((1047, 1071), 'asyncio.new_event_loop', 'asyncio.new_event_loop', ([], {}), '()\n', (1069, 1071), False, 'import asyncio\n'), ((1084, 1112), 'asyncio.set_event_loop', 'asyncio.set_event_loop', (['loop'], {}), '(loop)\n', (1106, 1112), False, 'import asyncio\n'), ((2790, 2808), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (2806, 2808), False, 'import tempfile\n'), ((8753, 8814), 'rasa.model.update_model_with_new_domain', 'model.update_model_with_new_domain', (['file_importer', 'train_path'], {}), '(file_importer, train_path)\n', (8787, 8814), False, 'from rasa import model\n'), ((13379, 13417), 'rasa.model.model_fingerprint', 'model.model_fingerprint', (['file_importer'], {}), '(file_importer)\n', (13402, 13417), False, 'from rasa import model\n'), ((17729, 17767), 'rasa.model.model_fingerprint', 'model.model_fingerprint', (['file_importer'], {}), '(file_importer)\n', (17752, 17767), False, 'from rasa import model\n'), ((12654, 12672), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (12670, 12672), False, 'import tempfile\n'), ((13040, 13097), 'os.path.join', 'os.path.join', (['_train_path', 'DEFAULT_CORE_SUBDIRECTORY_NAME'], {}), '(_train_path, DEFAULT_CORE_SUBDIRECTORY_NAME)\n', (13052, 13097), False, 'import os\n'), ((16816, 16834), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (16832, 16834), False, 'import tempfile\n')] |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author:
''' PNASNet in PyTorch.
Paper: Progressive Neural Architecture Search
'''
from easyai.base_name.block_name import NormalizationType, ActivationType
from easyai.base_name.backbone_name import BackboneName
from easyai.model.backbone.utility.base_backbone import *
from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock
from easyai.model.base_block.cls.pnasnet_block import CellA, CellB
__all__ = ['pnasnet_A', 'pnasnet_B']
class PNASNet(BaseBackbone):
def __init__(self, data_channel=3, num_cells=6,
num_planes=44, block=CellA,
bnName=NormalizationType.BatchNormalize2d,
activationName=ActivationType.ReLU):
super().__init__()
self.set_name(BackboneName.PNASNetA)
self.data_channel = data_channel
self.num_cells = num_cells
self.block = block
self.activation_name = activationName
self.bn_name = bnName
self.first_output = num_planes
self.in_planes = self.first_output
self.create_block_list()
def create_block_list(self):
self.block_out_channels = []
self.index = 0
layer1 = ConvBNActivationBlock(in_channels=self.data_channel,
out_channels=self.first_output,
kernel_size=3,
stride=1,
padding=1,
bias=False,
bnName=self.bn_name,
activationName=self.activation_name)
self.add_block_list(layer1.get_name(), layer1, self.first_output)
self.make_layer(self.first_output, self.num_cells)
self.downsample(self.first_output * 2)
self.make_layer(self.first_output * 2, self.num_cells)
self.downsample(self.first_output * 4)
self.make_layer(self.first_output * 4, self.num_cells)
def make_layer(self, planes, num_cells):
for _ in range(num_cells):
temp_block = self.block(self.in_planes, planes, stride=1,
bn_name=self.bn_name, activation_name=self.activation_name)
self.add_block_list(temp_block.get_name(), temp_block, planes)
self.in_planes = planes
def downsample(self, planes):
down_block = self.block(self.in_planes, planes, stride=2,
bn_name=self.bn_name, activation_name=self.activation_name)
self.add_block_list(down_block.get_name(), down_block, planes)
self.in_planes = planes
def forward(self, x):
output_list = []
for block in self._modules.values():
x = block(x)
output_list.append(x)
return output_list
def pnasnet_A(data_channel):
model = PNASNet(data_channel=data_channel,
num_cells=6,
num_planes=44,
block=CellA)
model.set_name(BackboneName.PNASNetA)
return model
def pnasnet_B(data_channel):
model = PNASNet(data_channel=data_channel,
num_cells=6, num_planes=32,
block=CellB)
model.set_name(BackboneName.PNASNetB)
return model
| [
"easyai.model.base_block.utility.utility_block.ConvBNActivationBlock"
] | [((1225, 1425), 'easyai.model.base_block.utility.utility_block.ConvBNActivationBlock', 'ConvBNActivationBlock', ([], {'in_channels': 'self.data_channel', 'out_channels': 'self.first_output', 'kernel_size': '(3)', 'stride': '(1)', 'padding': '(1)', 'bias': '(False)', 'bnName': 'self.bn_name', 'activationName': 'self.activation_name'}), '(in_channels=self.data_channel, out_channels=self.\n first_output, kernel_size=3, stride=1, padding=1, bias=False, bnName=\n self.bn_name, activationName=self.activation_name)\n', (1246, 1425), False, 'from easyai.model.base_block.utility.utility_block import ConvBNActivationBlock\n')] |
# -*- coding: utf-8 -*-
# coding=utf-8
import json
import os
import math
import logging
import requests
import time
from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox
def get_access_token(token):
resp = None
request_count = 0
url = "https://api.cesium.com/v1/assets/1/endpoint"
while True:
if request_count > 4:
break
try:
request_count += 1
param = {'access_token': token}
resp = requests.get(url, params=param, timeout=2)
if resp.status_code != 200:
continue
break
except Exception as e:
resp = None
time.sleep(3)
if resp is None:
return None
resp_json = resp.json()
return resp_json.get('accessToken')
class TerrainDownloaderThread(BaseDownloaderThread):
URL = "https://assets.cesium.com/1/{z}/{x}/{y}.terrain?extensions=octvertexnormals-watermask&v=1.1.0"
def __init__(self, root_dir, bbox, token, task_q, logger=None, write_db=False):
super(TerrainDownloaderThread, self).__init__(
root_dir, bbox, task_q, logger, write_db=write_db, db_file_name='Terrain.db')
self.token = token
self._init_metadata(
format='terrain',
bounds='%f,%f,%f,%f' % (self.bbox.min_lng, self.bbox.min_lat, self.bbox.max_lng, self.bbox.max_lat))
def get_url(self, x, y, z):
return self.URL.format(x=x, y=y, z=z)
def _download(self, x, y, z):
file_path = '%s/%s/%i/%i/%i.%s' % (self.root_dir, 'Terrain', z, x, y, 'terrain')
if os.path.exists(file_path):
self._data2DB(x, y, z, file_path)
return 0
os.makedirs(os.path.dirname(file_path), exist_ok=True)
resp = None
requre_count = 0
_url = ''
access_token = get_access_token(self.token)
if access_token is None:
return -1
param = {'extensions': 'octvertexnormals-watermask', 'v': '1.1.0', 'access_token': access_token}
while True:
if requre_count > 4: break
try:
_url = self.get_url(x, y, z)
resp = requests.get(_url, params=param, stream=True, timeout=2)
break
except Exception as e:
resp = None
time.sleep(3)
requre_count += 1
if resp is None:
return -1
if resp.status_code != 200:
return -1
try:
with open(file_path, 'wb') as f:
for chunk in resp.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
except Exception as e:
return -1
self._data2DB(x, y, z, file_path)
return 1
class TerrainDownloadEngine(DownloadEngine):
root_dir = ''
def __init__(self, root_dir, bbox, token, thread_num, logger=None, write_db=False):
super(TerrainDownloadEngine, self).__init__(bbox, thread_num, logger, write_db=write_db)
self.root_dir = root_dir
self.token = token
def bbox2xyz(self, bbox, z):
min_x, min_y = latlng2tile_terrain(bbox.min_lat, bbox.min_lng, z)
max_x, max_y = latlng2tile_terrain(bbox.max_lat, bbox.max_lng, z)
return math.floor(min_x), math.floor(min_y), math.ceil(max_x) + 1, math.ceil(max_y) + 1
def generate_metadata(self):
try:
metadatas = {
"attribution": "© Analytical Graphics Inc., © CGIAR-CSI, Produced using Copernicus data and "
"information funded by the European Union - EU-DEM layers",
"available": [
[
{
"endX": 1,
"endY": 0,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 3,
"endY": 1,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 7,
"endY": 3,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 15,
"endY": 7,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 31,
"endY": 15,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 63,
"endY": 31,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 127,
"endY": 63,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 255,
"endY": 127,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 511,
"endY": 255,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 1023,
"endY": 511,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 2047,
"endY": 1023,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 4095,
"endY": 2047,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 8191,
"endY": 4095,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 16383,
"endY": 8191,
"startX": 0,
"startY": 0
}
],
[
{
"endX": 32767,
"endY": 16383,
"startX": 0,
"startY": 0
}
]
],
"bounds": [-180, -90, 180, 90, ],
"description": "STK World Terrain Premium Tileset, v1.3. 10m - 30m resolution CONUS, 30m resolution "
"SRTM between 60N and 60S, 30m Europe. Minimum global coverage of 1000m.",
"extensions": ["watermask", "vertexnormals", "octvertexnormals", ],
"format": "quantized-mesh-1.0",
"maxzoom": 13,
"minzoom": 0,
"name": "world",
"projection": "EPSG:4326",
"scheme": "tms",
"tilejson": "2.1.0",
"tiles": ["{z}/{x}/{y}.terrain?v={version}", ],
"version": "1.31376.0"
}
_dir = os.path.join(self.root_dir, 'Terrain')
os.makedirs(_dir, exist_ok=True)
metadatas_path = os.path.join(_dir, 'layer.json')
with open(metadatas_path, 'w') as f:
json.dump(metadatas, f)
except Exception as e:
if self.logger is not None:
self.logger.exception(e)
def run(self):
try:
self.generate_metadata()
count = 0
bboxs = self.cut_bbox()
for bbox in bboxs:
_count = self.get_task_count(bbox)
count += _count
self.division_done_signal.emit(count)
for bbox in bboxs:
while True:
if not self.running:
time.sleep(0.01)
else:
break
task_q = self.get_task_queue(bbox)
self.threads = []
for i in range(self.thread_num):
thread = TerrainDownloaderThread(self.root_dir, self.bbox, self.token, task_q, self.logger,
write_db=self.write_db)
thread.sub_progressBar_updated_signal.connect(self.sub_update_progressBar)
self.threads.append(thread)
for thread in self.threads:
thread.start()
for thread in self.threads:
thread.wait()
for t in self.threads:
t.stop()
t.quit()
self.threads = []
self.download_done_signal.emit()
except Exception as e:
if self.logger is not None:
self.logger.error(e)
if __name__ == '__main__':
if 1:
logger = logging.getLogger('down')
try:
root = r'/Users/cugxy/Documents/data/downloader'
formatter = logging.Formatter('%(levelname)s-%(message)s')
hdlr = logging.StreamHandler()
log_file = os.path.join(root, 'down.log')
file_hdlr = logging.FileHandler(log_file)
file_hdlr.setFormatter(formatter)
logger.addHandler(file_hdlr)
logger.addHandler(hdlr)
logger.setLevel(logging.INFO)
min_lng = -180.0
max_lng = 180.0
min_lat = -90.0
max_lat = 90.0
start_zoom = 0
end_zoom = 5
bbox = BoundBox(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom)
d = TerrainDownloadEngine(root, bbox, 8, logger)
d.start()
time.sleep(10000)
logger.error('main thread out')
except Exception as e:
logger.error(e)
if 0:
accessToken = get_access_token()
pass
| [
"logging.getLogger",
"os.path.exists",
"logging.StreamHandler",
"math.ceil",
"os.makedirs",
"map_download.cmd.BaseDownloader.latlng2tile_terrain",
"math.floor",
"logging.Formatter",
"map_download.cmd.BaseDownloader.BoundBox",
"os.path.join",
"requests.get",
"time.sleep",
"os.path.dirname",
"logging.FileHandler",
"json.dump"
] | [((1646, 1671), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (1660, 1671), False, 'import os\n'), ((3201, 3251), 'map_download.cmd.BaseDownloader.latlng2tile_terrain', 'latlng2tile_terrain', (['bbox.min_lat', 'bbox.min_lng', 'z'], {}), '(bbox.min_lat, bbox.min_lng, z)\n', (3220, 3251), False, 'from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox\n'), ((3275, 3325), 'map_download.cmd.BaseDownloader.latlng2tile_terrain', 'latlng2tile_terrain', (['bbox.max_lat', 'bbox.max_lng', 'z'], {}), '(bbox.max_lat, bbox.max_lng, z)\n', (3294, 3325), False, 'from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox\n'), ((10233, 10258), 'logging.getLogger', 'logging.getLogger', (['"""down"""'], {}), "('down')\n", (10250, 10258), False, 'import logging\n'), ((526, 568), 'requests.get', 'requests.get', (['url'], {'params': 'param', 'timeout': '(2)'}), '(url, params=param, timeout=2)\n', (538, 568), False, 'import requests\n'), ((1760, 1786), 'os.path.dirname', 'os.path.dirname', (['file_path'], {}), '(file_path)\n', (1775, 1786), False, 'import os\n'), ((3341, 3358), 'math.floor', 'math.floor', (['min_x'], {}), '(min_x)\n', (3351, 3358), False, 'import math\n'), ((3360, 3377), 'math.floor', 'math.floor', (['min_y'], {}), '(min_y)\n', (3370, 3377), False, 'import math\n'), ((8434, 8472), 'os.path.join', 'os.path.join', (['self.root_dir', '"""Terrain"""'], {}), "(self.root_dir, 'Terrain')\n", (8446, 8472), False, 'import os\n'), ((8485, 8517), 'os.makedirs', 'os.makedirs', (['_dir'], {'exist_ok': '(True)'}), '(_dir, exist_ok=True)\n', (8496, 8517), False, 'import os\n'), ((8547, 8579), 'os.path.join', 'os.path.join', (['_dir', '"""layer.json"""'], {}), "(_dir, 'layer.json')\n", (8559, 8579), False, 'import os\n'), ((10357, 10403), 'logging.Formatter', 'logging.Formatter', (['"""%(levelname)s-%(message)s"""'], {}), "('%(levelname)s-%(message)s')\n", (10374, 10403), False, 'import logging\n'), ((10423, 10446), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (10444, 10446), False, 'import logging\n'), ((10470, 10500), 'os.path.join', 'os.path.join', (['root', '"""down.log"""'], {}), "(root, 'down.log')\n", (10482, 10500), False, 'import os\n'), ((10525, 10554), 'logging.FileHandler', 'logging.FileHandler', (['log_file'], {}), '(log_file)\n', (10544, 10554), False, 'import logging\n'), ((10903, 10969), 'map_download.cmd.BaseDownloader.BoundBox', 'BoundBox', (['max_lat', 'max_lng', 'min_lat', 'min_lng', 'start_zoom', 'end_zoom'], {}), '(max_lat, max_lng, min_lat, min_lng, start_zoom, end_zoom)\n', (10911, 10969), False, 'from map_download.cmd.BaseDownloader import DownloadEngine, BaseDownloaderThread, latlng2tile_terrain, BoundBox\n'), ((11065, 11082), 'time.sleep', 'time.sleep', (['(10000)'], {}), '(10000)\n', (11075, 11082), False, 'import time\n'), ((719, 732), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (729, 732), False, 'import time\n'), ((2222, 2278), 'requests.get', 'requests.get', (['_url'], {'params': 'param', 'stream': '(True)', 'timeout': '(2)'}), '(_url, params=param, stream=True, timeout=2)\n', (2234, 2278), False, 'import requests\n'), ((3379, 3395), 'math.ceil', 'math.ceil', (['max_x'], {}), '(max_x)\n', (3388, 3395), False, 'import math\n'), ((3401, 3417), 'math.ceil', 'math.ceil', (['max_y'], {}), '(max_y)\n', (3410, 3417), False, 'import math\n'), ((8645, 8668), 'json.dump', 'json.dump', (['metadatas', 'f'], {}), '(metadatas, f)\n', (8654, 8668), False, 'import json\n'), ((2380, 2393), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (2390, 2393), False, 'import time\n'), ((9197, 9213), 'time.sleep', 'time.sleep', (['(0.01)'], {}), '(0.01)\n', (9207, 9213), False, 'import time\n')] |
"""Forms for RTD donations"""
import logging
from django import forms
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from readthedocs.payments.forms import StripeModelForm, StripeResourceMixin
from readthedocs.payments.utils import stripe
from .models import Supporter
log = logging.getLogger(__name__)
class SupporterForm(StripeResourceMixin, StripeModelForm):
"""Donation support sign up form
This extends the basic payment form, giving fields for credit card number,
expiry, and CVV. The proper Knockout data bindings are established on
:py:class:`StripeModelForm`
"""
class Meta:
model = Supporter
fields = (
'last_4_digits',
'name',
'email',
'dollars',
'logo_url',
'site_url',
'public',
)
labels = {
'public': _('Make this donation public'),
}
help_texts = {
'public': _('Your name and image will be displayed on the donation page'),
'email': _('Your email is used for Gravatar and so we can send you a receipt'),
'logo_url': _("URL of your company's logo, images should be 300x300 pixels or less"),
'dollars': _('Companies donating over $400 can specify a logo URL and site link'),
}
widgets = {
'dollars': forms.HiddenInput(attrs={
'data-bind': 'value: dollars'
}),
'logo_url': forms.TextInput(attrs={
'data-bind': 'value: logo_url, enable: urls_enabled'
}),
'site_url': forms.TextInput(attrs={
'data-bind': 'value: site_url, enable: urls_enabled'
}),
'last_4_digits': forms.TextInput(attrs={
'data-bind': 'valueInit: card_digits, value: card_digits'
}),
}
last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True)
name = forms.CharField(required=True)
email = forms.CharField(required=True)
def __init__(self, *args, **kwargs):
self.user = kwargs.pop('user')
super(SupporterForm, self).__init__(*args, **kwargs)
def validate_stripe(self):
"""Call stripe for payment (not ideal here) and clean up logo < $200"""
dollars = self.cleaned_data['dollars']
if dollars < 200:
self.cleaned_data['logo_url'] = None
self.cleaned_data['site_url'] = None
stripe.Charge.create(
amount=int(self.cleaned_data['dollars']) * 100,
currency='usd',
source=self.cleaned_data['stripe_token'],
description='Read the Docs Sustained Engineering',
receipt_email=self.cleaned_data['email']
)
def save(self, commit=True):
supporter = super(SupporterForm, self).save(commit)
if commit and self.user is not None and self.user.is_authenticated():
supporter.user = self.user
supporter.save()
return supporter
class EthicalAdForm(StripeResourceMixin, StripeModelForm):
"""Payment form for ethical ads
This extends the basic payment form, giving fields for credit card number,
expiry, and CVV. The proper Knockout data bindings are established on
:py:class:`StripeModelForm`
"""
class Meta:
model = Supporter
fields = (
'last_4_digits',
'name',
'email',
'dollars',
)
help_texts = {
'email': _('Your email is used so we can send you a receipt'),
}
widgets = {
'dollars': forms.HiddenInput(attrs={
'data-bind': 'value: dollars'
}),
'last_4_digits': forms.TextInput(attrs={
'data-bind': 'valueInit: card_digits, value: card_digits'
}),
}
last_4_digits = forms.CharField(widget=forms.HiddenInput(), required=True)
name = forms.CharField(required=True)
email = forms.CharField(required=True)
def validate_stripe(self):
stripe.Charge.create(
amount=int(self.cleaned_data['dollars']) * 100,
currency='usd',
source=self.cleaned_data['stripe_token'],
description='Read the Docs Sponsorship Payment',
receipt_email=self.cleaned_data['email']
)
| [
"logging.getLogger",
"django.forms.HiddenInput",
"django.utils.translation.ugettext_lazy",
"django.forms.CharField",
"django.forms.TextInput"
] | [((322, 349), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (339, 349), False, 'import logging\n'), ((2007, 2037), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)'}), '(required=True)\n', (2022, 2037), False, 'from django import forms\n'), ((2050, 2080), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)'}), '(required=True)\n', (2065, 2080), False, 'from django import forms\n'), ((4009, 4039), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)'}), '(required=True)\n', (4024, 4039), False, 'from django import forms\n'), ((4052, 4082), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(True)'}), '(required=True)\n', (4067, 4082), False, 'from django import forms\n'), ((919, 949), 'django.utils.translation.ugettext_lazy', '_', (['"""Make this donation public"""'], {}), "('Make this donation public')\n", (920, 949), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1006, 1069), 'django.utils.translation.ugettext_lazy', '_', (['"""Your name and image will be displayed on the donation page"""'], {}), "('Your name and image will be displayed on the donation page')\n", (1007, 1069), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1092, 1161), 'django.utils.translation.ugettext_lazy', '_', (['"""Your email is used for Gravatar and so we can send you a receipt"""'], {}), "('Your email is used for Gravatar and so we can send you a receipt')\n", (1093, 1161), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1187, 1259), 'django.utils.translation.ugettext_lazy', '_', (['"""URL of your company\'s logo, images should be 300x300 pixels or less"""'], {}), '("URL of your company\'s logo, images should be 300x300 pixels or less")\n', (1188, 1259), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1284, 1354), 'django.utils.translation.ugettext_lazy', '_', (['"""Companies donating over $400 can specify a logo URL and site link"""'], {}), "('Companies donating over $400 can specify a logo URL and site link')\n", (1285, 1354), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((1409, 1465), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {'attrs': "{'data-bind': 'value: dollars'}"}), "(attrs={'data-bind': 'value: dollars'})\n", (1426, 1465), False, 'from django import forms\n'), ((1521, 1598), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'data-bind': 'value: logo_url, enable: urls_enabled'}"}), "(attrs={'data-bind': 'value: logo_url, enable: urls_enabled'})\n", (1536, 1598), False, 'from django import forms\n'), ((1654, 1731), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'data-bind': 'value: site_url, enable: urls_enabled'}"}), "(attrs={'data-bind': 'value: site_url, enable: urls_enabled'})\n", (1669, 1731), False, 'from django import forms\n'), ((1792, 1878), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'data-bind': 'valueInit: card_digits, value: card_digits'}"}), "(attrs={'data-bind':\n 'valueInit: card_digits, value: card_digits'})\n", (1807, 1878), False, 'from django import forms\n'), ((1960, 1979), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (1977, 1979), False, 'from django import forms\n'), ((3570, 3622), 'django.utils.translation.ugettext_lazy', '_', (['"""Your email is used so we can send you a receipt"""'], {}), "('Your email is used so we can send you a receipt')\n", (3571, 3622), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((3677, 3733), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {'attrs': "{'data-bind': 'value: dollars'}"}), "(attrs={'data-bind': 'value: dollars'})\n", (3694, 3733), False, 'from django import forms\n'), ((3794, 3880), 'django.forms.TextInput', 'forms.TextInput', ([], {'attrs': "{'data-bind': 'valueInit: card_digits, value: card_digits'}"}), "(attrs={'data-bind':\n 'valueInit: card_digits, value: card_digits'})\n", (3809, 3880), False, 'from django import forms\n'), ((3962, 3981), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (3979, 3981), False, 'from django import forms\n')] |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .base import DataReaderBase
from ..tools import COL, _get_dates, to_float, to_int
import pandas as pd
#from pandas.tseries.frequencies import to_offset
from six.moves import cStringIO as StringIO
import logging
import traceback
import datetime
import json
import token, tokenize
def ymd_to_date(y, m, d):
"""
Returns date
>>> expiration = {u'd': 1, u'm': 12, u'y': 2014}
>>> ymd_to_date(**expiration)
datetime.date(2014, 12, 1)
>>> ymd_to_date(2014, 3, 1)
datetime.date(2014, 3, 1)
"""
return(datetime.date(year=y, month=m, day=d))
def date_to_ymd(date):
"""
Returns dict like {'y': ..., 'm': ..., 'd': ...}
>>> date_to_ymd(datetime.date(year=2010, month=1, day=3))
{'y': 2010, 'm': 1, 'd': 3}
"""
d = {
'y': date.year,
'm': date.month,
'd': date.day
}
return(d)
def fix_lazy_json(in_text):
"""
Handle lazy JSON - to fix expecting property name
this function fixes the json output from google
http://stackoverflow.com/questions/4033633/handling-lazy-json-in-python-expecting-property-name
"""
tokengen = tokenize.generate_tokens(StringIO(in_text).readline)
result = []
for tokid, tokval, _, _, _ in tokengen:
# fix unquoted strings
if (tokid == token.NAME):
if tokval not in ['true', 'false', 'null', '-Infinity', 'Infinity', 'NaN']:
tokid = token.STRING
tokval = u'"%s"' % tokval
# fix single-quoted strings
elif (tokid == token.STRING):
if tokval.startswith ("'"):
tokval = u'"%s"' % tokval[1:-1].replace ('"', '\\"')
# remove invalid commas
elif (tokid == token.OP) and ((tokval == '}') or (tokval == ']')):
if (len(result) > 0) and (result[-1][1] == ','):
result.pop()
# fix single-quoted strings
elif (tokid == token.STRING):
if tokval.startswith ("'"):
tokval = u'"%s"' % tokval[1:-1].replace ('"', '\\"')
result.append((tokid, tokval))
return tokenize.untokenize(result)
def json_decode(json_string):
try:
ret = json.loads(json_string)
except:
json_string = fix_lazy_json(json_string)
ret = json.loads(json_string)
return ret
class DataReaderGoogleFinanceOptions(DataReaderBase):
"""
DataReader to fetch data from Google Finance Options
see https://www.google.com/finance/option_chain
https://github.com/makmac213/python-google-option-chain
http://www.drtomstarke.com/index.php/option-chains-from-google-finance-api
"""
def init(self, *args, **kwargs):
self._get_multi = self._get_multi_todict
def _get_one(self, name, *args, **kwargs):
return(self._get_one_raw(name, 'All', 'json'))
def _get_one_raw(self, symbol, typ='All', output='json', y='2014', m='12', d='1'):
url = "https://www.google.com/finance/option_chain"
params = {
'q': symbol,
'type': typ,
'output': output,
}
data = self._get_content(url, params)
d = {}
lst = []
for typ in [u'puts', u'calls']:
df_typ = pd.DataFrame(data[typ])
df_typ['Type'] = typ
lst.append(df_typ)
del data[typ]
for i, expiration in enumerate(data['expirations']):
params = {
'q': symbol,
'output': output,
'expy': expiration['y'],
'expm': expiration['m'],
'expd': expiration['d'],
}
data = self._get_content(url, params)
for typ in [u'puts', u'calls']:
df_typ = pd.DataFrame(data[typ])
df_typ['Type'] = typ
lst.append(df_typ)
del data[typ]
lst.append(df_typ)
df = pd.concat(lst, axis=0, ignore_index=True)
d_cols = {
"a": "Ask",
"b": "Bid",
"p": "Last",
"strike": "Strike",
"expiry": "Expiry",
"vol": "Volume",
"name": "Name"
}
df = df.rename(columns=d_cols)
"""
d_cols = {
"a": "ask",
"b": "bid",
"c": "change",
"cid": "identity code",
"cp": "cp"
"cs": change direction. "chg" = up, "chr" = down, "chg"?
"e": # I think this tells us something about what country where the stock is traded. "OPRA" means USA.
"expiry": expiration date for this option
"name": I don't know. I have never seen a value for this
"oi": open interest. How many of these are currently being held by others.
See, http://www.investopedia.com/terms/o/openinterest.asp
"p": price, last
"s": option code.
Basically, Stock Symbol + 7 if mini option + date + "C" or "P" + price
"strike": "strike price for this option"
"vol": "the volume of options traded."
}
"""
for col in ['Ask', 'Bid', 'c', 'cp', 'Last', 'Strike']:
df[col] = df[col].map(to_float)
for col in ['Volume', 'oi', 'cid']:
df[col] = df[col].map(to_int)
df['Expiry'] = pd.to_datetime(df['Expiry'])
data['options'] = df
data['underlying_id'] = int(data['underlying_id'])
data['expiry'] = ymd_to_date(**data['expiry'])
for i, expiration in enumerate(data['expirations']):
data['expirations'][i] = ymd_to_date(**expiration)
#for col in ['Volume']:
# df[col] = df[col].fillna(0)
#d = {}
#d["options"] = df
#return(d)
return(data)
def _get_content(self, url, params):
#response = requests.get(url, params=params)
response = self.session.get(url, params=params)
if response.status_code == 200:
content_json = response.text
data = json_decode(content_json)
return(data)
if __name__ == "__main__":
import doctest
doctest.testmod()
| [
"json.loads",
"tokenize.untokenize",
"six.moves.cStringIO",
"doctest.testmod",
"datetime.date",
"pandas.DataFrame",
"pandas.concat",
"pandas.to_datetime"
] | [((588, 625), 'datetime.date', 'datetime.date', ([], {'year': 'y', 'month': 'm', 'day': 'd'}), '(year=y, month=m, day=d)\n', (601, 625), False, 'import datetime\n'), ((2146, 2173), 'tokenize.untokenize', 'tokenize.untokenize', (['result'], {}), '(result)\n', (2165, 2173), False, 'import token, tokenize\n'), ((6240, 6257), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (6255, 6257), False, 'import doctest\n'), ((2228, 2251), 'json.loads', 'json.loads', (['json_string'], {}), '(json_string)\n', (2238, 2251), False, 'import json\n'), ((3973, 4014), 'pandas.concat', 'pd.concat', (['lst'], {'axis': '(0)', 'ignore_index': '(True)'}), '(lst, axis=0, ignore_index=True)\n', (3982, 4014), True, 'import pandas as pd\n'), ((5409, 5437), 'pandas.to_datetime', 'pd.to_datetime', (["df['Expiry']"], {}), "(df['Expiry'])\n", (5423, 5437), True, 'import pandas as pd\n'), ((1207, 1224), 'six.moves.cStringIO', 'StringIO', (['in_text'], {}), '(in_text)\n', (1215, 1224), True, 'from six.moves import cStringIO as StringIO\n'), ((2327, 2350), 'json.loads', 'json.loads', (['json_string'], {}), '(json_string)\n', (2337, 2350), False, 'import json\n'), ((3274, 3297), 'pandas.DataFrame', 'pd.DataFrame', (['data[typ]'], {}), '(data[typ])\n', (3286, 3297), True, 'import pandas as pd\n'), ((3796, 3819), 'pandas.DataFrame', 'pd.DataFrame', (['data[typ]'], {}), '(data[typ])\n', (3808, 3819), True, 'import pandas as pd\n')] |
from django.db.models import Q
from django.shortcuts import render
from django.http import Http404
# Create your views here.
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.decorators import api_view
from .models import Product, Category
from .serializers import ProductSerializer, CategorySerializer
class LatestProductsList(APIView):
def get(self, request, format=None):
products = Product.objects.all()[0:4]
serializer = ProductSerializer(products,many=True)
return Response(serializer.data)
class ProductDetail(APIView):
def get_object(self, category_slug, product_slug):
try:
return Product.objects.filter(category__slug=category_slug).get(slug=product_slug)
except Product.DoesNotExist:
raise Http404
def get(self, request, category_slug, product_slug, format= None):
product = self.get_object(category_slug, product_slug)
serializer = ProductSerializer(product)
return Response(serializer.data)
class CategoryDetail(APIView):
def get_object(self, category_slug):
try:
return Category.objects.get(slug=category_slug)
except Category.DoesNotExist:
raise Http404
def get(self, request, category_slug, format= None):
category = self.get_object(category_slug)
serializer = CategorySerializer(category)
return Response(serializer.data)
@api_view(['POST'])
def search(request):
query = request.data.get('query', '')
if query:
products = Product.objects.filter(Q(name__icontains=query) | Q(description__icontains=query))
serializer = ProductSerializer(products, many=True)
return Response(serializer.data)
else:
return Response({"products": []}) | [
"rest_framework.response.Response",
"rest_framework.decorators.api_view",
"django.db.models.Q"
] | [((1481, 1499), 'rest_framework.decorators.api_view', 'api_view', (["['POST']"], {}), "(['POST'])\n", (1489, 1499), False, 'from rest_framework.decorators import api_view\n'), ((559, 584), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (567, 584), False, 'from rest_framework.response import Response\n'), ((1040, 1065), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (1048, 1065), False, 'from rest_framework.response import Response\n'), ((1453, 1478), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (1461, 1478), False, 'from rest_framework.response import Response\n'), ((1755, 1780), 'rest_framework.response.Response', 'Response', (['serializer.data'], {}), '(serializer.data)\n', (1763, 1780), False, 'from rest_framework.response import Response\n'), ((1806, 1832), 'rest_framework.response.Response', 'Response', (["{'products': []}"], {}), "({'products': []})\n", (1814, 1832), False, 'from rest_framework.response import Response\n'), ((1620, 1644), 'django.db.models.Q', 'Q', ([], {'name__icontains': 'query'}), '(name__icontains=query)\n', (1621, 1644), False, 'from django.db.models import Q\n'), ((1647, 1678), 'django.db.models.Q', 'Q', ([], {'description__icontains': 'query'}), '(description__icontains=query)\n', (1648, 1678), False, 'from django.db.models import Q\n')] |
##########################################################################
#
# Copyright (c) 2010-2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
from __future__ import with_statement
import os
import sys
import shutil
import unittest
import IECore
class TestBasicPreset( unittest.TestCase ) :
def testCopy( self ) :
testObj = IECore.Parameterised( "testParameterised1" )
testObj.parameters().addParameters(
[
IECore.BoolParameter( "a", "", True ),
IECore.FloatParameter( "b", "", 1.0 ),
]
)
testObj2 = IECore.Parameterised( "testParameterised2" )
testObj2.parameters().addParameters(
[
IECore.BoolParameter( "a", "", False ),
IECore.FloatParameter( "c", "", 0.0 ),
]
)
p = IECore.BasicPreset( testObj, testObj.parameters() )
self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) )
self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) )
testObj.parameters()["a"].setTypedValue( False )
testObj.parameters()["b"].setTypedValue( 0.0 )
p( testObj, testObj.parameters() )
self.assertEqual( testObj.parameters()["a"].getTypedValue(), True )
self.assertEqual( testObj.parameters()["b"].getTypedValue(), 1.0 )
p2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()["a"], ) )
self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) )
self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) )
p2( testObj2, testObj2.parameters() )
self.assertEqual( testObj2.parameters()["a"].getTypedValue(), True )
self.assertEqual( testObj2.parameters()["c"].getTypedValue(), 0.0 )
def testLoad( self ) :
testObj = IECore.Parameterised( "testParameterised1" )
testObj.parameters().addParameters(
[
IECore.BoolParameter( "a", "", True ),
IECore.FloatParameter( "b", "", 1.0 ),
]
)
testObj2 = IECore.Parameterised( "testParameterised1" )
testObj2.parameters().addParameters(
[
IECore.BoolParameter( "a", "", False ),
IECore.FloatParameter( "c", "", 0.0 ),
]
)
savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "data", "basicPreset" ) )
messageHandler = IECore.CapturingMessageHandler()
with messageHandler :
p = IECore.BasicPreset( os.path.join( savePath, "basicPresetLoadTest", "basicPresetLoadTest-1.cob" ) )
self.assertEqual( len( messageHandler.messages ), 0 )
self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) )
self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) )
testObj.parameters()["a"].setTypedValue( False )
testObj.parameters()["b"].setTypedValue( 0.0 )
p( testObj, testObj.parameters() )
self.assertEqual( testObj.parameters()["a"].getTypedValue(), True )
self.assertEqual( testObj.parameters()["b"].getTypedValue(), 1.0 )
def testSave( self ) :
testObj = IECore.Parameterised( "testParameterised1" )
testObj.parameters().addParameters(
[
IECore.BoolParameter( "a", "", True ),
IECore.FloatParameter( "b", "", 1.0 ),
]
)
testObj2 = IECore.Parameterised( "testParameterised1" )
testObj2.parameters().addParameters(
[
IECore.BoolParameter( "a", "", False ),
IECore.FloatParameter( "c", "", 0.0 ),
]
)
savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "data", "basicPreset" ) )
preset = IECore.BasicPreset( testObj, testObj.parameters() )
# Save for the classLoader and check its there, we test the 'loadability' later...
preset.save( savePath, "basicPresetTest" )
self.assertTrue( os.path.isfile( os.path.join( savePath, "basicPresetTest", "basicPresetTest-1.cob" ) ) )
self.assertTrue( os.path.isfile( os.path.join( savePath, "basicPresetTest", "basicPresetTest-1.py" ) ) )
# save without the classLoader and check its there
preset.save( savePath, "basicPresetTest", classLoadable=False )
self.assertTrue( os.path.isfile( os.path.join( savePath, "basicPresetTest.cob" ) ) )
# reload
p = IECore.BasicPreset( os.path.join( savePath, "basicPresetTest.cob" ) )
self.assertTrue( p.applicableTo( testObj, testObj.parameters() ) )
self.assertFalse( p.applicableTo( testObj2, testObj2.parameters() ) )
testObj.parameters()["a"].setTypedValue( False )
testObj.parameters()["b"].setTypedValue( 0.0 )
p( testObj, testObj.parameters() )
self.assertEqual( testObj.parameters()["a"].getTypedValue(), True )
self.assertEqual( testObj.parameters()["b"].getTypedValue(), 1.0 )
preset2 = IECore.BasicPreset( testObj, testObj.parameters(), parameters=( testObj.parameters()["a"], ) )
preset2.save( savePath, "basicPresetTest2", classLoadable=False )
#reload
p2 = IECore.BasicPreset( os.path.join( savePath, "basicPresetTest2.cob" ) )
self.assertTrue( p2.applicableTo( testObj, testObj.parameters() ) )
self.assertTrue( p2.applicableTo( testObj2, testObj.parameters() ) )
p2( testObj2, testObj2.parameters() )
self.assertEqual( testObj2.parameters()["a"].getTypedValue(), True )
self.assertEqual( testObj2.parameters()["c"].getTypedValue(), 0.0 )
def testClassLoader( self ) :
testObj = IECore.Parameterised( "testParameterised1" )
testObj.parameters().addParameters(
[
IECore.BoolParameter( "a", "", True ),
IECore.FloatParameter( "b", "", 1.0 ),
]
)
savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "data", "basicPreset" ) )
preset = IECore.BasicPreset( testObj, testObj.parameters() )
preset.save( savePath, "basicPresetTestClassLoader" )
# make sure that no messages are emitted during loading
messageHandler = IECore.CapturingMessageHandler()
with messageHandler :
loader = IECore.ClassLoader( IECore.SearchPath( savePath ) )
p = loader.load( "basicPresetTestClassLoader" )()
self.assertEqual( len( messageHandler.messages ), 0 )
self.assertTrue( isinstance( p, IECore.BasicPreset ) )
p.metadata()
def testClasses( self ) :
testObj = IECore.Parameterised( "testParameterised1" )
testObj.parameters().addParameters(
[
IECore.BoolParameter( "a", "", True ),
IECore.ClassParameter( "b", "", "IECORE_OP_PATHS", os.path.join( "maths", "multiply" ), 2 ),
]
)
testObj2 = IECore.Parameterised( "testParameterised2" )
testObj2.parameters().addParameters(
[
IECore.ClassParameter( "c", "", "IECORE_OP_PATHS" ),
]
)
classes1 = testObj.parameters()["b"].getClass( True )
classes2 = testObj2.parameters()["c"].getClass( True )
self.assertNotEqual( classes1[1:], classes2[1:] )
p = IECore.BasicPreset( testObj, testObj.parameters()["b"] )
self.assertTrue( p.applicableTo( testObj, testObj.parameters()["b"] ) )
self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) )
self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()["c"] ) )
p( testObj2, testObj2.parameters()["c"] )
classes1 = testObj.parameters()["b"].getClass( True )
classes2 = testObj2.parameters()["c"].getClass( True )
self.assertEqual( classes1[1:], classes2[1:] )
def testClassVectors( self ) :
testObj = IECore.Parameterised( "testParameterised1" )
testObj.parameters().addParameters(
[
IECore.BoolParameter( "a", "", True ),
IECore.ClassVectorParameter( "b", "", "IECORE_OP_PATHS" ),
]
)
testObj.parameters()["b"].setClasses(
[
( "mult", os.path.join( "maths", "multiply" ), 2 ),
( "coIO", "compoundObjectInOut", 1 ),
]
)
testObj2 = IECore.Parameterised( "testParameterised2" )
testObj2.parameters().addParameters(
[
IECore.ClassVectorParameter( "c", "", "IECORE_OP_PATHS" ),
]
)
classes1 = [ c[1:] for c in testObj.parameters()["b"].getClasses( True ) ]
classes2 = [ c[1:] for c in testObj2.parameters()["c"].getClasses( True ) ]
self.assertNotEqual( classes1, classes2 )
p = IECore.BasicPreset( testObj, testObj.parameters()["b"] )
self.assertTrue( p.applicableTo( testObj, testObj.parameters()["b"] ) )
self.assertFalse( p.applicableTo( testObj, testObj.parameters() ) )
self.assertTrue( p.applicableTo( testObj2, testObj2.parameters()["c"] ) )
p( testObj2, testObj2.parameters()["c"] )
classes1 = [ c[1:] for c in testObj.parameters()["b"].getClasses( True ) ]
classes2 = [ c[1:] for c in testObj2.parameters()["c"].getClasses( True ) ]
self.assertEqual( classes1, classes2 )
def testCompoundVectorParameter( self ) :
p = IECore.Parameterised( "test" )
p.parameters().addParameters(
[
IECore.BoolParameter( "a", "", False ),
IECore.CompoundVectorParameter(
"c",
"",
members = [
IECore.StringVectorParameter( "s", "", IECore.StringVectorData() ),
IECore.BoolVectorParameter( "b", "", IECore.BoolVectorData() ),
]
)
]
)
p["c"]["s"].setValue( IECore.StringVectorData( [ "1", "2", "3" ] ) )
p["c"]["b"].setValue( IECore.BoolVectorData( [ True, False, True ] ) )
v = p.parameters().getValue().copy()
preset = IECore.BasicPreset( p, p.parameters() )
self.assertTrue( preset.applicableTo( p, p.parameters() ) )
p.parameters().setValue( p.parameters().defaultValue )
self.assertNotEqual( p.parameters().getValue(), v )
preset( p, p.parameters() )
self.assertEqual( p.parameters().getValue(), v )
def tearDown( self ) :
savePath = os.path.abspath( os.path.join( os.path.dirname( __file__ ), "data", "basicPreset" ) )
paths = (
os.path.join( savePath, "basicPresetTest" ),
os.path.join( savePath, "basicPresetTest.cob" ),
os.path.join( savePath, "basicPresetTest2.cob" ),
os.path.join( savePath, "basicPresetTestClassLoader" ),
)
for p in paths :
if os.path.isdir( p ) :
shutil.rmtree( p )
elif os.path.isfile( p ) :
os.remove( p )
if __name__ == "__main__":
unittest.main()
| [
"IECore.SearchPath",
"IECore.Parameterised",
"IECore.CapturingMessageHandler",
"IECore.BoolVectorData",
"os.path.join",
"shutil.rmtree",
"os.path.isfile",
"os.path.dirname",
"os.path.isdir",
"IECore.ClassVectorParameter",
"IECore.FloatParameter",
"IECore.StringVectorData",
"unittest.main",
"IECore.BoolParameter",
"IECore.ClassParameter",
"os.remove"
] | [((11326, 11341), 'unittest.main', 'unittest.main', ([], {}), '()\n', (11339, 11341), False, 'import unittest\n'), ((1977, 2019), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised1"""'], {}), "('testParameterised1')\n", (1997, 2019), False, 'import IECore\n'), ((2174, 2216), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised2"""'], {}), "('testParameterised2')\n", (2194, 2216), False, 'import IECore\n'), ((3305, 3347), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised1"""'], {}), "('testParameterised1')\n", (3325, 3347), False, 'import IECore\n'), ((3502, 3544), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised1"""'], {}), "('testParameterised1')\n", (3522, 3544), False, 'import IECore\n'), ((3807, 3839), 'IECore.CapturingMessageHandler', 'IECore.CapturingMessageHandler', ([], {}), '()\n', (3837, 3839), False, 'import IECore\n'), ((4487, 4529), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised1"""'], {}), "('testParameterised1')\n", (4507, 4529), False, 'import IECore\n'), ((4684, 4726), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised1"""'], {}), "('testParameterised1')\n", (4704, 4726), False, 'import IECore\n'), ((6730, 6772), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised1"""'], {}), "('testParameterised1')\n", (6750, 6772), False, 'import IECore\n'), ((7210, 7242), 'IECore.CapturingMessageHandler', 'IECore.CapturingMessageHandler', ([], {}), '()\n', (7240, 7242), False, 'import IECore\n'), ((7557, 7599), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised1"""'], {}), "('testParameterised1')\n", (7577, 7599), False, 'import IECore\n'), ((7808, 7850), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised2"""'], {}), "('testParameterised2')\n", (7828, 7850), False, 'import IECore\n'), ((8669, 8711), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised1"""'], {}), "('testParameterised1')\n", (8689, 8711), False, 'import IECore\n'), ((9039, 9081), 'IECore.Parameterised', 'IECore.Parameterised', (['"""testParameterised2"""'], {}), "('testParameterised2')\n", (9059, 9081), False, 'import IECore\n'), ((9980, 10008), 'IECore.Parameterised', 'IECore.Parameterised', (['"""test"""'], {}), "('test')\n", (10000, 10008), False, 'import IECore\n'), ((5624, 5669), 'os.path.join', 'os.path.join', (['savePath', '"""basicPresetTest.cob"""'], {}), "(savePath, 'basicPresetTest.cob')\n", (5636, 5669), False, 'import os\n'), ((6309, 6355), 'os.path.join', 'os.path.join', (['savePath', '"""basicPresetTest2.cob"""'], {}), "(savePath, 'basicPresetTest2.cob')\n", (6321, 6355), False, 'import os\n'), ((10355, 10395), 'IECore.StringVectorData', 'IECore.StringVectorData', (["['1', '2', '3']"], {}), "(['1', '2', '3'])\n", (10378, 10395), False, 'import IECore\n'), ((10426, 10468), 'IECore.BoolVectorData', 'IECore.BoolVectorData', (['[True, False, True]'], {}), '([True, False, True])\n', (10447, 10468), False, 'import IECore\n'), ((10965, 11006), 'os.path.join', 'os.path.join', (['savePath', '"""basicPresetTest"""'], {}), "(savePath, 'basicPresetTest')\n", (10977, 11006), False, 'import os\n'), ((11013, 11058), 'os.path.join', 'os.path.join', (['savePath', '"""basicPresetTest.cob"""'], {}), "(savePath, 'basicPresetTest.cob')\n", (11025, 11058), False, 'import os\n'), ((11065, 11111), 'os.path.join', 'os.path.join', (['savePath', '"""basicPresetTest2.cob"""'], {}), "(savePath, 'basicPresetTest2.cob')\n", (11077, 11111), False, 'import os\n'), ((11118, 11170), 'os.path.join', 'os.path.join', (['savePath', '"""basicPresetTestClassLoader"""'], {}), "(savePath, 'basicPresetTestClassLoader')\n", (11130, 11170), False, 'import os\n'), ((11204, 11220), 'os.path.isdir', 'os.path.isdir', (['p'], {}), '(p)\n', (11217, 11220), False, 'import os\n'), ((2069, 2104), 'IECore.BoolParameter', 'IECore.BoolParameter', (['"""a"""', '""""""', '(True)'], {}), "('a', '', True)\n", (2089, 2104), False, 'import IECore\n'), ((2112, 2147), 'IECore.FloatParameter', 'IECore.FloatParameter', (['"""b"""', '""""""', '(1.0)'], {}), "('b', '', 1.0)\n", (2133, 2147), False, 'import IECore\n'), ((2267, 2303), 'IECore.BoolParameter', 'IECore.BoolParameter', (['"""a"""', '""""""', '(False)'], {}), "('a', '', False)\n", (2287, 2303), False, 'import IECore\n'), ((2311, 2346), 'IECore.FloatParameter', 'IECore.FloatParameter', (['"""c"""', '""""""', '(0.0)'], {}), "('c', '', 0.0)\n", (2332, 2346), False, 'import IECore\n'), ((3397, 3432), 'IECore.BoolParameter', 'IECore.BoolParameter', (['"""a"""', '""""""', '(True)'], {}), "('a', '', True)\n", (3417, 3432), False, 'import IECore\n'), ((3440, 3475), 'IECore.FloatParameter', 'IECore.FloatParameter', (['"""b"""', '""""""', '(1.0)'], {}), "('b', '', 1.0)\n", (3461, 3475), False, 'import IECore\n'), ((3595, 3631), 'IECore.BoolParameter', 'IECore.BoolParameter', (['"""a"""', '""""""', '(False)'], {}), "('a', '', False)\n", (3615, 3631), False, 'import IECore\n'), ((3639, 3674), 'IECore.FloatParameter', 'IECore.FloatParameter', (['"""c"""', '""""""', '(0.0)'], {}), "('c', '', 0.0)\n", (3660, 3674), False, 'import IECore\n'), ((3732, 3757), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (3747, 3757), False, 'import os\n'), ((3892, 3966), 'os.path.join', 'os.path.join', (['savePath', '"""basicPresetLoadTest"""', '"""basicPresetLoadTest-1.cob"""'], {}), "(savePath, 'basicPresetLoadTest', 'basicPresetLoadTest-1.cob')\n", (3904, 3966), False, 'import os\n'), ((4579, 4614), 'IECore.BoolParameter', 'IECore.BoolParameter', (['"""a"""', '""""""', '(True)'], {}), "('a', '', True)\n", (4599, 4614), False, 'import IECore\n'), ((4622, 4657), 'IECore.FloatParameter', 'IECore.FloatParameter', (['"""b"""', '""""""', '(1.0)'], {}), "('b', '', 1.0)\n", (4643, 4657), False, 'import IECore\n'), ((4777, 4813), 'IECore.BoolParameter', 'IECore.BoolParameter', (['"""a"""', '""""""', '(False)'], {}), "('a', '', False)\n", (4797, 4813), False, 'import IECore\n'), ((4821, 4856), 'IECore.FloatParameter', 'IECore.FloatParameter', (['"""c"""', '""""""', '(0.0)'], {}), "('c', '', 0.0)\n", (4842, 4856), False, 'import IECore\n'), ((4914, 4939), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (4929, 4939), False, 'import os\n'), ((5199, 5265), 'os.path.join', 'os.path.join', (['savePath', '"""basicPresetTest"""', '"""basicPresetTest-1.cob"""'], {}), "(savePath, 'basicPresetTest', 'basicPresetTest-1.cob')\n", (5211, 5265), False, 'import os\n'), ((5307, 5372), 'os.path.join', 'os.path.join', (['savePath', '"""basicPresetTest"""', '"""basicPresetTest-1.py"""'], {}), "(savePath, 'basicPresetTest', 'basicPresetTest-1.py')\n", (5319, 5372), False, 'import os\n'), ((5534, 5579), 'os.path.join', 'os.path.join', (['savePath', '"""basicPresetTest.cob"""'], {}), "(savePath, 'basicPresetTest.cob')\n", (5546, 5579), False, 'import os\n'), ((6822, 6857), 'IECore.BoolParameter', 'IECore.BoolParameter', (['"""a"""', '""""""', '(True)'], {}), "('a', '', True)\n", (6842, 6857), False, 'import IECore\n'), ((6865, 6900), 'IECore.FloatParameter', 'IECore.FloatParameter', (['"""b"""', '""""""', '(1.0)'], {}), "('b', '', 1.0)\n", (6886, 6900), False, 'import IECore\n'), ((6958, 6983), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (6973, 6983), False, 'import os\n'), ((7300, 7327), 'IECore.SearchPath', 'IECore.SearchPath', (['savePath'], {}), '(savePath)\n', (7317, 7327), False, 'import IECore\n'), ((7649, 7684), 'IECore.BoolParameter', 'IECore.BoolParameter', (['"""a"""', '""""""', '(True)'], {}), "('a', '', True)\n", (7669, 7684), False, 'import IECore\n'), ((7901, 7950), 'IECore.ClassParameter', 'IECore.ClassParameter', (['"""c"""', '""""""', '"""IECORE_OP_PATHS"""'], {}), "('c', '', 'IECORE_OP_PATHS')\n", (7922, 7950), False, 'import IECore\n'), ((8761, 8796), 'IECore.BoolParameter', 'IECore.BoolParameter', (['"""a"""', '""""""', '(True)'], {}), "('a', '', True)\n", (8781, 8796), False, 'import IECore\n'), ((8804, 8859), 'IECore.ClassVectorParameter', 'IECore.ClassVectorParameter', (['"""b"""', '""""""', '"""IECORE_OP_PATHS"""'], {}), "('b', '', 'IECORE_OP_PATHS')\n", (8831, 8859), False, 'import IECore\n'), ((9132, 9187), 'IECore.ClassVectorParameter', 'IECore.ClassVectorParameter', (['"""c"""', '""""""', '"""IECORE_OP_PATHS"""'], {}), "('c', '', 'IECORE_OP_PATHS')\n", (9159, 9187), False, 'import IECore\n'), ((10052, 10088), 'IECore.BoolParameter', 'IECore.BoolParameter', (['"""a"""', '""""""', '(False)'], {}), "('a', '', False)\n", (10072, 10088), False, 'import IECore\n'), ((10895, 10920), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (10910, 10920), False, 'import os\n'), ((11229, 11245), 'shutil.rmtree', 'shutil.rmtree', (['p'], {}), '(p)\n', (11242, 11245), False, 'import shutil\n'), ((11256, 11273), 'os.path.isfile', 'os.path.isfile', (['p'], {}), '(p)\n', (11270, 11273), False, 'import os\n'), ((7743, 7776), 'os.path.join', 'os.path.join', (['"""maths"""', '"""multiply"""'], {}), "('maths', 'multiply')\n", (7755, 7776), False, 'import os\n'), ((8932, 8965), 'os.path.join', 'os.path.join', (['"""maths"""', '"""multiply"""'], {}), "('maths', 'multiply')\n", (8944, 8965), False, 'import os\n'), ((11282, 11294), 'os.remove', 'os.remove', (['p'], {}), '(p)\n', (11291, 11294), False, 'import os\n'), ((10209, 10234), 'IECore.StringVectorData', 'IECore.StringVectorData', ([], {}), '()\n', (10232, 10234), False, 'import IECore\n'), ((10281, 10304), 'IECore.BoolVectorData', 'IECore.BoolVectorData', ([], {}), '()\n', (10302, 10304), False, 'import IECore\n')] |
from zeit.cms.i18n import MessageFactory as _
import zope.interface
import zope.schema
class IGlobalSettings(zope.interface.Interface):
"""Global CMS settings."""
default_year = zope.schema.Int(
title=_("Default year"),
min=1900,
max=2100)
default_volume = zope.schema.Int(
title=_("Default volume"),
min=1,
max=54)
def get_working_directory(template):
"""Return the collection which is the main working directory.
template:
Template which will be filled with year and volume. In
``template`` the placeholders $year and $volume will be replaced.
Example: 'online/$year/$volume/foo'
If the respective collection does not exist, it will be created before
returning it.
"""
| [
"zeit.cms.i18n.MessageFactory"
] | [((220, 237), 'zeit.cms.i18n.MessageFactory', '_', (['"""Default year"""'], {}), "('Default year')\n", (221, 237), True, 'from zeit.cms.i18n import MessageFactory as _\n'), ((328, 347), 'zeit.cms.i18n.MessageFactory', '_', (['"""Default volume"""'], {}), "('Default volume')\n", (329, 347), True, 'from zeit.cms.i18n import MessageFactory as _\n')] |
import imtreat
img = imtreat.imageManagerClass.openImageFunction("../images/soleil.png", 0)
img = imtreat.definedModesClass.detailEnhanceFunction(img)
imtreat.imageManagerClass.saveImageFunction("/Téléchargements/", "image_1", ".png", img)
| [
"imtreat.imageManagerClass.saveImageFunction",
"imtreat.imageManagerClass.openImageFunction",
"imtreat.definedModesClass.detailEnhanceFunction"
] | [((22, 92), 'imtreat.imageManagerClass.openImageFunction', 'imtreat.imageManagerClass.openImageFunction', (['"""../images/soleil.png"""', '(0)'], {}), "('../images/soleil.png', 0)\n", (65, 92), False, 'import imtreat\n'), ((100, 152), 'imtreat.definedModesClass.detailEnhanceFunction', 'imtreat.definedModesClass.detailEnhanceFunction', (['img'], {}), '(img)\n', (147, 152), False, 'import imtreat\n'), ((154, 246), 'imtreat.imageManagerClass.saveImageFunction', 'imtreat.imageManagerClass.saveImageFunction', (['"""/Téléchargements/"""', '"""image_1"""', '""".png"""', 'img'], {}), "('/Téléchargements/', 'image_1',\n '.png', img)\n", (197, 246), False, 'import imtreat\n')] |
import requests
words_list = requests.get("https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt").text
words_list = filter(lambda x: len(x) > 4, words_list.split('\n'))
path = input("Chemin d'écriture ? (words.txt) ")
if path == "":
path = "./words.txt"
with open(path, "w", encoding="utf-8") as file:
file.write('\n'.join(words_list)) | [
"requests.get"
] | [((30, 118), 'requests.get', 'requests.get', (['"""https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt"""'], {}), "(\n 'https://raw.githubusercontent.com/atebits/Words/master/Words/fr.txt')\n", (42, 118), False, 'import requests\n')] |
import unittest
from unittest import mock
import os
import subprocess
from testfixtures import TempDirectory
from simplegallery.upload.uploader_factory import get_uploader
class AWSUploaderTestCase(unittest.TestCase):
def test_no_location(self):
uploader = get_uploader('aws')
self.assertFalse(uploader.check_location(''))
@mock.patch('subprocess.run')
def test_upload_gallery(self, subprocess_run):
subprocess_run.return_value = subprocess.CompletedProcess([], returncode=0)
with TempDirectory() as tempdir:
# Setup mock file and uploader
tempdir.write('index.html', b'')
gallery_path = os.path.join(tempdir.path, 'index.html')
uploader = get_uploader('aws')
# Test upload to bucket
uploader.upload_gallery('s3://testbucket/path/', gallery_path)
subprocess_run.assert_called_with(
['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store'])
# Test upload to bucket without prefix
uploader.upload_gallery('testbucket/path/', gallery_path)
subprocess_run.assert_called_with(
['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store'])
# Test upload to bucket without trailing /
uploader.upload_gallery('s3://testbucket/path', gallery_path)
subprocess_run.assert_called_with(
['aws', 's3', 'sync', gallery_path, 's3://testbucket/path/', '--exclude', '.DS_Store'])
if __name__ == '__main__':
unittest.main()
| [
"testfixtures.TempDirectory",
"subprocess.CompletedProcess",
"os.path.join",
"unittest.main",
"unittest.mock.patch",
"simplegallery.upload.uploader_factory.get_uploader"
] | [((352, 380), 'unittest.mock.patch', 'mock.patch', (['"""subprocess.run"""'], {}), "('subprocess.run')\n", (362, 380), False, 'from unittest import mock\n'), ((1607, 1622), 'unittest.main', 'unittest.main', ([], {}), '()\n', (1620, 1622), False, 'import unittest\n'), ((272, 291), 'simplegallery.upload.uploader_factory.get_uploader', 'get_uploader', (['"""aws"""'], {}), "('aws')\n", (284, 291), False, 'from simplegallery.upload.uploader_factory import get_uploader\n'), ((470, 515), 'subprocess.CompletedProcess', 'subprocess.CompletedProcess', (['[]'], {'returncode': '(0)'}), '([], returncode=0)\n', (497, 515), False, 'import subprocess\n'), ((530, 545), 'testfixtures.TempDirectory', 'TempDirectory', ([], {}), '()\n', (543, 545), False, 'from testfixtures import TempDirectory\n'), ((673, 713), 'os.path.join', 'os.path.join', (['tempdir.path', '"""index.html"""'], {}), "(tempdir.path, 'index.html')\n", (685, 713), False, 'import os\n'), ((737, 756), 'simplegallery.upload.uploader_factory.get_uploader', 'get_uploader', (['"""aws"""'], {}), "('aws')\n", (749, 756), False, 'from simplegallery.upload.uploader_factory import get_uploader\n')] |
# Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import os
import re
from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment
log = logging.getLogger(__name__)
class SLURMEnvironment(ClusterEnvironment):
"""Cluster environment for training on a cluster managed by SLURM."""
@property
def creates_processes_externally(self) -> bool:
return True
@staticmethod
def detect() -> bool:
"""Returns ``True`` if the current process was launched on a SLURM cluster."""
return "SLURM_NTASKS" in os.environ
@property
def main_address(self) -> str:
# figure out the root node addr
slurm_nodelist = os.environ.get("SLURM_NODELIST")
if slurm_nodelist:
root_node = slurm_nodelist.split(" ")[0].split(",")[0]
else:
root_node = "127.0.0.1"
root_node = self.resolve_root_node_address(root_node)
os.environ["MASTER_ADDR"] = root_node
log.debug(f"MASTER_ADDR: {os.environ['MASTER_ADDR']}")
return root_node
@property
def main_port(self) -> int:
# -----------------------
# SLURM JOB = PORT number
# -----------------------
# this way every process knows what port to use
default_port = os.environ.get("SLURM_JOB_ID")
if default_port:
# use the last 4 numbers in the job id as the id
default_port = default_port[-4:]
# all ports should be in the 10k+ range
default_port = int(default_port) + 15000
else:
default_port = 12910
# -----------------------
# PORT NUMBER = MASTER_PORT
# -----------------------
# in case the user passed it in
if "MASTER_PORT" in os.environ:
default_port = os.environ["MASTER_PORT"]
else:
os.environ["MASTER_PORT"] = str(default_port)
log.debug(f"MASTER_PORT: {os.environ['MASTER_PORT']}")
return int(default_port)
def world_size(self) -> int:
return int(os.environ["SLURM_NTASKS"])
def set_world_size(self, size: int) -> None:
log.debug("SLURMEnvironment.set_world_size was called, but setting world size is not allowed. Ignored.")
def global_rank(self) -> int:
return int(os.environ["SLURM_PROCID"])
def set_global_rank(self, rank: int) -> None:
log.debug("SLURMEnvironment.set_global_rank was called, but setting global rank is not allowed. Ignored.")
def local_rank(self) -> int:
return int(os.environ["SLURM_LOCALID"])
def node_rank(self) -> int:
return int(os.environ["SLURM_NODEID"])
def resolve_root_node_address(self, root_node: str) -> str:
if "[" in root_node:
name, numbers = root_node.split("[", maxsplit=1)
number = numbers.split(",", maxsplit=1)[0]
if "-" in number:
number = number.split("-")[0]
number = re.sub("[^0-9]", "", number)
root_node = name + number
return root_node
| [
"logging.getLogger",
"re.sub",
"os.environ.get"
] | [((720, 747), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (737, 747), False, 'import logging\n'), ((1246, 1278), 'os.environ.get', 'os.environ.get', (['"""SLURM_NODELIST"""'], {}), "('SLURM_NODELIST')\n", (1260, 1278), False, 'import os\n'), ((1848, 1878), 'os.environ.get', 'os.environ.get', (['"""SLURM_JOB_ID"""'], {}), "('SLURM_JOB_ID')\n", (1862, 1878), False, 'import os\n'), ((3532, 3560), 're.sub', 're.sub', (['"""[^0-9]"""', '""""""', 'number'], {}), "('[^0-9]', '', number)\n", (3538, 3560), False, 'import re\n')] |
#
# Copyright (c) 2021 The GPflux Contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import numpy as np
import pytest
import tensorflow as tf
import tensorflow_probability as tfp
from gpflow.kullback_leiblers import gauss_kl
from gpflux.encoders import DirectlyParameterizedNormalDiag
from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer
tf.keras.backend.set_floatx("float64")
############
# Utilities
############
def _zero_one_normal_prior(w_dim):
""" N(0, I) prior """
return tfp.distributions.MultivariateNormalDiag(loc=np.zeros(w_dim), scale_diag=np.ones(w_dim))
def get_distributions_with_w_dim():
distributions = []
for d in [1, 5]:
mean = np.zeros(d)
scale_tri_l = np.eye(d)
mvn = tfp.distributions.MultivariateNormalTriL(mean, scale_tri_l)
std = np.ones(d)
mvn_diag = tfp.distributions.MultivariateNormalDiag(mean, std)
distributions.append((mvn, d))
distributions.append((mvn_diag, d))
return distributions
############
# Tests
############
@pytest.mark.parametrize("distribution, w_dim", get_distributions_with_w_dim())
def test_local_kls(distribution, w_dim):
lv = LatentVariableLayer(encoder=None, prior=distribution)
# test kl is 0 when posteriors == priors
posterior = distribution
assert lv._local_kls(posterior) == 0
# test kl > 0 when posteriors != priors
batch_size = 10
params = distribution.parameters
posterior_params = {
k: [v + 0.5 for _ in range(batch_size)]
for k, v in params.items()
if isinstance(v, np.ndarray)
}
posterior = lv.distribution_class(**posterior_params)
local_kls = lv._local_kls(posterior)
assert np.all(local_kls > 0)
assert local_kls.shape == (batch_size,)
@pytest.mark.parametrize("w_dim", [1, 5])
def test_local_kl_gpflow_consistency(w_dim):
num_data = 400
means = np.random.randn(num_data, w_dim)
encoder = DirectlyParameterizedNormalDiag(num_data, w_dim, means)
lv = LatentVariableLayer(encoder=encoder, prior=_zero_one_normal_prior(w_dim))
posteriors = lv._inference_posteriors(
[np.random.randn(num_data, 3), np.random.randn(num_data, 2)]
)
q_mu = posteriors.parameters["loc"]
q_sqrt = posteriors.parameters["scale_diag"]
gpflow_local_kls = gauss_kl(q_mu, q_sqrt)
tfp_local_kls = tf.reduce_sum(lv._local_kls(posteriors))
np.testing.assert_allclose(tfp_local_kls, gpflow_local_kls, rtol=1e-10)
class ArrayMatcher:
def __init__(self, expected):
self.expected = expected
def __eq__(self, actual):
return np.allclose(actual, self.expected, equal_nan=True)
@pytest.mark.parametrize("w_dim", [1, 5])
def test_latent_variable_layer_losses(mocker, w_dim):
num_data, x_dim, y_dim = 43, 3, 1
prior_shape = (w_dim,)
posteriors_shape = (num_data, w_dim)
prior = tfp.distributions.MultivariateNormalDiag(
loc=np.random.randn(*prior_shape),
scale_diag=np.random.randn(*prior_shape) ** 2,
)
posteriors = tfp.distributions.MultivariateNormalDiag(
loc=np.random.randn(*posteriors_shape),
scale_diag=np.random.randn(*posteriors_shape) ** 2,
)
encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag))
lv = LatentVariableLayer(encoder=encoder, prior=prior)
inputs = np.full((num_data, x_dim), np.nan)
targets = np.full((num_data, y_dim), np.nan)
observations = [inputs, targets]
encoder_inputs = np.concatenate(observations, axis=-1)
_ = lv(inputs)
encoder.assert_not_called()
assert lv.losses == [0.0]
_ = lv(inputs, observations=observations, training=True)
# assert_called_once_with uses == for comparison which fails on arrays
encoder.assert_called_once_with(ArrayMatcher(encoder_inputs), training=True)
expected_loss = [tf.reduce_mean(posteriors.kl_divergence(prior))]
np.testing.assert_equal(lv.losses, expected_loss) # also checks shapes match
@pytest.mark.parametrize("w_dim", [1, 5])
@pytest.mark.parametrize("seed2", [None, 42])
def test_latent_variable_layer_samples(mocker, test_data, w_dim, seed2):
seed = 123
inputs, targets = test_data
num_data, x_dim = inputs.shape
prior_shape = (w_dim,)
posteriors_shape = (num_data, w_dim)
prior = tfp.distributions.MultivariateNormalDiag(
loc=np.random.randn(*prior_shape),
scale_diag=np.random.randn(*prior_shape) ** 2,
)
posteriors = tfp.distributions.MultivariateNormalDiag(
loc=np.random.randn(*posteriors_shape),
scale_diag=np.random.randn(*posteriors_shape) ** 2,
)
encoder = mocker.Mock(return_value=(posteriors.loc, posteriors.scale.diag))
lv = LatentVariableLayer(prior=prior, encoder=encoder)
tf.random.set_seed(seed)
sample_prior = lv(inputs, seed=seed2)
tf.random.set_seed(seed)
prior_expected = np.concatenate([inputs, prior.sample(num_data, seed=seed2)], axis=-1)
np.testing.assert_array_equal(sample_prior, prior_expected)
tf.random.set_seed(seed)
sample_posterior = lv(inputs, observations=[inputs, targets], training=True, seed=seed2)
tf.random.set_seed(seed)
posterior_expected = np.concatenate([inputs, posteriors.sample(seed=seed2)], axis=-1)
np.testing.assert_array_equal(sample_posterior, posterior_expected)
def test_no_tensorflow_metaclass_overwritten():
"""
LayerWithObservations is a subclass of tf.keras.layers.Layer (via TrackableLayer);
this test ensures that TrackableLayer does not have a metaclass, and hence by adding
the ABCMeta to LayerWithObservations we are not accidentally removing some required
TensorFlow magic metaclass.
"""
assert LayerWithObservations.__bases__ == (TrackableLayer,)
assert type(TrackableLayer) is type
assert type(LayerWithObservations) is abc.ABCMeta
| [
"numpy.testing.assert_equal",
"tensorflow_probability.distributions.MultivariateNormalDiag",
"tensorflow_probability.distributions.MultivariateNormalTriL",
"numpy.testing.assert_allclose",
"numpy.concatenate",
"numpy.testing.assert_array_equal",
"numpy.eye",
"numpy.allclose",
"gpflux.encoders.DirectlyParameterizedNormalDiag",
"numpy.ones",
"tensorflow.keras.backend.set_floatx",
"numpy.random.randn",
"gpflux.layers.LatentVariableLayer",
"tensorflow.random.set_seed",
"gpflow.kullback_leiblers.gauss_kl",
"pytest.mark.parametrize",
"numpy.zeros",
"numpy.full",
"numpy.all"
] | [((897, 935), 'tensorflow.keras.backend.set_floatx', 'tf.keras.backend.set_floatx', (['"""float64"""'], {}), "('float64')\n", (924, 935), True, 'import tensorflow as tf\n'), ((2330, 2370), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""w_dim"""', '[1, 5]'], {}), "('w_dim', [1, 5])\n", (2353, 2370), False, 'import pytest\n'), ((3216, 3256), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""w_dim"""', '[1, 5]'], {}), "('w_dim', [1, 5])\n", (3239, 3256), False, 'import pytest\n'), ((4542, 4582), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""w_dim"""', '[1, 5]'], {}), "('w_dim', [1, 5])\n", (4565, 4582), False, 'import pytest\n'), ((4584, 4628), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""seed2"""', '[None, 42]'], {}), "('seed2', [None, 42])\n", (4607, 4628), False, 'import pytest\n'), ((1728, 1781), 'gpflux.layers.LatentVariableLayer', 'LatentVariableLayer', ([], {'encoder': 'None', 'prior': 'distribution'}), '(encoder=None, prior=distribution)\n', (1747, 1781), False, 'from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer\n'), ((2261, 2282), 'numpy.all', 'np.all', (['(local_kls > 0)'], {}), '(local_kls > 0)\n', (2267, 2282), True, 'import numpy as np\n'), ((2447, 2479), 'numpy.random.randn', 'np.random.randn', (['num_data', 'w_dim'], {}), '(num_data, w_dim)\n', (2462, 2479), True, 'import numpy as np\n'), ((2494, 2549), 'gpflux.encoders.DirectlyParameterizedNormalDiag', 'DirectlyParameterizedNormalDiag', (['num_data', 'w_dim', 'means'], {}), '(num_data, w_dim, means)\n', (2525, 2549), False, 'from gpflux.encoders import DirectlyParameterizedNormalDiag\n'), ((2866, 2888), 'gpflow.kullback_leiblers.gauss_kl', 'gauss_kl', (['q_mu', 'q_sqrt'], {}), '(q_mu, q_sqrt)\n', (2874, 2888), False, 'from gpflow.kullback_leiblers import gauss_kl\n'), ((2955, 3026), 'numpy.testing.assert_allclose', 'np.testing.assert_allclose', (['tfp_local_kls', 'gpflow_local_kls'], {'rtol': '(1e-10)'}), '(tfp_local_kls, gpflow_local_kls, rtol=1e-10)\n', (2981, 3026), True, 'import numpy as np\n'), ((3841, 3890), 'gpflux.layers.LatentVariableLayer', 'LatentVariableLayer', ([], {'encoder': 'encoder', 'prior': 'prior'}), '(encoder=encoder, prior=prior)\n', (3860, 3890), False, 'from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer\n'), ((3905, 3939), 'numpy.full', 'np.full', (['(num_data, x_dim)', 'np.nan'], {}), '((num_data, x_dim), np.nan)\n', (3912, 3939), True, 'import numpy as np\n'), ((3954, 3988), 'numpy.full', 'np.full', (['(num_data, y_dim)', 'np.nan'], {}), '((num_data, y_dim), np.nan)\n', (3961, 3988), True, 'import numpy as np\n'), ((4047, 4084), 'numpy.concatenate', 'np.concatenate', (['observations'], {'axis': '(-1)'}), '(observations, axis=-1)\n', (4061, 4084), True, 'import numpy as np\n'), ((4461, 4510), 'numpy.testing.assert_equal', 'np.testing.assert_equal', (['lv.losses', 'expected_loss'], {}), '(lv.losses, expected_loss)\n', (4484, 4510), True, 'import numpy as np\n'), ((5277, 5326), 'gpflux.layers.LatentVariableLayer', 'LatentVariableLayer', ([], {'prior': 'prior', 'encoder': 'encoder'}), '(prior=prior, encoder=encoder)\n', (5296, 5326), False, 'from gpflux.layers import LatentVariableLayer, LayerWithObservations, TrackableLayer\n'), ((5332, 5356), 'tensorflow.random.set_seed', 'tf.random.set_seed', (['seed'], {}), '(seed)\n', (5350, 5356), True, 'import tensorflow as tf\n'), ((5403, 5427), 'tensorflow.random.set_seed', 'tf.random.set_seed', (['seed'], {}), '(seed)\n', (5421, 5427), True, 'import tensorflow as tf\n'), ((5523, 5582), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['sample_prior', 'prior_expected'], {}), '(sample_prior, prior_expected)\n', (5552, 5582), True, 'import numpy as np\n'), ((5588, 5612), 'tensorflow.random.set_seed', 'tf.random.set_seed', (['seed'], {}), '(seed)\n', (5606, 5612), True, 'import tensorflow as tf\n'), ((5710, 5734), 'tensorflow.random.set_seed', 'tf.random.set_seed', (['seed'], {}), '(seed)\n', (5728, 5734), True, 'import tensorflow as tf\n'), ((5829, 5896), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['sample_posterior', 'posterior_expected'], {}), '(sample_posterior, posterior_expected)\n', (5858, 5896), True, 'import numpy as np\n'), ((1236, 1247), 'numpy.zeros', 'np.zeros', (['d'], {}), '(d)\n', (1244, 1247), True, 'import numpy as np\n'), ((1270, 1279), 'numpy.eye', 'np.eye', (['d'], {}), '(d)\n', (1276, 1279), True, 'import numpy as np\n'), ((1294, 1353), 'tensorflow_probability.distributions.MultivariateNormalTriL', 'tfp.distributions.MultivariateNormalTriL', (['mean', 'scale_tri_l'], {}), '(mean, scale_tri_l)\n', (1334, 1353), True, 'import tensorflow_probability as tfp\n'), ((1369, 1379), 'numpy.ones', 'np.ones', (['d'], {}), '(d)\n', (1376, 1379), True, 'import numpy as np\n'), ((1399, 1450), 'tensorflow_probability.distributions.MultivariateNormalDiag', 'tfp.distributions.MultivariateNormalDiag', (['mean', 'std'], {}), '(mean, std)\n', (1439, 1450), True, 'import tensorflow_probability as tfp\n'), ((3162, 3212), 'numpy.allclose', 'np.allclose', (['actual', 'self.expected'], {'equal_nan': '(True)'}), '(actual, self.expected, equal_nan=True)\n', (3173, 3212), True, 'import numpy as np\n'), ((1094, 1109), 'numpy.zeros', 'np.zeros', (['w_dim'], {}), '(w_dim)\n', (1102, 1109), True, 'import numpy as np\n'), ((1122, 1136), 'numpy.ones', 'np.ones', (['w_dim'], {}), '(w_dim)\n', (1129, 1136), True, 'import numpy as np\n'), ((2686, 2714), 'numpy.random.randn', 'np.random.randn', (['num_data', '(3)'], {}), '(num_data, 3)\n', (2701, 2714), True, 'import numpy as np\n'), ((2716, 2744), 'numpy.random.randn', 'np.random.randn', (['num_data', '(2)'], {}), '(num_data, 2)\n', (2731, 2744), True, 'import numpy as np\n'), ((3485, 3514), 'numpy.random.randn', 'np.random.randn', (['*prior_shape'], {}), '(*prior_shape)\n', (3500, 3514), True, 'import numpy as np\n'), ((3648, 3682), 'numpy.random.randn', 'np.random.randn', (['*posteriors_shape'], {}), '(*posteriors_shape)\n', (3663, 3682), True, 'import numpy as np\n'), ((4921, 4950), 'numpy.random.randn', 'np.random.randn', (['*prior_shape'], {}), '(*prior_shape)\n', (4936, 4950), True, 'import numpy as np\n'), ((5084, 5118), 'numpy.random.randn', 'np.random.randn', (['*posteriors_shape'], {}), '(*posteriors_shape)\n', (5099, 5118), True, 'import numpy as np\n'), ((3535, 3564), 'numpy.random.randn', 'np.random.randn', (['*prior_shape'], {}), '(*prior_shape)\n', (3550, 3564), True, 'import numpy as np\n'), ((3703, 3737), 'numpy.random.randn', 'np.random.randn', (['*posteriors_shape'], {}), '(*posteriors_shape)\n', (3718, 3737), True, 'import numpy as np\n'), ((4971, 5000), 'numpy.random.randn', 'np.random.randn', (['*prior_shape'], {}), '(*prior_shape)\n', (4986, 5000), True, 'import numpy as np\n'), ((5139, 5173), 'numpy.random.randn', 'np.random.randn', (['*posteriors_shape'], {}), '(*posteriors_shape)\n', (5154, 5173), True, 'import numpy as np\n')] |
from floodsystem.stationdata import build_station_list
from floodsystem.flood import stations_highest_rel_level
def run():
stations = build_station_list()
warning_stations = stations_highest_rel_level(stations,10)
for entry in warning_stations:
print(entry[0].name,entry[1])
if __name__ == "__main__":
print("*** Task 2C: CUED Part IA Flood Warning System ***")
run() | [
"floodsystem.stationdata.build_station_list",
"floodsystem.flood.stations_highest_rel_level"
] | [((139, 159), 'floodsystem.stationdata.build_station_list', 'build_station_list', ([], {}), '()\n', (157, 159), False, 'from floodsystem.stationdata import build_station_list\n'), ((183, 223), 'floodsystem.flood.stations_highest_rel_level', 'stations_highest_rel_level', (['stations', '(10)'], {}), '(stations, 10)\n', (209, 223), False, 'from floodsystem.flood import stations_highest_rel_level\n')] |
import logging
import time
from datetime import timedelta
from typing import List
from homeassistant.components.binary_sensor import (
BinarySensorEntity,
DEVICE_CLASS_MOTION
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ATTRIBUTION
from homeassistant.core import HomeAssistant
from wyzeapy.base_client import Device, AccessTokenError
from wyzeapy.client import Client
from wyzeapy.types import PropertyIDs
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
ATTRIBUTION = "Data provided by Wyze"
SCAN_INTERVAL = timedelta(seconds=10)
async def async_setup_entry(hass: HomeAssistant, config_entry: ConfigEntry, async_add_entities):
_LOGGER.debug("""Creating new WyzeApi binary sensor component""")
client: Client = hass.data[DOMAIN][config_entry.entry_id]
def get_cameras() -> List[Device]:
try:
return client.get_cameras()
except AccessTokenError as e:
_LOGGER.warning(e)
client.reauthenticate()
return client.get_cameras()
cameras = [WyzeCameraMotion(client, camera) for camera in await hass.async_add_executor_job(get_cameras)]
async_add_entities(cameras, True)
class WyzeCameraMotion(BinarySensorEntity):
_on: bool
_available: bool
def __init__(self, wyzeapi_client: Client, device: Device):
self._client = wyzeapi_client
self._device = device
self._last_event = int(str(int(time.time())) + "000")
@property
def device_info(self):
return {
"identifiers": {
(DOMAIN, self._device.mac)
},
"name": self.name,
"manufacturer": "WyzeLabs",
"model": self._device.product_model
}
@property
def available(self) -> bool:
return self._available
@property
def name(self):
"""Return the display name of this switch."""
return self._device.nickname
@property
def is_on(self):
"""Return true if switch is on."""
return self._on
@property
def unique_id(self):
return "{}-motion".format(self._device.mac)
@property
def device_state_attributes(self):
"""Return device attributes of the entity."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
"state": self.is_on,
"available": self.available,
"device model": self._device.product_model,
"mac": self.unique_id
}
@property
def device_class(self):
return DEVICE_CLASS_MOTION
def update(self):
try:
device_info = self._client.get_info(self._device)
except AccessTokenError:
self._client.reauthenticate()
device_info = self._client.get_info(self._device)
for property_id, value in device_info:
if property_id == PropertyIDs.AVAILABLE:
self._available = True if value == "1" else False
latest_event = self._client.get_latest_event(self._device)
if latest_event is not None:
if latest_event.event_ts > self._last_event:
self._on = True
self._last_event = latest_event.event_ts
else:
self._on = False
self._last_event = latest_event.event_ts
else:
self._on = False
| [
"logging.getLogger",
"datetime.timedelta",
"time.time"
] | [((500, 527), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (517, 527), False, 'import logging\n'), ((582, 603), 'datetime.timedelta', 'timedelta', ([], {'seconds': '(10)'}), '(seconds=10)\n', (591, 603), False, 'from datetime import timedelta\n'), ((1476, 1487), 'time.time', 'time.time', ([], {}), '()\n', (1485, 1487), False, 'import time\n')] |
"""
This script will modulate the blinky lights using the following algorithm:
1) uses user-provided location to obtain row of pixel data from bathy image
2) samples a 'number of LEDs' number of pixels from that row
3) shifts the sampled row data to center it at the location specified by user
4) displays resulting pixels on Blinky Tape
5) shifts next row by a given latitude, also specified by user
6) sleeps for user-specified period of time
Uses the following arguments:
-l/--location: tuple
Location of the user in tuple(lat, lon). This represents the center of the LED strip. Defaults to (0, 0)
-u/--update-interval: int
Update interval of the script, in minutes. Defaults to 10.
-p/--port: str
Serial port of the BlinkyLight (e.g., 'ttyAMA0', 'COM3'). Defaults to 'COM5'.
-d/--delta_latitude: int
Vertical change in latitude every update rate. May be 0, but this will result in a never-changing LEDs.
-i/--image: str
Name of the PNG image that contains the color coded pathymetric data.
The file current named mapserv.png was obtained using the following API:
https://www.gebco.net/data_and_products/gebco_web_services/web_map_service/mapserv?request=getmap&service=wms&BBOX=-90,-180,90,180&format=image/png&height=600&width=1200&crs=EPSG:4326&layers=GEBCO_LATEST_SUB_ICE_TOPO&version=1.3.0
In lieu of providing command line arguments, you may alternatively edit the defaults in bath_config.json.
NOTE: runs via:
runfile('/BlinkyTape_Python/bathymetry_blink/bathymetry_blink.py', wdir='/BlinkyTape_Python/')
(C) 2021 <NAME> (https://joeycodes.dev)
MIT Licensed
"""
import optparse
import json
from blinkytape import BlinkyTape
from time import sleep
from PIL import Image
import numpy as np
import sys
MAX_ERRORS = 3
num_errors = 0
# Obtain default parameters
with open("./bathymetry_blink/bathy_config.json") as f:
config = json.load(f)
# Default Blinky Tape port on Raspberry Pi is /dev/ttyACM0
parser = optparse.OptionParser()
parser.add_option("-p", "--port", dest="portname",
help="serial port (ex: /dev/ttyACM0)", default=config["port"])
parser.add_option("-l", "--location", dest="location",
help="Location of the center of the LED strip (ex: 70,-110)", default=config["location"])
parser.add_option("-u", "--update-rate", dest="update_rate",
help="How often to update elevation profile (mins) (ex: 5)", default=config["update_rate"])
parser.add_option("-d", "--delta-latitude", dest="delta_latitude",
help="Change in latitude during update (ex: 5)", default=config["delta_latitude"])
parser.add_option("-n", "--num-leds", dest="num_leds",
help="Number of LEDs in strip (ex: 60)", default=config["num_leds"])
parser.add_option("-i", "--image", dest="image_name",
help="Name of the map/bathymetry image (ex: ./mapserv.png)", default=config["image"])
(options, args) = parser.parse_args()
if args:
print("Unknown parameters: " + args)
# grab the values provided by user (or defaults)
port = options.portname
loc = options.location
rate = options.update_rate
delta = options.delta_latitude
n_leds = options.num_leds
i_name = options.image_name
# Some visual indication that it works, for headless setups (green tape)
bt = BlinkyTape(port, n_leds)
bt.displayColor(0, 100, 0)
bt.show()
sleep(2)
while True:
try:
# first, load image
im = Image.open(i_name) # Can be many different formats.
cols, rows = im.size
a = np.asarray(im) # of shape (rows, cols, channels)
# map loc latitude to 0-based index
latitude_index = min(rows - 1, max(0, (int)(((loc[0] - -90) / (90 - -90)) * (rows - 0) + 0)))
longitude_index = min(cols - 1, max(0, (int)(((loc[1] - -180) / (180 - -180)) * (cols - 0) + 0)))
# update the location of the next row of elevation data to take
loc[0] += delta
loc[0] = ((loc[0] + 90) % 180) - 90 # wraps to next pole if overflow
print("Lat index: " + str(latitude_index))
print("Lon index: " + str(longitude_index))
print("Next latitude: " + str(loc[0]))
# grab the applicable pixel indices
indices = [(int)(x*(cols/n_leds)) for x in range(n_leds)]
# sample that row of pixel data
output_pixels = np.take(a[latitude_index], indices, axis=0)
# rotate the row to center around the specified longitude
output_pixels = np.roll(output_pixels, longitude_index, axis=0)
# send all pixel data to bt
for pixel in output_pixels:
print("Sending r: {}, g: {}, b: {}".format(*pixel))
bt.sendPixel(*pixel)
# finally, show the image
bt.show()
# delete variables for memory management
del a
del im
# Tape resets to stored pattern after a few seconds of inactivity
sleep(rate * 60) # Wait specified number of minutes
# sleep(10) # Wait specified number of minutes
except KeyboardInterrupt:
print("Keyboard interrupt, ending program.")
sys.exit()
except RuntimeError as e:
print("Encountered runtime error: " + e.args[0])
# flush any incomplete data
bt.show()
num_errors += 1
if num_errors > MAX_ERRORS:
sys.exit("Error count exceeds that allowed.")
| [
"PIL.Image.open",
"numpy.roll",
"numpy.asarray",
"optparse.OptionParser",
"time.sleep",
"numpy.take",
"blinkytape.BlinkyTape",
"sys.exit",
"json.load"
] | [((1945, 1968), 'optparse.OptionParser', 'optparse.OptionParser', ([], {}), '()\n', (1966, 1968), False, 'import optparse\n'), ((3304, 3328), 'blinkytape.BlinkyTape', 'BlinkyTape', (['port', 'n_leds'], {}), '(port, n_leds)\n', (3314, 3328), False, 'from blinkytape import BlinkyTape\n'), ((3366, 3374), 'time.sleep', 'sleep', (['(2)'], {}), '(2)\n', (3371, 3374), False, 'from time import sleep\n'), ((1863, 1875), 'json.load', 'json.load', (['f'], {}), '(f)\n', (1872, 1875), False, 'import json\n'), ((3438, 3456), 'PIL.Image.open', 'Image.open', (['i_name'], {}), '(i_name)\n', (3448, 3456), False, 'from PIL import Image\n'), ((3532, 3546), 'numpy.asarray', 'np.asarray', (['im'], {}), '(im)\n', (3542, 3546), True, 'import numpy as np\n'), ((4371, 4414), 'numpy.take', 'np.take', (['a[latitude_index]', 'indices'], {'axis': '(0)'}), '(a[latitude_index], indices, axis=0)\n', (4378, 4414), True, 'import numpy as np\n'), ((4514, 4561), 'numpy.roll', 'np.roll', (['output_pixels', 'longitude_index'], {'axis': '(0)'}), '(output_pixels, longitude_index, axis=0)\n', (4521, 4561), True, 'import numpy as np\n'), ((4971, 4987), 'time.sleep', 'sleep', (['(rate * 60)'], {}), '(rate * 60)\n', (4976, 4987), False, 'from time import sleep\n'), ((5172, 5182), 'sys.exit', 'sys.exit', ([], {}), '()\n', (5180, 5182), False, 'import sys\n'), ((5423, 5468), 'sys.exit', 'sys.exit', (['"""Error count exceeds that allowed."""'], {}), "('Error count exceeds that allowed.')\n", (5431, 5468), False, 'import sys\n')] |
# pylint: disable=protected-access
import os
import re
import pytest
from dagster import file_relative_path
from dagster.core.errors import DagsterInstanceMigrationRequired
from dagster.core.instance import DagsterInstance, InstanceRef
from dagster.utils.test import restore_directory
# test that we can load runs and events from an old instance
def test_0_6_4():
test_dir = file_relative_path(__file__, 'snapshot_0_6_4')
with restore_directory(test_dir):
instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir))
runs = instance.get_runs()
with pytest.raises(
DagsterInstanceMigrationRequired,
match=re.escape(
'Instance is out of date and must be migrated (SqliteEventLogStorage for run '
'c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is '
'567bc23fd1ac. Please run `dagster instance migrate`.'
),
):
for run in runs:
instance.all_logs(run.run_id)
def test_0_6_6_sqlite_exc():
test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite')
with restore_directory(test_dir):
instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir))
runs = instance.get_runs()
# Note that this is a deliberate choice -- old runs are simply invisible, and their
# presence won't raise DagsterInstanceMigrationRequired. This is a reasonable choice since
# the runs.db has moved and otherwise we would have to do a check for the existence of an
# old runs.db every time we accessed the runs. Instead, we'll do this only in the upgrade
# method.
assert len(runs) == 0
run_ids = instance._event_storage.get_all_run_ids()
assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945']
with pytest.raises(
DagsterInstanceMigrationRequired,
match=re.escape(
'Instance is out of date and must be migrated (SqliteEventLogStorage for run '
'89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is '
'567bc23fd1ac. Please run `dagster instance migrate`.'
),
):
instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945')
def test_0_6_6_sqlite_migrate():
test_dir = file_relative_path(__file__, 'snapshot_0_6_6/sqlite')
assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db'))
assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db'))
with restore_directory(test_dir):
instance = DagsterInstance.from_ref(InstanceRef.from_dir(test_dir))
instance.upgrade()
runs = instance.get_runs()
assert len(runs) == 1
run_ids = instance._event_storage.get_all_run_ids()
assert run_ids == ['89296095-892d-4a15-aa0d-9018d1580945']
instance._event_storage.get_logs_for_run('89296095-892d-4a15-aa0d-9018d1580945')
assert not os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/runs.db'))
assert os.path.exists(file_relative_path(__file__, 'snapshot_0_6_6/sqlite/history/runs.db'))
| [
"dagster.file_relative_path",
"dagster.core.instance.InstanceRef.from_dir",
"dagster.utils.test.restore_directory",
"re.escape"
] | [((383, 429), 'dagster.file_relative_path', 'file_relative_path', (['__file__', '"""snapshot_0_6_4"""'], {}), "(__file__, 'snapshot_0_6_4')\n", (401, 429), False, 'from dagster import file_relative_path\n'), ((1092, 1145), 'dagster.file_relative_path', 'file_relative_path', (['__file__', '"""snapshot_0_6_6/sqlite"""'], {}), "(__file__, 'snapshot_0_6_6/sqlite')\n", (1110, 1145), False, 'from dagster import file_relative_path\n'), ((2393, 2446), 'dagster.file_relative_path', 'file_relative_path', (['__file__', '"""snapshot_0_6_6/sqlite"""'], {}), "(__file__, 'snapshot_0_6_6/sqlite')\n", (2411, 2446), False, 'from dagster import file_relative_path\n'), ((439, 466), 'dagster.utils.test.restore_directory', 'restore_directory', (['test_dir'], {}), '(test_dir)\n', (456, 466), False, 'from dagster.utils.test import restore_directory\n'), ((1155, 1182), 'dagster.utils.test.restore_directory', 'restore_directory', (['test_dir'], {}), '(test_dir)\n', (1172, 1182), False, 'from dagster.utils.test import restore_directory\n'), ((2473, 2534), 'dagster.file_relative_path', 'file_relative_path', (['__file__', '"""snapshot_0_6_6/sqlite/runs.db"""'], {}), "(__file__, 'snapshot_0_6_6/sqlite/runs.db')\n", (2491, 2534), False, 'from dagster import file_relative_path\n'), ((2647, 2674), 'dagster.utils.test.restore_directory', 'restore_directory', (['test_dir'], {}), '(test_dir)\n', (2664, 2674), False, 'from dagster.utils.test import restore_directory\n'), ((512, 542), 'dagster.core.instance.InstanceRef.from_dir', 'InstanceRef.from_dir', (['test_dir'], {}), '(test_dir)\n', (532, 542), False, 'from dagster.core.instance import DagsterInstance, InstanceRef\n'), ((1228, 1258), 'dagster.core.instance.InstanceRef.from_dir', 'InstanceRef.from_dir', (['test_dir'], {}), '(test_dir)\n', (1248, 1258), False, 'from dagster.core.instance import DagsterInstance, InstanceRef\n'), ((2566, 2635), 'dagster.file_relative_path', 'file_relative_path', (['__file__', '"""snapshot_0_6_6/sqlite/history/runs.db"""'], {}), "(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')\n", (2584, 2635), False, 'from dagster import file_relative_path\n'), ((2720, 2750), 'dagster.core.instance.InstanceRef.from_dir', 'InstanceRef.from_dir', (['test_dir'], {}), '(test_dir)\n', (2740, 2750), False, 'from dagster.core.instance import DagsterInstance, InstanceRef\n'), ((3191, 3260), 'dagster.file_relative_path', 'file_relative_path', (['__file__', '"""snapshot_0_6_6/sqlite/history/runs.db"""'], {}), "(__file__, 'snapshot_0_6_6/sqlite/history/runs.db')\n", (3209, 3260), False, 'from dagster import file_relative_path\n'), ((3098, 3159), 'dagster.file_relative_path', 'file_relative_path', (['__file__', '"""snapshot_0_6_6/sqlite/runs.db"""'], {}), "(__file__, 'snapshot_0_6_6/sqlite/runs.db')\n", (3116, 3159), False, 'from dagster import file_relative_path\n'), ((672, 900), 're.escape', 're.escape', (['"""Instance is out of date and must be migrated (SqliteEventLogStorage for run c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is 567bc23fd1ac. Please run `dagster instance migrate`."""'], {}), "(\n 'Instance is out of date and must be migrated (SqliteEventLogStorage for run c7a6c4d7-6c88-46d0-8baa-d4937c3cefe5). Database is at revision None, head is 567bc23fd1ac. Please run `dagster instance migrate`.'\n )\n", (681, 900), False, 'import re\n'), ((1951, 2179), 're.escape', 're.escape', (['"""Instance is out of date and must be migrated (SqliteEventLogStorage for run 89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is 567bc23fd1ac. Please run `dagster instance migrate`."""'], {}), "(\n 'Instance is out of date and must be migrated (SqliteEventLogStorage for run 89296095-892d-4a15-aa0d-9018d1580945). Database is at revision None, head is 567bc23fd1ac. Please run `dagster instance migrate`.'\n )\n", (1960, 2179), False, 'import re\n')] |
import matplotlib.pyplot as plt
import pandas as pd
def group_by_category(df):
grouped = df.groupby(['CATEGORY']).size().to_frame('Crimes')
labels = ['Trespassing', 'Vehicle theft', 'General Theft',
'Damage to Property', 'Robbery', 'Homicide']
p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%')
p.set_title('Crimes Percentage Grouped By Category')
p.get_legend().remove()
plt.savefig('../charts/category.png')
def group_by_time_of_day(df):
grouped = df.groupby(['TIME_OF_DAY']).size().to_frame('Crimes')
p = grouped.plot.pie(y='Crimes', labels=['Day', 'Evening', 'Night'], autopct='%1.1f%%')
p.set_title('Crimes Percentage Grouped By Time of Day')
p.get_legend().remove()
plt.savefig('../charts/time_of_day.png')
def group_by_day_of_the_week(df):
grouped = df.groupby(['DAY_OF_THE_WEEK']).size().to_frame('Crimes')
labels = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday']
p = grouped.plot.pie(y='Crimes', labels=labels, autopct='%1.1f%%')
p.set_title('Crimes Percentage Grouped By Day of The Week')
p.get_legend().remove()
plt.savefig('../charts/day_of_the_week.png')
def group_by_month(df):
grouped = df.groupby(['MONTH']).size().to_frame('Size')
grouped['Percentage'] = 100 * grouped['Size'] / len(df)
grouped = grouped.drop(columns='Size')
p = grouped.plot.bar()
p.set_title('Crimes Percentage Grouped By Month')
p.set_ylabel('Percentage of Crimes')
p.set_xlabel('Month')
p.get_legend().remove()
plt.savefig('../charts/month.png')
def group_by_year(df):
grouped = df.groupby(['YEAR']).size().to_frame('Crimes')
p = grouped.plot.pie(y='Crimes', autopct='%1.1f%%')
p.set_title('Crimes Percentage Grouped By Year')
p.get_legend().remove()
plt.savefig('../charts/year.png')
def group_by_territory(df):
grouped = df.groupby(['PDQ']).size().to_frame('Size')
grouped['Percentage'] = 100 * grouped['Size'] / len(df)
grouped = grouped.drop(columns='Size')
grouped.index = grouped.index.astype(int)
p = grouped.plot.bar()
p.set_title('Crimes Percentage Grouped By Territory')
p.set_ylabel('Percentage of Crimes')
p.set_xlabel('Territory Number')
p.get_legend().remove()
plt.savefig('../charts/territory.png')
if __name__ == '__main__':
df = pd.read_csv('../data/crimes_dataset_processed_incomplete.csv')
group_by_territory(df)
group_by_year(df)
group_by_month(df)
group_by_time_of_day(df)
group_by_day_of_the_week(df)
group_by_category(df)
| [
"matplotlib.pyplot.savefig",
"pandas.read_csv"
] | [((428, 465), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""../charts/category.png"""'], {}), "('../charts/category.png')\n", (439, 465), True, 'import matplotlib.pyplot as plt\n'), ((749, 789), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""../charts/time_of_day.png"""'], {}), "('../charts/time_of_day.png')\n", (760, 789), True, 'import matplotlib.pyplot as plt\n'), ((1156, 1200), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""../charts/day_of_the_week.png"""'], {}), "('../charts/day_of_the_week.png')\n", (1167, 1200), True, 'import matplotlib.pyplot as plt\n'), ((1569, 1603), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""../charts/month.png"""'], {}), "('../charts/month.png')\n", (1580, 1603), True, 'import matplotlib.pyplot as plt\n'), ((1830, 1863), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""../charts/year.png"""'], {}), "('../charts/year.png')\n", (1841, 1863), True, 'import matplotlib.pyplot as plt\n'), ((2295, 2333), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""../charts/territory.png"""'], {}), "('../charts/territory.png')\n", (2306, 2333), True, 'import matplotlib.pyplot as plt\n'), ((2372, 2434), 'pandas.read_csv', 'pd.read_csv', (['"""../data/crimes_dataset_processed_incomplete.csv"""'], {}), "('../data/crimes_dataset_processed_incomplete.csv')\n", (2383, 2434), True, 'import pandas as pd\n')] |
# Unit tests
import unittest
def run_test_instance(unittestinstance, profile, committeesize, tests):
import rules_approval
# all rules used?
for rule in rules_approval.MWRULES:
unittestinstance.assertTrue(rule in tests.keys())
for rule in tests.keys():
output = rules_approval.compute_rule(rule, profile,
committeesize,
resolute=False)
unittestinstance.assertEqual(
output, tests[rule], msg=rules_approval.MWRULES[rule] + " failed")
output = rules_approval.compute_rule(
rule, profile, committeesize, resolute=True)
unittestinstance.assertEqual(
len(output), 1,
msg=rules_approval.MWRULES[rule] + " failed with resolute=True")
unittestinstance.assertTrue(
output[0] in tests[rule],
msg=rules_approval.MWRULES[rule] + " failed with resolute=True")
class TestApprovalMultiwinner(unittest.TestCase):
def test_createprofiles(self):
from preferences import Profile
from preferences import DichotomousPreferences
num_cand = 7
prof = Profile(num_cand)
self.assertEqual(prof.add_preferences(
DichotomousPreferences([0, 4, 5])),
None)
with self.assertRaises(Exception):
prof.add_preferences(DichotomousPreferences([num_cand]))
with self.assertRaises(Exception):
prof.add_preferences(DichotomousPreferences([-1]))
self.assertEqual(prof.add_preferences([0, 4, 5]), None)
with self.assertRaises(Exception):
prof.add_preferences([0, 4, 5, "1"])
with self.assertRaises(Exception):
prof.add_preferences(["1", 0, 4, 5])
p1 = DichotomousPreferences([0, 4, 5])
p2 = DichotomousPreferences([1, 2])
self.assertEqual(prof.add_preferences([p1, p2]), None)
self.assertTrue(prof.has_unit_weights())
prof.add_preferences(DichotomousPreferences([0, 4, 5], 2.4))
self.assertFalse(prof.has_unit_weights())
self.assertEqual(prof.totalweight(), 6.4)
def test_mwrules__toofewcandidates(self):
from preferences import Profile
import rules_approval
profile = Profile(5)
committeesize = 4
preflist = [[0, 1, 2], [1], [1, 2], [0]]
profile.add_preferences(preflist)
for rule in rules_approval.MWRULES.keys():
with self.assertRaises(Exception):
rules_approval.compute_rule(rule, profile, committeesize)
with self.assertRaises(Exception):
rules_approval.compute_rule(rule, profile,
committeesize, resolute=True)
def test_mwrules_weightsconsidered(self):
from preferences import Profile
from preferences import DichotomousPreferences
import rules_approval
self.longMessage = True
profile = Profile(3)
profile.add_preferences(DichotomousPreferences([0]))
profile.add_preferences(DichotomousPreferences([0]))
profile.add_preferences(DichotomousPreferences([1], 5))
profile.add_preferences(DichotomousPreferences([0]))
committeesize = 1
for rule in rules_approval.MWRULES.keys():
if "monroe" in rule or "rule-x" in rule:
# Monroe and rule x only work with unit weights:
continue
result = rules_approval.compute_rule(rule, profile, committeesize)
self.assertTrue([1] in result,
msg=rule + " failed"+str(result))
def test_mwrules_correct_simple(self):
from preferences import Profile
import rules_approval
self.longMessage = True
profile = Profile(4)
profile.add_preferences([[0], [1], [2], [3]])
committeesize = 2
for rule in rules_approval.MWRULES.keys():
if rule == "greedy-monroe": # always returns one committee
continue
self.assertEqual(len(rules_approval.compute_rule(rule, profile,
committeesize)),
6, msg=rule + " failed")
for rule in rules_approval.MWRULES.keys():
self.assertEqual(len(rules_approval.compute_rule(rule, profile,
committeesize,
resolute=True)),
1, msg=rule + " failed with resolute=True")
def test_monroe_indivisible(self):
from preferences import Profile
import rules_approval
self.longMessage = True
profile = Profile(4)
profile.add_preferences([[0], [0], [0], [1, 2], [1, 2], [1], [3]])
committeesize = 3
for ilp in [True, False]:
# max Monroe score is 6 (even for committee [0, 1, 3])
self.assertEqual(
rules_approval.compute_monroe(profile, committeesize,
ilp=ilp, resolute=False),
[[0, 1, 2], [0, 1, 3], [0, 2, 3]])
# this test shows that tiebreaking is not (yet)
# implemented for opt-Phragmen
def test_optphrag_notiebreaking(self):
from preferences import Profile
from rules_approval import compute_rule
self.longMessage = True
profile = Profile(6)
profile.add_preferences([[0], [0], [1, 3], [1, 3], [1, 4],
[2, 4], [2, 5], [2, 5]])
committeesize = 3
self.assertEqual(
len(compute_rule("optphrag", profile, committeesize,
resolute=False)),
12)
def test_mwrules_correct_advanced_1(self):
from preferences import Profile
self.longMessage = True
committeesize = 4
profile = Profile(6)
preflist = [[0, 4, 5], [0], [1, 4, 5], [1],
[2, 4, 5], [2], [3, 4, 5], [3]]
profile.add_preferences(preflist)
tests1 = {
"seqpav": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],
[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],
"av": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],
[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],
"sav": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4],
[0, 1, 3, 5], [0, 1, 4, 5], [0, 2, 3, 4], [0, 2, 3, 5],
[0, 2, 4, 5], [0, 3, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5],
[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],
"pav-ilp": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],
[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],
"pav-noilp": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],
[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],
"revseqpav": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],
[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],
"minimaxav-noilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],
[0, 1, 3, 4], [0, 1, 3, 5], [0, 1, 4, 5],
[0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5],
[0, 3, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5],
[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],
"minimaxav-ilp": [[0, 1, 2, 3], [0, 1, 2, 4], [0, 1, 2, 5],
[0, 1, 3, 4], [0, 1, 3, 5], [0, 1, 4, 5],
[0, 2, 3, 4], [0, 2, 3, 5], [0, 2, 4, 5],
[0, 3, 4, 5], [1, 2, 3, 4], [1, 2, 3, 5],
[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],
"phrag": [[0, 1, 4, 5], [0, 2, 4, 5], [0, 3, 4, 5],
[1, 2, 4, 5], [1, 3, 4, 5], [2, 3, 4, 5]],
"optphrag": [[0, 1, 2, 3]],
"cc-ilp": [[0, 1, 2, 3]],
"cc-noilp": [[0, 1, 2, 3]],
"seqcc": [[0, 1, 2, 4], [0, 1, 2, 5], [0, 1, 3, 4], [0, 1, 3, 5],
[0, 2, 3, 4], [0, 2, 3, 5], [1, 2, 3, 4], [1, 2, 3, 5]],
"revseqcc": [[0, 1, 2, 3]],
"monroe-ilp": [[0, 1, 2, 3]],
"monroe-noilp": [[0, 1, 2, 3]],
"greedy-monroe": [[0, 2, 3, 4]],
"slav-ilp": [[0, 1, 2, 3],
[0, 1, 2, 4], [0, 1, 2, 5],
[0, 1, 3, 4], [0, 1, 3, 5],
[0, 2, 3, 4], [0, 2, 3, 5],
[1, 2, 3, 4], [1, 2, 3, 5]],
"slav-noilp": [[0, 1, 2, 3],
[0, 1, 2, 4], [0, 1, 2, 5],
[0, 1, 3, 4], [0, 1, 3, 5],
[0, 2, 3, 4], [0, 2, 3, 5],
[1, 2, 3, 4], [1, 2, 3, 5]],
"seqslav": [[0, 1, 2, 4], [0, 1, 2, 5],
[0, 1, 3, 4], [0, 1, 3, 5],
[0, 2, 3, 4], [0, 2, 3, 5],
[1, 2, 3, 4], [1, 2, 3, 5]],
"rule-x": [[0, 1, 4, 5], [0, 2, 4, 5],
[0, 3, 4, 5], [1, 2, 4, 5],
[1, 3, 4, 5], [2, 3, 4, 5]],
"phragmen-enestroem": [[0, 1, 4, 5], [0, 2, 4, 5],
[0, 3, 4, 5], [1, 2, 4, 5],
[1, 3, 4, 5], [2, 3, 4, 5]],
}
run_test_instance(self, profile, committeesize, tests1)
# and now with reversed preflist
preflist.reverse()
for p in preflist:
p.reverse()
profile = Profile(6)
profile.add_preferences(preflist)
run_test_instance(self, profile, committeesize, tests1)
def test_mwrules_correct_advanced_2(self):
from preferences import Profile
self.longMessage = True
# and another profile
profile = Profile(5)
committeesize = 3
preflist = [[0, 1, 2], [0, 1, 2], [0, 1, 2], [0, 1, 2],
[0, 1, 2], [0, 1], [3, 4], [3, 4], [3]]
profile.add_preferences(preflist)
tests2 = {
"seqpav": [[0, 1, 3]],
"av": [[0, 1, 2]],
"sav": [[0, 1, 3]],
"pav-ilp": [[0, 1, 3]],
"pav-noilp": [[0, 1, 3]],
"revseqpav": [[0, 1, 3]],
"minimaxav-noilp": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],
"minimaxav-ilp": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],
"phrag": [[0, 1, 3]],
"optphrag": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],
"cc-ilp": [[0, 1, 3], [0, 2, 3], [0, 3, 4],
[1, 2, 3], [1, 3, 4]],
"cc-noilp": [[0, 1, 3], [0, 2, 3], [0, 3, 4],
[1, 2, 3], [1, 3, 4]],
"seqcc": [[0, 1, 3], [0, 2, 3], [0, 3, 4],
[1, 2, 3], [1, 3, 4]],
"revseqcc": [[0, 1, 3], [0, 2, 3], [0, 3, 4],
[1, 2, 3], [1, 3, 4]],
"monroe-ilp": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],
"monroe-noilp": [[0, 1, 3], [0, 2, 3], [1, 2, 3]],
"greedy-monroe": [[0, 1, 3]],
"seqslav": [[0, 1, 3]],
"slav-ilp": [[0, 1, 3]],
"slav-noilp": [[0, 1, 3]],
"rule-x": [[0, 1, 3]],
"phragmen-enestroem": [[0, 1, 3]],
}
run_test_instance(self, profile, committeesize, tests2)
def test_mwrules_correct_advanced_3(self):
from preferences import Profile
self.longMessage = True
# and a third profile
profile = Profile(6)
committeesize = 4
preflist = [[0, 3, 4, 5], [1, 2], [0, 2, 5], [2],
[0, 1, 2, 3, 4], [0, 3, 4], [0, 2, 4], [0, 1]]
profile.add_preferences(preflist)
tests3 = {
"seqpav": [[0, 1, 2, 4]],
"av": [[0, 1, 2, 4], [0, 2, 3, 4]],
"sav": [[0, 1, 2, 4]],
"pav-ilp": [[0, 1, 2, 4]],
"pav-noilp": [[0, 1, 2, 4]],
"revseqpav": [[0, 1, 2, 4]],
"minimaxav-noilp": [[0, 1, 2, 3], [0, 1, 2, 4],
[0, 2, 3, 4], [0, 2, 3, 5],
[0, 2, 4, 5]],
"minimaxav-ilp": [[0, 1, 2, 3], [0, 1, 2, 4],
[0, 2, 3, 4], [0, 2, 3, 5],
[0, 2, 4, 5]],
"phrag": [[0, 1, 2, 4]],
"optphrag": [[0, 1, 2, 3], [0, 1, 2, 4],
[0, 1, 2, 5], [0, 2, 3, 4],
[0, 2, 3, 5], [0, 2, 4, 5],
[1, 2, 3, 4], [1, 2, 3, 5],
[1, 2, 4, 5]],
"cc-ilp": [[0, 1, 2, 3], [0, 1, 2, 4],
[0, 1, 2, 5], [0, 2, 3, 4],
[0, 2, 3, 5], [0, 2, 4, 5],
[1, 2, 3, 4], [1, 2, 3, 5],
[1, 2, 4, 5]],
"cc-noilp": [[0, 1, 2, 3], [0, 1, 2, 4],
[0, 1, 2, 5], [0, 2, 3, 4],
[0, 2, 3, 5], [0, 2, 4, 5],
[1, 2, 3, 4], [1, 2, 3, 5],
[1, 2, 4, 5]],
"seqcc": [[0, 1, 2, 3], [0, 1, 2, 4],
[0, 1, 2, 5], [0, 2, 3, 4],
[0, 2, 3, 5], [0, 2, 4, 5]],
"revseqcc": [[0, 1, 2, 3], [0, 1, 2, 4],
[0, 1, 2, 5], [0, 2, 3, 4],
[0, 2, 3, 5], [0, 2, 4, 5],
[1, 2, 3, 4], [1, 2, 3, 5],
[1, 2, 4, 5]],
"monroe-ilp": [[0, 1, 2, 3], [0, 1, 2, 4],
[0, 1, 2, 5], [0, 2, 3, 4],
[0, 2, 3, 5], [0, 2, 4, 5],
[1, 2, 3, 4], [1, 2, 3, 5],
[1, 2, 4, 5]],
"monroe-noilp": [[0, 1, 2, 3], [0, 1, 2, 4],
[0, 1, 2, 5], [0, 2, 3, 4],
[0, 2, 3, 5], [0, 2, 4, 5],
[1, 2, 3, 4], [1, 2, 3, 5],
[1, 2, 4, 5]],
"greedy-monroe": [[0, 1, 2, 3]],
"seqslav": [[0, 1, 2, 4]],
"slav-ilp": [[0, 1, 2, 4]],
"slav-noilp": [[0, 1, 2, 4]],
"rule-x": [[0, 1, 2, 4]],
"phragmen-enestroem": [[0, 1, 2, 4]],
}
run_test_instance(self, profile, committeesize, tests3)
def test_monroescore(self):
from preferences import Profile
from score_functions import monroescore_flowbased, monroescore_matching
self.longMessage = True
# and a third profile
profile = Profile(6)
preflist = [[0, 1], [1], [1, 3], [4], [2], [1, 5, 3]]
profile.add_preferences(preflist)
self.assertEqual(monroescore_flowbased(profile, [1, 3, 2]), 5)
self.assertEqual(monroescore_matching(profile, [1, 3, 2]), 5)
self.assertEqual(monroescore_flowbased(profile, [2, 1, 5]), 4)
self.assertEqual(monroescore_matching(profile, [2, 1, 5]), 4)
self.assertEqual(monroescore_flowbased(profile, [2, 4, 5]), 3)
self.assertEqual(monroescore_matching(profile, [2, 5, 4]), 3)
if __name__ == '__main__':
unittest.main()
| [
"rules_approval.compute_rule",
"score_functions.monroescore_matching",
"preferences.Profile",
"rules_approval.compute_monroe",
"rules_approval.MWRULES.keys",
"preferences.DichotomousPreferences",
"score_functions.monroescore_flowbased",
"unittest.main"
] | [((15333, 15348), 'unittest.main', 'unittest.main', ([], {}), '()\n', (15346, 15348), False, 'import unittest\n'), ((301, 374), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (['rule', 'profile', 'committeesize'], {'resolute': '(False)'}), '(rule, profile, committeesize, resolute=False)\n', (328, 374), False, 'import rules_approval\n'), ((599, 671), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (['rule', 'profile', 'committeesize'], {'resolute': '(True)'}), '(rule, profile, committeesize, resolute=True)\n', (626, 671), False, 'import rules_approval\n'), ((1198, 1215), 'preferences.Profile', 'Profile', (['num_cand'], {}), '(num_cand)\n', (1205, 1215), False, 'from preferences import Profile\n'), ((1808, 1841), 'preferences.DichotomousPreferences', 'DichotomousPreferences', (['[0, 4, 5]'], {}), '([0, 4, 5])\n', (1830, 1841), False, 'from preferences import DichotomousPreferences\n'), ((1855, 1885), 'preferences.DichotomousPreferences', 'DichotomousPreferences', (['[1, 2]'], {}), '([1, 2])\n', (1877, 1885), False, 'from preferences import DichotomousPreferences\n'), ((2302, 2312), 'preferences.Profile', 'Profile', (['(5)'], {}), '(5)\n', (2309, 2312), False, 'from preferences import Profile\n'), ((2451, 2480), 'rules_approval.MWRULES.keys', 'rules_approval.MWRULES.keys', ([], {}), '()\n', (2478, 2480), False, 'import rules_approval\n'), ((3007, 3017), 'preferences.Profile', 'Profile', (['(3)'], {}), '(3)\n', (3014, 3017), False, 'from preferences import Profile\n'), ((3312, 3341), 'rules_approval.MWRULES.keys', 'rules_approval.MWRULES.keys', ([], {}), '()\n', (3339, 3341), False, 'import rules_approval\n'), ((3836, 3846), 'preferences.Profile', 'Profile', (['(4)'], {}), '(4)\n', (3843, 3846), False, 'from preferences import Profile\n'), ((3948, 3977), 'rules_approval.MWRULES.keys', 'rules_approval.MWRULES.keys', ([], {}), '()\n', (3975, 3977), False, 'import rules_approval\n'), ((4306, 4335), 'rules_approval.MWRULES.keys', 'rules_approval.MWRULES.keys', ([], {}), '()\n', (4333, 4335), False, 'import rules_approval\n'), ((4802, 4812), 'preferences.Profile', 'Profile', (['(4)'], {}), '(4)\n', (4809, 4812), False, 'from preferences import Profile\n'), ((5510, 5520), 'preferences.Profile', 'Profile', (['(6)'], {}), '(6)\n', (5517, 5520), False, 'from preferences import Profile\n'), ((6005, 6015), 'preferences.Profile', 'Profile', (['(6)'], {}), '(6)\n', (6012, 6015), False, 'from preferences import Profile\n'), ((9721, 9731), 'preferences.Profile', 'Profile', (['(6)'], {}), '(6)\n', (9728, 9731), False, 'from preferences import Profile\n'), ((10009, 10019), 'preferences.Profile', 'Profile', (['(5)'], {}), '(5)\n', (10016, 10019), False, 'from preferences import Profile\n'), ((11684, 11694), 'preferences.Profile', 'Profile', (['(6)'], {}), '(6)\n', (11691, 11694), False, 'from preferences import Profile\n'), ((14761, 14771), 'preferences.Profile', 'Profile', (['(6)'], {}), '(6)\n', (14768, 14771), False, 'from preferences import Profile\n'), ((2027, 2065), 'preferences.DichotomousPreferences', 'DichotomousPreferences', (['[0, 4, 5]', '(2.4)'], {}), '([0, 4, 5], 2.4)\n', (2049, 2065), False, 'from preferences import DichotomousPreferences\n'), ((3050, 3077), 'preferences.DichotomousPreferences', 'DichotomousPreferences', (['[0]'], {}), '([0])\n', (3072, 3077), False, 'from preferences import DichotomousPreferences\n'), ((3111, 3138), 'preferences.DichotomousPreferences', 'DichotomousPreferences', (['[0]'], {}), '([0])\n', (3133, 3138), False, 'from preferences import DichotomousPreferences\n'), ((3172, 3202), 'preferences.DichotomousPreferences', 'DichotomousPreferences', (['[1]', '(5)'], {}), '([1], 5)\n', (3194, 3202), False, 'from preferences import DichotomousPreferences\n'), ((3236, 3263), 'preferences.DichotomousPreferences', 'DichotomousPreferences', (['[0]'], {}), '([0])\n', (3258, 3263), False, 'from preferences import DichotomousPreferences\n'), ((3507, 3564), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (['rule', 'profile', 'committeesize'], {}), '(rule, profile, committeesize)\n', (3534, 3564), False, 'import rules_approval\n'), ((14902, 14943), 'score_functions.monroescore_flowbased', 'monroescore_flowbased', (['profile', '[1, 3, 2]'], {}), '(profile, [1, 3, 2])\n', (14923, 14943), False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((14973, 15013), 'score_functions.monroescore_matching', 'monroescore_matching', (['profile', '[1, 3, 2]'], {}), '(profile, [1, 3, 2])\n', (14993, 15013), False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((15043, 15084), 'score_functions.monroescore_flowbased', 'monroescore_flowbased', (['profile', '[2, 1, 5]'], {}), '(profile, [2, 1, 5])\n', (15064, 15084), False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((15114, 15154), 'score_functions.monroescore_matching', 'monroescore_matching', (['profile', '[2, 1, 5]'], {}), '(profile, [2, 1, 5])\n', (15134, 15154), False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((15184, 15225), 'score_functions.monroescore_flowbased', 'monroescore_flowbased', (['profile', '[2, 4, 5]'], {}), '(profile, [2, 4, 5])\n', (15205, 15225), False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((15255, 15295), 'score_functions.monroescore_matching', 'monroescore_matching', (['profile', '[2, 5, 4]'], {}), '(profile, [2, 5, 4])\n', (15275, 15295), False, 'from score_functions import monroescore_flowbased, monroescore_matching\n'), ((1275, 1308), 'preferences.DichotomousPreferences', 'DichotomousPreferences', (['[0, 4, 5]'], {}), '([0, 4, 5])\n', (1297, 1308), False, 'from preferences import DichotomousPreferences\n'), ((1405, 1439), 'preferences.DichotomousPreferences', 'DichotomousPreferences', (['[num_cand]'], {}), '([num_cand])\n', (1427, 1439), False, 'from preferences import DichotomousPreferences\n'), ((1517, 1545), 'preferences.DichotomousPreferences', 'DichotomousPreferences', (['[-1]'], {}), '([-1])\n', (1539, 1545), False, 'from preferences import DichotomousPreferences\n'), ((2545, 2602), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (['rule', 'profile', 'committeesize'], {}), '(rule, profile, committeesize)\n', (2572, 2602), False, 'import rules_approval\n'), ((2666, 2738), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (['rule', 'profile', 'committeesize'], {'resolute': '(True)'}), '(rule, profile, committeesize, resolute=True)\n', (2693, 2738), False, 'import rules_approval\n'), ((5062, 5140), 'rules_approval.compute_monroe', 'rules_approval.compute_monroe', (['profile', 'committeesize'], {'ilp': 'ilp', 'resolute': '(False)'}), '(profile, committeesize, ilp=ilp, resolute=False)\n', (5091, 5140), False, 'import rules_approval\n'), ((5719, 5783), 'rules_approval.compute_rule', 'compute_rule', (['"""optphrag"""', 'profile', 'committeesize'], {'resolute': '(False)'}), "('optphrag', profile, committeesize, resolute=False)\n", (5731, 5783), False, 'from rules_approval import compute_rule\n'), ((4110, 4167), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (['rule', 'profile', 'committeesize'], {}), '(rule, profile, committeesize)\n', (4137, 4167), False, 'import rules_approval\n'), ((4370, 4442), 'rules_approval.compute_rule', 'rules_approval.compute_rule', (['rule', 'profile', 'committeesize'], {'resolute': '(True)'}), '(rule, profile, committeesize, resolute=True)\n', (4397, 4442), False, 'import rules_approval\n')] |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2016, 2017 Red Hat, Inc.
# Red Hat Author: <NAME> <<EMAIL>>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Client for applications relying on OpenID Connect for authentication."""
from __future__ import print_function
from copy import copy
import json
import logging
from threading import Lock
import time
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import socket
import os
try:
from urllib import urlencode
except ImportError:
from urllib.parse import urlencode
from uuid import uuid4 as uuidgen
import webbrowser
from wsgiref import simple_server
import requests
import sys
from openidc_client import release
# The ports that we will try to use for our webserver
WEB_PORTS = [12345, 23456]
class OpenIDCClient(object):
# Internal implementation of tokens:
# Every app id has its own token cache
# The token cache is a json serialized dict
# This dict contains uuid: token pairs
# Every "token" object is a json dict with the following keys:
# idp: The URL of the idp that issued the token
# sub: The subject that owns the token
# access_token: Token value
# token_type: Token type. Currently supported: "Bearer"
# expires_at: Token expiration UTC time. NOTE: Even if the expires_at
# indicates the token should still be valid, it may have been revoked by
# the user! Also, even if it has expired, we might still be able to
# refresh the token.
# refresh_token: The token we can use to refresh the access token
# scopes: A list of scopes that we had requested with the token
def __init__(self, app_identifier, id_provider, id_provider_mapping,
client_id, client_secret=None, use_post=False, useragent=None,
cachedir=None, printfd=sys.stdout):
"""Client for interacting with web services relying on OpenID Connect.
:param app_identifier: Identifier for storage of retrieved tokens
:param id_provider: URL of the identity provider to get tokens from
:param id_provider_mapping: Mapping with URLs to use for specific
endpoints on the IdP.
:kwarg use_post: Whether to use POST submission of client secrets
rather than Authorization header
:kwarg client_id: The Client Identifier used to request credentials
:kwarg client_secret: The client "secret" that goes with the client_id.
May be None if your IdP does not require you to use a secret.
:kwarg useragent: Useragent string to use. If not provided, defaults to
"python-openidc-client/VERSION"
:kwarg cachedir: The directory in which to store the token caches. Will
be put through expanduer. Default is ~/.openidc. If this does not
exist and we are unable to create it, the OSError will be thrown.
:kwargs printfd: The File object to print token instructions to.
"""
self.logger = logging.getLogger(__name__)
self.debug = self.logger.debug
self.app_id = app_identifier
self.use_post = use_post
self.idp = id_provider
self.idp_mapping = id_provider_mapping
self.client_id = client_id
self.client_secret = client_secret
self.useragent = useragent or 'python-openid-client/%s' % \
release.VERSION
self.cachedir = os.path.expanduser(cachedir or '~/.openidc')
self.last_returned_uuid = None
self.problem_reported = False
self.token_to_try = None
self._retrieved_code = None
# TODO: Make cache_lock a filesystem lock so we also lock across
# multiple invocations
self._cache_lock = Lock()
with self._cache_lock:
self.__refresh_cache()
self._valid_cache = []
self._printfd = printfd
def get_token(self, scopes, new_token=True):
"""Function to retrieve tokens with specific scopes.
This function will block until a token is retrieved if requested.
It is always safe to call this though, since if we already have a token
with the current app_identifier that has the required scopes, we will
return it.
This function will return a bearer token or None.
Note that the bearer token might have been revoked by the user or
expired.
In that case, you will want to call report_token_issue() to try to
renew the token or delete the token.
:kwarg scopes: A list of scopes required for the current client.
:kwarg new_token: If True, we will actively request the user to get a
new token with the current scopeset if we do not already have on.
:rtype: string or None
:returns: String bearer token if possible or None
"""
if not isinstance(scopes, list):
raise ValueError('Scopes must be a list')
token = self._get_token_with_scopes(scopes)
if token:
# If we had a valid token, use that
self.last_returned_uuid = token[0]
self.problem_reported = False
return token[1]['access_token']
elif not new_token:
return None
# We did not have a valid token, now comes the hard part...
uuid = self._get_new_token(scopes)
if uuid:
self.last_returned_uuid = uuid
self.problem_reported = False
return self._cache[uuid]['access_token']
def report_token_issue(self):
"""Report an error with the last token that was returned.
This will attempt to renew the token that was last returned.
If that worked, we will return the new access token.
If it did not work, we will return None and remove this token from the
cache.
If you get an indication from your application that the token you sent
was invalid, you should call it.
You should explicitly NOT call this function if the token was valid but
your request failed due to a server error or because the account or
token was lacking specific permissions.
"""
if not self.last_returned_uuid:
raise Exception('Cannot report issue before requesting token')
if self.problem_reported:
# We were reported an issue before. Let's just remove this token.
self._delete_token(self.last_returned_uuid)
return None
refresh_result = self._refresh_token(self.last_returned_uuid)
if not refresh_result:
self._delete_token(self.last_returned_uuid)
return None
else:
self.problem_reported = True
return self._cache[self.last_returned_uuid]['access_token']
def send_request(self, *args, **kwargs):
"""Make an python-requests POST request.
Allarguments and keyword arguments are like the arguments to requests,
except for `scopes`, `new_token` and `auto_refresh` keyword arguments.
`scopes` is required.
:kwarg scopes: Scopes required for this call. If a token is not present
with this token, a new one will be requested unless nonblocking is
True.
:kwarg new_token: If True, we will actively request the user to get a
new token with the current scopeset if we do not already have on.
:kwarg auto_refresh: If False, will not try to automatically report
token issues on 401. This helps with broken apps that may send a
401 return code in incorrect cases.
:kwargs http_method: The HTTP method to use, defaults to POST..
"""
ckwargs = copy(kwargs)
scopes = ckwargs.pop('scopes')
new_token = ckwargs.pop('new_token', True)
auto_refresh = ckwargs.pop('auto_refresh', True)
method = ckwargs.pop('http_method', 'POST')
is_retry = False
if self.token_to_try:
is_retry = True
token = self.token_to_try
self.token_to_try = None
else:
token = self.get_token(scopes, new_token=new_token)
if not token:
return None
if self.use_post:
if 'json' in ckwargs:
raise ValueError('Cannot provide json in a post call')
if method not in ['POST']:
raise ValueError('Cannot use POST tokens in %s method' %
method)
if 'data' not in ckwargs:
ckwargs['data'] = {}
ckwargs['data']['access_token'] = token
else:
if 'headers' not in ckwargs:
ckwargs['headers'] = {}
ckwargs['headers']['Authorization'] = 'Bearer %s' % token
resp = requests.request(method, *args, **ckwargs)
if resp.status_code == 401 and not is_retry:
if not auto_refresh:
return resp
self.token_to_try = self.report_token_issue()
if not self.token_to_try:
return resp
return self.send_request(*args, **kwargs)
elif resp.status_code == 401:
# We got a 401 and this is a retry. Report error
self.report_token_issue()
return resp
else:
return resp
@property
def _cachefile(self):
"""Property to get the cache file name for the current client.
This assures that whenever this file is touched, the cache lock is held
"""
assert self._cache_lock.locked()
return os.path.join(self.cachedir, 'oidc_%s.json' % self.app_id)
def __refresh_cache(self):
"""Refreshes the self._cache from the cache on disk.
Requires cache_lock to be held by caller."""
assert self._cache_lock.locked()
self.debug('Refreshing cache')
if not os.path.isdir(self.cachedir):
self.debug('Creating directory')
os.makedirs(self.cachedir)
if not os.path.exists(self._cachefile):
self.debug('Creating file')
with open(self._cachefile, 'w') as f:
f.write(json.dumps({}))
with open(self._cachefile, 'r') as f:
self._cache = json.loads(f.read())
self.debug('Loaded %i tokens', len(self._cache))
def _refresh_cache(self):
"""Refreshes the self._cache from the cache on disk.
cache_lock may not be held by anyone."""
with self._cache_lock:
self.__refresh_cache()
def __write_cache(self):
"""Wirtes self._cache to cache on disk.
Requires cache_lock to be held by caller."""
assert self._cache_lock.locked()
self.debug('Writing cache with %i tokens', len(self._cache))
with open(self._cachefile, 'w') as f:
f.write(json.dumps(self._cache))
def _add_token(self, token):
"""Adds a token to the cache and writes cache to disk.
cache_lock may not be held by anyone.
:param token: Dict of the token to be added to the cache
"""
uuid = uuidgen().hex
self.debug('Adding token %s to cache', uuid)
with self._cache_lock:
self.__refresh_cache()
self._cache[uuid] = token
self.__write_cache()
return uuid
def _update_token(self, uuid, toupdate):
"""Updates a token in the cache.
cache_lock may not be held by anyone.
:param token: UUID of the token to be updated
:param toupdate: Dict indicating which fields need to be updated
"""
self.debug('Updating token %s in cache, fields %s',
uuid, toupdate.keys())
with self._cache_lock:
self.__refresh_cache()
if uuid not in self._cache:
return None
self._cache[uuid].update(toupdate)
self.__write_cache()
return uuid
def _delete_token(self, uuid):
"""Removes a token from the cache and writes cache to disk.
cache_lock may not be held by anyone.
:param uuid: UUID of the token to be removed from cache
"""
self.debug('Removing token %s from cache', uuid)
with self._cache_lock:
self.__refresh_cache()
if uuid in self._cache:
self.debug('Removing token')
del self._cache[uuid]
self.__write_cache()
else:
self.debug('Token was already gone')
def _get_token_with_scopes(self, scopes):
"""Searches the cache for any tokens that have the requested scopes.
It will prefer to return tokens whose expires_at is still before the
current time, but if no such tokens exist it will return the possibly
expired token: it might be refreshable.
:param scopes: List of scopes that need to be in the returned token
:rtype: (string, dict) or None
:returns: Token UUID and contents or None if no applicable tokens were
found
"""
possible_token = None
self.debug('Trying to get token with scopes %s', scopes)
for uuid in self._cache:
self.debug('Checking %s', uuid)
token = self._cache[uuid]
if token['idp'] != self.idp:
self.debug('Incorrect idp')
continue
if not set(scopes).issubset(set(token['scopes'])):
self.debug('Missing scope: %s not subset of %s',
set(scopes),
set(token['scopes']))
continue
if token['expires_at'] < time.time():
# This is a token that's supposed to still be valid, prefer it
# over any others we have
self.debug('Not yet expired, returning')
return uuid, token
# This is a token that may or may not still be valid
self.debug('Possible')
possible_token = (uuid, token)
if possible_token:
self.debug('Returning possible token')
return possible_token
def _idp_url(self, method):
"""Returns the IdP URL for the requested method.
:param method: The method name in the IdP mapping dict.
:rtype: string
:returns: The IdP URL
"""
if method in self.idp_mapping:
return self.idp + self.idp_mapping[method]
else:
return ValueError('Idp Mapping did not include path for %s'
% method)
def _refresh_token(self, uuid):
"""Tries to refresh a token and put the refreshed token in self._cache
The caller is responsible for either removing the token if it could not
be refreshed or saving the cache if renewal was succesful.
:param uuid: The UUID of the cached token to attempt to refresh.
:rtype: bool
:returns: True if the token was succesfully refreshed, False otherwise
"""
oldtoken = self._cache[uuid]
self.debug('Refreshing token %s', uuid)
data = {'client_id': self.client_id,
'grant_type': 'refresh_token',
'refresh_token': oldtoken['refresh_token']}
if self.client_secret:
data['client_secret'] = self.client_secret
resp = requests.request(
'POST',
self._idp_url('Token'),
data=data)
resp.raise_for_status()
resp = resp.json()
if 'error' in resp:
self.debug('Unable to refresh, error: %s', resp['error'])
return False
self._update_token(
uuid,
{'access_token': resp['access_token'],
'token_type': resp['token_type'],
'refresh_token': resp['refresh_token'],
'expires_at': time.time() + resp['expires_in']})
self.debug('Refreshed until %s', self._cache[uuid]['expires_at'])
return True
def _get_server(self, app):
"""This function returns a SimpleServer with an available WEB_PORT."""
for port in WEB_PORTS:
try:
server = simple_server.make_server('0.0.0.0', port, app)
return server
except socket.error:
# This port did not work. Switch to next one
continue
def _get_new_token(self, scopes):
"""This function kicks off some magic.
We will start a new webserver on one of the WEB_PORTS, and then either
show the user a URL, or if possible, kick off their browser.
This URL will be the Authorization endpoint of the IdP with a request
for our client_id to get a new token with the specified scopes.
The webserver will then need to catch the return with either an
Authorization Code (that we will exchange for an access token) or the
cancellation message.
This function will store the new token in the local cache, add it to
the valid cache, and then return the UUID.
If the user cancelled (or we got another error), we will return None.
"""
def _token_app(environ, start_response):
query = environ['QUERY_STRING']
split = query.split('&')
kv = dict([v.split('=', 1) for v in split])
if 'error' in kv:
self.debug('Error code returned: %s (%s)',
kv['error'], kv.get('error_description'))
self._retrieved_code = False
else:
self._retrieved_code = kv['code']
# Just return a message
start_response('200 OK', [('Content-Type', 'text/plain')])
return [u'You can close this window and return to the CLI'.encode('ascii')]
self._retrieved_code = None
server = self._get_server(_token_app)
if not server:
raise Exception('We were unable to instantiate a webserver')
return_uri = 'http://localhost:%i/' % server.socket.getsockname()[1]
rquery = {}
rquery['scope'] = ' '.join(scopes)
rquery['response_type'] = 'code'
rquery['client_id'] = self.client_id
rquery['redirect_uri'] = return_uri
rquery['response_mode'] = 'query'
query = urlencode(rquery)
authz_url = '%s?%s' % (self._idp_url('Authorization'), query)
print('Please visit %s to grant authorization' % authz_url,
file=self._printfd)
webbrowser.open(authz_url)
server.handle_request()
server.server_close()
assert self._retrieved_code is not None
if self._retrieved_code is False:
# The user cancelled the request
self._retrieved_code = None
self.debug('User cancelled')
return None
self.debug('We got an authorization code!')
data = {'client_id': self.client_id,
'grant_type': 'authorization_code',
'redirect_uri': return_uri,
'code': self._retrieved_code}
if self.client_secret:
data['client_secret'] = self.client_secret
resp = requests.request(
'POST',
self._idp_url('Token'),
data=data)
resp.raise_for_status()
self._retrieved_code = None
resp = resp.json()
if 'error' in resp:
self.debug('Error exchanging authorization code: %s',
resp['error'])
return None
token = {'access_token': resp['access_token'],
'refresh_token': resp['refresh_token'],
'expires_at': time.time() + int(resp['expires_in']),
'idp': self.idp,
'token_type': resp['token_type'],
'scopes': scopes}
# AND WE ARE DONE! \o/
return self._add_token(token)
| [
"logging.getLogger",
"os.path.exists",
"os.makedirs",
"threading.Lock",
"json.dumps",
"os.path.join",
"webbrowser.open",
"requests.request",
"uuid.uuid4",
"os.path.isdir",
"urllib.parse.urlencode",
"copy.copy",
"time.time",
"os.path.expanduser",
"wsgiref.simple_server.make_server"
] | [((4037, 4064), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (4054, 4064), False, 'import logging\n'), ((4451, 4495), 'os.path.expanduser', 'os.path.expanduser', (["(cachedir or '~/.openidc')"], {}), "(cachedir or '~/.openidc')\n", (4469, 4495), False, 'import os\n'), ((4773, 4779), 'threading.Lock', 'Lock', ([], {}), '()\n', (4777, 4779), False, 'from threading import Lock\n'), ((8738, 8750), 'copy.copy', 'copy', (['kwargs'], {}), '(kwargs)\n', (8742, 8750), False, 'from copy import copy\n'), ((9836, 9878), 'requests.request', 'requests.request', (['method', '*args'], {}), '(method, *args, **ckwargs)\n', (9852, 9878), False, 'import requests\n'), ((10632, 10689), 'os.path.join', 'os.path.join', (['self.cachedir', "('oidc_%s.json' % self.app_id)"], {}), "(self.cachedir, 'oidc_%s.json' % self.app_id)\n", (10644, 10689), False, 'import os\n'), ((19386, 19403), 'urllib.parse.urlencode', 'urlencode', (['rquery'], {}), '(rquery)\n', (19395, 19403), False, 'from urllib.parse import urlencode\n'), ((19584, 19610), 'webbrowser.open', 'webbrowser.open', (['authz_url'], {}), '(authz_url)\n', (19599, 19610), False, 'import webbrowser\n'), ((10932, 10960), 'os.path.isdir', 'os.path.isdir', (['self.cachedir'], {}), '(self.cachedir)\n', (10945, 10960), False, 'import os\n'), ((11019, 11045), 'os.makedirs', 'os.makedirs', (['self.cachedir'], {}), '(self.cachedir)\n', (11030, 11045), False, 'import os\n'), ((11061, 11092), 'os.path.exists', 'os.path.exists', (['self._cachefile'], {}), '(self._cachefile)\n', (11075, 11092), False, 'import os\n'), ((12152, 12161), 'uuid.uuid4', 'uuidgen', ([], {}), '()\n', (12159, 12161), True, 'from uuid import uuid4 as uuidgen\n'), ((11890, 11913), 'json.dumps', 'json.dumps', (['self._cache'], {}), '(self._cache)\n', (11900, 11913), False, 'import json\n'), ((14717, 14728), 'time.time', 'time.time', ([], {}), '()\n', (14726, 14728), False, 'import time\n'), ((17244, 17291), 'wsgiref.simple_server.make_server', 'simple_server.make_server', (['"""0.0.0.0"""', 'port', 'app'], {}), "('0.0.0.0', port, app)\n", (17269, 17291), False, 'from wsgiref import simple_server\n'), ((20747, 20758), 'time.time', 'time.time', ([], {}), '()\n', (20756, 20758), False, 'import time\n'), ((11208, 11222), 'json.dumps', 'json.dumps', (['{}'], {}), '({})\n', (11218, 11222), False, 'import json\n'), ((16930, 16941), 'time.time', 'time.time', ([], {}), '()\n', (16939, 16941), False, 'import time\n')] |
import datetime
import uuid
import simplejson as json
from src.db.s3_client import Client as S3Client
from decimal import Decimal
def get_from_archive(archive_key):
''' Download a VP Save from S3.
:param str archive_key: The vp_save data's location (S3 bucket and file path). This value is required.
'''
if archive_key is None or '/' not in archive_key:
raise ValueError()
bucket, key = archive_key.split('/', 1)
s3_client = S3Client()
try:
archive_object = json.loads(s3_client.get_object(bucket, key)['Body'].read(),parse_float=Decimal)
except Exception as e:
print('ERROR: Error downloading ' + key + ' from ' + bucket + ' bucket. ERROR\n%s' %e)
raise
return archive_object
def build(vp_save={}):
''' Builds and returns a valid vp_save object.
Builds a new vp_save object by creating default values for
required fields and combines any of the given attributes.
'''
vp_save['PK'] = str(uuid.uuid4())
# Set timestamps (for new data)
now = datetime.datetime.now().isoformat()
vp_save['date_created'] = now
vp_save['last_modified'] = now
vp_save['item_type'] = 'vp_save'
return vp_save
def archive(bucket, vp_save_pk, save_data):
''' Archives a vp save data to S3.
Uploads the save data object as a JSON file to S3. The location of the archive
depends on the bucket and the primary key of the save data. If the upload fails,
an exception is raised. If successful, returns the archive location.
:param str bucket: The name of the S3 bucket for the archive. This value is required.
:param str vp_save_pk: The vp_save PK to use as the name of the JSON file. This value is required.
:param obj save_data: The save data object to archive. This value is required.
'''
if bucket is None or len(bucket) <= 0:
raise ValueError()
if vp_save_pk is None or len(vp_save_pk) <= 0:
raise ValueError()
if not save_data:
raise ValueError()
archive_file = __archive_key(save_data) + '/' + vp_save_pk + '.json'
# Upload curation data to S3 archive bucket.
s3_client = S3Client()
try:
s3_client.put_object(
bytes(json.dumps(save_data).encode('UTF-8')),
bucket,
archive_file
)
except Exception as e:
print('ERROR: Error uploading ' + archive_file + ' to ' + bucket + ' bucket. ERROR\n%s' %e)
raise
archive_key_comps = [bucket, archive_file]
return '/'.join(archive_key_comps)
def __archive_key(save_data):
return save_data['PK']
| [
"simplejson.dumps",
"datetime.datetime.now",
"uuid.uuid4",
"src.db.s3_client.Client"
] | [((448, 458), 'src.db.s3_client.Client', 'S3Client', ([], {}), '()\n', (456, 458), True, 'from src.db.s3_client import Client as S3Client\n'), ((2070, 2080), 'src.db.s3_client.Client', 'S3Client', ([], {}), '()\n', (2078, 2080), True, 'from src.db.s3_client import Client as S3Client\n'), ((945, 957), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (955, 957), False, 'import uuid\n'), ((1002, 1025), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1023, 1025), False, 'import datetime\n'), ((2127, 2148), 'simplejson.dumps', 'json.dumps', (['save_data'], {}), '(save_data)\n', (2137, 2148), True, 'import simplejson as json\n')] |
"""
Basic usage
===========
This example presents the basic usage of brokenaxes
"""
import matplotlib.pyplot as plt
from brokenaxes import brokenaxes
import numpy as np
fig = plt.figure(figsize=(5,2))
bax = brokenaxes(xlims=((0, .1), (.4, .7)), ylims=((-1, .7), (.79, 1)), hspace=.05)
x = np.linspace(0, 1, 100)
bax.plot(x, np.sin(10 * x), label='sin')
bax.plot(x, np.cos(10 * x), label='cos')
bax.legend(loc=3)
bax.set_xlabel('time')
bax.set_ylabel('value')
| [
"matplotlib.pyplot.figure",
"brokenaxes.brokenaxes",
"numpy.linspace",
"numpy.cos",
"numpy.sin"
] | [((180, 206), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(5, 2)'}), '(figsize=(5, 2))\n', (190, 206), True, 'import matplotlib.pyplot as plt\n'), ((212, 299), 'brokenaxes.brokenaxes', 'brokenaxes', ([], {'xlims': '((0, 0.1), (0.4, 0.7))', 'ylims': '((-1, 0.7), (0.79, 1))', 'hspace': '(0.05)'}), '(xlims=((0, 0.1), (0.4, 0.7)), ylims=((-1, 0.7), (0.79, 1)),\n hspace=0.05)\n', (222, 299), False, 'from brokenaxes import brokenaxes\n'), ((294, 316), 'numpy.linspace', 'np.linspace', (['(0)', '(1)', '(100)'], {}), '(0, 1, 100)\n', (305, 316), True, 'import numpy as np\n'), ((329, 343), 'numpy.sin', 'np.sin', (['(10 * x)'], {}), '(10 * x)\n', (335, 343), True, 'import numpy as np\n'), ((370, 384), 'numpy.cos', 'np.cos', (['(10 * x)'], {}), '(10 * x)\n', (376, 384), True, 'import numpy as np\n')] |
End of preview. Expand
in Dataset Viewer.
README.md exists but content is empty.
- Downloads last month
- 64