code
stringlengths 22
1.05M
| apis
sequencelengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from functools import partial
from typing import Any, Optional, List
from torchvision.prototype.transforms import ImageNetEval
from torchvision.transforms.functional import InterpolationMode
from ...models.mobilenetv3 import MobileNetV3, _mobilenet_v3_conf, InvertedResidualConfig
from ._api import WeightsEnum, Weights
from ._meta import _IMAGENET_CATEGORIES
from ._utils import handle_legacy_interface, _ovewrite_named_param
__all__ = [
"MobileNetV3",
"MobileNet_V3_Large_Weights",
"MobileNet_V3_Small_Weights",
"mobilenet_v3_large",
"mobilenet_v3_small",
]
def _mobilenet_v3(
inverted_residual_setting: List[InvertedResidualConfig],
last_channel: int,
weights: Optional[WeightsEnum],
progress: bool,
**kwargs: Any,
) -> MobileNetV3:
if weights is not None:
_ovewrite_named_param(kwargs, "num_classes", len(weights.meta["categories"]))
model = MobileNetV3(inverted_residual_setting, last_channel, **kwargs)
if weights is not None:
model.load_state_dict(weights.get_state_dict(progress=progress))
return model
_COMMON_META = {
"task": "image_classification",
"architecture": "MobileNetV3",
"publication_year": 2019,
"size": (224, 224),
"min_size": (1, 1),
"categories": _IMAGENET_CATEGORIES,
"interpolation": InterpolationMode.BILINEAR,
}
class MobileNet_V3_Large_Weights(WeightsEnum):
ImageNet1K_V1 = Weights(
url="https://download.pytorch.org/models/mobilenet_v3_large-8738ca79.pth",
transforms=partial(ImageNetEval, crop_size=224),
meta={
**_COMMON_META,
"num_params": 5483032,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#mobilenetv3-large--small",
"acc@1": 74.042,
"acc@5": 91.340,
},
)
ImageNet1K_V2 = Weights(
url="https://download.pytorch.org/models/mobilenet_v3_large-5c1a4163.pth",
transforms=partial(ImageNetEval, crop_size=224, resize_size=232),
meta={
**_COMMON_META,
"num_params": 5483032,
"recipe": "https://github.com/pytorch/vision/issues/3995#new-recipe-with-reg-tuning",
"acc@1": 75.274,
"acc@5": 92.566,
},
)
default = ImageNet1K_V2
class MobileNet_V3_Small_Weights(WeightsEnum):
ImageNet1K_V1 = Weights(
url="https://download.pytorch.org/models/mobilenet_v3_small-047dcff4.pth",
transforms=partial(ImageNetEval, crop_size=224),
meta={
**_COMMON_META,
"num_params": 2542856,
"recipe": "https://github.com/pytorch/vision/tree/main/references/classification#mobilenetv3-large--small",
"acc@1": 67.668,
"acc@5": 87.402,
},
)
default = ImageNet1K_V1
@handle_legacy_interface(weights=("pretrained", MobileNet_V3_Large_Weights.ImageNet1K_V1))
def mobilenet_v3_large(
*, weights: Optional[MobileNet_V3_Large_Weights] = None, progress: bool = True, **kwargs: Any
) -> MobileNetV3:
weights = MobileNet_V3_Large_Weights.verify(weights)
inverted_residual_setting, last_channel = _mobilenet_v3_conf("mobilenet_v3_large", **kwargs)
return _mobilenet_v3(inverted_residual_setting, last_channel, weights, progress, **kwargs)
@handle_legacy_interface(weights=("pretrained", MobileNet_V3_Small_Weights.ImageNet1K_V1))
def mobilenet_v3_small(
*, weights: Optional[MobileNet_V3_Small_Weights] = None, progress: bool = True, **kwargs: Any
) -> MobileNetV3:
weights = MobileNet_V3_Small_Weights.verify(weights)
inverted_residual_setting, last_channel = _mobilenet_v3_conf("mobilenet_v3_small", **kwargs)
return _mobilenet_v3(inverted_residual_setting, last_channel, weights, progress, **kwargs)
| [
"functools.partial"
] | [((1531, 1567), 'functools.partial', 'partial', (['ImageNetEval'], {'crop_size': '(224)'}), '(ImageNetEval, crop_size=224)\n', (1538, 1567), False, 'from functools import partial\n'), ((1973, 2026), 'functools.partial', 'partial', (['ImageNetEval'], {'crop_size': '(224)', 'resize_size': '(232)'}), '(ImageNetEval, crop_size=224, resize_size=232)\n', (1980, 2026), False, 'from functools import partial\n'), ((2487, 2523), 'functools.partial', 'partial', (['ImageNetEval'], {'crop_size': '(224)'}), '(ImageNetEval, crop_size=224)\n', (2494, 2523), False, 'from functools import partial\n')] |
from django.urls import re_path
from django.views.generic import TemplateView
from .views import RegisterView, VerifyEmailView
urlpatterns = [
re_path(r'^$', RegisterView.as_view(), name='rest_register'),
re_path(r'^verify-email/$', VerifyEmailView.as_view(), name='rest_verify_email'),
# This url is used by django-allauth and empty TemplateView is
# defined just to allow reverse() call inside app, for example when email
# with verification link is being sent, then it's required to render email
# content.
# account_confirm_email - You should override this view to handle it in
# your API client somehow and then, send post to /verify-email/ endpoint
# with proper key.
# If you don't want to use API on that step, then just use ConfirmEmailView
# view from:
# django-allauth https://github.com/pennersr/django-allauth/blob/master/allauth/account/views.py
re_path(r'^account-confirm-email/(?P<key>[-:\w]+)/$', TemplateView.as_view(),
name='account_confirm_email'),
]
| [
"django.views.generic.TemplateView.as_view"
] | [((971, 993), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {}), '()\n', (991, 993), False, 'from django.views.generic import TemplateView\n')] |
"""dedupper_app URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from contacts import views
admin.autodiscover()
urlpatterns = [
path('', views.index, name='contact_index'),
path('', views.index, name='lead_index'),
path('contacts/', views.contacts, name='contacts'),
path('leads/', views.leads, name='leads'),
path('table/', views.table, name='table'),
path('plotly/', views.plotly, name='plotly'),
# url(r'^keys', views.upload, name='keys'),
# path('key-gen/', views.key_gen, name='key-gen'),
# path('heroku/', generic.ListView.as_view(model=models.Contact), name='heroku'),
# path('run/', views.run, name='run'),
# path('sorted/<id>', views.merge, name='merge'),
# path('sorted/export/<type>', views.download, name='export'),
# path('sorted/report/<type>', views.download_times, name='report'),
]
| [
"django.urls.path",
"django.contrib.admin.autodiscover"
] | [((727, 747), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (745, 747), False, 'from django.contrib import admin\n'), ((769, 812), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""contact_index"""'}), "('', views.index, name='contact_index')\n", (773, 812), False, 'from django.urls import path\n'), ((818, 858), 'django.urls.path', 'path', (['""""""', 'views.index'], {'name': '"""lead_index"""'}), "('', views.index, name='lead_index')\n", (822, 858), False, 'from django.urls import path\n'), ((864, 914), 'django.urls.path', 'path', (['"""contacts/"""', 'views.contacts'], {'name': '"""contacts"""'}), "('contacts/', views.contacts, name='contacts')\n", (868, 914), False, 'from django.urls import path\n'), ((920, 961), 'django.urls.path', 'path', (['"""leads/"""', 'views.leads'], {'name': '"""leads"""'}), "('leads/', views.leads, name='leads')\n", (924, 961), False, 'from django.urls import path\n'), ((967, 1008), 'django.urls.path', 'path', (['"""table/"""', 'views.table'], {'name': '"""table"""'}), "('table/', views.table, name='table')\n", (971, 1008), False, 'from django.urls import path\n'), ((1014, 1058), 'django.urls.path', 'path', (['"""plotly/"""', 'views.plotly'], {'name': '"""plotly"""'}), "('plotly/', views.plotly, name='plotly')\n", (1018, 1058), False, 'from django.urls import path\n')] |
#!/usr/bin/env python
# Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# [START app]
import os
import logging
import transform
import flask
import google.cloud.storage as gcs
# [start config]
app = flask.Flask(__name__)
# Configure this environment variable via app.yaml
CLOUD_STORAGE_BUCKET = os.environ['CLOUD_STORAGE_BUCKET']
#
logging.basicConfig(format='%(levelname)s: %(message)s', level=logging.INFO)
# [end config]
@app.route('/')
def welcome():
return '<html><a href="ingest">ingest last week</a> earthquake data</html>'
@app.route('/ingest')
def ingest_last_week():
try:
# verify that this is a cron job request
is_cron = flask.request.headers['X-Appengine-Cron']
logging.info('Received cron request {}'.format(is_cron))
# create png
url = 'http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_week.csv'
outfile = 'earthquakes.png'
status = 'scheduled ingest of {} to {}'.format(url, outfile)
logging.info(status)
transform.create_png(url, outfile)
# upload to cloud storage
client = gcs.Client()
bucket = client.get_bucket(CLOUD_STORAGE_BUCKET)
blob = gcs.Blob('earthquakes/earthquakes.png', bucket)
blob.upload_from_filename(outfile)
# change permissions
blob.make_public()
status = 'uploaded {} to {}'.format(outfile, blob.name)
logging.info(status)
except KeyError as e:
status = '<html>Sorry, this capability is accessible only by the Cron service, but I got a KeyError for {} -- try invoking it from <a href="{}"> the GCP console / AppEngine / taskqueues </a></html>'.format(
e, 'http://console.cloud.google.com/appengine/taskqueues?tab=CRON')
logging.info('Rejected non-Cron request')
return status
@app.errorhandler(500)
def server_error(e):
logging.exception('An error occurred during a request.')
return """
An internal error occurred: <pre>{}</pre>
See logs for full stacktrace.
""".format(e), 500
if __name__ == '__main__':
app.run(host='0.0.0.0', port=8080, debug=True)
# [END app]
| [
"logging.basicConfig",
"google.cloud.storage.Client",
"flask.Flask",
"logging.exception",
"transform.create_png",
"google.cloud.storage.Blob",
"logging.info"
] | [((727, 748), 'flask.Flask', 'flask.Flask', (['__name__'], {}), '(__name__)\n', (738, 748), False, 'import flask\n'), ((860, 936), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(levelname)s: %(message)s"""', 'level': 'logging.INFO'}), "(format='%(levelname)s: %(message)s', level=logging.INFO)\n", (879, 936), False, 'import logging\n'), ((2400, 2456), 'logging.exception', 'logging.exception', (['"""An error occurred during a request."""'], {}), "('An error occurred during a request.')\n", (2417, 2456), False, 'import logging\n'), ((1517, 1537), 'logging.info', 'logging.info', (['status'], {}), '(status)\n', (1529, 1537), False, 'import logging\n'), ((1546, 1580), 'transform.create_png', 'transform.create_png', (['url', 'outfile'], {}), '(url, outfile)\n', (1566, 1580), False, 'import transform\n'), ((1633, 1645), 'google.cloud.storage.Client', 'gcs.Client', ([], {}), '()\n', (1643, 1645), True, 'import google.cloud.storage as gcs\n'), ((1718, 1765), 'google.cloud.storage.Blob', 'gcs.Blob', (['"""earthquakes/earthquakes.png"""', 'bucket'], {}), "('earthquakes/earthquakes.png', bucket)\n", (1726, 1765), True, 'import google.cloud.storage as gcs\n'), ((1938, 1958), 'logging.info', 'logging.info', (['status'], {}), '(status)\n', (1950, 1958), False, 'import logging\n'), ((2289, 2330), 'logging.info', 'logging.info', (['"""Rejected non-Cron request"""'], {}), "('Rejected non-Cron request')\n", (2301, 2330), False, 'import logging\n')] |
import pyperclip
import random
import string
class Credential:
'''
class that generates new credentials
'''
credential_list = []
def __init__(self,username,sitename,password):
self.username = username
self.password = password
self.sitename = sitename
def save_credential(self):
'''
save_cred method saves the user objects into creds_list
'''
Credential.credential_list.append(self)
@classmethod
def display_credential(cls, user_name):
'''
Class method to show the list of credentials saved
'''
users_credential_list = []
for credential in cls.credential_list:
if credential.username == user_name:
users_credential_list.append(credential)
return users_credential_list
def delete_credential(self):
'''
delete_contact method deletes a saved credential from the credential_list
'''
Credential.credential_list.remove(self)
def generate_password(self):
'''
Function to generate a password where a user can generate a password based on their length of choice
'''
chars = "abcdefghijklmnopqrstuvwxyziABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890^?!?$%&/()=?`'+#*'~';:_,.-<>|"
password = ""
print("Use Char list = %s \n" % chars)
length = int(input("[*] Input Password Length: "))
while len(password) != length:
password = password + random.choice(chars)
if len(password) == length:
print("Password: %s" % password)
return password
@classmethod
def find_by_sitename(cls, sitename):
'''
Class method that takes a site name and returns the credential that matches that site
'''
for credential in cls.credential_list:
if credential.sitename == sitename:
return credential
@classmethod
def copy_credential(cls, sitename):
'''
Class method that copies a credentials details after the credentials sitename has been entered
'''
find_credential = Credential.find_by_sitename(sitename)
return pyperclip.copy(find_credential.password)
@classmethod
def credential_exist(cls, sitename):
'''
Method that checks if user exists from the credential list.
Returns:
Boolean: True or false depending if the credential exits
'''
the_credential = ""
for credential in Credential.credential_list:
if (credential.sitename == sitename):
the_credential = sitename
return the_credential
| [
"pyperclip.copy",
"random.choice"
] | [((2094, 2134), 'pyperclip.copy', 'pyperclip.copy', (['find_credential.password'], {}), '(find_credential.password)\n', (2108, 2134), False, 'import pyperclip\n'), ((1419, 1439), 'random.choice', 'random.choice', (['chars'], {}), '(chars)\n', (1432, 1439), False, 'import random\n')] |
import bpy
import random as rnd
from collections import Counter
import itertools as iter
feld_von, feld_bis = -4, 4
spielfeld_von, spielfeld_bis = feld_von-6, feld_bis+6
anz = int((feld_bis-feld_von)**3*.3)
spielfeld = {(rnd.randint(feld_von, feld_bis), rnd.randint(
feld_von, feld_bis), rnd.randint(feld_von, feld_bis)) for _ in range(anz)}
animate_frame = 8
def nachbarn(pos):
for x,y,z in iter.product(range(-1,2), repeat = 3):
if z == y == x == 0: continue
yield pos[0]+x, pos[1]+y, pos[2]+z
def nächsteGeneration(spielfeld):
nachb = Counter([p for pos in spielfeld for p in nachbarn(pos)])
return {pos for pos, anz in nachb.items() if anz == 6 or (anz in (5, 6, 7, 8) and pos in spielfeld)}
def scale_rotate(ob, scale, rot, fr):
ob.scale = (scale, scale, scale)
ob.rotation_euler.rotate_axis("Z", rot)
ob.keyframe_insert(data_path='rotation_euler', frame=fr)
ob.keyframe_insert(data_path='scale', frame=fr)
bpy.ops.mesh.primitive_cube_add(size=0.001, location=(0, 0, 0))
orig_cube = bpy.context.active_object
n = "cube"
m = orig_cube.data.copy()
cubes = {}
for x,y,z in iter.product(range(spielfeld_von,spielfeld_bis), repeat = 3):
o = bpy.data.objects.new(n, m)
o.location = (x, y, z)
cubes[x, y, z] = o
bpy.context.collection.objects.link(o)
o.select_set(False)
for i in range(200):
print(f'Durchlauf No. {i}, Anz. Zellen = {len(spielfeld)}')
spielfeld2 = nächsteGeneration(spielfeld)
dead = spielfeld - spielfeld2
new = spielfeld2 - spielfeld
spielfeld = spielfeld2
if not new and not dead:
break
for zelle in new | dead:
if zelle not in cubes:
continue
ob = cubes[zelle]
if zelle in new:
scale_rotate(ob, 0.001, -3.141/2, (i-1)*animate_frame)
scale_rotate(ob, 750, 3.141/2, i * animate_frame)
else:
scale_rotate(ob, 750, 3.141/2, (i-1) * animate_frame)
scale_rotate(ob, 0.001, -3.141/2, i * animate_frame)
if not spielfeld:
break
bpy.context.scene.frame_current = 1
| [
"bpy.context.collection.objects.link",
"bpy.data.objects.new",
"random.randint",
"bpy.ops.mesh.primitive_cube_add"
] | [((950, 1013), 'bpy.ops.mesh.primitive_cube_add', 'bpy.ops.mesh.primitive_cube_add', ([], {'size': '(0.001)', 'location': '(0, 0, 0)'}), '(size=0.001, location=(0, 0, 0))\n', (981, 1013), False, 'import bpy\n'), ((1182, 1208), 'bpy.data.objects.new', 'bpy.data.objects.new', (['n', 'm'], {}), '(n, m)\n', (1202, 1208), False, 'import bpy\n'), ((1257, 1295), 'bpy.context.collection.objects.link', 'bpy.context.collection.objects.link', (['o'], {}), '(o)\n', (1292, 1295), False, 'import bpy\n'), ((225, 256), 'random.randint', 'rnd.randint', (['feld_von', 'feld_bis'], {}), '(feld_von, feld_bis)\n', (236, 256), True, 'import random as rnd\n'), ((258, 289), 'random.randint', 'rnd.randint', (['feld_von', 'feld_bis'], {}), '(feld_von, feld_bis)\n', (269, 289), True, 'import random as rnd\n'), ((296, 327), 'random.randint', 'rnd.randint', (['feld_von', 'feld_bis'], {}), '(feld_von, feld_bis)\n', (307, 327), True, 'import random as rnd\n')] |
from dataclasses import dataclass
import numpy as np
import xarray as xr
from power_perceiver.load_prepared_batches.data_sources import PV
from power_perceiver.load_prepared_batches.data_sources.prepared_data_source import XarrayBatch
@dataclass
class ReduceNumPVSystems:
"""Reduce the number of PV systems per example to `requested_num_pv_systems`.
Randomly select PV systems for each example. If there are less PV systems available
than requested, then randomly sample with duplicates allowed.
This is implemented as an xr_batch_processor so it can run after
SelectPVSystemsNearCenterOfImage.
"""
requested_num_pv_systems: int
def __post_init__(self):
self.rng = np.random.default_rng() # Seeded by seed_rngs worker_init_function
def __call__(self, xr_batch: XarrayBatch) -> XarrayBatch:
pv_batch = xr_batch[PV]
num_examples = len(pv_batch.example)
selection = np.zeros(shape=(num_examples, self.requested_num_pv_systems), dtype=np.int32)
for example_i in range(num_examples):
pv_mask_for_example = pv_batch.pv_mask.isel(example=example_i).values
all_indicies = np.nonzero(pv_mask_for_example)[0]
# Only allow a PV system to be chosen multiple times for this example if there are
# less available PV systems than requested PV systems.
replace = len(all_indicies) < self.requested_num_pv_systems
chosen_indicies = self.rng.choice(
all_indicies, size=self.requested_num_pv_systems, replace=replace
)
selection[example_i] = chosen_indicies
selection = xr.DataArray(selection, dims=("example", "pv_system"))
pv_batch = pv_batch.isel(pv_system=selection)
xr_batch[PV] = pv_batch
return xr_batch
| [
"numpy.nonzero",
"numpy.zeros",
"numpy.random.default_rng",
"xarray.DataArray"
] | [((713, 736), 'numpy.random.default_rng', 'np.random.default_rng', ([], {}), '()\n', (734, 736), True, 'import numpy as np\n'), ((942, 1019), 'numpy.zeros', 'np.zeros', ([], {'shape': '(num_examples, self.requested_num_pv_systems)', 'dtype': 'np.int32'}), '(shape=(num_examples, self.requested_num_pv_systems), dtype=np.int32)\n', (950, 1019), True, 'import numpy as np\n'), ((1659, 1713), 'xarray.DataArray', 'xr.DataArray', (['selection'], {'dims': "('example', 'pv_system')"}), "(selection, dims=('example', 'pv_system'))\n", (1671, 1713), True, 'import xarray as xr\n'), ((1175, 1206), 'numpy.nonzero', 'np.nonzero', (['pv_mask_for_example'], {}), '(pv_mask_for_example)\n', (1185, 1206), True, 'import numpy as np\n')] |
#encoding=utf8
# 按天生成文件
import logging
import time
from logging.handlers import TimedRotatingFileHandler
#----------------------------------------------------------------------
if __name__ == "__main__":
logFilePath = "timed_test.log"
logger = logging.getLogger("YouLoggerName")
logger.setLevel(logging.INFO)
handler = TimedRotatingFileHandler(logFilePath,
when="d",
interval=1,
backupCount=7)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
handler.setLevel(logging.INFO)
logger.addHandler(handler)
for i in range(6):
logger.info("This is a info!")
logger.debug("This is a debug!")
# time.sleep(61) | [
"logging.getLogger",
"logging.Formatter",
"logging.handlers.TimedRotatingFileHandler"
] | [((257, 291), 'logging.getLogger', 'logging.getLogger', (['"""YouLoggerName"""'], {}), "('YouLoggerName')\n", (274, 291), False, 'import logging\n'), ((341, 415), 'logging.handlers.TimedRotatingFileHandler', 'TimedRotatingFileHandler', (['logFilePath'], {'when': '"""d"""', 'interval': '(1)', 'backupCount': '(7)'}), "(logFilePath, when='d', interval=1, backupCount=7)\n", (365, 415), False, 'from logging.handlers import TimedRotatingFileHandler\n'), ((549, 622), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s - %(name)s - %(levelname)s - %(message)s"""'], {}), "('%(asctime)s - %(name)s - %(levelname)s - %(message)s')\n", (566, 622), False, 'import logging\n')] |
from operator import eq
class PersonAction:
def __init__(self, action):
self.action = action
def __str__(self): return self.action
def __eq__(self, other):
return eq(self.action, other.action)
# Necessary when __cmp__ or __eq__ is defined
# in order to make this class usable as a
# dictionary key:
def __hash__(self):
return hash(self.action)
# Static fields; an enumeration of instances:
PersonAction.compliment = PersonAction("person compliments")
PersonAction.informing = PersonAction("person gives information about the service order")
PersonAction.query = PersonAction("person wants to know about his/her order")
PersonAction.angry = PersonAction("person is pissed off")
| [
"operator.eq"
] | [((195, 224), 'operator.eq', 'eq', (['self.action', 'other.action'], {}), '(self.action, other.action)\n', (197, 224), False, 'from operator import eq\n')] |
# Tool Name :- MyServer
# Author :- LordReaper
# Date :- 13/11/2018 - 9/11/2019
# Powered By :- H1ckPro Software's
import sys
import os
from time import sleep
from core.system import *
if len(sys.argv)>1:
pass
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
sys.exit()
if sys.argv[1]=="-s":
if len(sys.argv)==2:
if system=="ubuntu":
os.system("sudo python3 core/s.py "+sys.argv[1])
else:
os.system("python3 core/s.py "+sys.argv[1])
elif len(sys.argv)==3:
if sys.argv[2]=="apache":
if system=="ubuntu":
os.system("sudo python3 core/server.py -apa")
else:
os.system("python3 core/server.py -apa")
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
elif len(sys.argv)==6:
if sys.argv[2]=="-php":
if system=="ubuntu":
os.system("sudo python3 core/server.py -php "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
else:
os.system("python3 core/server.py -php "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
elif sys.argv[2]=="-py":
if system=="ubuntu":
os.system("sudo python3 core/server.py -py "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
else:
os.system("python3 core/server.py -py "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
elif sys.argv[2]=="-ng":
if system=="ubuntu":
os.system("sudo python3 core/server.py -ng "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
else:
os.system("python3 core/server.py -ng "+sys.argv[3]+" "+sys.argv[4]+" "+sys.argv[5])
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
elif len(sys.argv)==5:
if system=="ubuntu":
os.system("sudo python3 core/server.py -d "+sys.argv[2]+" "+sys.argv[3]+" "+sys.argv[4])
else:
os.system("python3 core/server.py -d "+sys.argv[2]+" "+sys.argv[3]+" "+sys.argv[4])
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
elif sys.argv[1]=="-h":
if len(sys.argv)==2:
if system=="ubuntu":
os.system("sudo python3 core/s.py "+sys.argv[1])
else:
os.system("python3 core/s.py "+sys.argv[1])
elif len(sys.argv)==5:
if system=="ubuntu":
os.system("sudo python3 core/host.py "+sys.argv[2]+" "+sys.argv[3]+" "+sys.argv[4])
else:
os.system("python3 core/host.py "+sys.argv[2]+" "+sys.argv[3]+" "+sys.argv[4])
else:
print ("error : invalid arguments")
print ("use : myserver --help for more information")
elif sys.argv[1]=="-db":
if len(sys.argv)==3:
if sys.argv[2]=="start":
if system=="ubuntu":
os.system("sudo python3 core/mysql.py "+sys.argv[2])
else:
os.system("python3 core/mysql.py "+sys.argv[2])
elif sys.argv[2]=="stop":
if system=="ubuntu":
os.system("sudo python3 core/mysql.py "+sys.argv[2])
else:
os.system("python3 core/mysql.py "+sys.argv[2])
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
elif sys.argv[1]=="rm":
if len(sys.argv)==3:
if sys.argv[2]=="-T" or sys.argv[2]=="-t":
if system=="ubuntu":
os.system("sudo python3 core/un.py")
else:
os.system("python3 core/un.py")
else:
print ("error : invalid arguments")
print ("use : myserver --help for more information")
else:
print ("error : invalid arguments")
print ("use : myserver --help for more information")
elif sys.argv[1]=="update":
if system=="ubuntu":
os.system("sudo python3 core/upd.py")
else:
os.system("python3 core/upd.py")
elif sys.argv[1]=="start":
if system=="ubuntu":
os.system("sudo python3 .MyServer.py")
else:
os.system("python3 .MyServer.py")
elif sys.argv[1]=="--help" or sys.argv[1]=="-help" or sys.argv[1]=="help":
print ("")
print ("Usage: myserver [command]... [arguments]...")
print ("")
print (" Commands:")
print (" -s <hostname> <port> <path> to start default localhost server.")
print (" -s -ng <hostname> <port> <path> to start php localhost server.")
print (" -s -php <hostname> <port> <path> to start php localhost server.")
print (" -s -py <hostname> <port> <path> to start python localhost server.")
print (" -h <hostname> <localhost_port> <port> to access localhost server on internet.")
print (" -db [start/stop] to start/stop MySQL database server.")
print (" -s apache to start apache web server.")
print (" update update MyServer.")
print (" rm -t uninstall MyServer.")
print (" start start MyServer menu.")
print ("")
else:
print ("error : invalid arguments !!")
print ("use : myserver --help for more information")
| [
"os.system",
"sys.exit"
] | [((319, 329), 'sys.exit', 'sys.exit', ([], {}), '()\n', (327, 329), False, 'import sys\n'), ((408, 458), 'os.system', 'os.system', (["('sudo python3 core/s.py ' + sys.argv[1])"], {}), "('sudo python3 core/s.py ' + sys.argv[1])\n", (417, 458), False, 'import os\n'), ((473, 518), 'os.system', 'os.system', (["('python3 core/s.py ' + sys.argv[1])"], {}), "('python3 core/s.py ' + sys.argv[1])\n", (482, 518), False, 'import os\n'), ((2183, 2233), 'os.system', 'os.system', (["('sudo python3 core/s.py ' + sys.argv[1])"], {}), "('sudo python3 core/s.py ' + sys.argv[1])\n", (2192, 2233), False, 'import os\n'), ((2248, 2293), 'os.system', 'os.system', (["('python3 core/s.py ' + sys.argv[1])"], {}), "('python3 core/s.py ' + sys.argv[1])\n", (2257, 2293), False, 'import os\n'), ((608, 653), 'os.system', 'os.system', (['"""sudo python3 core/server.py -apa"""'], {}), "('sudo python3 core/server.py -apa')\n", (617, 653), False, 'import os\n'), ((674, 714), 'os.system', 'os.system', (['"""python3 core/server.py -apa"""'], {}), "('python3 core/server.py -apa')\n", (683, 714), False, 'import os\n'), ((2349, 2446), 'os.system', 'os.system', (["('sudo python3 core/host.py ' + sys.argv[2] + ' ' + sys.argv[3] + ' ' + sys\n .argv[4])"], {}), "('sudo python3 core/host.py ' + sys.argv[2] + ' ' + sys.argv[3] +\n ' ' + sys.argv[4])\n", (2358, 2446), False, 'import os\n'), ((2449, 2541), 'os.system', 'os.system', (["('python3 core/host.py ' + sys.argv[2] + ' ' + sys.argv[3] + ' ' + sys.argv[4])"], {}), "('python3 core/host.py ' + sys.argv[2] + ' ' + sys.argv[3] + ' ' +\n sys.argv[4])\n", (2458, 2541), False, 'import os\n'), ((919, 1024), 'os.system', 'os.system', (["('sudo python3 core/server.py -php ' + sys.argv[3] + ' ' + sys.argv[4] +\n ' ' + sys.argv[5])"], {}), "('sudo python3 core/server.py -php ' + sys.argv[3] + ' ' + sys.\n argv[4] + ' ' + sys.argv[5])\n", (928, 1024), False, 'import os\n'), ((1030, 1129), 'os.system', 'os.system', (["('python3 core/server.py -php ' + sys.argv[3] + ' ' + sys.argv[4] + ' ' +\n sys.argv[5])"], {}), "('python3 core/server.py -php ' + sys.argv[3] + ' ' + sys.argv[4] +\n ' ' + sys.argv[5])\n", (1039, 1129), False, 'import os\n'), ((1806, 1909), 'os.system', 'os.system', (["('sudo python3 core/server.py -d ' + sys.argv[2] + ' ' + sys.argv[3] + ' ' +\n sys.argv[4])"], {}), "('sudo python3 core/server.py -d ' + sys.argv[2] + ' ' + sys.argv[\n 3] + ' ' + sys.argv[4])\n", (1815, 1909), False, 'import os\n'), ((1911, 2008), 'os.system', 'os.system', (["('python3 core/server.py -d ' + sys.argv[2] + ' ' + sys.argv[3] + ' ' + sys\n .argv[4])"], {}), "('python3 core/server.py -d ' + sys.argv[2] + ' ' + sys.argv[3] +\n ' ' + sys.argv[4])\n", (1920, 2008), False, 'import os\n'), ((2748, 2802), 'os.system', 'os.system', (["('sudo python3 core/mysql.py ' + sys.argv[2])"], {}), "('sudo python3 core/mysql.py ' + sys.argv[2])\n", (2757, 2802), False, 'import os\n'), ((2821, 2870), 'os.system', 'os.system', (["('python3 core/mysql.py ' + sys.argv[2])"], {}), "('python3 core/mysql.py ' + sys.argv[2])\n", (2830, 2870), False, 'import os\n'), ((3772, 3809), 'os.system', 'os.system', (['"""sudo python3 core/upd.py"""'], {}), "('sudo python3 core/upd.py')\n", (3781, 3809), False, 'import os\n'), ((3822, 3854), 'os.system', 'os.system', (['"""python3 core/upd.py"""'], {}), "('python3 core/upd.py')\n", (3831, 3854), False, 'import os\n'), ((1180, 1284), 'os.system', 'os.system', (["('sudo python3 core/server.py -py ' + sys.argv[3] + ' ' + sys.argv[4] + ' ' +\n sys.argv[5])"], {}), "('sudo python3 core/server.py -py ' + sys.argv[3] + ' ' + sys.argv\n [4] + ' ' + sys.argv[5])\n", (1189, 1284), False, 'import os\n'), ((1290, 1388), 'os.system', 'os.system', (["('python3 core/server.py -py ' + sys.argv[3] + ' ' + sys.argv[4] + ' ' +\n sys.argv[5])"], {}), "('python3 core/server.py -py ' + sys.argv[3] + ' ' + sys.argv[4] +\n ' ' + sys.argv[5])\n", (1299, 1388), False, 'import os\n'), ((2934, 2988), 'os.system', 'os.system', (["('sudo python3 core/mysql.py ' + sys.argv[2])"], {}), "('sudo python3 core/mysql.py ' + sys.argv[2])\n", (2943, 2988), False, 'import os\n'), ((3007, 3056), 'os.system', 'os.system', (["('python3 core/mysql.py ' + sys.argv[2])"], {}), "('python3 core/mysql.py ' + sys.argv[2])\n", (3016, 3056), False, 'import os\n'), ((3409, 3445), 'os.system', 'os.system', (['"""sudo python3 core/un.py"""'], {}), "('sudo python3 core/un.py')\n", (3418, 3445), False, 'import os\n'), ((3466, 3497), 'os.system', 'os.system', (['"""python3 core/un.py"""'], {}), "('python3 core/un.py')\n", (3475, 3497), False, 'import os\n'), ((3910, 3948), 'os.system', 'os.system', (['"""sudo python3 .MyServer.py"""'], {}), "('sudo python3 .MyServer.py')\n", (3919, 3948), False, 'import os\n'), ((3962, 3995), 'os.system', 'os.system', (['"""python3 .MyServer.py"""'], {}), "('python3 .MyServer.py')\n", (3971, 3995), False, 'import os\n'), ((1439, 1543), 'os.system', 'os.system', (["('sudo python3 core/server.py -ng ' + sys.argv[3] + ' ' + sys.argv[4] + ' ' +\n sys.argv[5])"], {}), "('sudo python3 core/server.py -ng ' + sys.argv[3] + ' ' + sys.argv\n [4] + ' ' + sys.argv[5])\n", (1448, 1543), False, 'import os\n'), ((1549, 1647), 'os.system', 'os.system', (["('python3 core/server.py -ng ' + sys.argv[3] + ' ' + sys.argv[4] + ' ' +\n sys.argv[5])"], {}), "('python3 core/server.py -ng ' + sys.argv[3] + ' ' + sys.argv[4] +\n ' ' + sys.argv[5])\n", (1558, 1647), False, 'import os\n')] |
""" test gen_epub. """
from tmx2epub.gen_epub import gen_epub
def test_gen_epub2():
""" test_gen_epub2. """
from pathlib import Path
infile = r"tests\2.tmx"
stem = Path(infile).absolute().stem
outfile = f"{Path(infile).absolute().parent / stem}.epub"
assert gen_epub(infile, debug=True) == outfile
# assert 0
| [
"tmx2epub.gen_epub.gen_epub",
"pathlib.Path"
] | [((285, 313), 'tmx2epub.gen_epub.gen_epub', 'gen_epub', (['infile'], {'debug': '(True)'}), '(infile, debug=True)\n', (293, 313), False, 'from tmx2epub.gen_epub import gen_epub\n'), ((183, 195), 'pathlib.Path', 'Path', (['infile'], {}), '(infile)\n', (187, 195), False, 'from pathlib import Path\n'), ((229, 241), 'pathlib.Path', 'Path', (['infile'], {}), '(infile)\n', (233, 241), False, 'from pathlib import Path\n')] |
import json
import time
import random
import logging
import requests
import os
logging.basicConfig(level=logging.INFO)
base_url = os.getenv('BASE_URL', 'http://localhost') + ':' + os.getenv(
'DAPR_HTTP_PORT', '3500')
PUBSUB_NAME = 'order_pub_sub'
TOPIC = 'orders'
logging.info('Publishing to baseURL: %s, Pubsub Name: %s, Topic: %s' % (
base_url, PUBSUB_NAME, TOPIC))
for i in range(1, 10):
order = {'orderId': i}
# Publish an event/message using Dapr PubSub via HTTP Post
result = requests.post(
url='%s/v1.0/publish/%s/%s' % (base_url, PUBSUB_NAME, TOPIC),
json=order
)
logging.info('Published data: ' + json.dumps(order))
time.sleep(1)
| [
"logging.basicConfig",
"requests.post",
"os.getenv",
"json.dumps",
"time.sleep",
"logging.info"
] | [((80, 119), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (99, 119), False, 'import logging\n'), ((286, 393), 'logging.info', 'logging.info', (["('Publishing to baseURL: %s, Pubsub Name: %s, Topic: %s' % (base_url,\n PUBSUB_NAME, TOPIC))"], {}), "('Publishing to baseURL: %s, Pubsub Name: %s, Topic: %s' % (\n base_url, PUBSUB_NAME, TOPIC))\n", (298, 393), False, 'import logging\n'), ((182, 217), 'os.getenv', 'os.getenv', (['"""DAPR_HTTP_PORT"""', '"""3500"""'], {}), "('DAPR_HTTP_PORT', '3500')\n", (191, 217), False, 'import os\n'), ((530, 621), 'requests.post', 'requests.post', ([], {'url': "('%s/v1.0/publish/%s/%s' % (base_url, PUBSUB_NAME, TOPIC))", 'json': 'order'}), "(url='%s/v1.0/publish/%s/%s' % (base_url, PUBSUB_NAME, TOPIC),\n json=order)\n", (543, 621), False, 'import requests\n'), ((702, 715), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (712, 715), False, 'import time\n'), ((132, 173), 'os.getenv', 'os.getenv', (['"""BASE_URL"""', '"""http://localhost"""'], {}), "('BASE_URL', 'http://localhost')\n", (141, 173), False, 'import os\n'), ((678, 695), 'json.dumps', 'json.dumps', (['order'], {}), '(order)\n', (688, 695), False, 'import json\n')] |
# Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Utilities for Markov Chain Monte Carlo (MCMC) sampling.
@@effective_sample_size
@@potential_scale_reduction
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v2 as tf
from tensorflow_probability.python import stats
from tensorflow_probability.python.internal import assert_util
from tensorflow_probability.python.internal import dtype_util
from tensorflow_probability.python.internal import nest_util
from tensorflow_probability.python.internal import prefer_static as ps
from tensorflow_probability.python.internal import tensorshape_util
from tensorflow.python.util import nest # pylint: disable=g-direct-tensorflow-import
__all__ = [
'effective_sample_size',
'potential_scale_reduction',
]
def effective_sample_size(states,
filter_threshold=0.,
filter_beyond_lag=None,
filter_beyond_positive_pairs=False,
cross_chain_dims=None,
validate_args=False,
name=None):
"""Estimate a lower bound on effective sample size for each independent chain.
Roughly speaking, "effective sample size" (ESS) is the size of an iid sample
with the same variance as `state`.
More precisely, given a stationary sequence of possibly correlated random
variables `X_1, X_2, ..., X_N`, identically distributed, ESS is the
number such that
```
Variance{ N**-1 * Sum{X_i} } = ESS**-1 * Variance{ X_1 }.
```
If the sequence is uncorrelated, `ESS = N`. If the sequence is positively
auto-correlated, `ESS` will be less than `N`. If there are negative
correlations, then `ESS` can exceed `N`.
Some math shows that, with `R_k` the auto-correlation sequence,
`R_k := Covariance{X_1, X_{1+k}} / Variance{X_1}`, we have
```
ESS(N) = N / [ 1 + 2 * ( (N - 1) / N * R_1 + ... + 1 / N * R_{N-1} ) ]
```
This function estimates the above by first estimating the auto-correlation.
Since `R_k` must be estimated using only `N - k` samples, it becomes
progressively noisier for larger `k`. For this reason, the summation over
`R_k` should be truncated at some number `filter_beyond_lag < N`. This
function provides two methods to perform this truncation.
* `filter_threshold` -- since many MCMC methods generate chains where `R_k >
0`, a reasonable criterion is to truncate at the first index where the
estimated auto-correlation becomes negative. This method does not estimate
the `ESS` of super-efficient chains (where `ESS > N`) correctly.
* `filter_beyond_positive_pairs` -- reversible MCMC chains produce
an auto-correlation sequence with the property that pairwise sums of the
elements of that sequence are positive [Geyer][1], i.e.
`R_{2k} + R_{2k + 1} > 0` for `k in {0, ..., N/2}`. Deviations are only
possible due to noise. This method truncates the auto-correlation sequence
where the pairwise sums become non-positive.
The arguments `filter_beyond_lag`, `filter_threshold` and
`filter_beyond_positive_pairs` are filters intended to remove noisy tail terms
from `R_k`. You can combine `filter_beyond_lag` with `filter_threshold` or
`filter_beyond_positive_pairs. E.g., combining `filter_beyond_lag` and
`filter_beyond_positive_pairs` means that terms are removed if they were to be
filtered under the `filter_beyond_lag` OR `filter_beyond_positive_pairs`
criteria.
This function can also compute cross-chain ESS following
[Vehtari et al. (2019)][2] by specifying the `cross_chain_dims` argument.
Cross-chain ESS takes into account the cross-chain variance to reduce the ESS
in cases where the chains are not mixing well. In general, this will be a
smaller number than computing the ESS for individual chains and then summing
them. In an extreme case where the chains have fallen into K non-mixing modes,
this function will return ESS ~ K. Even when chains are mixing well it is
still preferrable to compute cross-chain ESS via this method because it will
reduce the noise in the estimate of `R_k`, reducing the need for truncation.
Args:
states: `Tensor` or Python structure of `Tensor` objects. Dimension zero
should index identically distributed states.
filter_threshold: `Tensor` or Python structure of `Tensor` objects. Must
broadcast with `state`. The sequence of auto-correlations is truncated
after the first appearance of a term less than `filter_threshold`.
Setting to `None` means we use no threshold filter. Since `|R_k| <= 1`,
setting to any number less than `-1` has the same effect. Ignored if
`filter_beyond_positive_pairs` is `True`.
filter_beyond_lag: `Tensor` or Python structure of `Tensor` objects. Must
be `int`-like and scalar valued. The sequence of auto-correlations is
truncated to this length. Setting to `None` means we do not filter based
on the size of lags.
filter_beyond_positive_pairs: Python boolean. If `True`, only consider the
initial auto-correlation sequence where the pairwise sums are positive.
cross_chain_dims: An integer `Tensor` or a structure of integer `Tensors`
corresponding to each state component. If a list of `states` is provided,
then this argument should also be a list of the same length. Which
dimensions of `states` to treat as independent chains that ESS will be
summed over. If `None`, no summation is performed. Note this requires at
least 2 chains.
validate_args: Whether to add runtime checks of argument validity. If False,
and arguments are incorrect, correct behavior is not guaranteed.
name: `String` name to prepend to created ops.
Returns:
ess: `Tensor` structure parallel to `states`. The effective sample size of
each component of `states`. If `cross_chain_dims` is None, the shape will
be `states.shape[1:]`. Otherwise, the shape is `tf.reduce_mean(states,
cross_chain_dims).shape[1:]`.
Raises:
ValueError: If `states` and `filter_threshold` or `states` and
`filter_beyond_lag` are both structures of different shapes.
ValueError: If `cross_chain_dims` is not `None` and there are less than 2
chains.
#### Examples
We use ESS to estimate standard error.
```
import tensorflow as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
target = tfd.MultivariateNormalDiag(scale_diag=[1., 2.])
# Get 1000 states from one chain.
states = tfp.mcmc.sample_chain(
num_burnin_steps=200,
num_results=1000,
current_state=tf.constant([0., 0.]),
trace_fn=None,
kernel=tfp.mcmc.HamiltonianMonteCarlo(
target_log_prob_fn=target.log_prob,
step_size=0.05,
num_leapfrog_steps=20))
states.shape
==> (1000, 2)
ess = effective_sample_size(states, filter_beyond_positive_pairs=True)
==> Shape (2,) Tensor
mean, variance = tf.nn.moments(states, axis=0)
standard_error = tf.sqrt(variance / ess)
```
#### References
[1]: <NAME>, Practical Markov chain Monte Carlo (with discussion).
Statistical Science, 7:473-511, 1992.
[2]: <NAME>, <NAME>, <NAME>, <NAME>, <NAME>. Rank-normalization, folding, and localization: An improved R-hat
for assessing convergence of MCMC, 2019. Retrieved from
http://arxiv.org/abs/1903.08008
"""
if cross_chain_dims is None:
cross_chain_dims = nest_util.broadcast_structure(states, None)
filter_beyond_lag = nest_util.broadcast_structure(states, filter_beyond_lag)
filter_threshold = nest_util.broadcast_structure(states, filter_threshold)
filter_beyond_positive_pairs = nest_util.broadcast_structure(
states, filter_beyond_positive_pairs)
# Process items, one at a time.
def single_state(*args):
return _effective_sample_size_single_state(
*args, validate_args=validate_args)
with tf.name_scope('effective_sample_size' if name is None else name):
return nest.map_structure_up_to(
states,
single_state,
states, filter_beyond_lag, filter_threshold,
filter_beyond_positive_pairs, cross_chain_dims)
def _effective_sample_size_single_state(states, filter_beyond_lag,
filter_threshold,
filter_beyond_positive_pairs,
cross_chain_dims,
validate_args):
"""ESS computation for one single Tensor argument."""
with tf.name_scope('effective_sample_size_single_state'):
states = tf.convert_to_tensor(states, name='states')
dt = states.dtype
# filter_beyond_lag == None ==> auto_corr is the full sequence.
auto_cov = stats.auto_correlation(
states, axis=0, max_lags=filter_beyond_lag, normalize=False)
n = _axis_size(states, axis=0)
if cross_chain_dims is not None:
num_chains = _axis_size(states, cross_chain_dims)
num_chains_ = tf.get_static_value(num_chains)
assertions = []
msg = ('When `cross_chain_dims` is not `None`, there must be > 1 chain '
'in `states`.')
if num_chains_ is not None:
if num_chains_ < 2:
raise ValueError(msg)
elif validate_args:
assertions.append(
assert_util.assert_greater(num_chains, 1., message=msg))
with tf.control_dependencies(assertions):
# We're computing the R[k] from equation 10 of Vehtari et al.
# (2019):
#
# R[k] := 1 - (W - 1/C * Sum_{c=1}^C s_c**2 R[k, c]) / (var^+),
#
# where:
# C := number of chains
# N := length of chains
# x_hat[c] := 1 / N Sum_{n=1}^N x[n, c], chain mean.
# x_hat := 1 / C Sum_{c=1}^C x_hat[c], overall mean.
# W := 1/C Sum_{c=1}^C s_c**2, within-chain variance.
# B := N / (C - 1) Sum_{c=1}^C (x_hat[c] - x_hat)**2, between chain
# variance.
# s_c**2 := 1 / (N - 1) Sum_{n=1}^N (x[n, c] - x_hat[c])**2, chain
# variance
# R[k, m] := auto_corr[k, m, ...], auto-correlation indexed by chain.
# var^+ := (N - 1) / N * W + B / N
cross_chain_dims = ps.non_negative_axis(
cross_chain_dims, ps.rank(states))
# B / N
between_chain_variance_div_n = _reduce_variance(
tf.reduce_mean(states, axis=0),
biased=False, # This makes the denominator be C - 1.
axis=cross_chain_dims - 1)
# W * (N - 1) / N
biased_within_chain_variance = tf.reduce_mean(auto_cov[0],
cross_chain_dims - 1)
# var^+
approx_variance = (
biased_within_chain_variance + between_chain_variance_div_n)
# 1/C * Sum_{c=1}^C s_c**2 R[k, c]
mean_auto_cov = tf.reduce_mean(auto_cov, cross_chain_dims)
auto_corr = 1. - (biased_within_chain_variance -
mean_auto_cov) / approx_variance
else:
auto_corr = auto_cov / auto_cov[:1]
num_chains = 1
# With R[k] := auto_corr[k, ...],
# ESS = N / {1 + 2 * Sum_{k=1}^N R[k] * (N - k) / N}
# = N / {-1 + 2 * Sum_{k=0}^N R[k] * (N - k) / N} (since R[0] = 1)
# approx N / {-1 + 2 * Sum_{k=0}^M R[k] * (N - k) / N}
# where M is the filter_beyond_lag truncation point chosen above.
# Get the factor (N - k) / N, and give it shape [M, 1,...,1], having total
# ndims the same as auto_corr
k = tf.range(0., _axis_size(auto_corr, axis=0))
nk_factor = (n - k) / n
if tensorshape_util.rank(auto_corr.shape) is not None:
new_shape = [-1] + [1] * (tensorshape_util.rank(auto_corr.shape) - 1)
else:
new_shape = tf.concat(
([-1],
tf.ones([tf.rank(auto_corr) - 1], dtype=tf.int32)),
axis=0)
nk_factor = tf.reshape(nk_factor, new_shape)
weighted_auto_corr = nk_factor * auto_corr
if filter_beyond_positive_pairs:
def _sum_pairs(x):
x_len = ps.shape(x)[0]
# For odd sequences, we drop the final value.
x = x[:x_len - x_len % 2]
new_shape = ps.concat([[x_len // 2, 2], ps.shape(x)[1:]], axis=0)
return tf.reduce_sum(tf.reshape(x, new_shape), 1)
# Pairwise sums are all positive for auto-correlation spectra derived from
# reversible MCMC chains.
# E.g. imagine the pairwise sums are [0.2, 0.1, -0.1, -0.2]
# Step 1: mask = [False, False, True, True]
mask = _sum_pairs(auto_corr) < 0.
# Step 2: mask = [0, 0, 1, 1]
mask = tf.cast(mask, dt)
# Step 3: mask = [0, 0, 1, 2]
mask = tf.cumsum(mask, axis=0)
# Step 4: mask = [1, 1, 0, 0]
mask = tf.maximum(1. - mask, 0.)
# N.B. this reduces the length of weighted_auto_corr by a factor of 2.
# It still works fine in the formula below.
weighted_auto_corr = _sum_pairs(weighted_auto_corr) * mask
elif filter_threshold is not None:
filter_threshold = tf.convert_to_tensor(
filter_threshold, dtype=dt, name='filter_threshold')
# Get a binary mask to zero out values of auto_corr below the threshold.
# mask[i, ...] = 1 if auto_corr[j, ...] > threshold for all j <= i,
# mask[i, ...] = 0, otherwise.
# So, along dimension zero, the mask will look like [1, 1, ..., 0, 0,...]
# Building step by step,
# Assume auto_corr = [1, 0.5, 0.0, 0.3], and filter_threshold = 0.2.
# Step 1: mask = [False, False, True, False]
mask = auto_corr < filter_threshold
# Step 2: mask = [0, 0, 1, 0]
mask = tf.cast(mask, dtype=dt)
# Step 3: mask = [0, 0, 1, 1]
mask = tf.cumsum(mask, axis=0)
# Step 4: mask = [1, 1, 0, 0]
mask = tf.maximum(1. - mask, 0.)
weighted_auto_corr *= mask
return num_chains * n / (-1 + 2 * tf.reduce_sum(weighted_auto_corr, axis=0))
def potential_scale_reduction(chains_states,
independent_chain_ndims=1,
split_chains=False,
validate_args=False,
name=None):
"""<NAME> Rubin (1992)'s potential scale reduction for chain convergence.
Given `N > 1` states from each of `C > 1` independent chains, the potential
scale reduction factor, commonly referred to as R-hat, measures convergence of
the chains (to the same target) by testing for equality of means.
Specifically, R-hat measures the degree to which variance (of the means)
between chains exceeds what one would expect if the chains were identically
distributed. See [Gelman and Rubin (1992)][1]; [Brooks and Gelman (1998)][2].
Some guidelines:
* The initial state of the chains should be drawn from a distribution
overdispersed with respect to the target.
* If all chains converge to the target, then as `N --> infinity`, R-hat --> 1.
Before that, R-hat > 1 (except in pathological cases, e.g. if the chain
paths were identical).
* The above holds for any number of chains `C > 1`. Increasing `C` does
improve effectiveness of the diagnostic.
* Sometimes, R-hat < 1.2 is used to indicate approximate convergence, but of
course this is problem-dependent. See [Brooks and Gelman (1998)][2].
* R-hat only measures non-convergence of the mean. If higher moments, or
other statistics are desired, a different diagnostic should be used. See
[Brooks and Gelman (1998)][2].
Args:
chains_states: `Tensor` or Python structure of `Tensor`s representing the
states of a Markov Chain at each result step. The `ith` state is
assumed to have shape `[Ni, Ci1, Ci2,...,CiD] + A`.
Dimension `0` indexes the `Ni > 1` result steps of the Markov Chain.
Dimensions `1` through `D` index the `Ci1 x ... x CiD` independent
chains to be tested for convergence to the same target.
The remaining dimensions, `A`, can have any shape (even empty).
independent_chain_ndims: Integer type `Tensor` with value `>= 1` giving the
number of dimensions, from `dim = 1` to `dim = D`, holding independent
chain results to be tested for convergence.
split_chains: Python `bool`. If `True`, divide samples from each chain into
first and second halves, treating these as separate chains. This makes
R-hat more robust to non-stationary chains, and is recommended in [3].
validate_args: Whether to add runtime checks of argument validity. If False,
and arguments are incorrect, correct behavior is not guaranteed.
name: `String` name to prepend to created tf. Default:
`potential_scale_reduction`.
Returns:
`Tensor` structure parallel to `chains_states` representing the
R-hat statistic for the state(s). Same `dtype` as `state`, and
shape equal to `state.shape[1 + independent_chain_ndims:]`.
Raises:
ValueError: If `independent_chain_ndims < 1`.
#### Examples
Diagnosing convergence by monitoring 10 chains that each attempt to
sample from a 2-variate normal.
```python
import tensorflow as tf
import tensorflow_probability as tfp
tfd = tfp.distributions
target = tfd.MultivariateNormalDiag(scale_diag=[1., 2.])
# Get 10 (2x) overdispersed initial states.
initial_state = target.sample(10) * 2.
==> (10, 2)
# Get 1000 samples from the 10 independent chains.
chains_states = tfp.mcmc.sample_chain(
num_burnin_steps=200,
num_results=1000,
current_state=initial_state,
trace_fn=None,
kernel=tfp.mcmc.HamiltonianMonteCarlo(
target_log_prob_fn=target.log_prob,
step_size=0.05,
num_leapfrog_steps=20))
chains_states.shape
==> (1000, 10, 2)
rhat = tfp.mcmc.diagnostic.potential_scale_reduction(
chains_states, independent_chain_ndims=1)
# The second dimension needed a longer burn-in.
rhat.eval()
==> [1.05, 1.3]
```
To see why R-hat is reasonable, let `X` be a random variable drawn uniformly
from the combined states (combined over all chains). Then, in the limit
`N, C --> infinity`, with `E`, `Var` denoting expectation and variance,
```R-hat = ( E[Var[X | chain]] + Var[E[X | chain]] ) / E[Var[X | chain]].```
Using the law of total variance, the numerator is the variance of the combined
states, and the denominator is the total variance minus the variance of the
the individual chain means. If the chains are all drawing from the same
distribution, they will have the same mean, and thus the ratio should be one.
#### References
[1]: <NAME> and <NAME>. General Methods for Monitoring
Convergence of Iterative Simulations. _Journal of Computational and
Graphical Statistics_, 7(4), 1998.
[2]: <NAME> and <NAME>. Inference from Iterative Simulation
Using Multiple Sequences. _Statistical Science_, 7(4):457-472, 1992.
[3]: <NAME>, <NAME>, <NAME>, <NAME>, Paul-Christian
Burkner. Rank-normalization, folding, and localization: An improved R-hat
for assessing convergence of MCMC, 2019. Retrieved from
http://arxiv.org/abs/1903.08008
"""
# tf.get_static_value returns None iff a constant value (as a numpy
# array) is not efficiently computable. Therefore, we try constant_value then
# check for None.
icn_const_ = tf.get_static_value(
ps.convert_to_shape_tensor(independent_chain_ndims))
if icn_const_ is not None:
independent_chain_ndims = icn_const_
if icn_const_ < 1:
raise ValueError(
'Argument `independent_chain_ndims` must be `>= 1`, found: {}'.format(
independent_chain_ndims))
def single_state(s):
return _potential_scale_reduction_single_state(
s, independent_chain_ndims, split_chains, validate_args)
with tf.name_scope('potential_scale_reduction' if name is None else name):
return tf.nest.map_structure(single_state, chains_states)
def _potential_scale_reduction_single_state(state, independent_chain_ndims,
split_chains, validate_args):
"""potential_scale_reduction for one single state `Tensor`."""
# casting integers to floats for floating-point division
# check to see if the `state` is a numpy object for the numpy test suite
if dtype_util.as_numpy_dtype(state.dtype) is np.int64:
state = tf.cast(state, tf.float64)
elif dtype_util.is_integer(state.dtype):
state = tf.cast(state, tf.float32)
with tf.name_scope('potential_scale_reduction_single_state'):
# We assume exactly one leading dimension indexes e.g. correlated samples
# from each Markov chain.
state = tf.convert_to_tensor(state, name='state')
n_samples_ = tf.compat.dimension_value(state.shape[0])
if n_samples_ is not None: # If available statically.
if split_chains and n_samples_ < 4:
raise ValueError(
'Must provide at least 4 samples when splitting chains. '
'Found {}'.format(n_samples_))
if not split_chains and n_samples_ < 2:
raise ValueError(
'Must provide at least 2 samples. Found {}'.format(n_samples_))
elif validate_args:
if split_chains:
assertions = [assert_util.assert_greater(
ps.shape(state)[0], 4,
message='Must provide at least 4 samples when splitting chains.')]
with tf.control_dependencies(assertions):
state = tf.identity(state)
else:
assertions = [assert_util.assert_greater(
ps.shape(state)[0], 2,
message='Must provide at least 2 samples.')]
with tf.control_dependencies(assertions):
state = tf.identity(state)
# Define so it's not a magic number.
# Warning! `if split_chains` logic assumes this is 1!
sample_ndims = 1
if split_chains:
# Split the sample dimension in half, doubling the number of
# independent chains.
# For odd number of samples, keep all but the last sample.
state_shape = ps.shape(state)
n_samples = state_shape[0]
state = state[:n_samples - n_samples % 2]
# Suppose state = [0, 1, 2, 3, 4, 5]
# Step 1: reshape into [[0, 1, 2], [3, 4, 5]]
# E.g. reshape states of shape [a, b] into [2, a//2, b].
state = tf.reshape(
state,
ps.concat([[2, n_samples // 2], state_shape[1:]], axis=0)
)
# Step 2: Put the size `2` dimension in the right place to be treated as a
# chain, changing [[0, 1, 2], [3, 4, 5]] into [[0, 3], [1, 4], [2, 5]],
# reshaping [2, a//2, b] into [a//2, 2, b].
state = tf.transpose(
a=state,
perm=ps.concat(
[[1, 0], ps.range(2, ps.rank(state))], axis=0))
# We're treating the new dim as indexing 2 chains, so increment.
independent_chain_ndims += 1
sample_axis = ps.range(0, sample_ndims)
chain_axis = ps.range(sample_ndims,
sample_ndims + independent_chain_ndims)
sample_and_chain_axis = ps.range(
0, sample_ndims + independent_chain_ndims)
n = _axis_size(state, sample_axis)
m = _axis_size(state, chain_axis)
# In the language of Brooks and Gelman (1998),
# B / n is the between chain variance, the variance of the chain means.
# W is the within sequence variance, the mean of the chain variances.
b_div_n = _reduce_variance(
tf.reduce_mean(state, axis=sample_axis, keepdims=True),
sample_and_chain_axis,
biased=False)
w = tf.reduce_mean(
_reduce_variance(state, sample_axis, keepdims=True, biased=False),
axis=sample_and_chain_axis)
# sigma^2_+ is an estimate of the true variance, which would be unbiased if
# each chain was drawn from the target. c.f. "law of total variance."
sigma_2_plus = ((n - 1) / n) * w + b_div_n
return ((m + 1.) / m) * sigma_2_plus / w - (n - 1.) / (m * n)
# TODO(b/72873233) Move some variant of this to tfd.sample_stats.
def _reduce_variance(x, axis=None, biased=True, keepdims=False):
with tf.name_scope('reduce_variance'):
x = tf.convert_to_tensor(x, name='x')
mean = tf.reduce_mean(x, axis=axis, keepdims=True)
biased_var = tf.reduce_mean(
tf.math.squared_difference(x, mean), axis=axis, keepdims=keepdims)
if biased:
return biased_var
n = _axis_size(x, axis)
return (n / (n - 1.)) * biased_var
def _axis_size(x, axis=None):
"""Get number of elements of `x` in `axis`, as type `x.dtype`."""
if axis is None:
return ps.cast(ps.size(x), x.dtype)
return ps.cast(
ps.reduce_prod(
ps.gather(ps.shape(x), axis)), x.dtype)
| [
"tensorflow.compat.v2.control_dependencies",
"tensorflow.compat.v2.reduce_mean",
"tensorflow.compat.v2.nest.map_structure",
"tensorflow.compat.v2.cast",
"tensorflow_probability.python.internal.prefer_static.size",
"tensorflow.compat.v2.get_static_value",
"tensorflow.compat.v2.maximum",
"tensorflow.compat.v2.convert_to_tensor",
"tensorflow.compat.v2.compat.dimension_value",
"tensorflow_probability.python.internal.prefer_static.concat",
"tensorflow.python.util.nest.map_structure_up_to",
"tensorflow_probability.python.internal.nest_util.broadcast_structure",
"tensorflow.compat.v2.reduce_sum",
"tensorflow.compat.v2.math.squared_difference",
"tensorflow_probability.python.internal.prefer_static.convert_to_shape_tensor",
"tensorflow.compat.v2.rank",
"tensorflow_probability.python.internal.dtype_util.as_numpy_dtype",
"tensorflow_probability.python.internal.prefer_static.shape",
"tensorflow.compat.v2.cumsum",
"tensorflow_probability.python.internal.prefer_static.range",
"tensorflow.compat.v2.name_scope",
"tensorflow.compat.v2.reshape",
"tensorflow_probability.python.stats.auto_correlation",
"tensorflow_probability.python.internal.prefer_static.rank",
"tensorflow_probability.python.internal.assert_util.assert_greater",
"tensorflow_probability.python.internal.tensorshape_util.rank",
"tensorflow.compat.v2.identity",
"tensorflow_probability.python.internal.dtype_util.is_integer"
] | [((8267, 8323), 'tensorflow_probability.python.internal.nest_util.broadcast_structure', 'nest_util.broadcast_structure', (['states', 'filter_beyond_lag'], {}), '(states, filter_beyond_lag)\n', (8296, 8323), False, 'from tensorflow_probability.python.internal import nest_util\n'), ((8345, 8400), 'tensorflow_probability.python.internal.nest_util.broadcast_structure', 'nest_util.broadcast_structure', (['states', 'filter_threshold'], {}), '(states, filter_threshold)\n', (8374, 8400), False, 'from tensorflow_probability.python.internal import nest_util\n'), ((8434, 8501), 'tensorflow_probability.python.internal.nest_util.broadcast_structure', 'nest_util.broadcast_structure', (['states', 'filter_beyond_positive_pairs'], {}), '(states, filter_beyond_positive_pairs)\n', (8463, 8501), False, 'from tensorflow_probability.python.internal import nest_util\n'), ((8201, 8244), 'tensorflow_probability.python.internal.nest_util.broadcast_structure', 'nest_util.broadcast_structure', (['states', 'None'], {}), '(states, None)\n', (8230, 8244), False, 'from tensorflow_probability.python.internal import nest_util\n'), ((8670, 8734), 'tensorflow.compat.v2.name_scope', 'tf.name_scope', (["('effective_sample_size' if name is None else name)"], {}), "('effective_sample_size' if name is None else name)\n", (8683, 8734), True, 'import tensorflow.compat.v2 as tf\n'), ((8747, 8890), 'tensorflow.python.util.nest.map_structure_up_to', 'nest.map_structure_up_to', (['states', 'single_state', 'states', 'filter_beyond_lag', 'filter_threshold', 'filter_beyond_positive_pairs', 'cross_chain_dims'], {}), '(states, single_state, states, filter_beyond_lag,\n filter_threshold, filter_beyond_positive_pairs, cross_chain_dims)\n', (8771, 8890), False, 'from tensorflow.python.util import nest\n'), ((9295, 9346), 'tensorflow.compat.v2.name_scope', 'tf.name_scope', (['"""effective_sample_size_single_state"""'], {}), "('effective_sample_size_single_state')\n", (9308, 9346), True, 'import tensorflow.compat.v2 as tf\n'), ((9362, 9405), 'tensorflow.compat.v2.convert_to_tensor', 'tf.convert_to_tensor', (['states'], {'name': '"""states"""'}), "(states, name='states')\n", (9382, 9405), True, 'import tensorflow.compat.v2 as tf\n'), ((9512, 9599), 'tensorflow_probability.python.stats.auto_correlation', 'stats.auto_correlation', (['states'], {'axis': '(0)', 'max_lags': 'filter_beyond_lag', 'normalize': '(False)'}), '(states, axis=0, max_lags=filter_beyond_lag,\n normalize=False)\n', (9534, 9599), False, 'from tensorflow_probability.python import stats\n'), ((12656, 12688), 'tensorflow.compat.v2.reshape', 'tf.reshape', (['nk_factor', 'new_shape'], {}), '(nk_factor, new_shape)\n', (12666, 12688), True, 'import tensorflow.compat.v2 as tf\n'), ((20111, 20162), 'tensorflow_probability.python.internal.prefer_static.convert_to_shape_tensor', 'ps.convert_to_shape_tensor', (['independent_chain_ndims'], {}), '(independent_chain_ndims)\n', (20137, 20162), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((20550, 20618), 'tensorflow.compat.v2.name_scope', 'tf.name_scope', (["('potential_scale_reduction' if name is None else name)"], {}), "('potential_scale_reduction' if name is None else name)\n", (20563, 20618), True, 'import tensorflow.compat.v2 as tf\n'), ((20631, 20681), 'tensorflow.compat.v2.nest.map_structure', 'tf.nest.map_structure', (['single_state', 'chains_states'], {}), '(single_state, chains_states)\n', (20652, 20681), True, 'import tensorflow.compat.v2 as tf\n'), ((21038, 21076), 'tensorflow_probability.python.internal.dtype_util.as_numpy_dtype', 'dtype_util.as_numpy_dtype', (['state.dtype'], {}), '(state.dtype)\n', (21063, 21076), False, 'from tensorflow_probability.python.internal import dtype_util\n'), ((21102, 21128), 'tensorflow.compat.v2.cast', 'tf.cast', (['state', 'tf.float64'], {}), '(state, tf.float64)\n', (21109, 21128), True, 'import tensorflow.compat.v2 as tf\n'), ((21136, 21170), 'tensorflow_probability.python.internal.dtype_util.is_integer', 'dtype_util.is_integer', (['state.dtype'], {}), '(state.dtype)\n', (21157, 21170), False, 'from tensorflow_probability.python.internal import dtype_util\n'), ((21218, 21273), 'tensorflow.compat.v2.name_scope', 'tf.name_scope', (['"""potential_scale_reduction_single_state"""'], {}), "('potential_scale_reduction_single_state')\n", (21231, 21273), True, 'import tensorflow.compat.v2 as tf\n'), ((21395, 21436), 'tensorflow.compat.v2.convert_to_tensor', 'tf.convert_to_tensor', (['state'], {'name': '"""state"""'}), "(state, name='state')\n", (21415, 21436), True, 'import tensorflow.compat.v2 as tf\n'), ((21455, 21496), 'tensorflow.compat.v2.compat.dimension_value', 'tf.compat.dimension_value', (['state.shape[0]'], {}), '(state.shape[0])\n', (21480, 21496), True, 'import tensorflow.compat.v2 as tf\n'), ((23595, 23620), 'tensorflow_probability.python.internal.prefer_static.range', 'ps.range', (['(0)', 'sample_ndims'], {}), '(0, sample_ndims)\n', (23603, 23620), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((23638, 23700), 'tensorflow_probability.python.internal.prefer_static.range', 'ps.range', (['sample_ndims', '(sample_ndims + independent_chain_ndims)'], {}), '(sample_ndims, sample_ndims + independent_chain_ndims)\n', (23646, 23700), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((23755, 23806), 'tensorflow_probability.python.internal.prefer_static.range', 'ps.range', (['(0)', '(sample_ndims + independent_chain_ndims)'], {}), '(0, sample_ndims + independent_chain_ndims)\n', (23763, 23806), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((24789, 24821), 'tensorflow.compat.v2.name_scope', 'tf.name_scope', (['"""reduce_variance"""'], {}), "('reduce_variance')\n", (24802, 24821), True, 'import tensorflow.compat.v2 as tf\n'), ((24831, 24864), 'tensorflow.compat.v2.convert_to_tensor', 'tf.convert_to_tensor', (['x'], {'name': '"""x"""'}), "(x, name='x')\n", (24851, 24864), True, 'import tensorflow.compat.v2 as tf\n'), ((24876, 24919), 'tensorflow.compat.v2.reduce_mean', 'tf.reduce_mean', (['x'], {'axis': 'axis', 'keepdims': '(True)'}), '(x, axis=axis, keepdims=True)\n', (24890, 24919), True, 'import tensorflow.compat.v2 as tf\n'), ((9754, 9785), 'tensorflow.compat.v2.get_static_value', 'tf.get_static_value', (['num_chains'], {}), '(num_chains)\n', (9773, 9785), True, 'import tensorflow.compat.v2 as tf\n'), ((12375, 12413), 'tensorflow_probability.python.internal.tensorshape_util.rank', 'tensorshape_util.rank', (['auto_corr.shape'], {}), '(auto_corr.shape)\n', (12396, 12413), False, 'from tensorflow_probability.python.internal import tensorshape_util\n'), ((13369, 13386), 'tensorflow.compat.v2.cast', 'tf.cast', (['mask', 'dt'], {}), '(mask, dt)\n', (13376, 13386), True, 'import tensorflow.compat.v2 as tf\n'), ((13436, 13459), 'tensorflow.compat.v2.cumsum', 'tf.cumsum', (['mask'], {'axis': '(0)'}), '(mask, axis=0)\n', (13445, 13459), True, 'import tensorflow.compat.v2 as tf\n'), ((13509, 13536), 'tensorflow.compat.v2.maximum', 'tf.maximum', (['(1.0 - mask)', '(0.0)'], {}), '(1.0 - mask, 0.0)\n', (13519, 13536), True, 'import tensorflow.compat.v2 as tf\n'), ((21184, 21210), 'tensorflow.compat.v2.cast', 'tf.cast', (['state', 'tf.float32'], {}), '(state, tf.float32)\n', (21191, 21210), True, 'import tensorflow.compat.v2 as tf\n'), ((22750, 22765), 'tensorflow_probability.python.internal.prefer_static.shape', 'ps.shape', (['state'], {}), '(state)\n', (22758, 22765), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((24136, 24190), 'tensorflow.compat.v2.reduce_mean', 'tf.reduce_mean', (['state'], {'axis': 'sample_axis', 'keepdims': '(True)'}), '(state, axis=sample_axis, keepdims=True)\n', (24150, 24190), True, 'import tensorflow.compat.v2 as tf\n'), ((24961, 24996), 'tensorflow.compat.v2.math.squared_difference', 'tf.math.squared_difference', (['x', 'mean'], {}), '(x, mean)\n', (24987, 24996), True, 'import tensorflow.compat.v2 as tf\n'), ((25272, 25282), 'tensorflow_probability.python.internal.prefer_static.size', 'ps.size', (['x'], {}), '(x)\n', (25279, 25282), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((10145, 10180), 'tensorflow.compat.v2.control_dependencies', 'tf.control_dependencies', (['assertions'], {}), '(assertions)\n', (10168, 10180), True, 'import tensorflow.compat.v2 as tf\n'), ((11350, 11399), 'tensorflow.compat.v2.reduce_mean', 'tf.reduce_mean', (['auto_cov[0]', '(cross_chain_dims - 1)'], {}), '(auto_cov[0], cross_chain_dims - 1)\n', (11364, 11399), True, 'import tensorflow.compat.v2 as tf\n'), ((11638, 11680), 'tensorflow.compat.v2.reduce_mean', 'tf.reduce_mean', (['auto_cov', 'cross_chain_dims'], {}), '(auto_cov, cross_chain_dims)\n', (11652, 11680), True, 'import tensorflow.compat.v2 as tf\n'), ((13792, 13865), 'tensorflow.compat.v2.convert_to_tensor', 'tf.convert_to_tensor', (['filter_threshold'], {'dtype': 'dt', 'name': '"""filter_threshold"""'}), "(filter_threshold, dtype=dt, name='filter_threshold')\n", (13812, 13865), True, 'import tensorflow.compat.v2 as tf\n'), ((14403, 14426), 'tensorflow.compat.v2.cast', 'tf.cast', (['mask'], {'dtype': 'dt'}), '(mask, dtype=dt)\n', (14410, 14426), True, 'import tensorflow.compat.v2 as tf\n'), ((14477, 14500), 'tensorflow.compat.v2.cumsum', 'tf.cumsum', (['mask'], {'axis': '(0)'}), '(mask, axis=0)\n', (14486, 14500), True, 'import tensorflow.compat.v2 as tf\n'), ((14551, 14578), 'tensorflow.compat.v2.maximum', 'tf.maximum', (['(1.0 - mask)', '(0.0)'], {}), '(1.0 - mask, 0.0)\n', (14561, 14578), True, 'import tensorflow.compat.v2 as tf\n'), ((23059, 23116), 'tensorflow_probability.python.internal.prefer_static.concat', 'ps.concat', (['[[2, n_samples // 2], state_shape[1:]]'], {'axis': '(0)'}), '([[2, n_samples // 2], state_shape[1:]], axis=0)\n', (23068, 23116), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((25353, 25364), 'tensorflow_probability.python.internal.prefer_static.shape', 'ps.shape', (['x'], {}), '(x)\n', (25361, 25364), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((11046, 11061), 'tensorflow_probability.python.internal.prefer_static.rank', 'ps.rank', (['states'], {}), '(states)\n', (11053, 11061), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((11148, 11178), 'tensorflow.compat.v2.reduce_mean', 'tf.reduce_mean', (['states'], {'axis': '(0)'}), '(states, axis=0)\n', (11162, 11178), True, 'import tensorflow.compat.v2 as tf\n'), ((12815, 12826), 'tensorflow_probability.python.internal.prefer_static.shape', 'ps.shape', (['x'], {}), '(x)\n', (12823, 12826), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((13021, 13045), 'tensorflow.compat.v2.reshape', 'tf.reshape', (['x', 'new_shape'], {}), '(x, new_shape)\n', (13031, 13045), True, 'import tensorflow.compat.v2 as tf\n'), ((14649, 14690), 'tensorflow.compat.v2.reduce_sum', 'tf.reduce_sum', (['weighted_auto_corr'], {'axis': '(0)'}), '(weighted_auto_corr, axis=0)\n', (14662, 14690), True, 'import tensorflow.compat.v2 as tf\n'), ((10076, 10132), 'tensorflow_probability.python.internal.assert_util.assert_greater', 'assert_util.assert_greater', (['num_chains', '(1.0)'], {'message': 'msg'}), '(num_chains, 1.0, message=msg)\n', (10102, 10132), False, 'from tensorflow_probability.python.internal import assert_util\n'), ((12459, 12497), 'tensorflow_probability.python.internal.tensorshape_util.rank', 'tensorshape_util.rank', (['auto_corr.shape'], {}), '(auto_corr.shape)\n', (12480, 12497), False, 'from tensorflow_probability.python.internal import tensorshape_util\n'), ((22110, 22145), 'tensorflow.compat.v2.control_dependencies', 'tf.control_dependencies', (['assertions'], {}), '(assertions)\n', (22133, 22145), True, 'import tensorflow.compat.v2 as tf\n'), ((22165, 22183), 'tensorflow.compat.v2.identity', 'tf.identity', (['state'], {}), '(state)\n', (22176, 22183), True, 'import tensorflow.compat.v2 as tf\n'), ((22351, 22386), 'tensorflow.compat.v2.control_dependencies', 'tf.control_dependencies', (['assertions'], {}), '(assertions)\n', (22374, 22386), True, 'import tensorflow.compat.v2 as tf\n'), ((22406, 22424), 'tensorflow.compat.v2.identity', 'tf.identity', (['state'], {}), '(state)\n', (22417, 22424), True, 'import tensorflow.compat.v2 as tf\n'), ((12966, 12977), 'tensorflow_probability.python.internal.prefer_static.shape', 'ps.shape', (['x'], {}), '(x)\n', (12974, 12977), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((12579, 12597), 'tensorflow.compat.v2.rank', 'tf.rank', (['auto_corr'], {}), '(auto_corr)\n', (12586, 12597), True, 'import tensorflow.compat.v2 as tf\n'), ((21995, 22010), 'tensorflow_probability.python.internal.prefer_static.shape', 'ps.shape', (['state'], {}), '(state)\n', (22003, 22010), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((22258, 22273), 'tensorflow_probability.python.internal.prefer_static.shape', 'ps.shape', (['state'], {}), '(state)\n', (22266, 22273), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n'), ((23442, 23456), 'tensorflow_probability.python.internal.prefer_static.rank', 'ps.rank', (['state'], {}), '(state)\n', (23449, 23456), True, 'from tensorflow_probability.python.internal import prefer_static as ps\n')] |
from torch import nn as nn
from .base_model import BaseModel
from ..nn.conv2d import DenseConv2d
from ..nn.linear import DenseLinear
__all__ = ["Conv2", "conv2", "Conv4", "conv4"]
class Conv2(BaseModel):
def __init__(self):
super(Conv2, self).__init__()
self.features = nn.Sequential(DenseConv2d(1, 32, kernel_size=5, padding=2), # 32x28x28
nn.ReLU(inplace=True),
nn.MaxPool2d(2, stride=2), # 32x14x14
DenseConv2d(32, 64, kernel_size=5, padding=2), # 64x14x14
nn.ReLU(inplace=True),
nn.MaxPool2d(2, stride=2)) # 64x7x7
self.classifier = nn.Sequential(DenseLinear(64 * 7 * 7, 2048),
nn.ReLU(inplace=True),
DenseLinear(2048, 62))
self.collect_prunable_layers()
def forward(self, inp):
out = self.features(inp)
out = out.view(out.size(0), -1)
out = self.classifier(out)
return out
class Conv4(BaseModel):
def __init__(self):
super(Conv4, self).__init__()
self.features = nn.Sequential(DenseConv2d(3, 32, kernel_size=3, padding=1),
nn.BatchNorm2d(32),
nn.MaxPool2d(2),
DenseConv2d(32, 32, kernel_size=3, padding=1),
nn.BatchNorm2d(32),
nn.MaxPool2d(2),
DenseConv2d(32, 32, kernel_size=3, padding=2),
nn.BatchNorm2d(32),
nn.MaxPool2d(2),
DenseConv2d(32, 32, kernel_size=3, padding=2),
nn.BatchNorm2d(32),
nn.MaxPool2d(2))
self.classifier = DenseLinear(in_features=32 * 6 * 6, out_features=2)
def forward(self, inp):
out = self.features(inp)
out = out.view(out.size(0), -1)
out = self.classifier(out)
return out
def conv2() -> Conv2:
return Conv2()
def conv4() -> Conv4:
return Conv4()
# TODO: define pretrain etc.
| [
"torch.nn.BatchNorm2d",
"torch.nn.ReLU",
"torch.nn.MaxPool2d"
] | [((404, 425), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (411, 425), True, 'from torch import nn as nn\n'), ((465, 490), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)'}), '(2, stride=2)\n', (477, 490), True, 'from torch import nn as nn\n'), ((639, 660), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (646, 660), True, 'from torch import nn as nn\n'), ((700, 725), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {'stride': '(2)'}), '(2, stride=2)\n', (712, 725), True, 'from torch import nn as nn\n'), ((849, 870), 'torch.nn.ReLU', 'nn.ReLU', ([], {'inplace': '(True)'}), '(inplace=True)\n', (856, 870), True, 'from torch import nn as nn\n'), ((1340, 1358), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (1354, 1358), True, 'from torch import nn as nn\n'), ((1398, 1413), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (1410, 1413), True, 'from torch import nn as nn\n'), ((1538, 1556), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (1552, 1556), True, 'from torch import nn as nn\n'), ((1596, 1611), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (1608, 1611), True, 'from torch import nn as nn\n'), ((1736, 1754), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (1750, 1754), True, 'from torch import nn as nn\n'), ((1794, 1809), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (1806, 1809), True, 'from torch import nn as nn\n'), ((1934, 1952), 'torch.nn.BatchNorm2d', 'nn.BatchNorm2d', (['(32)'], {}), '(32)\n', (1948, 1952), True, 'from torch import nn as nn\n'), ((1992, 2007), 'torch.nn.MaxPool2d', 'nn.MaxPool2d', (['(2)'], {}), '(2)\n', (2004, 2007), True, 'from torch import nn as nn\n')] |
import bpy
from bpy.app.handlers import persistent
bl_info = {
"name": "Playback Once",
"author": "<NAME>",
"version": (1, 0, 0),
"blender": (2, 67, 3),
"location": "",
"description": "Playback once.",
"warning": "",
"wiki_url": "",
"tracker_url": "",
"category": "Animation"}
@persistent
def stopPlaybackAtEnd(scene):
if scene.frame_current >= scene.frame_end:
bpy.ops.screen.animation_cancel()
def register():
bpy.app.handlers.frame_change_pre.append(stopPlaybackAtEnd)
def unregister():
bpy.app.handlers.frame_change_pre.remove(stopPlaybackAtEnd)
if __name__ == "__main__":
register()
| [
"bpy.ops.screen.animation_cancel",
"bpy.app.handlers.frame_change_pre.remove",
"bpy.app.handlers.frame_change_pre.append"
] | [((471, 530), 'bpy.app.handlers.frame_change_pre.append', 'bpy.app.handlers.frame_change_pre.append', (['stopPlaybackAtEnd'], {}), '(stopPlaybackAtEnd)\n', (511, 530), False, 'import bpy\n'), ((554, 613), 'bpy.app.handlers.frame_change_pre.remove', 'bpy.app.handlers.frame_change_pre.remove', (['stopPlaybackAtEnd'], {}), '(stopPlaybackAtEnd)\n', (594, 613), False, 'import bpy\n'), ((416, 449), 'bpy.ops.screen.animation_cancel', 'bpy.ops.screen.animation_cancel', ([], {}), '()\n', (447, 449), False, 'import bpy\n')] |
# pylint:disable=unused-variable
# pylint:disable=unused-argument
# pylint:disable=redefined-outer-name
from typing import AsyncIterator
import pytest
from aioresponses import aioresponses
from faker import Faker
from hypothesis import HealthCheck, given, settings
from hypothesis import strategies as st
from models_library.clusters import ClusterID
from models_library.projects import ProjectID
from models_library.projects_pipeline import ComputationTask
from models_library.projects_state import RunningState
from models_library.users import UserID
from simcore_service_webserver import director_v2_api
from simcore_service_webserver.director_v2_models import (
ClusterCreate,
ClusterPatch,
ClusterPing,
)
@pytest.fixture()
async def mocked_director_v2(
director_v2_service_mock: aioresponses,
) -> AsyncIterator[aioresponses]:
yield director_v2_service_mock
@pytest.fixture
def user_id(faker: Faker) -> UserID:
return UserID(faker.pyint(min_value=1))
@pytest.fixture
def project_id(faker: Faker) -> ProjectID:
return ProjectID(faker.uuid4())
@pytest.fixture
def cluster_id(faker: Faker) -> ClusterID:
return ClusterID(faker.pyint(min_value=0))
async def test_create_pipeline(
mocked_director_v2, client, user_id: UserID, project_id: ProjectID
):
task_out = await director_v2_api.create_or_update_pipeline(
client.app, user_id, project_id
)
assert task_out
assert isinstance(task_out, dict)
assert task_out["state"] == RunningState.NOT_STARTED
async def test_get_computation_task(
mocked_director_v2,
client,
user_id: UserID,
project_id: ProjectID,
):
task_out = await director_v2_api.get_computation_task(
client.app, user_id, project_id
)
assert task_out
assert isinstance(task_out, ComputationTask)
assert task_out.state == RunningState.NOT_STARTED
async def test_delete_pipeline(
mocked_director_v2, client, user_id: UserID, project_id: ProjectID
):
await director_v2_api.delete_pipeline(client.app, user_id, project_id)
@settings(suppress_health_check=[HealthCheck.function_scoped_fixture])
@given(cluster_create=st.builds(ClusterCreate))
async def test_create_cluster(
mocked_director_v2, client, user_id: UserID, cluster_create
):
created_cluster = await director_v2_api.create_cluster(
client.app, user_id=user_id, new_cluster=cluster_create
)
assert created_cluster is not None
assert isinstance(created_cluster, dict)
assert "id" in created_cluster
async def test_list_clusters(mocked_director_v2, client, user_id: UserID):
list_of_clusters = await director_v2_api.list_clusters(client.app, user_id=user_id)
assert isinstance(list_of_clusters, list)
assert len(list_of_clusters) > 0
async def test_get_cluster(
mocked_director_v2, client, user_id: UserID, cluster_id: ClusterID
):
cluster = await director_v2_api.get_cluster(
client.app, user_id=user_id, cluster_id=cluster_id
)
assert isinstance(cluster, dict)
assert cluster["id"] == cluster_id
async def test_get_cluster_details(
mocked_director_v2, client, user_id: UserID, cluster_id: ClusterID
):
cluster_details = await director_v2_api.get_cluster_details(
client.app, user_id=user_id, cluster_id=cluster_id
)
assert isinstance(cluster_details, dict)
@settings(suppress_health_check=[HealthCheck.function_scoped_fixture])
@given(cluster_patch=st.from_type(ClusterPatch))
async def test_update_cluster(
mocked_director_v2, client, user_id: UserID, cluster_id: ClusterID, cluster_patch
):
print(f"--> updating cluster with {cluster_patch=}")
updated_cluster = await director_v2_api.update_cluster(
client.app, user_id=user_id, cluster_id=cluster_id, cluster_patch=cluster_patch
)
assert isinstance(updated_cluster, dict)
assert updated_cluster["id"] == cluster_id
async def test_delete_cluster(
mocked_director_v2, client, user_id: UserID, cluster_id: ClusterID
):
await director_v2_api.delete_cluster(
client.app, user_id=user_id, cluster_id=cluster_id
)
@settings(suppress_health_check=[HealthCheck.function_scoped_fixture])
@given(cluster_ping=st.builds(ClusterPing))
async def test_ping_cluster(mocked_director_v2, client, cluster_ping: ClusterPing):
await director_v2_api.ping_cluster(client.app, cluster_ping=cluster_ping)
async def test_ping_specific_cluster(
mocked_director_v2, client, user_id: UserID, cluster_id: ClusterID
):
await director_v2_api.ping_specific_cluster(
client.app, user_id=user_id, cluster_id=cluster_id
)
| [
"hypothesis.strategies.builds",
"simcore_service_webserver.director_v2_api.get_cluster_details",
"hypothesis.strategies.from_type",
"simcore_service_webserver.director_v2_api.delete_cluster",
"simcore_service_webserver.director_v2_api.delete_pipeline",
"simcore_service_webserver.director_v2_api.ping_cluster",
"simcore_service_webserver.director_v2_api.create_cluster",
"simcore_service_webserver.director_v2_api.get_cluster",
"simcore_service_webserver.director_v2_api.update_cluster",
"hypothesis.settings",
"simcore_service_webserver.director_v2_api.ping_specific_cluster",
"pytest.fixture",
"simcore_service_webserver.director_v2_api.list_clusters",
"simcore_service_webserver.director_v2_api.get_computation_task",
"simcore_service_webserver.director_v2_api.create_or_update_pipeline"
] | [((728, 744), 'pytest.fixture', 'pytest.fixture', ([], {}), '()\n', (742, 744), False, 'import pytest\n'), ((2065, 2134), 'hypothesis.settings', 'settings', ([], {'suppress_health_check': '[HealthCheck.function_scoped_fixture]'}), '(suppress_health_check=[HealthCheck.function_scoped_fixture])\n', (2073, 2134), False, 'from hypothesis import HealthCheck, given, settings\n'), ((3362, 3431), 'hypothesis.settings', 'settings', ([], {'suppress_health_check': '[HealthCheck.function_scoped_fixture]'}), '(suppress_health_check=[HealthCheck.function_scoped_fixture])\n', (3370, 3431), False, 'from hypothesis import HealthCheck, given, settings\n'), ((4121, 4190), 'hypothesis.settings', 'settings', ([], {'suppress_health_check': '[HealthCheck.function_scoped_fixture]'}), '(suppress_health_check=[HealthCheck.function_scoped_fixture])\n', (4129, 4190), False, 'from hypothesis import HealthCheck, given, settings\n'), ((1321, 1395), 'simcore_service_webserver.director_v2_api.create_or_update_pipeline', 'director_v2_api.create_or_update_pipeline', (['client.app', 'user_id', 'project_id'], {}), '(client.app, user_id, project_id)\n', (1362, 1395), False, 'from simcore_service_webserver import director_v2_api\n'), ((1672, 1741), 'simcore_service_webserver.director_v2_api.get_computation_task', 'director_v2_api.get_computation_task', (['client.app', 'user_id', 'project_id'], {}), '(client.app, user_id, project_id)\n', (1708, 1741), False, 'from simcore_service_webserver import director_v2_api\n'), ((1997, 2061), 'simcore_service_webserver.director_v2_api.delete_pipeline', 'director_v2_api.delete_pipeline', (['client.app', 'user_id', 'project_id'], {}), '(client.app, user_id, project_id)\n', (2028, 2061), False, 'from simcore_service_webserver import director_v2_api\n'), ((2309, 2401), 'simcore_service_webserver.director_v2_api.create_cluster', 'director_v2_api.create_cluster', (['client.app'], {'user_id': 'user_id', 'new_cluster': 'cluster_create'}), '(client.app, user_id=user_id, new_cluster=\n cluster_create)\n', (2339, 2401), False, 'from simcore_service_webserver import director_v2_api\n'), ((2157, 2181), 'hypothesis.strategies.builds', 'st.builds', (['ClusterCreate'], {}), '(ClusterCreate)\n', (2166, 2181), True, 'from hypothesis import strategies as st\n'), ((2636, 2694), 'simcore_service_webserver.director_v2_api.list_clusters', 'director_v2_api.list_clusters', (['client.app'], {'user_id': 'user_id'}), '(client.app, user_id=user_id)\n', (2665, 2694), False, 'from simcore_service_webserver import director_v2_api\n'), ((2902, 2981), 'simcore_service_webserver.director_v2_api.get_cluster', 'director_v2_api.get_cluster', (['client.app'], {'user_id': 'user_id', 'cluster_id': 'cluster_id'}), '(client.app, user_id=user_id, cluster_id=cluster_id)\n', (2929, 2981), False, 'from simcore_service_webserver import director_v2_api\n'), ((3212, 3304), 'simcore_service_webserver.director_v2_api.get_cluster_details', 'director_v2_api.get_cluster_details', (['client.app'], {'user_id': 'user_id', 'cluster_id': 'cluster_id'}), '(client.app, user_id=user_id, cluster_id\n =cluster_id)\n', (3247, 3304), False, 'from simcore_service_webserver import director_v2_api\n'), ((3686, 3802), 'simcore_service_webserver.director_v2_api.update_cluster', 'director_v2_api.update_cluster', (['client.app'], {'user_id': 'user_id', 'cluster_id': 'cluster_id', 'cluster_patch': 'cluster_patch'}), '(client.app, user_id=user_id, cluster_id=\n cluster_id, cluster_patch=cluster_patch)\n', (3716, 3802), False, 'from simcore_service_webserver import director_v2_api\n'), ((3453, 3479), 'hypothesis.strategies.from_type', 'st.from_type', (['ClusterPatch'], {}), '(ClusterPatch)\n', (3465, 3479), True, 'from hypothesis import strategies as st\n'), ((4021, 4108), 'simcore_service_webserver.director_v2_api.delete_cluster', 'director_v2_api.delete_cluster', (['client.app'], {'user_id': 'user_id', 'cluster_id': 'cluster_id'}), '(client.app, user_id=user_id, cluster_id=\n cluster_id)\n', (4051, 4108), False, 'from simcore_service_webserver import director_v2_api\n'), ((4329, 4396), 'simcore_service_webserver.director_v2_api.ping_cluster', 'director_v2_api.ping_cluster', (['client.app'], {'cluster_ping': 'cluster_ping'}), '(client.app, cluster_ping=cluster_ping)\n', (4357, 4396), False, 'from simcore_service_webserver import director_v2_api\n'), ((4211, 4233), 'hypothesis.strategies.builds', 'st.builds', (['ClusterPing'], {}), '(ClusterPing)\n', (4220, 4233), True, 'from hypothesis import strategies as st\n'), ((4521, 4614), 'simcore_service_webserver.director_v2_api.ping_specific_cluster', 'director_v2_api.ping_specific_cluster', (['client.app'], {'user_id': 'user_id', 'cluster_id': 'cluster_id'}), '(client.app, user_id=user_id,\n cluster_id=cluster_id)\n', (4558, 4614), False, 'from simcore_service_webserver import director_v2_api\n')] |
import numpy as np
def random_augmentation(img, mask):
#you can add any augmentations you need
return img, mask
def batch_generator(image, mask,
batch_size=1,
crop_size=0,
patch_size=256,
bbox= None,
augmentation=False):
'''
image: nparray, must have 3 dimension
mask: nparray, 2 dimensions, same size as image
batch_size: int, number of images in a batch
patch_size: int, size of the image returned, patch is square
crop_size: int, how much pixels should be cropped off the mask
bbox: None or tuple of 4 ints, (min_y, max_y, min_x, max_x), the data is selected from within the bbox
augmentation: turn on/off data augmentation. The augmentation function is random_augmentation() above
returns batch of image and mask patches, image is turned to 'channels last' as required by unet
'''
if np.ndim(mask) != 2 or np.ndim(image) != 3:
raise ValueError('image must have 3 dims and mask 2 dims')
if mask.shape != image.shape[1:]:
raise ValueError('image and mask shape is different')
im_max = float(np.max(image))
mask_max = 1.0
#select subimage
if bbox is not None:
# check bbox
if bbox[0] < 0 or bbox [2] < 0 \
or bbox[1] > mask.shape[0] or bbox[3] > mask.shape[0] \
or bbox[0] + patch_size > bbox[1] or bbox[2] + patch_size > bbox[3] \
or patch_size <= 0:
raise ValueError("Incorrect bbox or patch size")
img_ = image[:, bbox[0] : bbox[1], bbox[2]:bbox[3]]
mask_ = mask[bbox[0] : bbox[1], bbox[2]:bbox[3]]
else:
img_ = image
mask_ = mask
while 1:
x = []
y = []
for i in range (batch_size):
random_x = np.random.randint(0, mask_.shape[1] - patch_size)
random_y = np.random.randint(0, mask_.shape[0] - patch_size)
img_patch = img_[:,
random_y : random_y + patch_size,
random_x : random_x + patch_size] / im_max
# transform the image from channels-first (rasterio format) to channels-last (default tensorflow format)
img_patch = np.moveaxis(img_patch, 0, 2)
mask_patch = mask_[random_y : random_y + patch_size,
random_x : random_x + patch_size] / mask_max
if augmentation:
img_patch, mask_patch = random_augmentation(img_patch, mask_patch)
# mask is cropped as it may be useful for some convnets that have output size less than input
if crop_size > 0:
mask_patch = mask_patch[crop_size : -crop_size,
crop_size : -crop_size]
mask_patch = np.expand_dims(mask_patch, 2)
x.append(img_patch)
y.append(mask_patch)
yield (np.array(x), np.array(y))
| [
"numpy.ndim",
"numpy.max",
"numpy.array",
"numpy.random.randint",
"numpy.expand_dims",
"numpy.moveaxis"
] | [((1181, 1194), 'numpy.max', 'np.max', (['image'], {}), '(image)\n', (1187, 1194), True, 'import numpy as np\n'), ((950, 963), 'numpy.ndim', 'np.ndim', (['mask'], {}), '(mask)\n', (957, 963), True, 'import numpy as np\n'), ((972, 986), 'numpy.ndim', 'np.ndim', (['image'], {}), '(image)\n', (979, 986), True, 'import numpy as np\n'), ((1843, 1892), 'numpy.random.randint', 'np.random.randint', (['(0)', '(mask_.shape[1] - patch_size)'], {}), '(0, mask_.shape[1] - patch_size)\n', (1860, 1892), True, 'import numpy as np\n'), ((1916, 1965), 'numpy.random.randint', 'np.random.randint', (['(0)', '(mask_.shape[0] - patch_size)'], {}), '(0, mask_.shape[0] - patch_size)\n', (1933, 1965), True, 'import numpy as np\n'), ((2266, 2294), 'numpy.moveaxis', 'np.moveaxis', (['img_patch', '(0)', '(2)'], {}), '(img_patch, 0, 2)\n', (2277, 2294), True, 'import numpy as np\n'), ((2882, 2911), 'numpy.expand_dims', 'np.expand_dims', (['mask_patch', '(2)'], {}), '(mask_patch, 2)\n', (2896, 2911), True, 'import numpy as np\n'), ((2992, 3003), 'numpy.array', 'np.array', (['x'], {}), '(x)\n', (3000, 3003), True, 'import numpy as np\n'), ((3005, 3016), 'numpy.array', 'np.array', (['y'], {}), '(y)\n', (3013, 3016), True, 'import numpy as np\n')] |
# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
from collections import OrderedDict
# Django
from django.core.exceptions import PermissionDenied
from django.db.models.fields import PositiveIntegerField, BooleanField
from django.db.models.fields.related import ForeignKey
from django.http import Http404
from django.utils.encoding import force_text, smart_text
from django.utils.translation import ugettext_lazy as _
# Django REST Framework
from rest_framework import exceptions
from rest_framework import metadata
from rest_framework import serializers
from rest_framework.relations import RelatedField, ManyRelatedField
from rest_framework.fields import JSONField as DRFJSONField
from rest_framework.request import clone_request
# AWX
from awx.main.fields import JSONField, ImplicitRoleField
from awx.main.models import InventorySource, NotificationTemplate
from awx.main.scheduler.kubernetes import PodManager
class Metadata(metadata.SimpleMetadata):
def get_field_info(self, field):
field_info = OrderedDict()
field_info['type'] = self.label_lookup[field]
field_info['required'] = getattr(field, 'required', False)
text_attrs = [
'read_only', 'label', 'help_text',
'min_length', 'max_length',
'min_value', 'max_value',
'category', 'category_slug',
'defined_in_file'
]
for attr in text_attrs:
value = getattr(field, attr, None)
if value is not None and value != '':
field_info[attr] = force_text(value, strings_only=True)
placeholder = getattr(field, 'placeholder', serializers.empty)
if placeholder is not serializers.empty:
field_info['placeholder'] = placeholder
serializer = getattr(field, 'parent', None)
if serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
# Update help text for common fields.
field_help_text = {
'id': _('Database ID for this {}.'),
'name': _('Name of this {}.'),
'description': _('Optional description of this {}.'),
'type': _('Data type for this {}.'),
'url': _('URL for this {}.'),
'related': _('Data structure with URLs of related resources.'),
'summary_fields': _('Data structure with name/description for related resources.'),
'created': _('Timestamp when this {} was created.'),
'modified': _('Timestamp when this {} was last modified.'),
}
if field.field_name in field_help_text:
opts = serializer.Meta.model._meta.concrete_model._meta
verbose_name = smart_text(opts.verbose_name)
field_info['help_text'] = field_help_text[field.field_name].format(verbose_name)
if field.field_name == 'type':
field_info['filterable'] = True
else:
for model_field in serializer.Meta.model._meta.fields:
if field.field_name == model_field.name:
if getattr(model_field, '__accepts_json__', None):
field_info['type'] = 'json'
field_info['filterable'] = True
break
else:
field_info['filterable'] = False
# Indicate if a field has a default value.
# FIXME: Still isn't showing all default values?
try:
default = field.get_default()
if field.field_name == 'TOWER_URL_BASE' and default == 'https://towerhost':
default = '{}://{}'.format(self.request.scheme, self.request.get_host())
field_info['default'] = default
except serializers.SkipField:
pass
if getattr(field, 'child', None):
field_info['child'] = self.get_field_info(field.child)
elif getattr(field, 'fields', None):
field_info['children'] = self.get_serializer_info(field)
if not isinstance(field, (RelatedField, ManyRelatedField)) and hasattr(field, 'choices'):
field_info['choices'] = [(choice_value, choice_name) for choice_value, choice_name in field.choices.items()]
# Indicate if a field is write-only.
if getattr(field, 'write_only', False):
field_info['write_only'] = True
# Special handling of inventory source_region choices that vary based on
# selected inventory source.
if field.field_name == 'source_regions':
for cp in ('azure_rm', 'ec2', 'gce'):
get_regions = getattr(InventorySource, 'get_%s_region_choices' % cp)
field_info['%s_region_choices' % cp] = get_regions()
# Special handling of group_by choices for EC2.
if field.field_name == 'group_by':
for cp in ('ec2',):
get_group_by_choices = getattr(InventorySource, 'get_%s_group_by_choices' % cp)
field_info['%s_group_by_choices' % cp] = get_group_by_choices()
# Special handling of notification configuration where the required properties
# are conditional on the type selected.
if field.field_name == 'notification_configuration':
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
field_info[notification_type_name] = notification_type_class.init_parameters
# Special handling of notification messages where the required properties
# are conditional on the type selected.
try:
view_model = field.context['view'].model
except (AttributeError, KeyError):
view_model = None
if view_model == NotificationTemplate and field.field_name == 'messages':
for (notification_type_name, notification_tr_name, notification_type_class) in NotificationTemplate.NOTIFICATION_TYPES:
field_info[notification_type_name] = notification_type_class.default_messages
# Update type of fields returned...
model_field = None
if serializer and hasattr(serializer, 'Meta') and hasattr(serializer.Meta, 'model'):
try:
model_field = serializer.Meta.model._meta.get_field(field.field_name)
except Exception:
pass
if field.field_name == 'type':
field_info['type'] = 'choice'
elif field.field_name in ('url', 'custom_virtualenv', 'token'):
field_info['type'] = 'string'
elif field.field_name in ('related', 'summary_fields'):
field_info['type'] = 'object'
elif isinstance(field, PositiveIntegerField):
field_info['type'] = 'integer'
elif field.field_name in ('created', 'modified'):
field_info['type'] = 'datetime'
elif (
RelatedField in field.__class__.__bases__ or
isinstance(model_field, ForeignKey)
):
field_info['type'] = 'id'
elif (
isinstance(field, JSONField) or
isinstance(model_field, JSONField) or
isinstance(field, DRFJSONField) or
isinstance(getattr(field, 'model_field', None), JSONField) or
field.field_name == 'credential_passwords'
):
field_info['type'] = 'json'
elif (
isinstance(field, ManyRelatedField) and
field.field_name == 'credentials'
# launch-time credentials
):
field_info['type'] = 'list_of_ids'
elif isinstance(model_field, BooleanField):
field_info['type'] = 'boolean'
return field_info
def get_serializer_info(self, serializer, method=None):
filterer = getattr(serializer, 'filter_field_metadata', lambda fields, method: fields)
return filterer(
super(Metadata, self).get_serializer_info(serializer),
method
)
def determine_actions(self, request, view):
# Add field information for GET requests (so field names/labels are
# available even when we can't POST/PUT).
actions = {}
for method in {'GET', 'PUT', 'POST'} & set(view.allowed_methods):
view.request = clone_request(request, method)
obj = None
try:
# Test global permissions
if hasattr(view, 'check_permissions'):
view.check_permissions(view.request)
# Test object permissions
if method == 'PUT' and hasattr(view, 'get_object'):
obj = view.get_object()
except (exceptions.APIException, PermissionDenied, Http404):
continue
else:
# If user has appropriate permissions for the view, include
# appropriate metadata about the fields that should be supplied.
serializer = view.get_serializer(instance=obj)
actions[method] = self.get_serializer_info(serializer, method=method)
finally:
view.request = request
for field, meta in list(actions[method].items()):
if not isinstance(meta, dict):
continue
if field == "pod_spec_override":
meta['default'] = PodManager().pod_definition
# Add type choices if available from the serializer.
if field == 'type' and hasattr(serializer, 'get_type_choices'):
meta['choices'] = serializer.get_type_choices()
# For GET method, remove meta attributes that aren't relevant
# when reading a field and remove write-only fields.
if method == 'GET':
attrs_to_remove = ('required', 'read_only', 'default', 'min_length', 'max_length', 'placeholder')
for attr in attrs_to_remove:
meta.pop(attr, None)
meta.get('child', {}).pop(attr, None)
if meta.pop('write_only', False):
actions['GET'].pop(field)
# For PUT/POST methods, remove read-only fields.
if method in ('PUT', 'POST'):
# This value should always be False for PUT/POST, so don't
# show it (file-based read-only settings can't be updated)
meta.pop('defined_in_file', False)
if meta.pop('read_only', False):
if field == 'id' and hasattr(view, 'attach'):
continue
actions[method].pop(field)
return actions
def determine_metadata(self, request, view):
# store request on self so we can use it to generate field defaults
# (such as TOWER_URL_BASE)
self.request = request
try:
setattr(view, '_request', request)
metadata = super(Metadata, self).determine_metadata(request, view)
finally:
delattr(view, '_request')
# Add type(s) handled by this view/serializer.
if hasattr(view, 'get_serializer'):
serializer = view.get_serializer()
if hasattr(serializer, 'get_types'):
metadata['types'] = serializer.get_types()
# Add search fields if available from the view.
if getattr(view, 'search_fields', None):
metadata['search_fields'] = view.search_fields
# Add related search fields if available from the view.
if getattr(view, 'related_search_fields', None):
metadata['related_search_fields'] = view.related_search_fields
# include role names in metadata
roles = []
model = getattr(view, 'model', None)
if model:
for field in model._meta.get_fields():
if type(field) is ImplicitRoleField:
roles.append(field.name)
if len(roles) > 0:
metadata['object_roles'] = roles
from rest_framework import generics
if isinstance(view, generics.ListAPIView) and hasattr(view, 'paginator'):
metadata['max_page_size'] = view.paginator.max_page_size
return metadata
class RoleMetadata(Metadata):
def determine_metadata(self, request, view):
metadata = super(RoleMetadata, self).determine_metadata(request, view)
if 'actions' in metadata:
metadata['actions'].pop('POST')
metadata['actions']['POST'] = {
"id": {"type": "integer", "label": "ID", "help_text": "Database ID for this role."},
"disassociate": {"type": "integer", "label": "Disassociate", "help_text": "Provide to remove this role."},
}
return metadata
class SublistAttachDetatchMetadata(Metadata):
def determine_actions(self, request, view):
actions = super(SublistAttachDetatchMetadata, self).determine_actions(request, view)
method = 'POST'
if method in actions:
for field in list(actions[method].keys()):
if field == 'id':
continue
actions[method].pop(field)
return actions
| [
"collections.OrderedDict",
"django.utils.translation.ugettext_lazy",
"awx.main.scheduler.kubernetes.PodManager",
"django.utils.encoding.force_text",
"rest_framework.request.clone_request",
"django.utils.encoding.smart_text"
] | [((1028, 1041), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (1039, 1041), False, 'from collections import OrderedDict\n'), ((8352, 8382), 'rest_framework.request.clone_request', 'clone_request', (['request', 'method'], {}), '(request, method)\n', (8365, 8382), False, 'from rest_framework.request import clone_request\n'), ((1558, 1594), 'django.utils.encoding.force_text', 'force_text', (['value'], {'strings_only': '(True)'}), '(value, strings_only=True)\n', (1568, 1594), False, 'from django.utils.encoding import force_text, smart_text\n'), ((2018, 2047), 'django.utils.translation.ugettext_lazy', '_', (['"""Database ID for this {}."""'], {}), "('Database ID for this {}.')\n", (2019, 2047), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2073, 2094), 'django.utils.translation.ugettext_lazy', '_', (['"""Name of this {}."""'], {}), "('Name of this {}.')\n", (2074, 2094), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2127, 2164), 'django.utils.translation.ugettext_lazy', '_', (['"""Optional description of this {}."""'], {}), "('Optional description of this {}.')\n", (2128, 2164), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2190, 2217), 'django.utils.translation.ugettext_lazy', '_', (['"""Data type for this {}."""'], {}), "('Data type for this {}.')\n", (2191, 2217), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2242, 2263), 'django.utils.translation.ugettext_lazy', '_', (['"""URL for this {}."""'], {}), "('URL for this {}.')\n", (2243, 2263), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2292, 2343), 'django.utils.translation.ugettext_lazy', '_', (['"""Data structure with URLs of related resources."""'], {}), "('Data structure with URLs of related resources.')\n", (2293, 2343), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2379, 2443), 'django.utils.translation.ugettext_lazy', '_', (['"""Data structure with name/description for related resources."""'], {}), "('Data structure with name/description for related resources.')\n", (2380, 2443), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2472, 2512), 'django.utils.translation.ugettext_lazy', '_', (['"""Timestamp when this {} was created."""'], {}), "('Timestamp when this {} was created.')\n", (2473, 2512), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2542, 2588), 'django.utils.translation.ugettext_lazy', '_', (['"""Timestamp when this {} was last modified."""'], {}), "('Timestamp when this {} was last modified.')\n", (2543, 2588), True, 'from django.utils.translation import ugettext_lazy as _\n'), ((2759, 2788), 'django.utils.encoding.smart_text', 'smart_text', (['opts.verbose_name'], {}), '(opts.verbose_name)\n', (2769, 2788), False, 'from django.utils.encoding import force_text, smart_text\n'), ((9440, 9452), 'awx.main.scheduler.kubernetes.PodManager', 'PodManager', ([], {}), '()\n', (9450, 9452), False, 'from awx.main.scheduler.kubernetes import PodManager\n')] |
import json
from washer.worker.actions import AppendStdout, AppendStderr
from washer.worker.actions import CreateNamedLog, AppendToLog
from washer.worker.actions import SetProperty
from washer.worker.commands import washertask
def pipenv_graph2deps(rawgraph):
graph = json.loads(rawgraph)
def build_entry(data):
if 'required_version' in data:
spec = data['key'] + data['required_version']
else:
spec = data['key']
return {'installer': 'pipenv',
'spec': spec,
'source': 'pypi',
'name': data['package_name'],
'version': data['installed_version']}
def extract_dependencies(entries):
for entry in entries:
if 'package' in entry:
package = entry['package']
dependencies = entry.get('dependencies', [])
yield build_entry(package)
yield from extract_dependencies(dependencies)
else:
yield build_entry(entry)
yield from extract_dependencies(graph)
@washertask
def pip_install(repopath, path=".", **kwargs):
import invoke
c = invoke.Context()
with c.cd(repopath):
with c.cd(path):
res = c.run("pipenv install .")
deps = c.run("pipenv graph --json")
yield AppendStdout(res.stdout)
yield AppendStderr(res.stderr)
yield SetProperty("dependencies", list(pipenv_graph2deps(deps.stdout)))
return True
@washertask
def requirement_file(repopath, requirement="requirements.txt",
path=".", **kwargs):
import invoke
c = invoke.Context()
with c.cd(repopath):
with c.cd(path):
res = c.run("pipenv install -r %s" % requirement)
deps = c.run("pipenv graph --json")
yield AppendStdout(res.stdout)
yield AppendStderr(res.stderr)
yield SetProperty("dependencies", list(pipenv_graph2deps(deps.stdout)))
return True
| [
"json.loads",
"invoke.Context",
"washer.worker.actions.AppendStderr",
"washer.worker.actions.AppendStdout"
] | [((275, 295), 'json.loads', 'json.loads', (['rawgraph'], {}), '(rawgraph)\n', (285, 295), False, 'import json\n'), ((1174, 1190), 'invoke.Context', 'invoke.Context', ([], {}), '()\n', (1188, 1190), False, 'import invoke\n'), ((1643, 1659), 'invoke.Context', 'invoke.Context', ([], {}), '()\n', (1657, 1659), False, 'import invoke\n'), ((1345, 1369), 'washer.worker.actions.AppendStdout', 'AppendStdout', (['res.stdout'], {}), '(res.stdout)\n', (1357, 1369), False, 'from washer.worker.actions import AppendStdout, AppendStderr\n'), ((1380, 1404), 'washer.worker.actions.AppendStderr', 'AppendStderr', (['res.stderr'], {}), '(res.stderr)\n', (1392, 1404), False, 'from washer.worker.actions import AppendStdout, AppendStderr\n'), ((1832, 1856), 'washer.worker.actions.AppendStdout', 'AppendStdout', (['res.stdout'], {}), '(res.stdout)\n', (1844, 1856), False, 'from washer.worker.actions import AppendStdout, AppendStderr\n'), ((1867, 1891), 'washer.worker.actions.AppendStderr', 'AppendStderr', (['res.stderr'], {}), '(res.stderr)\n', (1879, 1891), False, 'from washer.worker.actions import AppendStdout, AppendStderr\n')] |
import numpy as np
import pickle
from collections import defaultdict
from parsing import parser
from analysis import training
def main():
parse = parser.Parser();
train_digits = parse.parse_file('data/pendigits-train');
test_digits = parse.parse_file('data/pendigits-test')
centroids = training.get_digit_kmeans_centroids(
train_digits, 256 - 3)
training.set_digit_observations(
train_digits, centroids, 256)
training.set_digit_observations(
test_digits, centroids, 256)
train_sequences = defaultdict(list)
test_sequences = []
n_test_sequences = len(test_digits)
test_expected_labels = np.ndarray(shape=(n_test_sequences,))
for digit in train_digits:
train_sequences[digit.label].append(digit.np_array_observations)
for i, digit in enumerate(test_digits):
test_sequences.append(digit.np_array_observations)
test_expected_labels[i] = digit.label
with open('train_sequences', 'wb') as f:
pickle.dump(train_sequences, f)
with open('test_sequences', 'wb') as f:
pickle.dump(test_sequences, f)
with open('test_expected_labels', 'wb') as f:
pickle.dump(test_expected_labels, f)
if __name__ == '__main__':
main()
| [
"pickle.dump",
"parsing.parser.Parser",
"analysis.training.get_digit_kmeans_centroids",
"numpy.ndarray",
"collections.defaultdict",
"analysis.training.set_digit_observations"
] | [((152, 167), 'parsing.parser.Parser', 'parser.Parser', ([], {}), '()\n', (165, 167), False, 'from parsing import parser\n'), ((306, 364), 'analysis.training.get_digit_kmeans_centroids', 'training.get_digit_kmeans_centroids', (['train_digits', '(256 - 3)'], {}), '(train_digits, 256 - 3)\n', (341, 364), False, 'from analysis import training\n'), ((379, 440), 'analysis.training.set_digit_observations', 'training.set_digit_observations', (['train_digits', 'centroids', '(256)'], {}), '(train_digits, centroids, 256)\n', (410, 440), False, 'from analysis import training\n'), ((454, 514), 'analysis.training.set_digit_observations', 'training.set_digit_observations', (['test_digits', 'centroids', '(256)'], {}), '(test_digits, centroids, 256)\n', (485, 514), False, 'from analysis import training\n'), ((551, 568), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (562, 568), False, 'from collections import defaultdict\n'), ((660, 697), 'numpy.ndarray', 'np.ndarray', ([], {'shape': '(n_test_sequences,)'}), '(shape=(n_test_sequences,))\n', (670, 697), True, 'import numpy as np\n'), ((1009, 1040), 'pickle.dump', 'pickle.dump', (['train_sequences', 'f'], {}), '(train_sequences, f)\n', (1020, 1040), False, 'import pickle\n'), ((1094, 1124), 'pickle.dump', 'pickle.dump', (['test_sequences', 'f'], {}), '(test_sequences, f)\n', (1105, 1124), False, 'import pickle\n'), ((1184, 1220), 'pickle.dump', 'pickle.dump', (['test_expected_labels', 'f'], {}), '(test_expected_labels, f)\n', (1195, 1220), False, 'import pickle\n')] |
#
# Copyright (c) Contributors to the Open 3D Engine Project.
# For complete copyright and license terms please see the LICENSE at the root of this distribution.
#
# SPDX-License-Identifier: Apache-2.0 OR MIT
#
#
import abc
import importlib
import os
import pkgutil
import re
import time
from typing import Dict, List, Tuple
VERBOSE = False
class Commit(abc.ABC):
"""An interface for accessing details about a commit"""
@abc.abstractmethod
def get_files(self) -> List[str]:
"""Returns a list of local files added/modified by the commit"""
pass
@abc.abstractmethod
def get_removed_files(self) -> List[str]:
"""Returns a list of local files removed by the commit"""
pass
@abc.abstractmethod
def get_file_diff(self, str) -> str:
"""
Given a file name, returns a string in unified diff format
that represents the changes made to that file for this commit.
Most validators will only pay attention to added lines (with + in front)
"""
pass
@abc.abstractmethod
def get_description(self) -> str:
"""Returns the description of the commit"""
pass
@abc.abstractmethod
def get_author(self) -> str:
"""Returns the author of the commit"""
pass
def validate_commit(commit: Commit, out_errors: List[str] = None, ignore_validators: List[str] = None) -> bool:
"""Validates a commit against all validators
:param commit: The commit to validate
:param out_errors: if not None, will populate with the list of errors given by the validators
:param ignore_validators: Optional list of CommitValidator classes to ignore, by class name
:return: True if there are no validation errors, and False otherwise
"""
failed_count = 0
passed_count = 0
start_time = time.time()
# Find all the validators in the validators package (recursively)
validator_classes = []
validators_dir = os.path.join(os.path.dirname(__file__), 'validators')
for _, module_name, is_package in pkgutil.iter_modules([validators_dir]):
if not is_package:
module = importlib.import_module('commit_validation.validators.' + module_name)
validator = module.get_validator()
if ignore_validators and validator.__name__ in ignore_validators:
print(f"Disabled validation for '{validator.__name__}'")
else:
validator_classes.append(validator)
error_summary = {}
# Process validators
for validator_class in validator_classes:
validator = validator_class()
validator_name = validator.__class__.__name__
error_list = []
passed = validator.run(commit, errors = error_list)
if passed:
passed_count += 1
print(f'{validator.__class__.__name__} PASSED')
else:
failed_count += 1
print(f'{validator.__class__.__name__} FAILED')
error_summary[validator_name] = error_list
end_time = time.time()
if failed_count:
print("VALIDATION FAILURE SUMMARY")
for val_name in error_summary.keys():
errors = error_summary[val_name]
if errors:
for error_message in errors:
first_line = True
for line in error_message.splitlines():
if first_line:
first_line = False
print(f'VALIDATOR_FAILED: {val_name} {line}')
else:
print(f' {line}') # extra detail lines do not need machine parsing
stats_strs = []
if failed_count > 0:
stats_strs.append(f'{failed_count} failed')
if passed_count > 0:
stats_strs.append(f'{passed_count} passed')
stats_str = ', '.join(stats_strs) + f' in {end_time - start_time:.2f}s'
print()
print(stats_str)
return failed_count == 0
def IsFileSkipped(file_name) -> bool:
if os.path.splitext(file_name)[1].lower() not in SOURCE_AND_SCRIPT_FILE_EXTENSIONS:
skipped = True
for pattern in SOURCE_AND_SCRIPT_FILE_PATTERNS:
if pattern.match(file_name):
skipped = False
break
return skipped
return False
class CommitValidator(abc.ABC):
"""A commit validator"""
@abc.abstractmethod
def run(self, commit: Commit, errors: List[str]) -> bool:
"""Validates a commit
:param commit: The commit to validate
:param errors: List of errors generated, append them to this list
:return: True if the commit is valid, and False otherwise
"""
pass
SOURCE_FILE_EXTENSIONS: Tuple[str, ...] = (
'.c', '.cc', '.cpp', '.cxx', '.h', '.hpp', '.hxx', '.inl', '.m', '.mm', '.cs', '.java'
)
"""File extensions for compiled source code"""
SCRIPT_FILE_EXTENSIONS: Tuple[str, ...] = (
'.py', '.lua', '.bat', '.cmd', '.sh', '.js'
)
"""File extensions for interpreted code"""
BUILD_FILE_EXTENSIONS: Tuple[str, ...] = (
'.cmake',
)
"""File extensions for build files"""
SOURCE_AND_SCRIPT_FILE_EXTENSIONS: Tuple[str, ...] = SOURCE_FILE_EXTENSIONS + SCRIPT_FILE_EXTENSIONS + BUILD_FILE_EXTENSIONS
"""File extensions for both compiled and interpreted code"""
BUILD_FILE_PATTERNS: Tuple[re.Pattern, ...] = (
re.compile(r'.*CMakeLists\.txt'),
re.compile(r'.*Jenkinsfile')
)
"""File patterns for build files"""
SOURCE_AND_SCRIPT_FILE_PATTERNS: Tuple[re.Pattern, ...] = BUILD_FILE_PATTERNS
EXCLUDED_VALIDATION_PATTERNS = [
'*/.git/*',
'*/3rdParty/*',
'*/__pycache__/*',
'*/External/*',
'build',
'Cache',
'*/Code/Framework/AzCore/azgnmx/azgnmx/*',
'Code/Tools/CryFXC',
'Code/Tools/HLSLCrossCompiler',
'Code/Tools/HLSLCrossCompilerMETAL',
'Docs',
'python/runtime',
'restricted/*/Tools/*RemoteControl',
'Tools/3dsmax',
'*/user/Cache/*',
'*/user/log/*',
]
| [
"importlib.import_module",
"re.compile",
"os.path.splitext",
"os.path.dirname",
"time.time",
"pkgutil.iter_modules"
] | [((1834, 1845), 'time.time', 'time.time', ([], {}), '()\n', (1843, 1845), False, 'import time\n'), ((2057, 2095), 'pkgutil.iter_modules', 'pkgutil.iter_modules', (['[validators_dir]'], {}), '([validators_dir])\n', (2077, 2095), False, 'import pkgutil\n'), ((3045, 3056), 'time.time', 'time.time', ([], {}), '()\n', (3054, 3056), False, 'import time\n'), ((5377, 5409), 're.compile', 're.compile', (['""".*CMakeLists\\\\.txt"""'], {}), "('.*CMakeLists\\\\.txt')\n", (5387, 5409), False, 'import re\n'), ((5415, 5442), 're.compile', 're.compile', (['""".*Jenkinsfile"""'], {}), "('.*Jenkinsfile')\n", (5425, 5442), False, 'import re\n'), ((1978, 2003), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (1993, 2003), False, 'import os\n'), ((2145, 2215), 'importlib.import_module', 'importlib.import_module', (["('commit_validation.validators.' + module_name)"], {}), "('commit_validation.validators.' + module_name)\n", (2168, 2215), False, 'import importlib\n'), ((4029, 4056), 'os.path.splitext', 'os.path.splitext', (['file_name'], {}), '(file_name)\n', (4045, 4056), False, 'import os\n')] |
import json
import re
from copy import copy
from logging import Formatter
from .profile import used_memory
from ..helper import colored
class ColorFormatter(Formatter):
"""Format the log into colored logs based on the log-level. """
MAPPING = {
'DEBUG': dict(color='white', on_color=None), # white
'INFO': dict(color='white', on_color=None), # cyan
'WARNING': dict(color='yellow', on_color='on_grey'), # yellow
'ERROR': dict(color='red', on_color=None), # 31 for red
'CRITICAL': dict(color='white', on_color='on_red'), # white on red bg
'SUCCESS': dict(color='green', on_color=None), # white on red bg
} #: log-level to color mapping
def format(self, record):
cr = copy(record)
seq = self.MAPPING.get(cr.levelname, self.MAPPING['INFO']) # default white
cr.msg = colored(cr.msg, **seq)
return super().format(cr)
class PlainFormatter(Formatter):
"""Remove all control chars from the log and format it as plain text
Also restrict the max-length of msg to 512
"""
def format(self, record):
cr = copy(record)
if isinstance(cr.msg, str):
cr.msg = re.sub(r'\u001b\[.*?[@-~]', '', str(cr.msg))[:512]
return super().format(cr)
class JsonFormatter(Formatter):
"""Format the log message as a JSON object so that it can be later used/parsed in browser with javascript. """
KEYS = {'created', 'filename', 'funcName', 'levelname', 'lineno', 'msg',
'module', 'name', 'pathname', 'process', 'thread', 'processName',
'threadName', 'log_id'} #: keys to extract from the log
def format(self, record):
cr = copy(record)
cr.msg = re.sub(r'\u001b\[.*?[@-~]', '', str(cr.msg))
return json.dumps(
{k: getattr(cr, k) for k in self.KEYS if hasattr(cr, k)},
sort_keys=True)
class ProfileFormatter(Formatter):
"""Format the log message as JSON object and add the current used memory into it"""
def format(self, record):
cr = copy(record)
if isinstance(cr.msg, dict):
cr.msg.update({k: getattr(cr, k) for k in ['created', 'module', 'process', 'thread']})
cr.msg['memory'] = used_memory(unit=1)
return json.dumps(cr.msg, sort_keys=True)
else:
return ''
| [
"copy.copy",
"json.dumps"
] | [((749, 761), 'copy.copy', 'copy', (['record'], {}), '(record)\n', (753, 761), False, 'from copy import copy\n'), ((1128, 1140), 'copy.copy', 'copy', (['record'], {}), '(record)\n', (1132, 1140), False, 'from copy import copy\n'), ((1701, 1713), 'copy.copy', 'copy', (['record'], {}), '(record)\n', (1705, 1713), False, 'from copy import copy\n'), ((2070, 2082), 'copy.copy', 'copy', (['record'], {}), '(record)\n', (2074, 2082), False, 'from copy import copy\n'), ((2289, 2323), 'json.dumps', 'json.dumps', (['cr.msg'], {'sort_keys': '(True)'}), '(cr.msg, sort_keys=True)\n', (2299, 2323), False, 'import json\n')] |
import sys
sys.setrecursionlimit(10000000)
input=lambda : sys.stdin.readline().rstrip()
n,x=map(int,input().split())
a=list(map(int,input().split()))
aa=list(filter(lambda b:b!=x,a))
print(*aa) | [
"sys.stdin.readline",
"sys.setrecursionlimit"
] | [((11, 42), 'sys.setrecursionlimit', 'sys.setrecursionlimit', (['(10000000)'], {}), '(10000000)\n', (32, 42), False, 'import sys\n'), ((58, 78), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (76, 78), False, 'import sys\n')] |
import re
from abc import ABC, abstractmethod
from typing import Any, Dict, Generator
class IncorrectVenueImplementation(Exception):
pass
# class AbstractVenue(metaclass=ABC):
class AbstractVenue(ABC):
def __init__(self):
self.url = ""
self.name = ""
self.city = ""
self.country = ""
self.pricepat_monetary = re.compile("[0-9.,]+.€")
self.pricepat_plain = re.compile("[0-9.,]+")
def get_venue_name(self) -> str:
return self.name
def get_city(self) -> str:
return self.city
def get_country(self) -> str:
return self.country
def event_sqlentity(self) -> Dict[str, str]:
return {"name": self.name,
"city": self.city,
"country": self.country}
def parse_price(self, info_tag: str) -> str:
prices_with_mon = self.pricepat_monetary.findall(info_tag)
prices = []
for price in prices_with_mon:
parsed_price = self.pricepat_plain.findall(price)
if len(parsed_price) == 0:
continue
prices.append("".join(parsed_price))
if len(prices) == 0:
return "0€"
elif len(prices) == 2:
in_advance, from_door = prices[0], prices[1]
return f"{in_advance}€/{from_door}€"
return "{}€".format("".join(prices))
# FIXME Proper class type checking
def __eq__(self, other):
return hasattr(other, "url") \
and other.url == self.url
@abstractmethod
def parse_events(self, data: Any) \
-> Generator[Dict[str, Any], None, None]:
pass
| [
"re.compile"
] | [((362, 386), 're.compile', 're.compile', (['"""[0-9.,]+.€"""'], {}), "('[0-9.,]+.€')\n", (372, 386), False, 'import re\n'), ((417, 439), 're.compile', 're.compile', (['"""[0-9.,]+"""'], {}), "('[0-9.,]+')\n", (427, 439), False, 'import re\n')] |
# Copyright 2018 Johns Hopkins University. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import namedtuple
import tensorflow as tf
from tensorflow.python.framework import sparse_tensor as sparse_tensor_lib
from tensorflow.python.ops import parsing_ops
class Pipeline(object):
def __init__(self, tfrecord_file, feature_map, batch_size=32,
num_threads=4, prefetch_buffer_size=1,
static_max_length=None, shuffle_buffer_size=10000,
shuffle=True, num_epochs=None, one_shot=False):
self._feature_map = feature_map
self._batch_size = batch_size
self._static_max_length = static_max_length
# Initialize the dataset
dataset = tf.data.TFRecordDataset(tfrecord_file)
# Maybe randomize
if shuffle:
dataset = dataset.shuffle(shuffle_buffer_size)
# Maybe repeat
if num_epochs is None:
dataset = dataset.repeat() # repeat indefinitely
elif num_epochs > 1:
dataset = dataset.repeat(count=num_epochs)
dataset = dataset.batch(batch_size)
dataset = dataset.map(self.parse_example,
num_parallel_calls=num_threads)
# Pre-fetch a batch for faster processing
dataset = dataset.prefetch(prefetch_buffer_size)
# Get the iterator
if one_shot:
self._iterator = dataset.make_one_shot_iterator()
else:
self._iterator = dataset.make_initializable_iterator()
self._init_op = self._iterator.initializer
# Get outputs
self._outputs = self._iterator.get_next()
# Map to features
index = 0
result = {}
for key in sorted(self._feature_map.keys()):
result[key] = self._outputs[index]
index += 1
self._result = result
def pad(self, t):
s = tf.shape(t)
paddings = [[0, 0], [0, self._static_max_length - s[1]]]
x = tf.pad(t, paddings, 'CONSTANT', constant_values=0)
x = tf.reshape(x, [s[0], self._static_max_length])
assert x.get_shape().as_list()[1] is self._static_max_length
return x
def parse_example(self, serialized):
parsed = parsing_ops.parse_example(serialized, self._feature_map)
result = []
for key in sorted(self._feature_map.keys()):
val = parsed[key]
if isinstance(val, sparse_tensor_lib.SparseTensor):
dense_tensor = tf.sparse_tensor_to_dense(val)
if self._static_max_length is not None:
dense_tensor = self.pad(dense_tensor)
result.append(dense_tensor)
else:
result.append(val)
return tuple(result)
@property
def iterator(self):
return self._iterator
@property
def init_op(self):
return self._init_op
@property
def batch(self):
return self._result
# namedtuple for bucket_info object (used in Pipeline)
# func: a mapping from examples to tf.int64 keys
# pads: a set of tf shapes that correspond to padded examples
bucket_info = namedtuple("bucket_info", "func pads")
def int64_feature(value):
""" Takes a single int (e.g. 3) and converts it to a tf Feature """
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def int64_list_feature(sequence):
""" Sequence of ints (e.g [1,2,3]) to TF feature """
return tf.train.Feature(int64_list=tf.train.Int64List(value=sequence))
| [
"tensorflow.data.TFRecordDataset",
"collections.namedtuple",
"tensorflow.shape",
"tensorflow.pad",
"tensorflow.python.ops.parsing_ops.parse_example",
"tensorflow.train.Int64List",
"tensorflow.reshape",
"tensorflow.sparse_tensor_to_dense"
] | [((3882, 3920), 'collections.namedtuple', 'namedtuple', (['"""bucket_info"""', '"""func pads"""'], {}), "('bucket_info', 'func pads')\n", (3892, 3920), False, 'from collections import namedtuple\n'), ((1446, 1484), 'tensorflow.data.TFRecordDataset', 'tf.data.TFRecordDataset', (['tfrecord_file'], {}), '(tfrecord_file)\n', (1469, 1484), True, 'import tensorflow as tf\n'), ((2630, 2641), 'tensorflow.shape', 'tf.shape', (['t'], {}), '(t)\n', (2638, 2641), True, 'import tensorflow as tf\n'), ((2719, 2769), 'tensorflow.pad', 'tf.pad', (['t', 'paddings', '"""CONSTANT"""'], {'constant_values': '(0)'}), "(t, paddings, 'CONSTANT', constant_values=0)\n", (2725, 2769), True, 'import tensorflow as tf\n'), ((2782, 2828), 'tensorflow.reshape', 'tf.reshape', (['x', '[s[0], self._static_max_length]'], {}), '(x, [s[0], self._static_max_length])\n', (2792, 2828), True, 'import tensorflow as tf\n'), ((2974, 3030), 'tensorflow.python.ops.parsing_ops.parse_example', 'parsing_ops.parse_example', (['serialized', 'self._feature_map'], {}), '(serialized, self._feature_map)\n', (2999, 3030), False, 'from tensorflow.python.ops import parsing_ops\n'), ((4060, 4093), 'tensorflow.train.Int64List', 'tf.train.Int64List', ([], {'value': '[value]'}), '(value=[value])\n', (4078, 4093), True, 'import tensorflow as tf\n'), ((4227, 4261), 'tensorflow.train.Int64List', 'tf.train.Int64List', ([], {'value': 'sequence'}), '(value=sequence)\n', (4245, 4261), True, 'import tensorflow as tf\n'), ((3229, 3259), 'tensorflow.sparse_tensor_to_dense', 'tf.sparse_tensor_to_dense', (['val'], {}), '(val)\n', (3254, 3259), True, 'import tensorflow as tf\n')] |
from py65.devices import mpu6502
from py65.utils.devices import make_instruction_decorator
class MPU(mpu6502.MPU):
def __init__(self, *args, **kwargs):
mpu6502.MPU.__init__(self, *args, **kwargs)
self.name = '65C02'
self.waiting = False
def step(self):
if self.waiting:
self.processorCycles += 1
else:
mpu6502.MPU.step(self)
return self
# Make copies of the lists
instruct = mpu6502.MPU.instruct[:]
cycletime = mpu6502.MPU.cycletime[:]
extracycles = mpu6502.MPU.extracycles[:]
disassemble = mpu6502.MPU.disassemble[:]
instruction = make_instruction_decorator(instruct, disassemble,
cycletime, extracycles)
# addressing modes
def ZeroPageIndirectAddr(self):
return self.WordAt( 255 & (self.ByteAt(self.pc)))
def AccumulatorAddr(self):
return self.a
# operations
def opRMB(self, x, mask):
address = x()
self.memory[address] &= mask
def opSMB(self, x, mask):
address = x()
self.memory[address] |= mask
def opSTZ(self, x):
self.memory[x()] = 0x00
def opTSB(self, x):
address = x()
m = self.memory[address]
self.p &= ~self.ZERO
z = m & self.a
if z != 0:
self.p |= self.ZERO
self.memory[address] = m | self.a
def opTRB(self, x):
address = x()
m = self.memory[address]
self.p &= ~self.ZERO
z = m & self.a
if z != 0:
self.p |= self.ZERO
self.memory[address] = m & ~self.a
# instructions
@instruction(name="RMB0", mode="zpg", cycles=5)
def inst_0x07(self):
self.opRMB(self.ZeroPageAddr, 0xFE)
self.pc += 1
@instruction(name="ORA", mode="zpi", cycles=5)
def inst_0x12(self):
self.opORA(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="RMB1", mode="zpg", cycles=5)
def inst_0x17(self):
self.opRMB(self.ZeroPageAddr, 0xFD)
self.pc += 1
@instruction(name="RMB2", mode="zpg", cycles=5)
def inst_0x27(self):
self.opRMB(self.ZeroPageAddr, 0xFB)
self.pc += 1
@instruction(name="AND", mode="zpi", cycles=5)
def inst_0x32(self):
self.opAND(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="BIT", mode="zpx", cycles=4)
def inst_0x34(self):
self.opBIT(self.ZeroPageXAddr)
self.pc += 1
@instruction(name="RMB3", mode="zpg", cycles=5)
def inst_0x37(self):
self.opRMB(self.ZeroPageAddr, 0xF7)
self.pc += 1
@instruction(name="BIT", mode="abx", cycles=4)
def inst_0x3c(self):
self.opBIT(self.AbsoluteXAddr)
self.pc += 2
@instruction(name="RMB4", mode="zpg", cycles=5)
def inst_0x47(self):
self.opRMB(self.ZeroPageAddr, 0xEF)
self.pc += 1
@instruction(name="EOR", mode="zpi", cycles=5)
def inst_0x52(self):
self.opEOR(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="RMB5", mode="zpg", cycles=5)
def inst_0x57(self):
self.opRMB(self.ZeroPageAddr, 0xDF)
self.pc += 1
@instruction(name="PHY", mode="imp", cycles=3)
def inst_0x5a(self):
self.stPush(self.y)
@instruction(name="STZ", mode="imp", cycles=3)
def inst_0x64(self):
self.opSTZ(self.ZeroPageAddr)
self.pc += 1
@instruction(name="RMB6", mode="zpg", cycles=5)
def inst_0x67(self):
self.opRMB(self.ZeroPageAddr, 0xBF)
self.pc += 1
@instruction(name="ADC", mode="zpi", cycles=5)
def inst_0x72(self):
self.opADC(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="STZ", mode="zpx", cycles=4)
def inst_0x74(self):
self.opSTZ(self.ZeroPageXAddr)
self.pc += 1
@instruction(name="PHY", mode="imp", cycles=4)
def inst_0x7a(self):
self.y = self.stPop()
self.FlagsNZ(self.y)
@instruction(name="RMB7", mode="zpg", cycles=5)
def inst_0x77(self):
self.opRMB(self.ZeroPageAddr, 0x7F)
self.pc += 1
@instruction(name="SMB0", mode="zpg", cycles=5)
def inst_0x87(self):
self.opSMB(self.ZeroPageAddr, 0x01)
self.pc += 1
@instruction(name="STA", mode="zpi", cycles=5)
def inst_0x92(self):
self.opSTA(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="SMB1", mode="zpg", cycles=5)
def inst_0x97(self):
self.opSMB(self.ZeroPageAddr, 0x02)
self.pc += 1
@instruction(name="STZ", mode="abs", cycles=4)
def inst_0x9c(self):
self.opSTZ(self.AbsoluteAddr)
self.pc += 2
@instruction(name="STZ", mode="abx", cycles=5)
def inst_0x9e(self):
self.opSTZ(self.AbsoluteXAddr)
self.pc += 2
@instruction(name="SMB2", mode="zpg", cycles=5)
def inst_0xa7(self):
self.opSMB(self.ZeroPageAddr, 0x04)
self.pc += 1
@instruction(name="LDA", mode="zpi", cycles=5)
def inst_0xb2(self):
self.opLDA(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="SMB3", mode="zpg", cycles=5)
def inst_0xb7(self):
self.opSMB(self.ZeroPageAddr, 0x08)
self.pc += 1
@instruction(name="SMB4", mode="zpg", cycles=5)
def inst_0xc7(self):
self.opSMB(self.ZeroPageAddr, 0x10)
self.pc += 1
@instruction(name="SMB5", mode="zpg", cycles=5)
def inst_0xd7(self):
self.opSMB(self.ZeroPageAddr, 0x20)
self.pc += 1
@instruction(name="PHX", mode="imp", cycles=3)
def inst_0xda(self):
self.stPush(self.x)
@instruction(name="SMB6", mode="zpg", cycles=5)
def inst_0xe7(self):
self.opSMB(self.ZeroPageAddr, 0x40)
self.pc += 1
@instruction(name="SMB7", mode="zpg", cycles=5)
def inst_0xf7(self):
self.opSMB(self.ZeroPageAddr, 0x80)
self.pc += 1
@instruction(name="PLX", mode="imp", cycles=4)
def inst_0xfa(self):
self.x = self.stPop()
self.FlagsNZ(self.x)
@instruction(name="TSB", mode="zpg", cycles=5)
def inst_0x04(self):
self.opTSB(self.ZeroPageAddr)
self.pc += 1
@instruction(name="TSB", mode="abs", cycles=6)
def inst_0x0c(self):
self.opTSB(self.AbsoluteAddr)
self.pc += 2
@instruction(name="TRB", mode="zpg", cycles=5)
def inst_0x14(self):
self.opTRB(self.ZeroPageAddr)
self.pc += 1
@instruction(name="INC", mode="acc", cycles=2)
def inst_0x1a(self):
self.opINCR(None)
@instruction(name="TRB", mode="abs", cycles=6)
def inst_0x1c(self):
self.opTRB(self.AbsoluteAddr)
self.pc += 2
@instruction(name="DEC", mode="acc", cycles=2)
def inst_0x3a(self):
self.opDECR(None)
@instruction(name="BRA", mode="rel", cycles=1, extracycles=1)
def inst_0x80(self):
self.BranchRelAddr()
@instruction(name="WAI", mode='imp', cycles=3)
def inst_0xCB(self):
self.waiting = True
@instruction(name="CMP", mode='zpi', cycles=6) # Don't know cycles
def inst_0xD2(self):
self.opCPY(self.ZeroPageIndirectAddr)
self.pc += 1
@instruction(name="SBC", mode="zpi", cycles=5)
def inst_0xf2(self):
self.opSBC(self.ZeroPageIndirectAddr)
self.pc += 1
| [
"py65.devices.mpu6502.MPU.step",
"py65.utils.devices.make_instruction_decorator",
"py65.devices.mpu6502.MPU.__init__"
] | [((646, 719), 'py65.utils.devices.make_instruction_decorator', 'make_instruction_decorator', (['instruct', 'disassemble', 'cycletime', 'extracycles'], {}), '(instruct, disassemble, cycletime, extracycles)\n', (672, 719), False, 'from py65.utils.devices import make_instruction_decorator\n'), ((166, 209), 'py65.devices.mpu6502.MPU.__init__', 'mpu6502.MPU.__init__', (['self', '*args'], {}), '(self, *args, **kwargs)\n', (186, 209), False, 'from py65.devices import mpu6502\n'), ((377, 399), 'py65.devices.mpu6502.MPU.step', 'mpu6502.MPU.step', (['self'], {}), '(self)\n', (393, 399), False, 'from py65.devices import mpu6502\n')] |
import pandas as pd
import shutil
import os
import io
from ms_mint.Mint import Mint
from pathlib import Path as P
from ms_mint.io import (
ms_file_to_df,
mzml_to_pandas_df_pyteomics,
convert_ms_file_to_feather,
convert_ms_file_to_parquet,
MZMLB_AVAILABLE,
)
from paths import (
TEST_MZML,
TEST_MZXML,
TEST_PARQUET,
TEST_MZMLB_POS,
TEST_MZML_POS,
TEST_MZML_NEG,
)
def test__ms_file_to_df__mzML():
result = ms_file_to_df(TEST_MZML)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
def test__ms_file_to_df__mzML_timeunit_minutes():
result = ms_file_to_df(TEST_MZML, time_unit="minutes")
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
def test__ms_file_to_df__mzXML():
result = ms_file_to_df(TEST_MZXML)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
def test__mzml_to_pandas_df_pyteomics_pos():
result = mzml_to_pandas_df_pyteomics(TEST_MZML_POS)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
assert all(result.polarity == "+"), f'Polarity should be "+"\n{result}'
def test__mzml_to_pandas_df_pyteomics_neg():
result = mzml_to_pandas_df_pyteomics(TEST_MZML_NEG)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
assert all(result.polarity == "-"), f'Polarity should be "-"\n{result}'
def test__read_parquet():
result = ms_file_to_df(TEST_PARQUET)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
def test__write_read_hdf(tmpdir):
df = ms_file_to_df(TEST_PARQUET)
fn = P(tmpdir) / "file.hdf"
df.to_hdf(fn, key="data")
result = ms_file_to_df(fn)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
def test__read_mzMLb(tmpdir):
if not MZMLB_AVAILABLE:
return None
result = ms_file_to_df(TEST_MZMLB_POS)
expected_cols = [
"scan_id",
"ms_level",
"polarity",
"scan_time_min",
"mz",
"intensity",
]
assert isinstance(result, pd.DataFrame), f"{type(result)} is not a dataframe"
assert expected_cols == result.columns.to_list(), result.columns
# assert all(result.polarity == '+'), f'Polarity should be "+"\n{result}'
def test__convert_ms_file_to_feather(tmpdir):
print(tmpdir)
shutil.copy(TEST_MZML, tmpdir)
fn = P(tmpdir) / P(TEST_MZML).name
fn_out = fn.with_suffix(".feather")
print(fn, fn_out)
convert_ms_file_to_feather(fn)
assert fn_out.is_file(), f"File not generated {fn_out}"
df = ms_file_to_df(fn)
df_fea = ms_file_to_df(fn_out)
assert df_fea.equals(df), "DataFrames not equal"
def test__convert_ms_file_to_parquet(tmpdir):
print(tmpdir)
shutil.copy(TEST_MZML, tmpdir)
fn = P(tmpdir) / P(TEST_MZML).name
fn_out = fn.with_suffix(".parquet")
print(fn, fn_out)
convert_ms_file_to_parquet(fn)
assert fn_out.is_file(), f"File not generated {fn_out}"
df = ms_file_to_df(fn)
df_fea = ms_file_to_df(fn_out)
assert df_fea.equals(df), "DataFrames not equal"
def test__export_to_excel(tmp_path):
filename = os.path.join(tmp_path, "output.xlsx")
mint = Mint(verbose=True)
mint.ms_files = "tests/data/test.mzXML"
mint.run()
mint.export(filename)
assert os.path.isfile(filename)
def test__export_to_excel_without_fn():
mint = Mint(verbose=True)
mint.ms_files = TEST_MZXML
mint.targets = pd.DataFrame(
{
"peak_label": ["A"],
"mz_mean": [200],
"mz_width": [10],
"intensity_threshold": [0],
"rt_min": [0],
"rt_max": [10],
"targets_filename": ["unknown"],
}
)
mint.run()
buffer = mint.export()
assert isinstance(buffer, io.BytesIO)
df = pd.read_excel(buffer, sheet_name="Results")
assert len(df) == 1, len(df)
assert df.loc[0, "peak_label"] == "A", df.loc[0, "peak_label"]
assert df.loc[0, "ms_file"] == P(TEST_MZXML).name, df.loc[0, "ms_file"]
| [
"pathlib.Path",
"os.path.join",
"ms_mint.io.mzml_to_pandas_df_pyteomics",
"os.path.isfile",
"ms_mint.Mint.Mint",
"shutil.copy",
"pandas.read_excel",
"pandas.DataFrame",
"ms_mint.io.convert_ms_file_to_parquet",
"ms_mint.io.ms_file_to_df",
"ms_mint.io.convert_ms_file_to_feather"
] | [((459, 483), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['TEST_MZML'], {}), '(TEST_MZML)\n', (472, 483), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((847, 892), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['TEST_MZML'], {'time_unit': '"""minutes"""'}), "(TEST_MZML, time_unit='minutes')\n", (860, 892), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((1240, 1265), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['TEST_MZXML'], {}), '(TEST_MZXML)\n', (1253, 1265), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((1624, 1666), 'ms_mint.io.mzml_to_pandas_df_pyteomics', 'mzml_to_pandas_df_pyteomics', (['TEST_MZML_POS'], {}), '(TEST_MZML_POS)\n', (1651, 1666), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((2101, 2143), 'ms_mint.io.mzml_to_pandas_df_pyteomics', 'mzml_to_pandas_df_pyteomics', (['TEST_MZML_NEG'], {}), '(TEST_MZML_NEG)\n', (2128, 2143), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((2559, 2586), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['TEST_PARQUET'], {}), '(TEST_PARQUET)\n', (2572, 2586), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((2930, 2957), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['TEST_PARQUET'], {}), '(TEST_PARQUET)\n', (2943, 2957), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((3033, 3050), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['fn'], {}), '(fn)\n', (3046, 3050), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((3442, 3471), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['TEST_MZMLB_POS'], {}), '(TEST_MZMLB_POS)\n', (3455, 3471), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((3918, 3948), 'shutil.copy', 'shutil.copy', (['TEST_MZML', 'tmpdir'], {}), '(TEST_MZML, tmpdir)\n', (3929, 3948), False, 'import shutil\n'), ((4054, 4084), 'ms_mint.io.convert_ms_file_to_feather', 'convert_ms_file_to_feather', (['fn'], {}), '(fn)\n', (4080, 4084), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((4154, 4171), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['fn'], {}), '(fn)\n', (4167, 4171), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((4185, 4206), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['fn_out'], {}), '(fn_out)\n', (4198, 4206), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((4330, 4360), 'shutil.copy', 'shutil.copy', (['TEST_MZML', 'tmpdir'], {}), '(TEST_MZML, tmpdir)\n', (4341, 4360), False, 'import shutil\n'), ((4466, 4496), 'ms_mint.io.convert_ms_file_to_parquet', 'convert_ms_file_to_parquet', (['fn'], {}), '(fn)\n', (4492, 4496), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((4566, 4583), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['fn'], {}), '(fn)\n', (4579, 4583), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((4597, 4618), 'ms_mint.io.ms_file_to_df', 'ms_file_to_df', (['fn_out'], {}), '(fn_out)\n', (4610, 4618), False, 'from ms_mint.io import ms_file_to_df, mzml_to_pandas_df_pyteomics, convert_ms_file_to_feather, convert_ms_file_to_parquet, MZMLB_AVAILABLE\n'), ((4726, 4763), 'os.path.join', 'os.path.join', (['tmp_path', '"""output.xlsx"""'], {}), "(tmp_path, 'output.xlsx')\n", (4738, 4763), False, 'import os\n'), ((4775, 4793), 'ms_mint.Mint.Mint', 'Mint', ([], {'verbose': '(True)'}), '(verbose=True)\n', (4779, 4793), False, 'from ms_mint.Mint import Mint\n'), ((4890, 4914), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (4904, 4914), False, 'import os\n'), ((4968, 4986), 'ms_mint.Mint.Mint', 'Mint', ([], {'verbose': '(True)'}), '(verbose=True)\n', (4972, 4986), False, 'from ms_mint.Mint import Mint\n'), ((5037, 5208), 'pandas.DataFrame', 'pd.DataFrame', (["{'peak_label': ['A'], 'mz_mean': [200], 'mz_width': [10],\n 'intensity_threshold': [0], 'rt_min': [0], 'rt_max': [10],\n 'targets_filename': ['unknown']}"], {}), "({'peak_label': ['A'], 'mz_mean': [200], 'mz_width': [10],\n 'intensity_threshold': [0], 'rt_min': [0], 'rt_max': [10],\n 'targets_filename': ['unknown']})\n", (5049, 5208), True, 'import pandas as pd\n'), ((5403, 5446), 'pandas.read_excel', 'pd.read_excel', (['buffer'], {'sheet_name': '"""Results"""'}), "(buffer, sheet_name='Results')\n", (5416, 5446), True, 'import pandas as pd\n'), ((2967, 2976), 'pathlib.Path', 'P', (['tmpdir'], {}), '(tmpdir)\n', (2968, 2976), True, 'from pathlib import Path as P\n'), ((3958, 3967), 'pathlib.Path', 'P', (['tmpdir'], {}), '(tmpdir)\n', (3959, 3967), True, 'from pathlib import Path as P\n'), ((4370, 4379), 'pathlib.Path', 'P', (['tmpdir'], {}), '(tmpdir)\n', (4371, 4379), True, 'from pathlib import Path as P\n'), ((3970, 3982), 'pathlib.Path', 'P', (['TEST_MZML'], {}), '(TEST_MZML)\n', (3971, 3982), True, 'from pathlib import Path as P\n'), ((4382, 4394), 'pathlib.Path', 'P', (['TEST_MZML'], {}), '(TEST_MZML)\n', (4383, 4394), True, 'from pathlib import Path as P\n'), ((5582, 5595), 'pathlib.Path', 'P', (['TEST_MZXML'], {}), '(TEST_MZXML)\n', (5583, 5595), True, 'from pathlib import Path as P\n')] |
#!/usr/bin/env python3
#
# MIT License
#
# Copyright (c) 2020-2022 EntySec
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import json
import os
from hatsploit.core.cli.badges import Badges
from hatsploit.lib.config import Config
from hatsploit.lib.storage import LocalStorage
class DB:
badges = Badges()
config = Config()
local_storage = LocalStorage()
def disconnect_payload_database(self, name):
if self.local_storage.get("connected_payload_databases"):
if name in self.local_storage.get("connected_payload_databases"):
self.local_storage.delete_element("connected_payload_databases", name)
self.local_storage.delete_element("payloads", name)
return
self.badges.print_error("No such payload database connected!")
def disconnect_module_database(self, name):
if self.local_storage.get("connected_module_databases"):
if name in self.local_storage.get("connected_module_databases"):
self.local_storage.delete_element("connected_module_databases", name)
self.local_storage.delete_element("modules", name)
return
self.badges.print_error("No such module database connected!")
def disconnect_plugin_database(self, name):
if self.local_storage.get("connected_plugin_databases"):
if name in self.local_storage.get("connected_plugin_databases"):
self.local_storage.delete_element("connected_plugin_databases", name)
self.local_storage.delete_element("plugins", name)
return
self.badges.print_error("No such plugin database connected!")
def connect_payload_database(self, name, path):
if self.local_storage.get("connected_payload_databases"):
if name in self.local_storage.get("connected_payload_databases"):
self.badges.print_error("Payload database already connected!")
return
if not os.path.exists(path) or not str.endswith(path, "json"):
self.badges.print_error("Not a payload database!")
return
try:
database = json.load(open(path))
except Exception:
self.badges.print_error("Failed to connect payload database!")
return
if '__database__' not in database:
self.badges.print_error("No __database__ section found!")
return
if database['__database__']['type'] != "payloads":
self.badges.print_error("Not a payload database!")
return
del database['__database__']
payloads = {
name: database
}
data = {
name: {
'path': path
}
}
if not self.local_storage.get("connected_payload_databases"):
self.local_storage.set("connected_payload_databases", {})
self.local_storage.update("connected_payload_databases", data)
if self.local_storage.get("payloads"):
self.local_storage.update("payloads", payloads)
else:
self.local_storage.set("payloads", payloads)
def connect_module_database(self, name, path):
if self.local_storage.get("connected_module_databases"):
if name in self.local_storage.get("connected_module_databases"):
self.badges.print_error("Module database already connected!")
return
if not os.path.exists(path) or not str.endswith(path, "json"):
self.badges.print_error("Not a module database!")
return
try:
database = json.load(open(path))
except Exception:
self.badges.print_error("Failed to connect module database!")
return
if '__database__' not in database:
self.badges.print_error("No __database__ section found!")
return
if database['__database__']['type'] != "modules":
self.badges.print_error("Not a module database!")
return
del database['__database__']
modules = {
name: database
}
data = {
name: {
'path': path
}
}
if not self.local_storage.get("connected_module_databases"):
self.local_storage.set("connected_module_databases", {})
self.local_storage.update("connected_module_databases", data)
if self.local_storage.get("modules"):
self.local_storage.update("modules", modules)
else:
self.local_storage.set("modules", modules)
def connect_plugin_database(self, name, path):
if self.local_storage.get("connected_plugin_databases"):
if name in self.local_storage.get("connected_plugin_databases"):
self.badges.print_error("Plugin database already connected!")
return
if not os.path.exists(path) or not str.endswith(path, "json"):
self.badges.print_error("Not a database!")
return
try:
database = json.load(open(path))
except Exception:
self.badges.print_error("Failed to connect plugin database!")
return
if '__database__' not in database:
self.badges.print_error("No __database__ section found!")
return
if database['__database__']['type'] != "plugins":
self.badges.print_error("Not a plugin database!")
return
del database['__database__']
plugins = {
name: database
}
data = {
name: {
'path': path
}
}
if not self.local_storage.get("connected_plugin_databases"):
self.local_storage.set("connected_plugin_databases", {})
self.local_storage.update("connected_plugin_databases", data)
if self.local_storage.get("plugins"):
self.local_storage.update("plugins", plugins)
else:
self.local_storage.set("plugins", plugins)
| [
"os.path.exists",
"hatsploit.core.cli.badges.Badges",
"hatsploit.lib.storage.LocalStorage",
"hatsploit.lib.config.Config"
] | [((1316, 1324), 'hatsploit.core.cli.badges.Badges', 'Badges', ([], {}), '()\n', (1322, 1324), False, 'from hatsploit.core.cli.badges import Badges\n'), ((1338, 1346), 'hatsploit.lib.config.Config', 'Config', ([], {}), '()\n', (1344, 1346), False, 'from hatsploit.lib.config import Config\n'), ((1367, 1381), 'hatsploit.lib.storage.LocalStorage', 'LocalStorage', ([], {}), '()\n', (1379, 1381), False, 'from hatsploit.lib.storage import LocalStorage\n'), ((3013, 3033), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (3027, 3033), False, 'import os\n'), ((4491, 4511), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (4505, 4511), False, 'import os\n'), ((5956, 5976), 'os.path.exists', 'os.path.exists', (['path'], {}), '(path)\n', (5970, 5976), False, 'import os\n')] |
from bluesky.plans import scan
from bluesky.simulators import (print_summary, print_summary_wrapper,
summarize_plan,
check_limits,
plot_raster_path)
import pytest
from bluesky.plans import grid_scan
def test_print_summary(hw):
det = hw.det
motor = hw.motor
print_summary(scan([det], motor, -1, 1, 10)) # old name
summarize_plan(scan([det], motor, -1, 1, 10)) # new name
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
def test_old_module_name(hw):
det = hw.det
motor = hw.motor
motor1 = hw.motor1
motor2 = hw.motor2
from bluesky.plan_tools import (print_summary, print_summary_wrapper,
plot_raster_path)
with pytest.warns(UserWarning):
print_summary(scan([det], motor, -1, 1, 10))
with pytest.warns(UserWarning):
list(print_summary_wrapper(scan([det], motor, -1, 1, 10)))
with pytest.warns(UserWarning):
plan = grid_scan([det], motor1, -5, 5, 10, motor2, -7, 7, 15, True)
plot_raster_path(plan, 'motor1', 'motor2', probe_size=.3)
def test_check_limits(RE, hw):
det = hw.det
motor = hw.motor
# The motor object does not currently implement limits.
# Use an assert to help us out if this changes in the future.
assert not hasattr(motor, 'limits')
# # check_limits should warn if it can't find check_value
# TODO: Is there _any_ object to test?
# with pytest.warns(UserWarning):
# check_limits(scan([det], motor, -1, 1, 3))
# monkey-patch some limits
motor.limits = (-2, 2)
# check_limits should do nothing here
check_limits(scan([det], motor, -1, 1, 3))
# check_limits should error if limits are exceeded only if object raises
# this object does not raise
check_limits(scan([det], motor, -3, 3, 3))
# check_limits should raise if limits are equal only if object raises
# this object does not raise
motor.limits = (2, 2)
check_limits(scan([det], motor, -1, 1, 3))
def test_check_limits_needs_RE():
with pytest.raises(RuntimeError) as ctx:
check_limits([])
assert str(ctx.value) == "Bluesky event loop not running"
def test_plot_raster_path(hw):
det = hw.det
motor1 = hw.motor1
motor2 = hw.motor2
plan = grid_scan([det], motor1, -5, 5, 10, motor2, -7, 7, 15, True)
plot_raster_path(plan, 'motor1', 'motor2', probe_size=.3)
| [
"bluesky.plans.scan",
"bluesky.simulators.check_limits",
"bluesky.plans.grid_scan",
"pytest.raises",
"bluesky.plan_tools.plot_raster_path",
"pytest.warns"
] | [((2359, 2419), 'bluesky.plans.grid_scan', 'grid_scan', (['[det]', 'motor1', '(-5)', '(5)', '(10)', 'motor2', '(-7)', '(7)', '(15)', '(True)'], {}), '([det], motor1, -5, 5, 10, motor2, -7, 7, 15, True)\n', (2368, 2419), False, 'from bluesky.plans import grid_scan\n'), ((2424, 2482), 'bluesky.plan_tools.plot_raster_path', 'plot_raster_path', (['plan', '"""motor1"""', '"""motor2"""'], {'probe_size': '(0.3)'}), "(plan, 'motor1', 'motor2', probe_size=0.3)\n", (2440, 2482), False, 'from bluesky.plan_tools import print_summary, print_summary_wrapper, plot_raster_path\n'), ((381, 410), 'bluesky.plans.scan', 'scan', (['[det]', 'motor', '(-1)', '(1)', '(10)'], {}), '([det], motor, -1, 1, 10)\n', (385, 410), False, 'from bluesky.plans import scan\n'), ((443, 472), 'bluesky.plans.scan', 'scan', (['[det]', 'motor', '(-1)', '(1)', '(10)'], {}), '([det], motor, -1, 1, 10)\n', (447, 472), False, 'from bluesky.plans import scan\n'), ((802, 827), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (814, 827), False, 'import pytest\n'), ((891, 916), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (903, 916), False, 'import pytest\n'), ((994, 1019), 'pytest.warns', 'pytest.warns', (['UserWarning'], {}), '(UserWarning)\n', (1006, 1019), False, 'import pytest\n'), ((1036, 1096), 'bluesky.plans.grid_scan', 'grid_scan', (['[det]', 'motor1', '(-5)', '(5)', '(10)', 'motor2', '(-7)', '(7)', '(15)', '(True)'], {}), '([det], motor1, -5, 5, 10, motor2, -7, 7, 15, True)\n', (1045, 1096), False, 'from bluesky.plans import grid_scan\n'), ((1105, 1163), 'bluesky.plan_tools.plot_raster_path', 'plot_raster_path', (['plan', '"""motor1"""', '"""motor2"""'], {'probe_size': '(0.3)'}), "(plan, 'motor1', 'motor2', probe_size=0.3)\n", (1121, 1163), False, 'from bluesky.plan_tools import print_summary, print_summary_wrapper, plot_raster_path\n'), ((1715, 1743), 'bluesky.plans.scan', 'scan', (['[det]', 'motor', '(-1)', '(1)', '(3)'], {}), '([det], motor, -1, 1, 3)\n', (1719, 1743), False, 'from bluesky.plans import scan\n'), ((1873, 1901), 'bluesky.plans.scan', 'scan', (['[det]', 'motor', '(-3)', '(3)', '(3)'], {}), '([det], motor, -3, 3, 3)\n', (1877, 1901), False, 'from bluesky.plans import scan\n'), ((2054, 2082), 'bluesky.plans.scan', 'scan', (['[det]', 'motor', '(-1)', '(1)', '(3)'], {}), '([det], motor, -1, 1, 3)\n', (2058, 2082), False, 'from bluesky.plans import scan\n'), ((2129, 2156), 'pytest.raises', 'pytest.raises', (['RuntimeError'], {}), '(RuntimeError)\n', (2142, 2156), False, 'import pytest\n'), ((2173, 2189), 'bluesky.simulators.check_limits', 'check_limits', (['[]'], {}), '([])\n', (2185, 2189), False, 'from bluesky.simulators import print_summary, print_summary_wrapper, summarize_plan, check_limits, plot_raster_path\n'), ((517, 546), 'bluesky.plans.scan', 'scan', (['[det]', 'motor', '(-1)', '(1)', '(10)'], {}), '([det], motor, -1, 1, 10)\n', (521, 546), False, 'from bluesky.plans import scan\n'), ((851, 880), 'bluesky.plans.scan', 'scan', (['[det]', 'motor', '(-1)', '(1)', '(10)'], {}), '([det], motor, -1, 1, 10)\n', (855, 880), False, 'from bluesky.plans import scan\n'), ((953, 982), 'bluesky.plans.scan', 'scan', (['[det]', 'motor', '(-1)', '(1)', '(10)'], {}), '([det], motor, -1, 1, 10)\n', (957, 982), False, 'from bluesky.plans import scan\n')] |
import warnings
import pytest
from leapp.libraries.actor.systemfacts import get_selinux_status
from leapp.models import SELinuxFacts
no_selinux = False
try:
import selinux
except ImportError:
no_selinux = True
warnings.warn(
'Tests which uses `selinux` will be skipped'
' due to library unavailability.', ImportWarning)
reason_to_skip_msg = "Selinux is not available"
# FIXME: create valid tests...
@pytest.mark.skipif(no_selinux, reason=reason_to_skip_msg)
def test_selinux_enabled_enforcing(monkeypatch):
"""
Test case SELinux is enabled in enforcing mode
"""
monkeypatch.setattr(selinux, 'is_selinux_mls_enabled', lambda: 1)
monkeypatch.setattr(selinux, 'security_getenforce', lambda: 1)
monkeypatch.setattr(selinux, 'selinux_getenforcemode', lambda: [0, 1])
monkeypatch.setattr(selinux, 'is_selinux_enabled', lambda: 1)
monkeypatch.setattr(selinux, 'selinux_getpolicytype', lambda: [0, 'targeted'])
expected_data = {'policy': 'targeted',
'mls_enabled': True,
'enabled': True,
'runtime_mode': 'enforcing',
'static_mode': 'enforcing'}
assert SELinuxFacts(**expected_data) == get_selinux_status()
@pytest.mark.skipif(no_selinux, reason=reason_to_skip_msg)
def test_selinux_enabled_permissive(monkeypatch):
"""
Test case SELinux is enabled in permissive mode
"""
monkeypatch.setattr(selinux, 'is_selinux_mls_enabled', lambda: 1)
monkeypatch.setattr(selinux, 'security_getenforce', lambda: 0)
monkeypatch.setattr(selinux, 'selinux_getenforcemode', lambda: [0, 0])
monkeypatch.setattr(selinux, 'is_selinux_enabled', lambda: 1)
monkeypatch.setattr(selinux, 'selinux_getpolicytype', lambda: [0, 'targeted'])
expected_data = {'policy': 'targeted',
'mls_enabled': True,
'enabled': True,
'runtime_mode': 'permissive',
'static_mode': 'permissive'}
assert SELinuxFacts(**expected_data) == get_selinux_status()
@pytest.mark.skipif(no_selinux, reason=reason_to_skip_msg)
def test_selinux_disabled(monkeypatch):
"""
Test case SELinux is disabled
"""
monkeypatch.setattr(selinux, 'is_selinux_mls_enabled', lambda: 0)
monkeypatch.setattr(selinux, 'security_getenforce', lambda: 0)
monkeypatch.setattr(selinux, 'selinux_getenforcemode', lambda: [0, 0])
monkeypatch.setattr(selinux, 'is_selinux_enabled', lambda: 0)
monkeypatch.setattr(selinux, 'selinux_getpolicytype', lambda: [0, 'targeted'])
expected_data = {'policy': 'targeted',
'mls_enabled': False,
'enabled': False,
'runtime_mode': 'permissive',
'static_mode': 'permissive'}
assert SELinuxFacts(**expected_data) == get_selinux_status()
class MockNoConfigFileOSError(object):
def __init__(self):
raise OSError
@pytest.mark.skipif(no_selinux, reason=reason_to_skip_msg)
def test_selinux_disabled_no_config_file(monkeypatch):
"""
Test case SELinux is disabled
"""
monkeypatch.setattr(selinux, 'is_selinux_mls_enabled', lambda: 0)
monkeypatch.setattr(selinux, 'security_getenforce', lambda: 0)
monkeypatch.setattr(selinux, 'selinux_getenforcemode', MockNoConfigFileOSError)
monkeypatch.setattr(selinux, 'is_selinux_enabled', lambda: 0)
monkeypatch.setattr(selinux, 'selinux_getpolicytype', lambda: [0, 'targeted'])
expected_data = {'policy': 'targeted',
'mls_enabled': False,
'enabled': False,
'runtime_mode': 'permissive',
'static_mode': 'disabled'}
assert SELinuxFacts(**expected_data) == get_selinux_status()
| [
"warnings.warn",
"leapp.libraries.actor.systemfacts.get_selinux_status",
"leapp.models.SELinuxFacts",
"pytest.mark.skipif"
] | [((436, 493), 'pytest.mark.skipif', 'pytest.mark.skipif', (['no_selinux'], {'reason': 'reason_to_skip_msg'}), '(no_selinux, reason=reason_to_skip_msg)\n', (454, 493), False, 'import pytest\n'), ((1261, 1318), 'pytest.mark.skipif', 'pytest.mark.skipif', (['no_selinux'], {'reason': 'reason_to_skip_msg'}), '(no_selinux, reason=reason_to_skip_msg)\n', (1279, 1318), False, 'import pytest\n'), ((2090, 2147), 'pytest.mark.skipif', 'pytest.mark.skipif', (['no_selinux'], {'reason': 'reason_to_skip_msg'}), '(no_selinux, reason=reason_to_skip_msg)\n', (2108, 2147), False, 'import pytest\n'), ((2980, 3037), 'pytest.mark.skipif', 'pytest.mark.skipif', (['no_selinux'], {'reason': 'reason_to_skip_msg'}), '(no_selinux, reason=reason_to_skip_msg)\n', (2998, 3037), False, 'import pytest\n'), ((225, 340), 'warnings.warn', 'warnings.warn', (['"""Tests which uses `selinux` will be skipped due to library unavailability."""', 'ImportWarning'], {}), "(\n 'Tests which uses `selinux` will be skipped due to library unavailability.'\n , ImportWarning)\n", (238, 340), False, 'import warnings\n'), ((1204, 1233), 'leapp.models.SELinuxFacts', 'SELinuxFacts', ([], {}), '(**expected_data)\n', (1216, 1233), False, 'from leapp.models import SELinuxFacts\n'), ((1237, 1257), 'leapp.libraries.actor.systemfacts.get_selinux_status', 'get_selinux_status', ([], {}), '()\n', (1255, 1257), False, 'from leapp.libraries.actor.systemfacts import get_selinux_status\n'), ((2033, 2062), 'leapp.models.SELinuxFacts', 'SELinuxFacts', ([], {}), '(**expected_data)\n', (2045, 2062), False, 'from leapp.models import SELinuxFacts\n'), ((2066, 2086), 'leapp.libraries.actor.systemfacts.get_selinux_status', 'get_selinux_status', ([], {}), '()\n', (2084, 2086), False, 'from leapp.libraries.actor.systemfacts import get_selinux_status\n'), ((2836, 2865), 'leapp.models.SELinuxFacts', 'SELinuxFacts', ([], {}), '(**expected_data)\n', (2848, 2865), False, 'from leapp.models import SELinuxFacts\n'), ((2869, 2889), 'leapp.libraries.actor.systemfacts.get_selinux_status', 'get_selinux_status', ([], {}), '()\n', (2887, 2889), False, 'from leapp.libraries.actor.systemfacts import get_selinux_status\n'), ((3749, 3778), 'leapp.models.SELinuxFacts', 'SELinuxFacts', ([], {}), '(**expected_data)\n', (3761, 3778), False, 'from leapp.models import SELinuxFacts\n'), ((3782, 3802), 'leapp.libraries.actor.systemfacts.get_selinux_status', 'get_selinux_status', ([], {}), '()\n', (3800, 3802), False, 'from leapp.libraries.actor.systemfacts import get_selinux_status\n')] |
# -*- coding: utf-8 -*-
"""
Microsoft-Windows-IPxlatCfg
GUID : 3e5ac668-af52-4c15-b99b-a3e7a6616ebd
"""
from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct
from etl.utils import WString, CString, SystemTime, Guid
from etl.dtyp import Sid
from etl.parsers.etw.core import Etw, declare, guid
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1001, version=0)
class Microsoft_Windows_IPxlatCfg_1001_0(Etw):
pattern = Struct(
"ErrorString" / CString,
"ErrorCode" / Int32ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1002, version=0)
class Microsoft_Windows_IPxlatCfg_1002_0(Etw):
pattern = Struct(
"ErrorString" / CString,
"ErrorCode" / Int32ul,
"InterfaceLuid" / Int64ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1003, version=0)
class Microsoft_Windows_IPxlatCfg_1003_0(Etw):
pattern = Struct(
"InfoString" / CString
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1005, version=0)
class Microsoft_Windows_IPxlatCfg_1005_0(Etw):
pattern = Struct(
"IPv4Address" / Int32ul,
"IPv4Prefix" / Int32ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1006, version=0)
class Microsoft_Windows_IPxlatCfg_1006_0(Etw):
pattern = Struct(
"InfoString" / CString,
"InterfaceLuid" / Int64ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1007, version=0)
class Microsoft_Windows_IPxlatCfg_1007_0(Etw):
pattern = Struct(
"InterfaceLuid" / Int64ul,
"PrefixLength" / Int32ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1008, version=0)
class Microsoft_Windows_IPxlatCfg_1008_0(Etw):
pattern = Struct(
"InterfaceLuid" / Int64ul,
"IPv4Address" / Int32ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1009, version=0)
class Microsoft_Windows_IPxlatCfg_1009_0(Etw):
pattern = Struct(
"InterfaceLuid" / Int64ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1010, version=0)
class Microsoft_Windows_IPxlatCfg_1010_0(Etw):
pattern = Struct(
"InterfaceLuid" / Int64ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1011, version=0)
class Microsoft_Windows_IPxlatCfg_1011_0(Etw):
pattern = Struct(
"InfoString" / CString,
"MTU" / Int32ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1101, version=0)
class Microsoft_Windows_IPxlatCfg_1101_0(Etw):
pattern = Struct(
"InterfaceLuid" / Int64ul,
"Metric" / Int32ul,
"RemotePrefixLength" / Int32ul,
"LocalPrefixLength" / Int32ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1102, version=0)
class Microsoft_Windows_IPxlatCfg_1102_0(Etw):
pattern = Struct(
"InterfaceLuid" / Int64ul,
"Metric" / Int32ul,
"RemotePrefixLength" / Int32ul,
"LocalPrefixLength" / Int32ul
)
@declare(guid=guid("3e5ac668-af52-4c15-b99b-a3e7a6616ebd"), event_id=1103, version=0)
class Microsoft_Windows_IPxlatCfg_1103_0(Etw):
pattern = Struct(
"InterfaceLuid" / Int64ul,
"PrefixLength" / Int32ul
)
| [
"construct.Struct",
"etl.parsers.etw.core.guid"
] | [((511, 565), 'construct.Struct', 'Struct', (["('ErrorString' / CString)", "('ErrorCode' / Int32ul)"], {}), "('ErrorString' / CString, 'ErrorCode' / Int32ul)\n", (517, 565), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((737, 822), 'construct.Struct', 'Struct', (["('ErrorString' / CString)", "('ErrorCode' / Int32ul)", "('InterfaceLuid' / Int64ul)"], {}), "('ErrorString' / CString, 'ErrorCode' / Int32ul, 'InterfaceLuid' /\n Int64ul)\n", (743, 822), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((998, 1028), 'construct.Struct', 'Struct', (["('InfoString' / CString)"], {}), "('InfoString' / CString)\n", (1004, 1028), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((1192, 1247), 'construct.Struct', 'Struct', (["('IPv4Address' / Int32ul)", "('IPv4Prefix' / Int32ul)"], {}), "('IPv4Address' / Int32ul, 'IPv4Prefix' / Int32ul)\n", (1198, 1247), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((1419, 1476), 'construct.Struct', 'Struct', (["('InfoString' / CString)", "('InterfaceLuid' / Int64ul)"], {}), "('InfoString' / CString, 'InterfaceLuid' / Int64ul)\n", (1425, 1476), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((1648, 1707), 'construct.Struct', 'Struct', (["('InterfaceLuid' / Int64ul)", "('PrefixLength' / Int32ul)"], {}), "('InterfaceLuid' / Int64ul, 'PrefixLength' / Int32ul)\n", (1654, 1707), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((1879, 1937), 'construct.Struct', 'Struct', (["('InterfaceLuid' / Int64ul)", "('IPv4Address' / Int32ul)"], {}), "('InterfaceLuid' / Int64ul, 'IPv4Address' / Int32ul)\n", (1885, 1937), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((2109, 2142), 'construct.Struct', 'Struct', (["('InterfaceLuid' / Int64ul)"], {}), "('InterfaceLuid' / Int64ul)\n", (2115, 2142), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((2306, 2339), 'construct.Struct', 'Struct', (["('InterfaceLuid' / Int64ul)"], {}), "('InterfaceLuid' / Int64ul)\n", (2312, 2339), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((2503, 2550), 'construct.Struct', 'Struct', (["('InfoString' / CString)", "('MTU' / Int32ul)"], {}), "('InfoString' / CString, 'MTU' / Int32ul)\n", (2509, 2550), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((2722, 2842), 'construct.Struct', 'Struct', (["('InterfaceLuid' / Int64ul)", "('Metric' / Int32ul)", "('RemotePrefixLength' / Int32ul)", "('LocalPrefixLength' / Int32ul)"], {}), "('InterfaceLuid' / Int64ul, 'Metric' / Int32ul, 'RemotePrefixLength' /\n Int32ul, 'LocalPrefixLength' / Int32ul)\n", (2728, 2842), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((3026, 3146), 'construct.Struct', 'Struct', (["('InterfaceLuid' / Int64ul)", "('Metric' / Int32ul)", "('RemotePrefixLength' / Int32ul)", "('LocalPrefixLength' / Int32ul)"], {}), "('InterfaceLuid' / Int64ul, 'Metric' / Int32ul, 'RemotePrefixLength' /\n Int32ul, 'LocalPrefixLength' / Int32ul)\n", (3032, 3146), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((3330, 3389), 'construct.Struct', 'Struct', (["('InterfaceLuid' / Int64ul)", "('PrefixLength' / Int32ul)"], {}), "('InterfaceLuid' / Int64ul, 'PrefixLength' / Int32ul)\n", (3336, 3389), False, 'from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct\n'), ((378, 422), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (382, 422), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((604, 648), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (608, 648), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((865, 909), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (869, 909), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((1059, 1103), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (1063, 1103), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((1286, 1330), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (1290, 1330), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((1515, 1559), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (1519, 1559), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((1746, 1790), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (1750, 1790), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((1976, 2020), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (1980, 2020), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((2173, 2217), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (2177, 2217), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((2370, 2414), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (2374, 2414), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((2589, 2633), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (2593, 2633), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((2893, 2937), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (2897, 2937), False, 'from etl.parsers.etw.core import Etw, declare, guid\n'), ((3197, 3241), 'etl.parsers.etw.core.guid', 'guid', (['"""3e5ac668-af52-4c15-b99b-a3e7a6616ebd"""'], {}), "('3e5ac668-af52-4c15-b99b-a3e7a6616ebd')\n", (3201, 3241), False, 'from etl.parsers.etw.core import Etw, declare, guid\n')] |
import uuid
from config import USR_ORG_MONGO_COLLECTION, USR_MONGO_COLLECTION
import db
from models.response import post_error
import logging
log = logging.getLogger('file')
class OrgUtils:
def __init__(self):
pass
#orgId generation
@staticmethod
def generate_org_id():
"""UUID generation for org registeration"""
return(uuid.uuid4().hex)
@staticmethod
def validate_org(org_code):
"""Validating Org
Org should be registered and active on Anuvaad system.
"""
try:
#connecting to mongo instance/collection
collections = db.get_db()[USR_ORG_MONGO_COLLECTION]
#searching for active org record
result = collections.find({"code": org_code}, {"_id": 0, "active": 1})
if result.count() == 0:
return post_error("Invalid Organization", "No such registered organization with the given Org Id", None)
for value in result:
if value["active"] == False:
return post_error("Invalid Organization", "Organization is currently inactive", None)
except Exception as e:
log.exception(f"Db connection exception : {e}")
return post_error("Database connection exception", "An error occurred while connecting to the database:{}".format(str(e)), None)
@staticmethod
def validate_org_upsert(i,org):
"""Org validation on upsert
deactivation of org allowed only once all the users in the corresponding org is inactive.
"""
if "code" not in org or not org["code"]:
return post_error("Data Missing", "code not found", None)
if "active" not in org:
return post_error("Data Missing", "active not found", None)
code = str(org["code"]).upper()
active = org["active"]
if not isinstance(active,bool):
return post_error("Invalid format", "active should be bool", None), 400
if active == False:
try:
#connecting to mongo instance/collection
collections = db.get_db()[USR_MONGO_COLLECTION]
#searching for active users in the org
result = collections.find({"orgID": code,"is_active":True})
if result.count()!=0:
log.info("Deactivation request for org failed, {} active users with the orgID".format(str(result.count())))
return post_error("Deactivation Failed","There exist active users in {} hence this action cannot be performed".format(code),None)
except Exception as e:
log.exception(f"Db connection exception : {e}")
return post_error("Database connection exception", "An error occurred while connecting to the database:{}".format(str(e)), None) | [
"logging.getLogger",
"db.get_db",
"models.response.post_error",
"uuid.uuid4"
] | [((150, 175), 'logging.getLogger', 'logging.getLogger', (['"""file"""'], {}), "('file')\n", (167, 175), False, 'import logging\n'), ((366, 378), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (376, 378), False, 'import uuid\n'), ((1643, 1693), 'models.response.post_error', 'post_error', (['"""Data Missing"""', '"""code not found"""', 'None'], {}), "('Data Missing', 'code not found', None)\n", (1653, 1693), False, 'from models.response import post_error\n'), ((1745, 1797), 'models.response.post_error', 'post_error', (['"""Data Missing"""', '"""active not found"""', 'None'], {}), "('Data Missing', 'active not found', None)\n", (1755, 1797), False, 'from models.response import post_error\n'), ((631, 642), 'db.get_db', 'db.get_db', ([], {}), '()\n', (640, 642), False, 'import db\n'), ((856, 957), 'models.response.post_error', 'post_error', (['"""Invalid Organization"""', '"""No such registered organization with the given Org Id"""', 'None'], {}), "('Invalid Organization',\n 'No such registered organization with the given Org Id', None)\n", (866, 957), False, 'from models.response import post_error\n'), ((1930, 1989), 'models.response.post_error', 'post_error', (['"""Invalid format"""', '"""active should be bool"""', 'None'], {}), "('Invalid format', 'active should be bool', None)\n", (1940, 1989), False, 'from models.response import post_error\n'), ((1059, 1137), 'models.response.post_error', 'post_error', (['"""Invalid Organization"""', '"""Organization is currently inactive"""', 'None'], {}), "('Invalid Organization', 'Organization is currently inactive', None)\n", (1069, 1137), False, 'from models.response import post_error\n'), ((2127, 2138), 'db.get_db', 'db.get_db', ([], {}), '()\n', (2136, 2138), False, 'import db\n')] |
# validated: 2017-02-19 DS c5e3a8a9b642 roborio/java/navx_frc/src/com/kauailabs/navx/frc/RegisterIO.java
#----------------------------------------------------------------------------
# Copyright (c) <NAME> 2015. All Rights Reserved.
#
# Created in support of Team 2465 (Kauaibots). Go Purple Wave!
#
# Open Source Software - may be modified and shared by FRC teams. Any
# modifications to this code must be accompanied by the \License.txt file
# in the root directory of the project
#----------------------------------------------------------------------------
from ._impl import AHRSProtocol, IMUProtocol, IMURegisters
from wpilib.timer import Timer
import logging
logger = logging.getLogger('navx')
__all__ = ['RegisterIO']
IO_TIMEOUT_SECONDS = 1.0
DELAY_OVERHEAD_SECONDS = 0.004
class _BoardId:
type = 0
hw_rev = 0
fw_ver_major = 0
fw_ver_minor = 0
fw_revision = 0
unique_id = [0]*12
class _BoardState:
op_status = 0
sensor_status = 0
cal_status = 0
selftest_status = 0
capability_flags = 0
update_rate_hz = 0
accel_fsr_g = 0
gyro_fsr_dps = 0
class RegisterIO:
def __init__(self, io_provider, update_rate_hz, notify_sink, board_capabilities):
"""
:param board_capabilities: must have the following callable attributes:
_isOmniMountSupported, _isBoardYawResetSupported,
_isDisplacementSupported
:param notify_sink: must have the following callable attributes:
_setYawPitchRoll, _setAHRSData, _setAHRSPosData,
_setRawData, _setBoardID, _setBoardState, _yawResetComplete
"""
self.io_provider = io_provider
self.update_rate_hz = update_rate_hz
self.board_capabilities = board_capabilities
self.notify_sink = notify_sink
self.raw_data_update = IMUProtocol.GyroUpdate()
self.ahrspos_update = AHRSProtocol.AHRSPosUpdate()
self.board_state = _BoardState()
self.board_id = _BoardId()
self.last_update_time = 0
self.byte_count = 0
self.update_count = 0
self.last_sensor_timestamp = 0
self._stop = False
def stop(self):
self._stop = True
def shutdown(self):
self.io_provider.shutdown()
def run(self):
logger.info("NavX io thread starting")
try:
self.io_provider.init()
# initial device configuration
self.setUpdateRateHz(self.update_rate_hz)
if not self.getConfiguration():
logger.warning("-- Did not get configuration data")
else:
logger.info("-- Board is %s (rev %s)",
IMURegisters.model_type(self.board_id.type),
self.board_id.hw_rev)
logger.info("-- Firmware %s.%s", self.board_id.fw_ver_major,
self.board_id.fw_ver_minor)
log_error = True
# Calculate delay to match configured update rate
# Note: some additional time is removed from the
# 1/update_rate value to ensure samples are not
# dropped, esp. at higher update rates.
update_rate = 1.0/(self.update_rate_hz & 0xFF)
if update_rate > DELAY_OVERHEAD_SECONDS:
update_rate -= DELAY_OVERHEAD_SECONDS
logger.info("-- Update rate: %shz (%.4fs)",
self.update_rate_hz, update_rate)
# IO Loop
while not self._stop:
if self.board_state.update_rate_hz != self.update_rate_hz:
self.setUpdateRateHz(self.update_rate_hz)
try:
self.getCurrentData()
except IOError:
if log_error:
logger.exception("Error getting data")
log_error = False
else:
log_error = True
Timer.delay(update_rate)
except Exception:
logger.exception("Unhandled exception in NavX thread")
finally:
logger.info("NavX i/o thread exiting")
def getConfiguration(self):
success = False
retry_count = 0
while retry_count < 5 and not success:
try:
config = self.io_provider.read(IMURegisters.NAVX_REG_WHOAMI,
IMURegisters.NAVX_REG_SENSOR_STATUS_H+1)
except IOError as e:
logger.warning("Error reading configuration data, retrying (%s)", e)
success = False
Timer.delay(0.5)
else:
board_id = self.board_id
board_id.hw_rev = config[IMURegisters.NAVX_REG_HW_REV]
board_id.fw_ver_major = config[IMURegisters.NAVX_REG_FW_VER_MAJOR]
board_id.fw_ver_minor = config[IMURegisters.NAVX_REG_FW_VER_MINOR]
board_id.type = config[IMURegisters.NAVX_REG_WHOAMI]
self.notify_sink._setBoardID(board_id)
board_state = self.board_state
board_state.cal_status = config[IMURegisters.NAVX_REG_CAL_STATUS]
board_state.op_status = config[IMURegisters.NAVX_REG_OP_STATUS]
board_state.selftest_status = config[IMURegisters.NAVX_REG_SELFTEST_STATUS]
board_state.sensor_status = AHRSProtocol.decodeBinaryUint16(config,IMURegisters.NAVX_REG_SENSOR_STATUS_L)
board_state.gyro_fsr_dps = AHRSProtocol.decodeBinaryUint16(config,IMURegisters.NAVX_REG_GYRO_FSR_DPS_L)
board_state.accel_fsr_g = config[IMURegisters.NAVX_REG_ACCEL_FSR_G]
board_state.update_rate_hz = config[IMURegisters.NAVX_REG_UPDATE_RATE_HZ]
board_state.capability_flags = AHRSProtocol.decodeBinaryUint16(config,IMURegisters.NAVX_REG_CAPABILITY_FLAGS_L)
self.notify_sink._setBoardState(board_state)
success = True
retry_count += 1
return success
def getCurrentData(self):
first_address = IMURegisters.NAVX_REG_UPDATE_RATE_HZ
displacement_registers = self.board_capabilities._isDisplacementSupported()
# If firmware supports displacement data, acquire it - otherwise implement
# similar (but potentially less accurate) calculations on this processor.
if displacement_registers:
read_count = IMURegisters.NAVX_REG_LAST + 1 - first_address
else:
read_count = IMURegisters.NAVX_REG_QUAT_OFFSET_Z_H + 1 - first_address
curr_data = self.io_provider.read(first_address, read_count)
sensor_timestamp = AHRSProtocol.decodeBinaryUint32(curr_data, IMURegisters.NAVX_REG_TIMESTAMP_L_L-first_address)
if sensor_timestamp == self.last_sensor_timestamp:
return
self.last_sensor_timestamp = sensor_timestamp
ahrspos_update = self.ahrspos_update
ahrspos_update.op_status = curr_data[IMURegisters.NAVX_REG_OP_STATUS - first_address]
ahrspos_update.selftest_status = curr_data[IMURegisters.NAVX_REG_SELFTEST_STATUS - first_address]
ahrspos_update.cal_status = curr_data[IMURegisters.NAVX_REG_CAL_STATUS]
ahrspos_update.sensor_status = curr_data[IMURegisters.NAVX_REG_SENSOR_STATUS_L - first_address]
ahrspos_update.yaw = AHRSProtocol.decodeProtocolSignedHundredthsFloat(curr_data, IMURegisters.NAVX_REG_YAW_L-first_address)
ahrspos_update.pitch = AHRSProtocol.decodeProtocolSignedHundredthsFloat(curr_data, IMURegisters.NAVX_REG_PITCH_L-first_address)
ahrspos_update.roll = AHRSProtocol.decodeProtocolSignedHundredthsFloat(curr_data, IMURegisters.NAVX_REG_ROLL_L-first_address)
ahrspos_update.compass_heading = AHRSProtocol.decodeProtocolUnsignedHundredthsFloat(curr_data, IMURegisters.NAVX_REG_HEADING_L-first_address)
ahrspos_update.mpu_temp_c = AHRSProtocol.decodeProtocolSignedHundredthsFloat(curr_data, IMURegisters.NAVX_REG_MPU_TEMP_C_L - first_address)
ahrspos_update.world_linear_accel_x = AHRSProtocol.decodeProtocolSignedThousandthsFloat(curr_data, IMURegisters.NAVX_REG_LINEAR_ACC_X_L-first_address)
ahrspos_update.world_linear_accel_y = AHRSProtocol.decodeProtocolSignedThousandthsFloat(curr_data, IMURegisters.NAVX_REG_LINEAR_ACC_Y_L-first_address)
ahrspos_update.world_linear_accel_z = AHRSProtocol.decodeProtocolSignedThousandthsFloat(curr_data, IMURegisters.NAVX_REG_LINEAR_ACC_Z_L-first_address)
ahrspos_update.altitude = AHRSProtocol.decodeProtocol1616Float(curr_data, IMURegisters.NAVX_REG_ALTITUDE_D_L - first_address)
ahrspos_update.baro_pressure = AHRSProtocol.decodeProtocol1616Float(curr_data, IMURegisters.NAVX_REG_PRESSURE_DL - first_address)
ahrspos_update.fused_heading = AHRSProtocol.decodeProtocolUnsignedHundredthsFloat(curr_data, IMURegisters.NAVX_REG_FUSED_HEADING_L-first_address)
ahrspos_update.quaternionW = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_QUAT_W_L-first_address)/ 32768.0
ahrspos_update.quaternionX = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_QUAT_X_L-first_address)/ 32768.0
ahrspos_update.quaternionY = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_QUAT_Y_L-first_address)/ 32768.0
ahrspos_update.quaternionZ = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_QUAT_Z_L-first_address)/ 32768.0
if displacement_registers:
ahrspos_update.vel_x = AHRSProtocol.decodeProtocol1616Float(curr_data, IMURegisters.NAVX_REG_VEL_X_I_L-first_address)
ahrspos_update.vel_y = AHRSProtocol.decodeProtocol1616Float(curr_data, IMURegisters.NAVX_REG_VEL_Y_I_L-first_address)
ahrspos_update.vel_z = AHRSProtocol.decodeProtocol1616Float(curr_data, IMURegisters.NAVX_REG_VEL_Z_I_L-first_address)
ahrspos_update.disp_x = AHRSProtocol.decodeProtocol1616Float(curr_data, IMURegisters.NAVX_REG_DISP_X_I_L-first_address)
ahrspos_update.disp_y = AHRSProtocol.decodeProtocol1616Float(curr_data, IMURegisters.NAVX_REG_DISP_Y_I_L-first_address)
ahrspos_update.disp_z = AHRSProtocol.decodeProtocol1616Float(curr_data, IMURegisters.NAVX_REG_DISP_Z_I_L-first_address)
self.notify_sink._setAHRSPosData(ahrspos_update, sensor_timestamp)
else:
self.notify_sink._setAHRSData(ahrspos_update, sensor_timestamp)
board_state = self.board_state
board_state.cal_status = curr_data[IMURegisters.NAVX_REG_CAL_STATUS-first_address]
board_state.op_status = curr_data[IMURegisters.NAVX_REG_OP_STATUS-first_address]
board_state.selftest_status = curr_data[IMURegisters.NAVX_REG_SELFTEST_STATUS-first_address]
board_state.sensor_status = AHRSProtocol.decodeBinaryUint16(curr_data,IMURegisters.NAVX_REG_SENSOR_STATUS_L-first_address)
board_state.update_rate_hz = curr_data[IMURegisters.NAVX_REG_UPDATE_RATE_HZ-first_address]
board_state.gyro_fsr_dps = AHRSProtocol.decodeBinaryUint16(curr_data,IMURegisters.NAVX_REG_GYRO_FSR_DPS_L)
board_state.accel_fsr_g = curr_data[IMURegisters.NAVX_REG_ACCEL_FSR_G]
board_state.capability_flags= AHRSProtocol.decodeBinaryUint16(curr_data,IMURegisters.NAVX_REG_CAPABILITY_FLAGS_L-first_address)
self.notify_sink._setBoardState(board_state)
raw_data_update = self.raw_data_update
raw_data_update.raw_gyro_x = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_GYRO_X_L-first_address)
raw_data_update.raw_gyro_y = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_GYRO_Y_L-first_address)
raw_data_update.raw_gyro_z = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_GYRO_Z_L-first_address)
raw_data_update.raw_accel_x = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_ACC_X_L-first_address)
raw_data_update.raw_accel_y = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_ACC_Y_L-first_address)
raw_data_update.raw_accel_z = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_ACC_Z_L-first_address)
raw_data_update.cal_mag_x = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_MAG_X_L-first_address)
raw_data_update.cal_mag_y = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_MAG_Y_L-first_address)
raw_data_update.cal_mag_z = AHRSProtocol.decodeBinaryInt16(curr_data, IMURegisters.NAVX_REG_MAG_Z_L-first_address)
raw_data_update.mpu_temp_c = ahrspos_update.mpu_temp
self.notify_sink._setRawData(raw_data_update, sensor_timestamp)
self.last_update_time = Timer.getFPGATimestamp()
self.byte_count += len(curr_data)
self.update_count += 1
def isConnected(self):
time_since_last_update = Timer.getFPGATimestamp() - self.last_update_time
return time_since_last_update <= IO_TIMEOUT_SECONDS
def getByteCount(self):
return self.byte_count
def getUpdateCount(self):
return self.update_count
def setUpdateRateHz(self, update_rate_hz):
self.io_provider.write(IMURegisters.NAVX_REG_UPDATE_RATE_HZ, update_rate_hz)
def zeroYaw(self):
self.io_provider.write( IMURegisters.NAVX_REG_INTEGRATION_CTL,
AHRSProtocol.NAVX_INTEGRATION_CTL_RESET_YAW )
self.notify_sink._yawResetComplete()
def zeroDisplacement(self):
self.io_provider.write( IMURegisters.NAVX_REG_INTEGRATION_CTL,
(AHRSProtocol.NAVX_INTEGRATION_CTL_RESET_DISP_X |
AHRSProtocol.NAVX_INTEGRATION_CTL_RESET_DISP_Y |
AHRSProtocol.NAVX_INTEGRATION_CTL_RESET_DISP_Z ) )
| [
"logging.getLogger",
"wpilib.timer.Timer.getFPGATimestamp",
"wpilib.timer.Timer.delay"
] | [((679, 704), 'logging.getLogger', 'logging.getLogger', (['"""navx"""'], {}), "('navx')\n", (696, 704), False, 'import logging\n'), ((13452, 13476), 'wpilib.timer.Timer.getFPGATimestamp', 'Timer.getFPGATimestamp', ([], {}), '()\n', (13474, 13476), False, 'from wpilib.timer import Timer\n'), ((13619, 13643), 'wpilib.timer.Timer.getFPGATimestamp', 'Timer.getFPGATimestamp', ([], {}), '()\n', (13641, 13643), False, 'from wpilib.timer import Timer\n'), ((4207, 4231), 'wpilib.timer.Timer.delay', 'Timer.delay', (['update_rate'], {}), '(update_rate)\n', (4218, 4231), False, 'from wpilib.timer import Timer\n'), ((4890, 4906), 'wpilib.timer.Timer.delay', 'Timer.delay', (['(0.5)'], {}), '(0.5)\n', (4901, 4906), False, 'from wpilib.timer import Timer\n')] |
#pylint: disable=invalid-name
#pylint: disable=too-many-instance-attributes
#pylint: disable=too-many-return-statements
#pylint: disable=too-many-statements
"""
Class structure and methods for an oscilloscope channel.
The idea is to collect all the relevant information from all the Rigol
scope waveforms into a single structure that can be handled in a uniform
and consistent manner.
Specifically this lets one just use
channel.times : numpy array of signal times
channel.volts : numpy array of signal voltages
or the stringification method to describe a channel
print(channel)
"""
from enum import Enum
import numpy as np
class UnitEnum(Enum):
"""Enumerated units for scopes without them."""
w = 0
a = 1
v = 2
u = 3
def best_scale(number):
"""Scale and units for a number with proper prefix."""
absnr = abs(number)
if absnr == 0:
return 1, ' '
if absnr < 0.99999999e-9:
return 1e12, 'p'
if absnr < 0.99999999e-6:
return 1e9, 'n'
if absnr < 0.99999999e-3:
return 1e6, 'µ'
if absnr < 0.99999999:
return 1e3, 'm'
if absnr < 0.99999999e3:
return 1, ' '
if absnr < 0.99999999e6:
return 1e-3, 'k'
if absnr < 0.999999991e9:
return 1e-6, 'M'
return 1e-9, 'G'
def engineering_string(number, n_digits):
"""Format number with proper prefix."""
scale, prefix = best_scale(number)
fformat = "%%.%df %%s" % n_digits
s = fformat % (number * scale, prefix)
return s
def _channel_bytes(channel_number, w):
"""
Return right series of bytes for a channel for 1000Z scopes.
Waveform points are interleaved stored in memory when two or more
channels are saved. This unweaves them.
Args:
channel_number: the number of enabled channels before this one
w: original waveform object
Returns
byte array for specified channel
"""
offset = 0
if w.header.stride == 2: # byte pattern CHx CHy
# use odd bytes when this is the second enabled channel
if any([w.header.ch[i].enabled for i in range(channel_number-1)]):
offset = 1
elif w.header.stride == 4: # byte pattern CH4 CH3 CH2 CH1
offset = 4 - channel_number
data = np.frombuffer(w.data.raw, dtype=np.uint8)
raw_bytes = data[offset::w.header.stride]
return raw_bytes
class Channel():
"""Base class for a single channel."""
def __init__(self, w, channel_number, scope, selected='1234'):
"""
Initialize a Channel Object.
Args:
w: Wfm object
channel_number: 1, 2, 3, or 4
scope: string describing scope
selected: string with channels chosen by user
Returns:
Channel object
"""
self.channel_number = channel_number
self.name = "CH %d" % channel_number
self.waveform = w
self.seconds_per_point = w.header.seconds_per_point
self.firmware = 'unknown'
self.unit = UnitEnum.v
self.points = 0
self.raw = None
self.volts = None
self.times = None
self.coupling = 'unknown'
self.roll_stop = 0
self.time_offset = 0
self.time_scale = 1
self.enabled = False
self.enabled_and_selected = False
self.volt_scale = 1
self.volt_offset = 0
self.y_scale = 1
self.y_offset = 0
self.volt_per_division = 1
self.probe_value = 1
self.inverted = False
# determine if this channel is one of those chosen by user
chosen = selected.find(str(channel_number)) != -1
if channel_number <= len(w.header.ch):
channel = w.header.ch[channel_number-1]
self.enabled = channel.enabled
self.enabled_and_selected = channel.enabled and chosen
self.volt_scale = channel.volt_scale
self.volt_offset = channel.volt_offset
self.y_scale = channel.volt_scale
self.y_offset = channel.volt_offset
self.volt_per_division = channel.volt_per_division
self.probe_value = channel.probe_value
self.unit = channel.unit
self.inverted = channel.inverted
if scope == 'wfm1000c':
self.ds1000c(w, channel_number)
elif scope == 'wfm1000d':
self.ds1000d(w, channel_number)
elif scope == 'wfm1000e':
self.ds1000e(w, channel_number)
elif scope == 'wfm1000z':
self.ds1000z(w, channel_number)
elif scope == 'wfm2000':
self.ds2000(w, channel_number)
elif scope == 'wfm4000':
self.ds4000(w, channel_number)
elif scope == 'wfm6000':
self.ds6000(w, channel_number)
def __str__(self):
"""Describe this channel."""
s = " Channel %d:\n" % self.channel_number
s += " Coupling = %8s\n" % self.coupling.rjust(7, ' ')
s += " Scale = %10sV/div\n" % engineering_string(self.volt_per_division, 2)
s += " Offset = %10sV\n" % engineering_string(self.volt_offset, 2)
s += " Probe = %7gX\n" % self.probe_value
s += " Inverted = %8s\n\n" % self.inverted
s += " Time Base = %10ss/div\n" % engineering_string(self.time_scale, 3)
s += " Offset = %10ss\n" % engineering_string(self.time_offset, 3)
s += " Delta = %10ss/point\n" % engineering_string(self.seconds_per_point, 3)
s += " Points = %8d\n\n" % self.points
if self.enabled_and_selected:
s += " Count = [%9d,%9d,%9d ... %9d,%9d]\n" % (
1, 2, 3, self.points-1, self.points)
s += " Raw = [%9d,%9d,%9d ... %9d,%9d]\n" % (
self.raw[0], self.raw[1], self.raw[2], self.raw[-2], self.raw[-1])
t = [engineering_string(self.times[i], 3) +
"s" for i in [0, 1, 2, -2, -1]]
s += " Times = [%9s,%9s,%9s ... %9s,%9s]\n" % (
t[0], t[1], t[2], t[-2], t[-1])
v = [engineering_string(self.volts[i], 2) +
"V" for i in [0, 1, 2, -2, -1]]
s += " Volts = [%9s,%9s,%9s ... %9s,%9s]\n" % (
v[0], v[1], v[2], v[-2], v[-1])
return s
def calc_times_and_volts(self):
"""Calculate the times and voltages for this channel."""
if self.enabled_and_selected:
self.volts = self.y_scale * (127.0 - self.raw) - self.y_offset
h = self.points * self.seconds_per_point / 2
self.times = np.linspace(-h, h, self.points) + self.time_offset
def ds1000c(self, w, channel_number):
"""Interpret waveform data for 1000CD series scopes."""
self.time_scale = 1.0e-12 * w.header.time_scale
self.time_offset = 1.0e-12 * w.header.time_offset
if channel_number == 1:
if self.enabled_and_selected:
self.points = len(w.data.ch1)
self.raw = np.frombuffer(w.data.ch1, dtype=np.uint8)
if channel_number == 2:
if self.enabled_and_selected:
self.points = len(w.data.ch2)
self.raw = np.frombuffer(w.data.ch2, dtype=np.uint8)
self.calc_times_and_volts()
def ds1000d(self, w, channel_number):
"""Interpret waveform data for 1000CD series scopes."""
self.time_scale = 1.0e-12 * w.header.time_scale
self.time_offset = 1.0e-12 * w.header.time_offset
if channel_number == 1:
if self.enabled_and_selected:
self.points = len(w.data.ch1)
self.raw = np.frombuffer(w.data.ch1, dtype=np.uint8)
if channel_number == 2:
if self.enabled_and_selected:
self.points = len(w.data.ch2)
self.raw = np.frombuffer(w.data.ch2, dtype=np.uint8)
self.calc_times_and_volts()
def ds1000e(self, w, channel_number):
"""Interpret waveform data for 1000D and 1000E series scopes."""
self.roll_stop = w.header.roll_stop
if channel_number == 1:
self.time_offset = w.header.ch1_time_offset
self.time_scale = w.header.ch1_time_scale
if self.enabled_and_selected:
self.points = len(w.data.ch1)
self.raw = np.frombuffer(w.data.ch1, dtype=np.uint8)
elif channel_number == 2:
self.time_offset = w.header.ch2_time_offset
self.time_scale = w.header.ch2_time_scale
if self.enabled_and_selected:
self.points = len(w.data.ch2)
self.raw = np.frombuffer(w.data.ch2, dtype=np.uint8)
self.calc_times_and_volts()
def ds1000z(self, w, channel_number):
"""Interpret waveform for the Rigol DS1000Z series."""
self.time_scale = w.header.time_scale
self.time_offset = w.header.time_offset
self.points = w.header.points
self.stride = w.header.stride
self.firmware = w.preheader.firmware_version
self.probe = w.header.ch[channel_number-1].probe_value
self.coupling = w.header.ch[channel_number-1].coupling.name.upper()
self.y_scale = w.header.ch[channel_number-1].y_scale
self.y_offset = w.header.ch[channel_number-1].y_offset
if self.enabled_and_selected:
self.raw = _channel_bytes(channel_number, w)
self.points = len(self.raw)
self.calc_times_and_volts()
def ds2000(self, w, channel_number):
"""Interpret waveform for the Rigol DS2000 series."""
self.time_offset = w.header.time_offset
self.time_scale = w.header.time_scale
self.points = w.header.storage_depth
self.firmware = w.header.firmware_version
self.unit = UnitEnum(w.header.ch[channel_number-1].unit_actual)
self.coupling = w.header.ch[channel_number-1].coupling.name.upper()
self.y_scale = -self.volt_scale
self.y_offset = self.volt_offset
if self.enabled_and_selected:
if channel_number == 1:
self.raw = np.frombuffer(w.header.raw_1, dtype=np.uint8)
if channel_number == 2:
self.raw = np.frombuffer(w.header.raw_2, dtype=np.uint8)
if channel_number == 3:
self.raw = np.frombuffer(w.header.raw_3, dtype=np.uint8)
if channel_number == 4:
self.raw = np.frombuffer(w.header.raw_4, dtype=np.uint8)
self.calc_times_and_volts()
def ds4000(self, w, channel_number):
"""Interpret waveform for the Rigol DS4000 series."""
self.time_offset = w.header.time_offset
self.time_scale = w.header.time_scale
self.points = w.header.points
self.firmware = w.header.firmware_version
self.coupling = w.header.ch[channel_number-1].coupling.name.upper()
self.y_scale = -self.volt_scale
self.y_offset = self.volt_offset
if self.enabled_and_selected:
if channel_number == 1:
self.raw = np.frombuffer(w.header.raw_1, dtype=np.uint8)
if channel_number == 2:
self.raw = np.frombuffer(w.header.raw_2, dtype=np.uint8)
if channel_number == 3:
self.raw = np.frombuffer(w.header.raw_3, dtype=np.uint8)
if channel_number == 4:
self.raw = np.frombuffer(w.header.raw_4, dtype=np.uint8)
self.calc_times_and_volts()
def ds6000(self, w, channel_number):
"""Interpret waveform for the Rigol DS6000 series."""
self.time_offset = w.header.time_offset
self.time_scale = w.header.time_scale
self.points = w.header.points
self.firmware = w.header.firmware_version
self.coupling = w.header.ch[channel_number-1].coupling.name.upper()
self.unit = w.header.ch[channel_number-1].unit
if self.enabled_and_selected:
if channel_number == 1:
self.raw = np.array(w.header.raw_1, dtype=np.uint8)
if channel_number == 2:
self.raw = np.array(w.header.raw_2, dtype=np.uint8)
if channel_number == 3:
self.raw = np.array(w.header.raw_3, dtype=np.uint8)
if channel_number == 4:
self.raw = np.array(w.header.raw_4, dtype=np.uint8)
self.calc_times_and_volts()
| [
"numpy.array",
"numpy.frombuffer",
"numpy.linspace"
] | [((2293, 2334), 'numpy.frombuffer', 'np.frombuffer', (['w.data.raw'], {'dtype': 'np.uint8'}), '(w.data.raw, dtype=np.uint8)\n', (2306, 2334), True, 'import numpy as np\n'), ((6716, 6747), 'numpy.linspace', 'np.linspace', (['(-h)', 'h', 'self.points'], {}), '(-h, h, self.points)\n', (6727, 6747), True, 'import numpy as np\n'), ((7136, 7177), 'numpy.frombuffer', 'np.frombuffer', (['w.data.ch1'], {'dtype': 'np.uint8'}), '(w.data.ch1, dtype=np.uint8)\n', (7149, 7177), True, 'import numpy as np\n'), ((7326, 7367), 'numpy.frombuffer', 'np.frombuffer', (['w.data.ch2'], {'dtype': 'np.uint8'}), '(w.data.ch2, dtype=np.uint8)\n', (7339, 7367), True, 'import numpy as np\n'), ((7774, 7815), 'numpy.frombuffer', 'np.frombuffer', (['w.data.ch1'], {'dtype': 'np.uint8'}), '(w.data.ch1, dtype=np.uint8)\n', (7787, 7815), True, 'import numpy as np\n'), ((7964, 8005), 'numpy.frombuffer', 'np.frombuffer', (['w.data.ch2'], {'dtype': 'np.uint8'}), '(w.data.ch2, dtype=np.uint8)\n', (7977, 8005), True, 'import numpy as np\n'), ((8462, 8503), 'numpy.frombuffer', 'np.frombuffer', (['w.data.ch1'], {'dtype': 'np.uint8'}), '(w.data.ch1, dtype=np.uint8)\n', (8475, 8503), True, 'import numpy as np\n'), ((10232, 10277), 'numpy.frombuffer', 'np.frombuffer', (['w.header.raw_1'], {'dtype': 'np.uint8'}), '(w.header.raw_1, dtype=np.uint8)\n', (10245, 10277), True, 'import numpy as np\n'), ((10342, 10387), 'numpy.frombuffer', 'np.frombuffer', (['w.header.raw_2'], {'dtype': 'np.uint8'}), '(w.header.raw_2, dtype=np.uint8)\n', (10355, 10387), True, 'import numpy as np\n'), ((10452, 10497), 'numpy.frombuffer', 'np.frombuffer', (['w.header.raw_3'], {'dtype': 'np.uint8'}), '(w.header.raw_3, dtype=np.uint8)\n', (10465, 10497), True, 'import numpy as np\n'), ((10562, 10607), 'numpy.frombuffer', 'np.frombuffer', (['w.header.raw_4'], {'dtype': 'np.uint8'}), '(w.header.raw_4, dtype=np.uint8)\n', (10575, 10607), True, 'import numpy as np\n'), ((11191, 11236), 'numpy.frombuffer', 'np.frombuffer', (['w.header.raw_1'], {'dtype': 'np.uint8'}), '(w.header.raw_1, dtype=np.uint8)\n', (11204, 11236), True, 'import numpy as np\n'), ((11301, 11346), 'numpy.frombuffer', 'np.frombuffer', (['w.header.raw_2'], {'dtype': 'np.uint8'}), '(w.header.raw_2, dtype=np.uint8)\n', (11314, 11346), True, 'import numpy as np\n'), ((11411, 11456), 'numpy.frombuffer', 'np.frombuffer', (['w.header.raw_3'], {'dtype': 'np.uint8'}), '(w.header.raw_3, dtype=np.uint8)\n', (11424, 11456), True, 'import numpy as np\n'), ((11521, 11566), 'numpy.frombuffer', 'np.frombuffer', (['w.header.raw_4'], {'dtype': 'np.uint8'}), '(w.header.raw_4, dtype=np.uint8)\n', (11534, 11566), True, 'import numpy as np\n'), ((12124, 12164), 'numpy.array', 'np.array', (['w.header.raw_1'], {'dtype': 'np.uint8'}), '(w.header.raw_1, dtype=np.uint8)\n', (12132, 12164), True, 'import numpy as np\n'), ((12229, 12269), 'numpy.array', 'np.array', (['w.header.raw_2'], {'dtype': 'np.uint8'}), '(w.header.raw_2, dtype=np.uint8)\n', (12237, 12269), True, 'import numpy as np\n'), ((12334, 12374), 'numpy.array', 'np.array', (['w.header.raw_3'], {'dtype': 'np.uint8'}), '(w.header.raw_3, dtype=np.uint8)\n', (12342, 12374), True, 'import numpy as np\n'), ((12439, 12479), 'numpy.array', 'np.array', (['w.header.raw_4'], {'dtype': 'np.uint8'}), '(w.header.raw_4, dtype=np.uint8)\n', (12447, 12479), True, 'import numpy as np\n'), ((8764, 8805), 'numpy.frombuffer', 'np.frombuffer', (['w.data.ch2'], {'dtype': 'np.uint8'}), '(w.data.ch2, dtype=np.uint8)\n', (8777, 8805), True, 'import numpy as np\n')] |
import os
import sys
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.syncstep import SyncStep
from core.models import Service
from hpc.models import ServiceProvider, ContentProvider, CDNPrefix, OriginServer
from util.logger import Logger, logging
# hpclibrary will be in steps/..
parentdir = os.path.join(os.path.dirname(__file__),"..")
sys.path.insert(0,parentdir)
from hpclib import HpcLibrary
logger = Logger(level=logging.INFO)
class SyncOriginServer(SyncStep, HpcLibrary):
provides=[OriginServer]
requested_interval=0
def __init__(self, **args):
SyncStep.__init__(self, **args)
HpcLibrary.__init__(self)
def fetch_pending(self, deleted):
#self.consistency_check()
return SyncStep.fetch_pending(self, deleted)
def consistency_check(self):
# set to true if something changed
result=False
# sanity check to make sure our PS objects have CMI objects behind them
all_ors_ids = [x["origin_server_id"] for x in self.client.onev.ListAll("OriginServer")]
for ors in OriginServer.objects.all():
if (ors.origin_server_id is not None) and (ors.origin_server_id not in all_ors_ids):
# we have an origin server ID, but it doesn't exist in the CMI
# something went wrong
# start over
logger.info("origin server %s was not found on CMI" % ors.origin_server_id)
ors.origin_server_id=None
ors.save()
result = True
return result
def sync_record(self, ors):
logger.info("sync'ing origin server %s" % str(ors))
if (not ors.contentProvider) or (not ors.contentProvider.content_provider_id):
return
cpid = ors.contentProvider.content_provider_id
# validation requires URL start with http://
url = ors.url
if not url.startswith("http://"):
url = "http://" + url
ors_dict = {"authenticated_content": ors.authenticated, "zone_redirects": ors.redirects, "content_provider_id": cpid, "url": url, "service_type": "HyperCache", "caching_type": "Optimistic", "description": ors.description}
#print os_dict
if not ors.origin_server_id:
id = self.client.onev.Create("OriginServer", ors_dict)
ors.origin_server_id = id
else:
self.client.onev.Update("OriginServer", ors.origin_server_id, ors_dict)
# ... something breaks (analytics) if the URL starts with http://, so we
# change it in cob after we added it via onev.
url = url[7:]
self.client.cob.UpdateContent(ors.origin_server_id, {"url": url})
ors.silent = True
ors.save()
def delete_record(self, m):
if m.origin_server_id is not None:
self.client.onev.Delete("OriginServer", m.origin_server_id)
| [
"hpclib.HpcLibrary.__init__",
"observer.syncstep.SyncStep.__init__",
"sys.path.insert",
"observer.syncstep.SyncStep.fetch_pending",
"hpc.models.OriginServer.objects.all",
"os.path.dirname",
"util.logger.Logger"
] | [((383, 412), 'sys.path.insert', 'sys.path.insert', (['(0)', 'parentdir'], {}), '(0, parentdir)\n', (398, 412), False, 'import sys\n'), ((453, 479), 'util.logger.Logger', 'Logger', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (459, 479), False, 'from util.logger import Logger, logging\n'), ((351, 376), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (366, 376), False, 'import os\n'), ((621, 652), 'observer.syncstep.SyncStep.__init__', 'SyncStep.__init__', (['self'], {}), '(self, **args)\n', (638, 652), False, 'from observer.syncstep import SyncStep\n'), ((661, 686), 'hpclib.HpcLibrary.__init__', 'HpcLibrary.__init__', (['self'], {}), '(self)\n', (680, 686), False, 'from hpclib import HpcLibrary\n'), ((776, 813), 'observer.syncstep.SyncStep.fetch_pending', 'SyncStep.fetch_pending', (['self', 'deleted'], {}), '(self, deleted)\n', (798, 813), False, 'from observer.syncstep import SyncStep\n'), ((1108, 1134), 'hpc.models.OriginServer.objects.all', 'OriginServer.objects.all', ([], {}), '()\n', (1132, 1134), False, 'from hpc.models import ServiceProvider, ContentProvider, CDNPrefix, OriginServer\n')] |
#!/usr/bin/env python3
from PIL import Image
def tranform(r, g, b):
tmp = b
b = g // 2
g = tmp
r = r // 2
return r, g, b
def main():
im = Image.open('blue-flames.jpg')
input_pixels = im.getdata()
output_pixels = tuple(tranform(*pixel) for pixel in input_pixels)
im.putdata(output_pixels)
im.save('green-flames.png')
if __name__ == '__main__':
main()
| [
"PIL.Image.open"
] | [((166, 195), 'PIL.Image.open', 'Image.open', (['"""blue-flames.jpg"""'], {}), "('blue-flames.jpg')\n", (176, 195), False, 'from PIL import Image\n')] |
from setuptools import setup
setup(
name='firetv',
version='1.0.7',
description='Communicate with an Amazon Fire TV device via ADB over a network.',
url='https://github.com/happyleavesaoc/python-firetv/',
license='MIT',
author='happyleaves',
author_email='<EMAIL>',
packages=['firetv'],
install_requires=['pycryptodome', 'rsa', 'adb-homeassistant', 'pure-python-adb-homeassistant'],
extras_require={
'firetv-server': ['Flask>=0.10.1', 'PyYAML>=3.12']
},
entry_points={
'console_scripts': [
'firetv-server = firetv.__main__:main'
]
},
classifiers=[
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 3'
]
)
| [
"setuptools.setup"
] | [((30, 747), 'setuptools.setup', 'setup', ([], {'name': '"""firetv"""', 'version': '"""1.0.7"""', 'description': '"""Communicate with an Amazon Fire TV device via ADB over a network."""', 'url': '"""https://github.com/happyleavesaoc/python-firetv/"""', 'license': '"""MIT"""', 'author': '"""happyleaves"""', 'author_email': '"""<EMAIL>"""', 'packages': "['firetv']", 'install_requires': "['pycryptodome', 'rsa', 'adb-homeassistant', 'pure-python-adb-homeassistant']", 'extras_require': "{'firetv-server': ['Flask>=0.10.1', 'PyYAML>=3.12']}", 'entry_points': "{'console_scripts': ['firetv-server = firetv.__main__:main']}", 'classifiers': "['License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 3']"}), "(name='firetv', version='1.0.7', description=\n 'Communicate with an Amazon Fire TV device via ADB over a network.',\n url='https://github.com/happyleavesaoc/python-firetv/', license='MIT',\n author='happyleaves', author_email='<EMAIL>', packages=['firetv'],\n install_requires=['pycryptodome', 'rsa', 'adb-homeassistant',\n 'pure-python-adb-homeassistant'], extras_require={'firetv-server': [\n 'Flask>=0.10.1', 'PyYAML>=3.12']}, entry_points={'console_scripts': [\n 'firetv-server = firetv.__main__:main']}, classifiers=[\n 'License :: OSI Approved :: MIT License',\n 'Operating System :: OS Independent',\n 'Programming Language :: Python :: 2',\n 'Programming Language :: Python :: 3'])\n", (35, 747), False, 'from setuptools import setup\n')] |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('scrapyproject', '0002_auto_20170208_1738'),
]
operations = [
migrations.AlterField(
model_name='project',
name='link_generator',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='project',
name='scraper_function',
field=models.TextField(blank=True),
),
migrations.AlterField(
model_name='project',
name='settings',
field=models.TextField(blank=True),
),
]
| [
"django.db.models.TextField"
] | [((367, 395), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (383, 395), False, 'from django.db import migrations, models\n'), ((528, 556), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (544, 556), False, 'from django.db import migrations, models\n'), ((681, 709), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (697, 709), False, 'from django.db import migrations, models\n')] |
from django import forms
from django.utils.translation import gettext_lazy as _
COURSE_QUANTITY_CHOICES = [(i, str(i)) for i in range(1, 21)]
class CartAddCourseForm(forms.Form):
quantity = forms.TypedChoiceField(
choices=COURSE_QUANTITY_CHOICES, coerce=int, label=_("Quantité")
)
override = forms.BooleanField(
required=False, initial=False, widget=forms.HiddenInput
)
| [
"django.forms.BooleanField",
"django.utils.translation.gettext_lazy"
] | [((316, 391), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)', 'initial': '(False)', 'widget': 'forms.HiddenInput'}), '(required=False, initial=False, widget=forms.HiddenInput)\n', (334, 391), False, 'from django import forms\n'), ((281, 294), 'django.utils.translation.gettext_lazy', '_', (['"""Quantité"""'], {}), "('Quantité')\n", (282, 294), True, 'from django.utils.translation import gettext_lazy as _\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
from unittest import mock
from keystoneauth1 import exceptions as ks_exc
import netaddr
from neutron_lib.api.definitions import qos
from neutron_lib.callbacks import events
from neutron_lib import constants as lib_constants
from neutron_lib import context
from neutron_lib import exceptions as lib_exc
from neutron_lib.exceptions import placement as pl_exc
from neutron_lib.exceptions import qos as qos_exc
from neutron_lib.objects import utils as obj_utils
from neutron_lib.plugins import constants as plugins_constants
from neutron_lib.plugins import directory
from neutron_lib.services.qos import constants as qos_consts
from neutron_lib.utils import net as net_utils
import os_resource_classes as orc
from oslo_config import cfg
from oslo_utils import uuidutils
import webob.exc
from neutron.exceptions import qos as neutron_qos_exc
from neutron.extensions import qos_pps_minimum_rule_alias
from neutron.extensions import qos_rules_alias
from neutron import manager
from neutron.objects import network as network_object
from neutron.objects import ports as ports_object
from neutron.objects.qos import policy as policy_object
from neutron.objects.qos import rule as rule_object
from neutron.services.qos import qos_plugin
from neutron.tests.unit.db import test_db_base_plugin_v2
from neutron.tests.unit.services.qos import base
DB_PLUGIN_KLASS = 'neutron.db.db_base_plugin_v2.NeutronDbPluginV2'
SERVICE_PLUGIN_KLASS = 'neutron.services.qos.qos_plugin.QoSPlugin'
class TestQosPlugin(base.BaseQosTestCase):
def setUp(self):
super(TestQosPlugin, self).setUp()
self.setup_coreplugin(load_plugins=False)
mock.patch('neutron.objects.db.api.create_object').start()
mock.patch('neutron.objects.db.api.update_object').start()
mock.patch('neutron.objects.db.api.delete_object').start()
mock.patch('neutron.objects.db.api.get_object').start()
_mock_qos_load_attr = mock.patch(
'neutron.objects.qos.policy.QosPolicy.obj_load_attr')
self.mock_qos_load_attr = _mock_qos_load_attr.start()
# We don't use real models as per mocks above. We also need to mock-out
# methods that work with real data types
mock.patch(
'neutron.objects.base.NeutronDbObject.modify_fields_from_db'
).start()
mock.patch.object(policy_object.QosPolicy, 'unset_default').start()
mock.patch.object(policy_object.QosPolicy, 'set_default').start()
cfg.CONF.set_override("core_plugin", DB_PLUGIN_KLASS)
cfg.CONF.set_override("service_plugins", ["qos"])
manager.init()
self.qos_plugin = directory.get_plugin(plugins_constants.QOS)
self.qos_plugin.driver_manager = mock.Mock()
self.rpc_push = mock.patch('neutron.api.rpc.handlers.resources_rpc'
'.ResourcesPushRpcApi.push').start()
self.ctxt = context.Context('fake_user', 'fake_tenant')
self.admin_ctxt = context.get_admin_context()
self.policy_data = {
'policy': {'id': uuidutils.generate_uuid(),
'project_id': uuidutils.generate_uuid(),
'name': 'test-policy',
'description': 'Test policy description',
'shared': True,
'is_default': False}}
self.rule_data = {
'bandwidth_limit_rule': {'id': uuidutils.generate_uuid(),
'max_kbps': 100,
'max_burst_kbps': 150},
'dscp_marking_rule': {'id': uuidutils.generate_uuid(),
'dscp_mark': 16},
'minimum_bandwidth_rule': {
'id': uuidutils.generate_uuid(),
'min_kbps': 10},
'packet_rate_limit_rule': {
'id': uuidutils.generate_uuid(),
'max_kpps': 20,
'max_burst_kpps': 130},
'minimum_packet_rate_rule': {
'id': uuidutils.generate_uuid(),
'min_kpps': 10,
'direction': 'any'},
}
self.policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
self.rule = rule_object.QosBandwidthLimitRule(
self.ctxt, **self.rule_data['bandwidth_limit_rule'])
self.dscp_rule = rule_object.QosDscpMarkingRule(
self.ctxt, **self.rule_data['dscp_marking_rule'])
self.min_bw_rule = rule_object.QosMinimumBandwidthRule(
self.ctxt, **self.rule_data['minimum_bandwidth_rule'])
self.pps_rule = rule_object.QosPacketRateLimitRule(
self.ctxt, **self.rule_data['packet_rate_limit_rule'])
self.min_pps_rule = rule_object.QosMinimumPacketRateRule(
self.ctxt, **self.rule_data['minimum_packet_rate_rule'])
def _validate_driver_params(self, method_name, ctxt):
call_args = self.qos_plugin.driver_manager.call.call_args[0]
self.assertTrue(self.qos_plugin.driver_manager.call.called)
self.assertEqual(call_args[0], method_name)
self.assertEqual(call_args[1], ctxt)
self.assertIsInstance(call_args[2], policy_object.QosPolicy)
def _create_and_extend_port(self, min_bw_rules, min_pps_rules=None,
physical_network='public',
has_qos_policy=True, has_net_qos_policy=False,
request_groups_uuids=None):
network_id = uuidutils.generate_uuid()
self.port_data = {
'port': {'id': uuidutils.generate_uuid(),
'network_id': network_id}
}
if has_qos_policy:
self.port_data['port']['qos_policy_id'] = self.policy.id
elif has_net_qos_policy:
self.port_data['port']['qos_network_policy_id'] = self.policy.id
self.port = ports_object.Port(
self.ctxt, **self.port_data['port'])
port_res = {"binding:vnic_type": "normal"}
segment_mock = mock.MagicMock(network_id=network_id,
physical_network=physical_network)
min_pps_rules = min_pps_rules if min_pps_rules else []
with mock.patch('neutron.objects.network.NetworkSegment.get_objects',
return_value=[segment_mock]), \
mock.patch(
'neutron.objects.qos.rule.QosMinimumBandwidthRule.'
'get_objects',
return_value=min_bw_rules), \
mock.patch(
'neutron.objects.qos.rule.QosMinimumPacketRateRule.'
'get_objects',
return_value=min_pps_rules), \
mock.patch(
'uuid.uuid5',
return_value='fake_uuid',
side_effect=request_groups_uuids):
return qos_plugin.QoSPlugin._extend_port_resource_request(
port_res, self.port)
def _create_and_extend_ports(self, min_bw_rules, min_pps_rules=None,
physical_network='public',
request_groups_uuids=None):
network_id = uuidutils.generate_uuid()
ports_res = [
{
"resource_request": {
"port_id": uuidutils.generate_uuid(),
"qos_id": self.policy.id,
"network_id": network_id,
"vnic_type": "normal",
}
},
{
"resource_request": {
"port_id": uuidutils.generate_uuid(),
"qos_id": self.policy.id,
"network_id": network_id,
"vnic_type": "normal",
}
},
]
segment_mock = mock.MagicMock(network_id=network_id,
physical_network=physical_network)
min_pps_rules = min_pps_rules if min_pps_rules else []
with mock.patch('neutron.objects.network.NetworkSegment.get_objects',
return_value=[segment_mock]), \
mock.patch(
'neutron.objects.qos.rule.QosMinimumBandwidthRule.'
'get_objects',
return_value=min_bw_rules), \
mock.patch(
'neutron.objects.qos.rule.QosMinimumPacketRateRule.'
'get_objects',
return_value=min_pps_rules), \
mock.patch(
'uuid.uuid5',
return_value='fake_uuid',
side_effect=request_groups_uuids):
return qos_plugin.QoSPlugin._extend_port_resource_request_bulk(
ports_res, None)
def test__extend_port_resource_request_min_bw_rule(self):
self.min_bw_rule.direction = lib_constants.EGRESS_DIRECTION
port = self._create_and_extend_port([self.min_bw_rule])
self.assertEqual(
1,
len(port['resource_request']['request_groups'])
)
self.assertEqual(
'fake_uuid',
port['resource_request']['request_groups'][0]['id']
)
self.assertEqual(
['CUSTOM_PHYSNET_PUBLIC', 'CUSTOM_VNIC_TYPE_NORMAL'],
port['resource_request']['request_groups'][0]['required']
)
self.assertEqual(
{orc.NET_BW_EGR_KILOBIT_PER_SEC: 10},
port['resource_request']['request_groups'][0]['resources'],
)
self.assertEqual(
['fake_uuid'],
port['resource_request']['same_subtree'],
)
def test__extend_port_resource_request_min_pps_rule(self):
port = self._create_and_extend_port([], [self.min_pps_rule])
self.assertEqual(
1,
len(port['resource_request']['request_groups'])
)
self.assertEqual(
'fake_uuid',
port['resource_request']['request_groups'][0]['id']
)
self.assertEqual(
['CUSTOM_VNIC_TYPE_NORMAL'],
port['resource_request']['request_groups'][0]['required']
)
self.assertEqual(
{orc.NET_PACKET_RATE_KILOPACKET_PER_SEC: 10},
port['resource_request']['request_groups'][0]['resources'],
)
self.assertEqual(
['fake_uuid'],
port['resource_request']['same_subtree'],
)
def test__extend_port_resource_request_min_bw_and_pps_rule(self):
self.min_bw_rule.direction = lib_constants.EGRESS_DIRECTION
self.min_pps_rule.direction = lib_constants.EGRESS_DIRECTION
request_groups_uuids = ['fake_uuid0', 'fake_uuid1']
min_bw_rule_ingress_data = {
'id': uuidutils.generate_uuid(),
'min_kbps': 20,
'direction': lib_constants.INGRESS_DIRECTION}
min_pps_rule_ingress_data = {
'id': uuidutils.generate_uuid(),
'min_kpps': 20,
'direction': lib_constants.INGRESS_DIRECTION}
min_bw_rule_ingress = rule_object.QosMinimumBandwidthRule(
self.ctxt, **min_bw_rule_ingress_data)
min_pps_rule_ingress = rule_object.QosMinimumPacketRateRule(
self.ctxt, **min_pps_rule_ingress_data)
port = self._create_and_extend_port(
[self.min_bw_rule, min_bw_rule_ingress],
[self.min_pps_rule, min_pps_rule_ingress],
request_groups_uuids=request_groups_uuids)
self.assertEqual(
2,
len(port['resource_request']['request_groups'])
)
self.assertIn(
{
'id': 'fake_uuid0',
'required':
['CUSTOM_PHYSNET_PUBLIC', 'CUSTOM_VNIC_TYPE_NORMAL'],
'resources': {
orc.NET_BW_EGR_KILOBIT_PER_SEC: 10,
orc.NET_BW_IGR_KILOBIT_PER_SEC: 20},
},
port['resource_request']['request_groups']
)
self.assertIn(
{
'id': 'fake_uuid1',
'required': ['CUSTOM_VNIC_TYPE_NORMAL'],
'resources': {
orc.NET_PACKET_RATE_EGR_KILOPACKET_PER_SEC: 10,
orc.NET_PACKET_RATE_IGR_KILOPACKET_PER_SEC: 20,
},
},
port['resource_request']['request_groups']
)
self.assertEqual(
['fake_uuid0', 'fake_uuid1'],
port['resource_request']['same_subtree'],
)
def test__extend_port_resource_request_non_min_bw_or_min_pps_rule(self):
port = self._create_and_extend_port([], [])
self.assertIsNone(port.get('resource_request'))
def test__extend_port_resource_request_min_bw_non_provider_net(self):
self.min_bw_rule.direction = lib_constants.EGRESS_DIRECTION
port = self._create_and_extend_port([self.min_bw_rule],
physical_network=None)
self.assertIsNone(port.get('resource_request'))
def test__extend_port_resource_request_mix_rules_non_provider_net(self):
self.min_bw_rule.direction = lib_constants.EGRESS_DIRECTION
port = self._create_and_extend_port([self.min_bw_rule],
[self.min_pps_rule],
physical_network=None)
self.assertEqual(
1,
len(port['resource_request']['request_groups'])
)
self.assertEqual(
'fake_uuid',
port['resource_request']['request_groups'][0]['id']
)
self.assertEqual(
['CUSTOM_VNIC_TYPE_NORMAL'],
port['resource_request']['request_groups'][0]['required']
)
self.assertEqual(
{orc.NET_PACKET_RATE_KILOPACKET_PER_SEC: 10},
port['resource_request']['request_groups'][0]['resources'],
)
self.assertEqual(
['fake_uuid'],
port['resource_request']['same_subtree'],
)
def test__extend_port_resource_request_bulk_min_bw_rule(self):
self.min_bw_rule.direction = lib_constants.EGRESS_DIRECTION
ports = self._create_and_extend_ports([self.min_bw_rule])
for port in ports:
self.assertEqual(
1,
len(port['resource_request']['request_groups'])
)
self.assertEqual(
'fake_uuid',
port['resource_request']['request_groups'][0]['id']
)
self.assertEqual(
['CUSTOM_PHYSNET_PUBLIC', 'CUSTOM_VNIC_TYPE_NORMAL'],
port['resource_request']['request_groups'][0]['required']
)
self.assertEqual(
{orc.NET_BW_EGR_KILOBIT_PER_SEC: 10},
port['resource_request']['request_groups'][0]['resources'],
)
self.assertEqual(
['fake_uuid'],
port['resource_request']['same_subtree'],
)
def test__extend_port_resource_request_bulk_min_pps_rule(self):
ports = self._create_and_extend_ports([], [self.min_pps_rule])
for port in ports:
self.assertEqual(
1,
len(port['resource_request']['request_groups'])
)
self.assertEqual(
'fake_uuid',
port['resource_request']['request_groups'][0]['id']
)
self.assertEqual(
['CUSTOM_VNIC_TYPE_NORMAL'],
port['resource_request']['request_groups'][0]['required']
)
self.assertEqual(
{orc.NET_PACKET_RATE_KILOPACKET_PER_SEC: 10},
port['resource_request']['request_groups'][0]['resources'],
)
self.assertEqual(
['fake_uuid'],
port['resource_request']['same_subtree'],
)
def test__extend_port_resource_request_bulk_min_bw_and_pps_rule(self):
self.min_bw_rule.direction = lib_constants.EGRESS_DIRECTION
self.min_pps_rule.direction = lib_constants.EGRESS_DIRECTION
request_groups_uuids = ['fake_uuid0', 'fake_uuid1'] * 2
min_bw_rule_ingress_data = {
'id': uuidutils.generate_uuid(),
'min_kbps': 20,
'direction': lib_constants.INGRESS_DIRECTION}
min_pps_rule_ingress_data = {
'id': uuidutils.generate_uuid(),
'min_kpps': 20,
'direction': lib_constants.INGRESS_DIRECTION}
min_bw_rule_ingress = rule_object.QosMinimumBandwidthRule(
self.ctxt, **min_bw_rule_ingress_data)
min_pps_rule_ingress = rule_object.QosMinimumPacketRateRule(
self.ctxt, **min_pps_rule_ingress_data)
ports = self._create_and_extend_ports(
[self.min_bw_rule, min_bw_rule_ingress],
[self.min_pps_rule, min_pps_rule_ingress],
request_groups_uuids=request_groups_uuids)
for port in ports:
self.assertEqual(
2,
len(port['resource_request']['request_groups'])
)
self.assertIn(
{
'id': 'fake_uuid0',
'required':
['CUSTOM_PHYSNET_PUBLIC', 'CUSTOM_VNIC_TYPE_NORMAL'],
'resources': {
orc.NET_BW_EGR_KILOBIT_PER_SEC: 10,
orc.NET_BW_IGR_KILOBIT_PER_SEC: 20},
},
port['resource_request']['request_groups']
)
self.assertIn(
{
'id': 'fake_uuid1',
'required': ['CUSTOM_VNIC_TYPE_NORMAL'],
'resources': {
orc.NET_PACKET_RATE_EGR_KILOPACKET_PER_SEC: 10,
orc.NET_PACKET_RATE_IGR_KILOPACKET_PER_SEC: 20,
},
},
port['resource_request']['request_groups']
)
self.assertEqual(
['fake_uuid0', 'fake_uuid1'],
port['resource_request']['same_subtree'],
)
def test__extend_port_resource_request_no_qos_policy(self):
port = self._create_and_extend_port([], physical_network='public',
has_qos_policy=False)
self.assertIsNone(port.get('resource_request'))
def test__extend_port_resource_request_min_bw_inherited_policy(
self):
self.min_bw_rule.direction = lib_constants.EGRESS_DIRECTION
self.min_bw_rule.qos_policy_id = self.policy.id
port = self._create_and_extend_port([self.min_bw_rule],
has_net_qos_policy=True)
self.assertEqual(
1,
len(port['resource_request']['request_groups'])
)
self.assertEqual(
'fake_uuid',
port['resource_request']['request_groups'][0]['id']
)
self.assertEqual(
['CUSTOM_PHYSNET_PUBLIC', 'CUSTOM_VNIC_TYPE_NORMAL'],
port['resource_request']['request_groups'][0]['required']
)
self.assertEqual(
{orc.NET_BW_EGR_KILOBIT_PER_SEC: 10},
port['resource_request']['request_groups'][0]['resources'],
)
self.assertEqual(
['fake_uuid'],
port['resource_request']['same_subtree'],
)
def test_get_ports_with_policy(self):
network_ports = [
mock.MagicMock(qos_policy_id=None),
mock.MagicMock(qos_policy_id=uuidutils.generate_uuid()),
mock.MagicMock(qos_policy_id=None)
]
ports = [
mock.MagicMock(qos_policy_id=self.policy.id),
]
expected_network_ports = [
port for port in network_ports if port.qos_policy_id is None]
expected_ports = ports + expected_network_ports
with mock.patch(
'neutron.objects.ports.Port.get_objects',
side_effect=[network_ports, ports]
), mock.patch.object(
self.policy, "get_bound_networks"
), mock.patch.object(
self.policy, "get_bound_ports"
):
policy_ports = self.qos_plugin._get_ports_with_policy(
self.ctxt, self.policy)
self.assertEqual(
len(expected_ports), len(policy_ports))
for port in expected_ports:
self.assertIn(port, policy_ports)
def _test_validate_update_port_callback(self, policy_id=None,
original_policy_id=None):
port_id = uuidutils.generate_uuid()
kwargs = {
"port": {
"id": port_id,
qos_consts.QOS_POLICY_ID: policy_id
},
"original_port": {
"id": port_id,
qos_consts.QOS_POLICY_ID: original_policy_id
}
}
port_mock = mock.MagicMock(id=port_id, qos_policy_id=policy_id)
policy_mock = mock.MagicMock(id=policy_id)
admin_ctxt = mock.Mock()
with mock.patch(
'neutron.objects.ports.Port.get_object',
return_value=port_mock
) as get_port, mock.patch(
'neutron.objects.qos.policy.QosPolicy.get_object',
return_value=policy_mock
) as get_policy, mock.patch.object(
self.qos_plugin, "validate_policy_for_port"
) as validate_policy_for_port, mock.patch.object(
self.ctxt, "elevated", return_value=admin_ctxt
):
self.qos_plugin._validate_update_port_callback(
"PORT", "precommit_update", "test_plugin",
payload=events.DBEventPayload(
self.ctxt, desired_state=kwargs['port'],
states=(kwargs['original_port'],)))
if policy_id is None or policy_id == original_policy_id:
get_port.assert_not_called()
get_policy.assert_not_called()
validate_policy_for_port.assert_not_called()
else:
get_port.assert_called_once_with(self.ctxt, id=port_id)
get_policy.assert_called_once_with(admin_ctxt, id=policy_id)
validate_policy_for_port.assert_called_once_with(
self.ctxt, policy_mock, port_mock)
def test_validate_update_port_callback_policy_changed(self):
self._test_validate_update_port_callback(
policy_id=uuidutils.generate_uuid())
def test_validate_update_port_callback_policy_not_changed(self):
policy_id = uuidutils.generate_uuid()
self._test_validate_update_port_callback(
policy_id=policy_id, original_policy_id=policy_id)
def test_validate_update_port_callback_policy_removed(self):
self._test_validate_update_port_callback(
policy_id=None, original_policy_id=uuidutils.generate_uuid())
def _test_validate_update_network_callback(self, policy_id=None,
original_policy_id=None):
network_id = uuidutils.generate_uuid()
kwargs = {
"context": self.ctxt,
"network": {
"id": network_id,
qos_consts.QOS_POLICY_ID: policy_id
},
"original_network": {
"id": network_id,
qos_consts.QOS_POLICY_ID: original_policy_id
}
}
port_mock_with_own_policy = mock.MagicMock(
id=uuidutils.generate_uuid(),
qos_policy_id=uuidutils.generate_uuid())
port_mock_without_own_policy = mock.MagicMock(
id=uuidutils.generate_uuid(), qos_policy_id=None)
ports = [port_mock_with_own_policy, port_mock_without_own_policy]
policy_mock = mock.MagicMock(id=policy_id)
admin_ctxt = mock.Mock()
with mock.patch(
'neutron.objects.ports.Port.get_objects',
return_value=ports
) as get_ports, mock.patch(
'neutron.objects.qos.policy.QosPolicy.get_object',
return_value=policy_mock
) as get_policy, mock.patch.object(
self.qos_plugin, "validate_policy_for_network"
) as validate_policy_for_network, mock.patch.object(
self.qos_plugin, "validate_policy_for_ports"
) as validate_policy_for_ports, mock.patch.object(
self.ctxt, "elevated", return_value=admin_ctxt
):
self.qos_plugin._validate_update_network_callback(
"NETWORK", "precommit_update", "test_plugin",
payload=events.DBEventPayload(
self.ctxt, desired_state=kwargs['network'],
states=(kwargs['original_network'],)))
if policy_id is None or policy_id == original_policy_id:
get_policy.assert_not_called()
validate_policy_for_network.assert_not_called()
get_ports.assert_not_called()
validate_policy_for_ports.assert_not_called()
else:
get_policy.assert_called_once_with(admin_ctxt, id=policy_id)
get_ports.assert_called_once_with(self.ctxt,
network_id=network_id)
validate_policy_for_ports.assert_called_once_with(
self.ctxt, policy_mock, [port_mock_without_own_policy])
def test_validate_update_network_callback_policy_changed(self):
self._test_validate_update_network_callback(
policy_id=uuidutils.generate_uuid())
def test_validate_update_network_callback_policy_not_changed(self):
policy_id = uuidutils.generate_uuid()
self._test_validate_update_network_callback(
policy_id=policy_id, original_policy_id=policy_id)
def test_validate_update_network_callback_policy_removed(self):
self._test_validate_update_network_callback(
policy_id=None, original_policy_id=uuidutils.generate_uuid())
def test_validate_policy_for_port_rule_not_valid(self):
port = {'id': uuidutils.generate_uuid()}
with mock.patch.object(
self.qos_plugin.driver_manager, "validate_rule_for_port",
return_value=False
):
self.policy.rules = [self.rule]
self.assertRaises(
qos_exc.QosRuleNotSupported,
self.qos_plugin.validate_policy_for_port,
self.ctxt, self.policy, port)
def test_validate_policy_for_port_all_rules_valid(self):
port = {'id': uuidutils.generate_uuid()}
with mock.patch.object(
self.qos_plugin.driver_manager, "validate_rule_for_port",
return_value=True
):
self.policy.rules = [self.rule]
try:
self.qos_plugin.validate_policy_for_port(
self.ctxt, self.policy, port)
except qos_exc.QosRuleNotSupported:
self.fail("QosRuleNotSupported exception unexpectedly raised")
def test_validate_policy_for_network(self):
network = uuidutils.generate_uuid()
with mock.patch.object(
self.qos_plugin.driver_manager, "validate_rule_for_network",
return_value=True
):
self.policy.rules = [self.rule]
try:
self.qos_plugin.validate_policy_for_network(
self.ctxt, self.policy, network_id=network)
except qos_exc.QosRuleNotSupportedByNetwork:
self.fail("QosRuleNotSupportedByNetwork "
"exception unexpectedly raised")
def test_create_min_bw_rule_on_bound_port(self):
policy = self._get_policy()
policy.rules = [self.min_bw_rule]
segment = network_object.NetworkSegment(
physical_network='fake physnet')
net = network_object.Network(
self.ctxt,
segments=[segment])
port = ports_object.Port(
self.ctxt,
id=uuidutils.generate_uuid(),
network_id=uuidutils.generate_uuid(),
device_owner='compute:fake-zone')
with mock.patch(
'neutron.objects.qos.policy.QosPolicy.get_object',
return_value=policy), \
mock.patch(
'neutron.objects.network.Network.get_object',
return_value=net), \
mock.patch.object(
self.qos_plugin,
'_get_ports_with_policy',
return_value=[port]):
self.assertRaises(
NotImplementedError,
self.qos_plugin.create_policy_minimum_bandwidth_rule,
self.ctxt, policy.id, self.rule_data)
def test_create_min_bw_rule_on_unbound_port(self):
policy = self._get_policy()
policy.rules = [self.min_bw_rule]
segment = network_object.NetworkSegment(
physical_network='fake physnet')
net = network_object.Network(
self.ctxt,
segments=[segment])
port = ports_object.Port(
self.ctxt,
id=uuidutils.generate_uuid(),
network_id=uuidutils.generate_uuid(),
device_owner='')
with mock.patch(
'neutron.objects.qos.policy.QosPolicy.get_object',
return_value=policy), \
mock.patch(
'neutron.objects.network.Network.get_object',
return_value=net), \
mock.patch.object(
self.qos_plugin,
'_get_ports_with_policy',
return_value=[port]):
try:
self.qos_plugin.create_policy_minimum_bandwidth_rule(
self.ctxt, policy.id, self.rule_data)
except NotImplementedError:
self.fail()
@mock.patch(
'neutron.objects.rbac_db.RbacNeutronDbObjectMixin'
'.create_rbac_policy')
@mock.patch('neutron.objects.qos.policy.QosPolicy')
def test_add_policy(self, mock_qos_policy, mock_create_rbac_policy):
mock_manager = mock.Mock()
mock_manager.attach_mock(mock_qos_policy, 'QosPolicy')
mock_manager.attach_mock(self.qos_plugin.driver_manager, 'driver')
mock_manager.reset_mock()
self.qos_plugin.create_policy(self.ctxt, self.policy_data)
policy_mock_call = mock.call.QosPolicy().create()
create_precommit_mock_call = mock.call.driver.call(
'create_policy_precommit', self.ctxt, mock.ANY)
create_mock_call = mock.call.driver.call(
'create_policy', self.ctxt, mock.ANY)
self.assertTrue(
mock_manager.mock_calls.index(policy_mock_call) <
mock_manager.mock_calls.index(create_precommit_mock_call) <
mock_manager.mock_calls.index(create_mock_call))
def test_add_policy_with_extra_tenant_keyword(self, *mocks):
policy_id = uuidutils.generate_uuid()
project_id = uuidutils.generate_uuid()
tenant_policy = {
'policy': {'id': policy_id,
'project_id': project_id,
'tenant_id': project_id,
'name': 'test-policy',
'description': 'Test policy description',
'shared': True,
'is_default': False}}
policy_details = {'id': policy_id,
'project_id': project_id,
'name': 'test-policy',
'description': 'Test policy description',
'shared': True,
'is_default': False}
with mock.patch('neutron.objects.qos.policy.QosPolicy') as QosMocked:
self.qos_plugin.create_policy(self.ctxt, tenant_policy)
QosMocked.assert_called_once_with(self.ctxt, **policy_details)
@mock.patch.object(policy_object.QosPolicy, "get_object")
@mock.patch(
'neutron.objects.rbac_db.RbacNeutronDbObjectMixin'
'.create_rbac_policy')
@mock.patch.object(policy_object.QosPolicy, 'update')
def test_update_policy(self, mock_qos_policy_update,
mock_create_rbac_policy, mock_qos_policy_get):
mock_qos_policy_get.return_value = self.policy
mock_manager = mock.Mock()
mock_manager.attach_mock(mock_qos_policy_update, 'update')
mock_manager.attach_mock(self.qos_plugin.driver_manager, 'driver')
mock_manager.reset_mock()
fields = obj_utils.get_updatable_fields(
policy_object.QosPolicy, self.policy_data['policy'])
self.qos_plugin.update_policy(
self.ctxt, self.policy.id, {'policy': fields})
self._validate_driver_params('update_policy', self.ctxt)
policy_update_mock_call = mock.call.update()
update_precommit_mock_call = mock.call.driver.call(
'update_policy_precommit', self.ctxt, mock.ANY)
update_mock_call = mock.call.driver.call(
'update_policy', self.ctxt, mock.ANY)
self.assertTrue(
mock_manager.mock_calls.index(policy_update_mock_call) <
mock_manager.mock_calls.index(update_precommit_mock_call) <
mock_manager.mock_calls.index(update_mock_call))
@mock.patch('neutron.objects.db.api.get_object', return_value=None)
@mock.patch.object(policy_object.QosPolicy, 'delete')
def test_delete_policy(self, mock_qos_policy_delete, mock_api_get_policy):
mock_manager = mock.Mock()
mock_manager.attach_mock(mock_qos_policy_delete, 'delete')
mock_manager.attach_mock(self.qos_plugin.driver_manager, 'driver')
mock_manager.reset_mock()
self.qos_plugin.delete_policy(self.ctxt, self.policy.id)
self._validate_driver_params('delete_policy', self.ctxt)
policy_delete_mock_call = mock.call.delete()
delete_precommit_mock_call = mock.call.driver.call(
'delete_policy_precommit', self.ctxt, mock.ANY)
delete_mock_call = mock.call.driver.call(
'delete_policy', self.ctxt, mock.ANY)
self.assertTrue(
mock_manager.mock_calls.index(policy_delete_mock_call) <
mock_manager.mock_calls.index(delete_precommit_mock_call) <
mock_manager.mock_calls.index(delete_mock_call))
@mock.patch.object(policy_object.QosPolicy, "get_object")
@mock.patch.object(rule_object.QosBandwidthLimitRule, 'create')
def test_create_policy_rule(self, mock_qos_rule_create,
mock_qos_policy_get):
_policy = copy.copy(self.policy)
setattr(_policy, "rules", [])
mock_qos_policy_get.return_value = _policy
mock_manager = mock.Mock()
mock_manager.attach_mock(mock_qos_rule_create, 'create')
mock_manager.attach_mock(self.qos_plugin.driver_manager, 'driver')
mock_manager.reset_mock()
with mock.patch('neutron.objects.qos.qos_policy_validator'
'.check_bandwidth_rule_conflict',
return_value=None), \
mock.patch(
'neutron.objects.qos.qos_policy_validator'
'.check_min_pps_rule_conflict', return_value=None):
self.qos_plugin.create_policy_bandwidth_limit_rule(
self.ctxt, self.policy.id, self.rule_data)
self._validate_driver_params('update_policy', self.ctxt)
rule_create_mock_call = mock.call.create()
update_precommit_mock_call = mock.call.driver.call(
'update_policy_precommit', self.ctxt, mock.ANY)
update_mock_call = mock.call.driver.call(
'update_policy', self.ctxt, mock.ANY)
self.assertTrue(
mock_manager.mock_calls.index(rule_create_mock_call) <
mock_manager.mock_calls.index(update_precommit_mock_call) <
mock_manager.mock_calls.index(update_mock_call))
def test_create_policy_rule_check_rule_min_less_than_max(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy) as mock_qos_get_obj:
self.qos_plugin.create_policy_minimum_bandwidth_rule(
self.ctxt, _policy.id, self.rule_data)
self._validate_driver_params('update_policy', self.ctxt)
self.mock_qos_load_attr.assert_called_once_with('rules')
mock_qos_get_obj.assert_called_once_with(self.ctxt, id=_policy.id)
def test_create_policy_rule_check_rule_max_more_than_min(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.min_bw_rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy) as mock_qos_get_obj:
self.qos_plugin.create_policy_bandwidth_limit_rule(
self.ctxt, _policy.id, self.rule_data)
self._validate_driver_params('update_policy', self.ctxt)
self.mock_qos_load_attr.assert_called_once_with('rules')
mock_qos_get_obj.assert_called_once_with(self.ctxt, id=_policy.id)
def test_create_policy_rule_check_rule_bwlimit_less_than_minbw(self):
_policy = self._get_policy()
self.rule_data['bandwidth_limit_rule']['max_kbps'] = 1
setattr(_policy, "rules", [self.min_bw_rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy) as mock_qos_get_obj:
self.assertRaises(qos_exc.QoSRuleParameterConflict,
self.qos_plugin.create_policy_bandwidth_limit_rule,
self.ctxt, self.policy.id, self.rule_data)
mock_qos_get_obj.assert_called_once_with(self.ctxt, id=_policy.id)
def test_create_policy_rule_check_rule_minbw_gr_than_bwlimit(self):
_policy = self._get_policy()
self.rule_data['minimum_bandwidth_rule']['min_kbps'] = 1000000
setattr(_policy, "rules", [self.rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy) as mock_qos_get_obj:
self.assertRaises(qos_exc.QoSRuleParameterConflict,
self.qos_plugin.create_policy_minimum_bandwidth_rule,
self.ctxt, self.policy.id, self.rule_data)
mock_qos_get_obj.assert_called_once_with(self.ctxt, id=_policy.id)
def test_create_policy_rule_duplicates(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.rule])
new_rule_data = {
'bandwidth_limit_rule': {
'max_kbps': 5000,
'direction': self.rule.direction
}
}
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy) as mock_qos_get_obj:
self.assertRaises(
qos_exc.QoSRulesConflict,
self.qos_plugin.create_policy_bandwidth_limit_rule,
self.ctxt, _policy.id, new_rule_data)
mock_qos_get_obj.assert_called_once_with(self.ctxt, id=_policy.id)
@mock.patch.object(rule_object.QosBandwidthLimitRule, 'update')
def test_update_policy_rule(self, mock_qos_rule_update):
mock_manager = mock.Mock()
mock_manager.attach_mock(mock_qos_rule_update, 'update')
mock_manager.attach_mock(self.qos_plugin.driver_manager, 'driver')
mock_manager.reset_mock()
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
setattr(_policy, "rules", [self.rule])
with mock.patch('neutron.objects.qos.rule.get_rules',
return_value=[self.rule]), mock.patch(
'neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.rule_data['bandwidth_limit_rule']['max_kbps'] = 1
self.qos_plugin.update_policy_bandwidth_limit_rule(
self.ctxt, self.rule.id, self.policy.id, self.rule_data)
self._validate_driver_params('update_policy', self.ctxt)
rule_update_mock_call = mock.call.update()
update_precommit_mock_call = mock.call.driver.call(
'update_policy_precommit', self.ctxt, mock.ANY)
update_mock_call = mock.call.driver.call(
'update_policy', self.ctxt, mock.ANY)
self.assertTrue(
mock_manager.mock_calls.index(rule_update_mock_call) <
mock_manager.mock_calls.index(update_precommit_mock_call) <
mock_manager.mock_calls.index(update_mock_call))
def test_update_policy_rule_check_rule_min_less_than_max(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.qos_plugin.update_policy_bandwidth_limit_rule(
self.ctxt, self.rule.id, self.policy.id, self.rule_data)
self.mock_qos_load_attr.assert_called_once_with('rules')
self._validate_driver_params('update_policy', self.ctxt)
rules = [self.rule, self.min_bw_rule]
setattr(_policy, "rules", rules)
self.mock_qos_load_attr.reset_mock()
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.qos_plugin.update_policy_minimum_bandwidth_rule(
self.ctxt, self.min_bw_rule.id,
self.policy.id, self.rule_data)
self.mock_qos_load_attr.assert_called_once_with('rules')
self._validate_driver_params('update_policy', self.ctxt)
def test_update_policy_rule_check_rule_bwlimit_less_than_minbw(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.qos_plugin.update_policy_bandwidth_limit_rule(
self.ctxt, self.rule.id, self.policy.id, self.rule_data)
self.mock_qos_load_attr.assert_called_once_with('rules')
self._validate_driver_params('update_policy', self.ctxt)
self.rule_data['minimum_bandwidth_rule']['min_kbps'] = 1000
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.assertRaises(
qos_exc.QoSRuleParameterConflict,
self.qos_plugin.update_policy_minimum_bandwidth_rule,
self.ctxt, self.min_bw_rule.id,
self.policy.id, self.rule_data)
def test_update_policy_rule_check_rule_minbw_gr_than_bwlimit(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.min_bw_rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.qos_plugin.update_policy_minimum_bandwidth_rule(
self.ctxt, self.min_bw_rule.id, self.policy.id,
self.rule_data)
self.mock_qos_load_attr.assert_called_once_with('rules')
self._validate_driver_params('update_policy', self.ctxt)
self.rule_data['bandwidth_limit_rule']['max_kbps'] = 1
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.assertRaises(
qos_exc.QoSRuleParameterConflict,
self.qos_plugin.update_policy_bandwidth_limit_rule,
self.ctxt, self.rule.id,
self.policy.id, self.rule_data)
def _get_policy(self):
return policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
def test_update_policy_rule_bad_policy(self):
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [])
self.assertRaises(
qos_exc.QosRuleNotFound,
self.qos_plugin.update_policy_bandwidth_limit_rule,
self.ctxt, self.rule.id, self.policy.id,
self.rule_data)
@mock.patch.object(rule_object.QosBandwidthLimitRule, 'delete')
def test_delete_policy_rule(self, mock_qos_rule_delete):
mock_manager = mock.Mock()
mock_manager.attach_mock(mock_qos_rule_delete, 'delete')
mock_manager.attach_mock(self.qos_plugin.driver_manager, 'driver')
mock_manager.reset_mock()
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [self.rule])
self.qos_plugin.delete_policy_bandwidth_limit_rule(
self.ctxt, self.rule.id, _policy.id)
self._validate_driver_params('update_policy', self.ctxt)
rule_delete_mock_call = mock.call.delete()
update_precommit_mock_call = mock.call.driver.call(
'update_policy_precommit', self.ctxt, mock.ANY)
update_mock_call = mock.call.driver.call(
'update_policy', self.ctxt, mock.ANY)
self.assertTrue(
mock_manager.mock_calls.index(rule_delete_mock_call) <
mock_manager.mock_calls.index(update_precommit_mock_call) <
mock_manager.mock_calls.index(update_mock_call))
def test_delete_policy_rule_bad_policy(self):
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [])
self.assertRaises(
qos_exc.QosRuleNotFound,
self.qos_plugin.delete_policy_bandwidth_limit_rule,
self.ctxt, self.rule.id, _policy.id)
def test_get_policy_bandwidth_limit_rule(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosBandwidthLimitRule.'
'get_object') as get_object_mock:
self.qos_plugin.get_policy_bandwidth_limit_rule(
self.ctxt, self.rule.id, self.policy.id)
get_object_mock.assert_called_once_with(self.ctxt,
id=self.rule.id)
def test_get_policy_bandwidth_limit_rules_for_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosBandwidthLimitRule.'
'get_objects') as get_objects_mock:
self.qos_plugin.get_policy_bandwidth_limit_rules(
self.ctxt, self.policy.id)
get_objects_mock.assert_called_once_with(
self.ctxt, _pager=mock.ANY, qos_policy_id=self.policy.id)
def test_get_policy_bandwidth_limit_rules_for_policy_with_filters(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosBandwidthLimitRule.'
'get_objects') as get_objects_mock:
filters = {'filter': 'filter_id'}
self.qos_plugin.get_policy_bandwidth_limit_rules(
self.ctxt, self.policy.id, filters=filters)
get_objects_mock.assert_called_once_with(
self.ctxt, _pager=mock.ANY,
qos_policy_id=self.policy.id,
filter='filter_id')
def test_get_policy_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy,
self.ctxt, self.policy.id)
def test_get_policy_bandwidth_limit_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy_bandwidth_limit_rule,
self.ctxt, self.rule.id, self.policy.id)
def test_get_policy_bandwidth_limit_rules_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy_bandwidth_limit_rules,
self.ctxt, self.policy.id)
def test_create_policy_dscp_marking_rule(self):
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [self.dscp_rule])
self.qos_plugin.create_policy_dscp_marking_rule(
self.ctxt, self.policy.id, self.rule_data)
self._validate_driver_params('update_policy', self.ctxt)
def test_update_policy_dscp_marking_rule(self):
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [self.dscp_rule])
self.qos_plugin.update_policy_dscp_marking_rule(
self.ctxt, self.dscp_rule.id, self.policy.id, self.rule_data)
self._validate_driver_params('update_policy', self.ctxt)
def test_delete_policy_dscp_marking_rule(self):
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [self.dscp_rule])
self.qos_plugin.delete_policy_dscp_marking_rule(
self.ctxt, self.dscp_rule.id, self.policy.id)
self._validate_driver_params('update_policy', self.ctxt)
def test_get_policy_dscp_marking_rules(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosDscpMarkingRule.'
'get_objects') as get_objects_mock:
self.qos_plugin.get_policy_dscp_marking_rules(
self.ctxt, self.policy.id)
get_objects_mock.assert_called_once_with(
self.ctxt, _pager=mock.ANY, qos_policy_id=self.policy.id)
def test_get_policy_dscp_marking_rules_for_policy_with_filters(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosDscpMarkingRule.'
'get_objects') as get_objects_mock:
filters = {'filter': 'filter_id'}
self.qos_plugin.get_policy_dscp_marking_rules(
self.ctxt, self.policy.id, filters=filters)
get_objects_mock.assert_called_once_with(
self.ctxt, qos_policy_id=self.policy.id,
_pager=mock.ANY, filter='filter_id')
def test_get_policy_dscp_marking_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy_dscp_marking_rule,
self.ctxt, self.dscp_rule.id, self.policy.id)
def test_get_policy_dscp_marking_rules_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy_dscp_marking_rules,
self.ctxt, self.policy.id)
def test_get_policy_minimum_bandwidth_rule(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosMinimumBandwidthRule.'
'get_object') as get_object_mock:
self.qos_plugin.get_policy_minimum_bandwidth_rule(
self.ctxt, self.rule.id, self.policy.id)
get_object_mock.assert_called_once_with(self.ctxt,
id=self.rule.id)
def test_get_policy_minimum_bandwidth_rules_for_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosMinimumBandwidthRule.'
'get_objects') as get_objects_mock:
self.qos_plugin.get_policy_minimum_bandwidth_rules(
self.ctxt, self.policy.id)
get_objects_mock.assert_called_once_with(
self.ctxt, _pager=mock.ANY, qos_policy_id=self.policy.id)
def test_get_policy_minimum_bandwidth_rules_for_policy_with_filters(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosMinimumBandwidthRule.'
'get_objects') as get_objects_mock:
filters = {'filter': 'filter_id'}
self.qos_plugin.get_policy_minimum_bandwidth_rules(
self.ctxt, self.policy.id, filters=filters)
get_objects_mock.assert_called_once_with(
self.ctxt, _pager=mock.ANY,
qos_policy_id=self.policy.id,
filter='filter_id')
def test_get_policy_minimum_bandwidth_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy_minimum_bandwidth_rule,
self.ctxt, self.rule.id, self.policy.id)
def test_get_policy_minimum_bandwidth_rules_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy_minimum_bandwidth_rules,
self.ctxt, self.policy.id)
def test_create_policy_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.create_policy_bandwidth_limit_rule,
self.ctxt, self.policy.id, self.rule_data)
def test_update_policy_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.update_policy_bandwidth_limit_rule,
self.ctxt, self.rule.id, self.policy.id, self.rule_data)
def test_delete_policy_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.delete_policy_bandwidth_limit_rule,
self.ctxt, self.rule.id, self.policy.id)
def test_verify_bad_method_call(self):
self.assertRaises(AttributeError, getattr, self.qos_plugin,
'create_policy_bandwidth_limit_rules')
def test_get_rule_type(self):
admin_ctxt = context.get_admin_context()
drivers_details = [{
'name': 'fake-driver',
'supported_parameters': [{
'parameter_name': 'max_kbps',
'parameter_type': lib_constants.VALUES_TYPE_RANGE,
'parameter_range': {'start': 0, 'end': 100}
}]
}]
with mock.patch.object(
qos_plugin.QoSPlugin, "supported_rule_type_details",
return_value=drivers_details
):
rule_type_details = self.qos_plugin.get_rule_type(
admin_ctxt, qos_consts.RULE_TYPE_BANDWIDTH_LIMIT)
self.assertEqual(
qos_consts.RULE_TYPE_BANDWIDTH_LIMIT,
rule_type_details['type'])
self.assertEqual(
drivers_details, rule_type_details['drivers'])
def test_get_rule_type_as_user(self):
self.assertRaises(
lib_exc.NotAuthorized,
self.qos_plugin.get_rule_type,
self.ctxt, qos_consts.RULE_TYPE_BANDWIDTH_LIMIT)
def test_get_rule_types(self):
filters = {'type': 'type_id'}
with mock.patch.object(qos_plugin.QoSPlugin, 'supported_rule_types',
return_value=qos_consts.VALID_RULE_TYPES):
types = self.qos_plugin.get_rule_types(self.ctxt, filters=filters)
self.assertEqual(sorted(qos_consts.VALID_RULE_TYPES),
sorted(type_['type'] for type_ in types))
@mock.patch('neutron.objects.ports.Port')
@mock.patch('neutron.objects.qos.policy.QosPolicy')
def test_rule_notification_and_driver_ordering(self, qos_policy_mock,
port_mock):
rule_cls_mock = mock.Mock()
rule_cls_mock.rule_type = 'fake'
rule_actions = {'create': [self.ctxt, rule_cls_mock,
self.policy.id, {'fake_rule': {}}],
'update': [self.ctxt, rule_cls_mock,
self.rule.id,
self.policy.id, {'fake_rule': {}}],
'delete': [self.ctxt, rule_cls_mock,
self.rule.id, self.policy.id]}
mock_manager = mock.Mock()
mock_manager.attach_mock(qos_policy_mock, 'QosPolicy')
mock_manager.attach_mock(port_mock, 'Port')
mock_manager.attach_mock(rule_cls_mock, 'RuleCls')
mock_manager.attach_mock(self.qos_plugin.driver_manager, 'driver')
for action, arguments in rule_actions.items():
mock_manager.reset_mock()
method = getattr(self.qos_plugin, "%s_policy_rule" % action)
method(*arguments)
# some actions get rule from policy
get_rule_mock_call = getattr(
mock.call.QosPolicy.get_policy_obj().get_rule_by_id(),
action)()
# some actions construct rule from class reference
rule_mock_call = getattr(mock.call.RuleCls(), action)()
driver_mock_call = mock.call.driver.call('update_policy',
self.ctxt, mock.ANY)
if rule_mock_call in mock_manager.mock_calls:
action_index = mock_manager.mock_calls.index(rule_mock_call)
else:
action_index = mock_manager.mock_calls.index(
get_rule_mock_call)
self.assertLess(
action_index, mock_manager.mock_calls.index(driver_mock_call))
def test_create_policy_packet_rate_limit_rule(self):
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [self.pps_rule])
self.qos_plugin.create_policy_packet_rate_limit_rule(
self.ctxt, self.policy.id, self.rule_data)
self._validate_driver_params('update_policy', self.ctxt)
def test_create_policy_pps_rule_duplicates(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.pps_rule])
new_rule_data = {
'packet_rate_limit_rule': {
'max_kpps': 400,
'direction': self.pps_rule.direction
}
}
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy) as mock_qos_get_obj:
self.assertRaises(
qos_exc.QoSRulesConflict,
self.qos_plugin.create_policy_packet_rate_limit_rule,
self.ctxt, _policy.id, new_rule_data)
mock_qos_get_obj.assert_called_once_with(self.ctxt, id=_policy.id)
def test_update_policy_packet_rate_limit_rule(self):
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [self.pps_rule])
self.qos_plugin.update_policy_packet_rate_limit_rule(
self.ctxt, self.pps_rule.id, self.policy.id, self.rule_data)
self._validate_driver_params('update_policy', self.ctxt)
def test_update_policy_pps_rule_bad_policy(self):
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [])
self.assertRaises(
qos_exc.QosRuleNotFound,
self.qos_plugin.update_policy_packet_rate_limit_rule,
self.ctxt, self.pps_rule.id, self.policy.id,
self.rule_data)
def test_delete_policy_packet_rate_limit_rule(self):
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [self.pps_rule])
self.qos_plugin.delete_policy_packet_rate_limit_rule(
self.ctxt, self.pps_rule.id, self.policy.id)
self._validate_driver_params('update_policy', self.ctxt)
def test_delete_policy_pps_rule_bad_policy(self):
_policy = policy_object.QosPolicy(
self.ctxt, **self.policy_data['policy'])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
setattr(_policy, "rules", [])
self.assertRaises(
qos_exc.QosRuleNotFound,
self.qos_plugin.delete_policy_packet_rate_limit_rule,
self.ctxt, self.pps_rule.id, _policy.id)
def test_get_policy_packet_rate_limit_rule(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosPacketRateLimitRule.'
'get_object') as get_object_mock:
self.qos_plugin.get_policy_packet_rate_limit_rule(
self.ctxt, self.pps_rule.id, self.policy.id)
get_object_mock.assert_called_once_with(self.ctxt,
id=self.pps_rule.id)
def test_get_policy_packet_rate_limit_rules_for_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosPacketRateLimitRule.'
'get_objects') as get_objects_mock:
self.qos_plugin.get_policy_packet_rate_limit_rules(
self.ctxt, self.policy.id)
get_objects_mock.assert_called_once_with(
self.ctxt, _pager=mock.ANY, qos_policy_id=self.policy.id)
def test_get_policy_packet_rate_limit_rules_for_policy_with_filters(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosPacketRateLimitRule.'
'get_objects') as get_objects_mock:
filters = {'filter': 'filter_id'}
self.qos_plugin.get_policy_packet_rate_limit_rules(
self.ctxt, self.policy.id, filters=filters)
get_objects_mock.assert_called_once_with(
self.ctxt, _pager=mock.ANY,
qos_policy_id=self.policy.id,
filter='filter_id')
def test_get_policy_packet_rate_limit_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy_packet_rate_limit_rule,
self.ctxt, self.pps_rule.id, self.policy.id)
def test_get_policy_packet_rate_limit_rules_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy_packet_rate_limit_rules,
self.ctxt, self.policy.id)
def test_create_policy_pps_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.create_policy_packet_rate_limit_rule,
self.ctxt, self.policy.id, self.rule_data)
def test_update_policy_pps_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.update_policy_packet_rate_limit_rule,
self.ctxt, self.pps_rule.id, self.policy.id, self.rule_data)
def test_delete_policy_pps_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.delete_policy_packet_rate_limit_rule,
self.ctxt, self.pps_rule.id, self.policy.id)
def test_get_pps_rule_type(self):
admin_ctxt = context.get_admin_context()
drivers_details = [{
'name': 'fake-driver',
'supported_parameters': [{
'parameter_name': 'max_kpps',
'parameter_type': lib_constants.VALUES_TYPE_RANGE,
'parameter_range': {'start': 0, 'end': 100}
}]
}]
with mock.patch.object(
qos_plugin.QoSPlugin, "supported_rule_type_details",
return_value=drivers_details
):
rule_type_details = self.qos_plugin.get_rule_type(
admin_ctxt, qos_consts.RULE_TYPE_PACKET_RATE_LIMIT)
self.assertEqual(
qos_consts.RULE_TYPE_PACKET_RATE_LIMIT,
rule_type_details['type'])
self.assertEqual(
drivers_details, rule_type_details['drivers'])
def test_get_pps_rule_type_as_user(self):
self.assertRaises(
lib_exc.NotAuthorized,
self.qos_plugin.get_rule_type,
self.ctxt, qos_consts.RULE_TYPE_PACKET_RATE_LIMIT)
def test_create_min_pps_rule_on_bound_port(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.min_pps_rule])
segment = network_object.NetworkSegment(
physical_network='fake physnet')
net = network_object.Network(
self.ctxt,
segments=[segment])
port = ports_object.Port(
self.ctxt,
id=uuidutils.generate_uuid(),
network_id=uuidutils.generate_uuid(),
device_owner='compute:fake-zone')
with mock.patch(
'neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy), \
mock.patch(
'neutron.objects.network.Network.get_object',
return_value=net), \
mock.patch.object(
self.qos_plugin,
'_get_ports_with_policy',
return_value=[port]):
self.assertRaises(
NotImplementedError,
self.qos_plugin.create_policy_minimum_packet_rate_rule,
self.ctxt, _policy.id, self.rule_data)
def test_create_min_pps_rule_on_unbound_port(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.min_pps_rule])
segment = network_object.NetworkSegment(
physical_network='fake physnet')
net = network_object.Network(
self.ctxt,
segments=[segment])
port = ports_object.Port(
self.ctxt,
id=uuidutils.generate_uuid(),
network_id=uuidutils.generate_uuid(),
device_owner='')
with mock.patch(
'neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy), \
mock.patch(
'neutron.objects.network.Network.get_object',
return_value=net), \
mock.patch.object(
self.qos_plugin,
'_get_ports_with_policy',
return_value=[port]):
try:
self.qos_plugin.create_policy_minimum_packet_rate_rule(
self.ctxt, _policy.id, self.rule_data)
except NotImplementedError:
self.fail()
def test_create_policy_rule_check_rule_min_pps_direction_conflict(self):
_policy = self._get_policy()
self.rule_data['minimum_packet_rate_rule']['direction'] = 'any'
setattr(_policy, "rules", [self.min_pps_rule])
rules = [
{
'minimum_packet_rate_rule': {
'id': uuidutils.generate_uuid(),
'min_kpps': 10,
'direction': 'ingress'
}
},
{
'minimum_packet_rate_rule': {
'id': uuidutils.generate_uuid(),
'min_kpps': 10,
'direction': 'egress'
}
},
]
for new_rule_data in rules:
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy) as mock_qos_get_obj:
self.assertRaises(qos_exc.QoSRuleParameterConflict,
self.qos_plugin.create_policy_minimum_packet_rate_rule,
self.ctxt, self.policy.id, new_rule_data)
mock_qos_get_obj.assert_called_once_with(self.ctxt,
id=_policy.id)
for rule_data in rules:
min_pps_rule = rule_object.QosMinimumPacketRateRule(
self.ctxt, **rule_data['minimum_packet_rate_rule'])
setattr(_policy, "rules", [min_pps_rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy) as mock_qos_get_obj:
self.assertRaises(qos_exc.QoSRuleParameterConflict,
self.qos_plugin.create_policy_minimum_packet_rate_rule,
self.ctxt, self.policy.id, self.rule_data)
mock_qos_get_obj.assert_called_once_with(self.ctxt,
id=_policy.id)
def test_create_policy_min_pps_rule(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.min_pps_rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.qos_plugin.create_policy_minimum_packet_rate_rule(
self.ctxt, self.policy.id, self.rule_data)
self._validate_driver_params('update_policy', self.ctxt)
def test_create_policy_min_pps_rule_duplicates(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.min_pps_rule])
new_rule_data = {
'minimum_packet_rate_rule': {
'id': uuidutils.generate_uuid(),
'min_kpps': 1234,
'direction': self.min_pps_rule.direction,
},
}
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy) as mock_qos_get_obj:
self.assertRaises(
qos_exc.QoSRulesConflict,
self.qos_plugin.create_policy_minimum_packet_rate_rule,
self.ctxt, _policy.id, new_rule_data)
mock_qos_get_obj.assert_called_once_with(self.ctxt, id=_policy.id)
def test_create_policy_min_pps_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.create_policy_minimum_packet_rate_rule,
self.ctxt, self.policy.id, self.rule_data)
def test_update_policy_min_pps_rule(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.min_pps_rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.qos_plugin.update_policy_minimum_packet_rate_rule(
self.ctxt, self.min_pps_rule.id, self.policy.id,
self.rule_data)
self._validate_driver_params('update_policy', self.ctxt)
def test_update_policy_rule_check_rule_min_pps_direction_conflict(self):
_policy = self._get_policy()
rules_data = [
{
'minimum_packet_rate_rule': {
'id': uuidutils.generate_uuid(),
'min_kpps': 10,
'direction': 'ingress'
}
},
{
'minimum_packet_rate_rule': {
'id': uuidutils.generate_uuid(),
'min_kpps': 10,
'direction': 'egress'
}
},
]
self.rule_data['minimum_packet_rate_rule']['direction'] = 'any'
for rule_data in rules_data:
rules = [
rule_object.QosMinimumPacketRateRule(
self.ctxt, **rules_data[0]['minimum_packet_rate_rule']),
rule_object.QosMinimumPacketRateRule(
self.ctxt, **rules_data[1]['minimum_packet_rate_rule']),
]
setattr(_policy, 'rules', rules)
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy) as mock_qos_get_obj:
self.assertRaises(qos_exc.QoSRuleParameterConflict,
self.qos_plugin.update_policy_minimum_packet_rate_rule,
self.ctxt, rule_data['minimum_packet_rate_rule']['id'],
self.policy.id, self.rule_data)
mock_qos_get_obj.assert_called_once_with(self.ctxt,
id=_policy.id)
def test_update_policy_min_pps_rule_bad_policy(self):
_policy = self._get_policy()
setattr(_policy, "rules", [])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.assertRaises(
qos_exc.QosRuleNotFound,
self.qos_plugin.update_policy_minimum_packet_rate_rule,
self.ctxt, self.min_pps_rule.id, self.policy.id,
self.rule_data)
def test_update_policy_min_pps_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.update_policy_minimum_packet_rate_rule,
self.ctxt, self.min_pps_rule.id, self.policy.id,
self.rule_data)
def test_delete_policy_min_pps_rule(self):
_policy = self._get_policy()
setattr(_policy, "rules", [self.min_pps_rule])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.qos_plugin.delete_policy_minimum_packet_rate_rule(
self.ctxt, self.min_pps_rule.id, self.policy.id)
self._validate_driver_params('update_policy', self.ctxt)
def test_delete_policy_min_pps_rule_bad_policy(self):
_policy = self._get_policy()
setattr(_policy, "rules", [])
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=_policy):
self.assertRaises(
qos_exc.QosRuleNotFound,
self.qos_plugin.delete_policy_minimum_packet_rate_rule,
self.ctxt, self.min_pps_rule.id, _policy.id)
def test_delete_policy_min_pps_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.delete_policy_minimum_packet_rate_rule,
self.ctxt, self.min_pps_rule.id, self.policy.id)
def test_get_policy_min_pps_rule(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosMinimumPacketRateRule.'
'get_object') as get_object_mock:
self.qos_plugin.get_policy_minimum_packet_rate_rule(
self.ctxt, self.min_pps_rule.id, self.policy.id)
get_object_mock.assert_called_once_with(
self.ctxt, id=self.min_pps_rule.id)
def test_get_policy_min_pps_rules_for_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosMinimumPacketRateRule.'
'get_objects') as get_objects_mock:
self.qos_plugin.get_policy_minimum_packet_rate_rules(
self.ctxt, self.policy.id)
get_objects_mock.assert_called_once_with(
self.ctxt, _pager=mock.ANY, qos_policy_id=self.policy.id)
def test_get_policy_min_pps_rules_for_policy_with_filters(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=self.policy):
with mock.patch('neutron.objects.qos.rule.'
'QosMinimumPacketRateRule.'
'get_objects') as get_objects_mock:
filters = {'filter': 'filter_id'}
self.qos_plugin.get_policy_minimum_packet_rate_rules(
self.ctxt, self.policy.id, filters=filters)
get_objects_mock.assert_called_once_with(
self.ctxt, _pager=mock.ANY,
qos_policy_id=self.policy.id,
filter='filter_id')
def test_get_policy_min_pps_rule_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy_minimum_packet_rate_rule,
self.ctxt, self.min_pps_rule.id, self.policy.id)
def test_get_policy_min_pps_rules_for_nonexistent_policy(self):
with mock.patch('neutron.objects.qos.policy.QosPolicy.get_object',
return_value=None):
self.assertRaises(
qos_exc.QosPolicyNotFound,
self.qos_plugin.get_policy_minimum_packet_rate_rules,
self.ctxt, self.policy.id)
def test_get_min_pps_rule_type(self):
admin_ctxt = context.get_admin_context()
drivers_details = [{
'name': 'fake-driver',
'supported_parameters': [{
'parameter_name': 'min_kpps',
'parameter_type': lib_constants.VALUES_TYPE_RANGE,
'parameter_range': {'start': 0, 'end': 100}
}]
}]
with mock.patch.object(
qos_plugin.QoSPlugin, "supported_rule_type_details",
return_value=drivers_details
):
rule_type_details = self.qos_plugin.get_rule_type(
admin_ctxt, qos_consts.RULE_TYPE_MINIMUM_PACKET_RATE)
self.assertEqual(
qos_consts.RULE_TYPE_MINIMUM_PACKET_RATE,
rule_type_details['type'])
self.assertEqual(
drivers_details, rule_type_details['drivers'])
def test_get_min_pps_rule_type_as_user(self):
self.assertRaises(
lib_exc.NotAuthorized,
self.qos_plugin.get_rule_type,
self.ctxt, qos_consts.RULE_TYPE_MINIMUM_PACKET_RATE)
class QoSRuleAliasTestExtensionManager(object):
def get_resources(self):
return qos_rules_alias.Qos_rules_alias.get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class QoSRuleAliasMinimumPacketRateTestExtensionManager(object):
def get_resources(self):
return qos_pps_minimum_rule_alias.Qos_pps_minimum_rule_alias.\
get_resources()
def get_actions(self):
return []
def get_request_extensions(self):
return []
class TestQoSRuleAlias(test_db_base_plugin_v2.NeutronDbPluginV2TestCase):
def setUp(self):
# Remove MissingAuthPlugin exception from logs
self.patch_notifier = mock.patch(
'neutron.notifiers.batch_notifier.BatchNotifier._notify')
self.patch_notifier.start()
plugin = 'ml2'
service_plugins = {'qos_plugin_name': SERVICE_PLUGIN_KLASS}
ext_mgr = QoSRuleAliasTestExtensionManager()
super(TestQoSRuleAlias, self).setUp(plugin=plugin, ext_mgr=ext_mgr,
service_plugins=service_plugins)
self.qos_plugin = directory.get_plugin(plugins_constants.QOS)
self.ctxt = context.Context('fake_user', 'fake_tenant')
self.rule_objects = {
'bandwidth_limit': rule_object.QosBandwidthLimitRule,
'dscp_marking': rule_object.QosDscpMarkingRule,
'minimum_bandwidth': rule_object.QosMinimumBandwidthRule
}
self.qos_policy_id = uuidutils.generate_uuid()
self.rule_data = {
'bandwidth_limit_rule': {'max_kbps': 100,
'max_burst_kbps': 150},
'dscp_marking_rule': {'dscp_mark': 16},
'minimum_bandwidth_rule': {'min_kbps': 10}
}
def _update_rule(self, rule_type, rule_id, **kwargs):
data = {'alias_%s_rule' % rule_type: kwargs}
resource = '%s/alias-%s-rules' % (qos.ALIAS,
rule_type.replace('_', '-'))
request = self.new_update_request(resource, data, rule_id, self.fmt)
res = request.get_response(self.ext_api)
if res.status_int >= webob.exc.HTTPClientError.code:
raise webob.exc.HTTPClientError(code=res.status_int)
return self.deserialize(self.fmt, res)
def _show_rule(self, rule_type, rule_id):
resource = '%s/alias-%s-rules' % (qos.ALIAS,
rule_type.replace('_', '-'))
request = self.new_show_request(resource, rule_id, self.fmt)
res = request.get_response(self.ext_api)
if res.status_int >= webob.exc.HTTPClientError.code:
raise webob.exc.HTTPClientError(code=res.status_int)
return self.deserialize(self.fmt, res)
def _delete_rule(self, rule_type, rule_id):
resource = '%s/alias-%s-rules' % (qos.ALIAS,
rule_type.replace('_', '-'))
request = self.new_delete_request(resource, rule_id, self.fmt)
res = request.get_response(self.ext_api)
if res.status_int >= webob.exc.HTTPClientError.code:
raise webob.exc.HTTPClientError(code=res.status_int)
@mock.patch.object(qos_plugin.QoSPlugin, "update_policy_rule")
def test_update_rule(self, update_policy_rule_mock):
calls = []
for rule_type, rule_object_class in self.rule_objects.items():
rule_id = uuidutils.generate_uuid()
rule_data_name = '%s_rule' % rule_type
data = self.rule_data[rule_data_name]
rule = rule_object_class(self.ctxt, id=rule_id,
qos_policy_id=self.qos_policy_id,
**data)
with mock.patch(
'neutron.objects.qos.rule.QosRule.get_object',
return_value=rule
), mock.patch.object(self.qos_plugin, 'get_policy_rule',
return_value=rule.to_dict()):
self._update_rule(rule_type, rule_id, **data)
calls.append(mock.call(mock.ANY, rule_object_class, rule_id,
self.qos_policy_id, {rule_data_name: data}))
update_policy_rule_mock.assert_has_calls(calls, any_order=True)
@mock.patch.object(qos_plugin.QoSPlugin, "get_policy_rule")
def test_show_rule(self, get_policy_rule_mock):
calls = []
for rule_type, rule_object_class in self.rule_objects.items():
rule_id = uuidutils.generate_uuid()
rule_data_name = '%s_rule' % rule_type
data = self.rule_data[rule_data_name]
rule = rule_object_class(self.ctxt, id=rule_id,
qos_policy_id=self.qos_policy_id,
**data)
with mock.patch('neutron.objects.qos.rule.QosRule.get_object',
return_value=rule):
self._show_rule(rule_type, rule_id)
calls.append(mock.call(mock.ANY, rule_object_class, rule_id,
self.qos_policy_id))
get_policy_rule_mock.assert_has_calls(calls, any_order=True)
@mock.patch.object(qos_plugin.QoSPlugin, "delete_policy_rule")
def test_delete_rule(self, delete_policy_rule_mock):
calls = []
for rule_type, rule_object_class in self.rule_objects.items():
rule_id = uuidutils.generate_uuid()
rule_data_name = '%s_rule' % rule_type
data = self.rule_data[rule_data_name]
rule = rule_object_class(self.ctxt, id=rule_id,
qos_policy_id=self.qos_policy_id,
**data)
with mock.patch(
'neutron.objects.qos.rule.QosRule.get_object',
return_value=rule
), mock.patch.object(self.qos_plugin, 'get_policy_rule',
return_value=rule.to_dict()):
self._delete_rule(rule_type, rule_id)
calls.append(mock.call(mock.ANY, rule_object_class, rule_id,
self.qos_policy_id))
delete_policy_rule_mock.assert_has_calls(calls, any_order=True)
def test_show_non_existing_rule(self):
for rule_type, rule_object_class in self.rule_objects.items():
rule_id = uuidutils.generate_uuid()
with mock.patch('neutron.objects.qos.rule.QosRule.get_object',
return_value=None):
resource = '%s/alias-%s-rules' % (qos.ALIAS,
rule_type.replace('_', '-'))
request = self.new_show_request(resource, rule_id, self.fmt)
res = request.get_response(self.ext_api)
self.assertEqual(webob.exc.HTTPNotFound.code, res.status_int)
class TestQoSRuleAliasMinimumPacketRate(TestQoSRuleAlias):
def setUp(self):
# Remove MissingAuthPlugin exception from logs
self.patch_notifier = mock.patch(
'neutron.notifiers.batch_notifier.BatchNotifier._notify')
self.patch_notifier.start()
plugin = 'ml2'
service_plugins = {'qos_plugin_name': SERVICE_PLUGIN_KLASS}
ext_mgr = QoSRuleAliasMinimumPacketRateTestExtensionManager()
super(TestQoSRuleAlias, self).setUp(plugin=plugin, ext_mgr=ext_mgr,
service_plugins=service_plugins)
self.qos_plugin = directory.get_plugin(plugins_constants.QOS)
self.ctxt = context.Context('fake_user', 'fake_tenant')
self.rule_objects = {
'minimum_packet_rate': rule_object.QosMinimumPacketRateRule
}
self.qos_policy_id = uuidutils.generate_uuid()
self.rule_data = {
'minimum_packet_rate_rule': {'min_kpps': 10, 'direction': 'any'}
}
class TestQosPluginDB(base.BaseQosTestCase):
PORT_ID = 'f02f160e-1612-11ec-b2b8-bf60ab98186c'
QOS_MIN_BW_RULE_ID = '8bf8eb46-160e-11ec-8024-9f96be32099d'
# uuid -v5 f02f160e-1612-11ec-b2b8-bf60ab98186c
# 8bf8eb46-160e-11ec-8024-9f96be32099d
MIN_BW_REQUEST_GROUP_UUID = 'c8bc1b27-59a1-5135-aa33-aeecad6093f4'
MIN_BW_RP = 'd7bea120-1626-11ec-9148-c32debfcf0f6'
QOS_MIN_PPS_RULE_ID = '6ac5db7e-1626-11ec-8c7f-0b70dbb8a8eb'
# uuid -v5 f02f160e-1612-11ec-b2b8-bf60ab98186c
# 6ac5db7e-1626-11ec-8c7f-0b70dbb8a8eb
MIN_PPS_REQUEST_GROUP_UUID = '995008f4-f120-547a-b051-428b89076067'
MIN_PPS_RP = 'e16161f4-1626-11ec-a5a2-1fc9396e27cc'
def setUp(self):
super(TestQosPluginDB, self).setUp()
self.setup_coreplugin(load_plugins=False)
cfg.CONF.set_override("core_plugin", DB_PLUGIN_KLASS)
cfg.CONF.set_override("service_plugins", ["qos"])
manager.init()
self.qos_plugin = directory.get_plugin(plugins_constants.QOS)
self.qos_plugin.driver_manager = mock.Mock()
self.rpc_push = mock.patch('neutron.api.rpc.handlers.resources_rpc'
'.ResourcesPushRpcApi.push').start()
self.context = context.get_admin_context()
self.project_id = uuidutils.generate_uuid()
def _make_qos_policy(self):
qos_policy = policy_object.QosPolicy(
self.context, project_id=self.project_id, shared=False,
is_default=False)
qos_policy.create()
return qos_policy
def _make_qos_minbw_rule(self, policy_id, direction='ingress',
min_kbps=1000, rule_id=None):
rule_id = rule_id if rule_id else uuidutils.generate_uuid()
qos_rule = rule_object.QosMinimumBandwidthRule(
self.context, project_id=self.project_id,
qos_policy_id=policy_id, direction=direction, min_kbps=min_kbps,
id=rule_id)
qos_rule.create()
return qos_rule
def _make_qos_minpps_rule(self, policy_id, direction='ingress',
min_kpps=1000, rule_id=None):
rule_id = rule_id if rule_id else uuidutils.generate_uuid()
qos_rule = rule_object.QosMinimumPacketRateRule(
self.context, project_id=self.project_id,
qos_policy_id=policy_id, direction=direction, min_kpps=min_kpps,
id=rule_id)
qos_rule.create()
return qos_rule
def _make_port(self, network_id, qos_policy_id=None, port_id=None,
qos_network_policy_id=None, device_owner=None):
port_id = port_id if port_id else uuidutils.generate_uuid()
base_mac = ['aa', 'bb', 'cc', 'dd', 'ee', 'ff']
mac = netaddr.EUI(next(net_utils.random_mac_generator(base_mac)))
device_owner = device_owner if device_owner else '3'
port = ports_object.Port(
self.context, network_id=network_id, device_owner=device_owner,
project_id=self.project_id, admin_state_up=True, status='DOWN',
device_id='2', qos_policy_id=qos_policy_id,
qos_network_policy_id=qos_network_policy_id, mac_address=mac,
id=port_id)
port.create()
return port
def _make_network(self, qos_policy_id=None):
network = network_object.Network(self.context,
qos_policy_id=qos_policy_id)
network.create()
return network
def _test_validate_create_network_callback(self, network_qos=False):
net_qos_obj = self._make_qos_policy()
net_qos_id = net_qos_obj.id if network_qos else None
network = self._make_network(qos_policy_id=net_qos_id)
kwargs = {"context": self.context,
"network": network}
with mock.patch.object(self.qos_plugin,
'validate_policy_for_network') \
as mock_validate_policy:
self.qos_plugin._validate_create_network_callback(
"NETWORK", "precommit_create", "test_plugin",
payload=events.DBEventPayload(
self.context, resource_id=kwargs['network']['id'],))
qos_policy = None
if network_qos:
qos_policy = net_qos_obj
if qos_policy:
mock_validate_policy.assert_called_once_with(
self.context, qos_policy, network.id)
else:
mock_validate_policy.assert_not_called()
def test_validate_create_network_callback(self):
self._test_validate_create_network_callback(network_qos=True)
def test_validate_create_network_callback_no_qos(self):
self._test_validate_create_network_callback(network_qos=False)
def _test_validate_create_port_callback(self, port_qos=False,
network_qos=False):
net_qos_obj = self._make_qos_policy()
port_qos_obj = self._make_qos_policy()
net_qos_id = net_qos_obj.id if network_qos else None
port_qos_id = port_qos_obj.id if port_qos else None
network = self._make_network(qos_policy_id=net_qos_id)
port = self._make_port(network.id, qos_policy_id=port_qos_id)
kwargs = {"context": self.context,
"port": {"id": port.id}}
with mock.patch.object(self.qos_plugin, 'validate_policy_for_port') \
as mock_validate_policy:
self.qos_plugin._validate_create_port_callback(
"PORT", "precommit_create", "test_plugin",
payload=events.DBEventPayload(
self.context,
resource_id=kwargs['port']['id'],))
qos_policy = None
if port_qos:
qos_policy = port_qos_obj
elif network_qos:
qos_policy = net_qos_obj
if qos_policy:
mock_validate_policy.assert_called_once_with(
self.context, qos_policy, port)
else:
mock_validate_policy.assert_not_called()
def test_validate_create_port_callback_policy_on_port(self):
self._test_validate_create_port_callback(port_qos=True)
def test_validate_create_port_callback_policy_on_port_and_network(self):
self._test_validate_create_port_callback(port_qos=True,
network_qos=True)
def test_validate_create_port_callback_policy_on_network(self):
self._test_validate_create_port_callback(network_qos=True)
def test_validate_create_port_callback_no_policy(self):
self._test_validate_create_port_callback()
def _prepare_for_port_placement_allocation_change(self, qos1, qos2,
qos_network_policy=None):
qos1_id = qos1.id if qos1 else None
qos2_id = qos2.id if qos2 else None
qos_network_policy_id = (
qos_network_policy.id if qos_network_policy else None)
network = self._make_network(qos_policy_id=qos_network_policy_id)
port = self._make_port(
network.id, qos_policy_id=qos1_id, port_id=TestQosPluginDB.PORT_ID)
return {"context": self.context,
"original_port": {
"id": port.id,
"device_owner": "compute:uu:id",
"qos_policy_id": qos1_id,
"qos_network_policy_id": qos_network_policy_id},
"port": {"id": port.id, "qos_policy_id": qos2_id}}
def test_check_port_for_placement_allocation_change_no_qos_change(self):
qos1_obj = self._make_qos_policy()
kwargs = self._prepare_for_port_placement_allocation_change(
qos1=qos1_obj, qos2=qos1_obj)
context = kwargs['context']
original_port = kwargs['original_port']
port = kwargs['port']
with mock.patch.object(
self.qos_plugin,
'_change_placement_allocation') as mock_alloc_change:
self.qos_plugin._check_port_for_placement_allocation_change(
'PORT', 'before_update', 'test_plugin',
payload=events.DBEventPayload(
context, states=(original_port, port)))
mock_alloc_change.assert_not_called()
def test_check_port_for_placement_allocation_change(self):
qos1_obj = self._make_qos_policy()
qos2_obj = self._make_qos_policy()
kwargs = self._prepare_for_port_placement_allocation_change(
qos1=qos1_obj, qos2=qos2_obj)
context = kwargs['context']
original_port = kwargs['original_port']
port = kwargs['port']
with mock.patch.object(
self.qos_plugin,
'_change_placement_allocation') as mock_alloc_change:
self.qos_plugin._check_port_for_placement_allocation_change(
'PORT', 'before_update', 'test_plugin',
payload=events.DBEventPayload(
context, states=(original_port, port)))
mock_alloc_change.assert_called_once_with(
qos1_obj, qos2_obj, kwargs['original_port'], port)
def test_check_port_for_placement_allocation_change_no_new_policy(self):
qos1_obj = self._make_qos_policy()
kwargs = self._prepare_for_port_placement_allocation_change(
qos1=qos1_obj, qos2=None)
context = kwargs['context']
original_port = kwargs['original_port']
port = kwargs['port']
with mock.patch.object(
self.qos_plugin,
'_change_placement_allocation') as mock_alloc_change:
self.qos_plugin._check_port_for_placement_allocation_change(
'PORT', 'before_update', 'test_plugin',
payload=events.DBEventPayload(
context, states=(original_port, port)))
mock_alloc_change.assert_called_once_with(
qos1_obj, None, kwargs['original_port'], port)
def test_check_port_for_placement_allocation_change_no_qos_update(self):
qos1_obj = self._make_qos_policy()
kwargs = self._prepare_for_port_placement_allocation_change(
qos1=qos1_obj, qos2=None)
kwargs['port'].pop('qos_policy_id')
context = kwargs['context']
original_port = kwargs['original_port']
port = kwargs['port']
with mock.patch.object(
self.qos_plugin,
'_change_placement_allocation') as mock_alloc_change:
self.qos_plugin._check_port_for_placement_allocation_change(
'PORT', 'before_update', 'test_plugin',
payload=events.DBEventPayload(
context, states=(original_port, port)))
mock_alloc_change.assert_not_called()
def test_check_port_for_placement_allocation_change_qos_network_policy(
self):
qos_network = self._make_qos_policy()
desired_qos = self._make_qos_policy()
kwargs = self._prepare_for_port_placement_allocation_change(
qos1=None, qos2=desired_qos, qos_network_policy=qos_network)
context = kwargs['context']
original_port = kwargs['original_port']
port = kwargs['port']
with mock.patch.object(
self.qos_plugin,
'_change_placement_allocation') as mock_alloc_change:
self.qos_plugin._check_port_for_placement_allocation_change(
'PORT', 'before_update', 'test_plugin',
payload=events.DBEventPayload(
context, states=(original_port, port)))
mock_alloc_change.assert_called_once_with(
qos_network, desired_qos, kwargs['original_port'], port)
def test_check_network_for_placement_allocation_change_no_qos_change(self):
qos1 = self._make_qos_policy()
original_network = self._make_network(qos1.id)
network = original_network
ml2plugin_mock = mock.MagicMock()
with mock.patch.object(
self.qos_plugin,
'_change_placement_allocation') as mock_alloc_change:
self.qos_plugin._check_network_for_placement_allocation_change(
'network', 'after_update', ml2plugin_mock,
payload=events.DBEventPayload(
self.context, states=(original_network, network)))
mock_alloc_change.assert_not_called()
ml2plugin_mock._make_port_dict.assert_not_called()
def test_check_network_for_placement_allocation_change_no_ports_to_update(
self):
original_qos = self._make_qos_policy()
qos = self._make_qos_policy()
port_qos = self._make_qos_policy()
original_network = self._make_network(original_qos.id)
network = self._make_network(qos.id)
# Port which is not compute bound
self._make_port(network_id=network.id, qos_policy_id=None,
device_owner='uu:id')
# Port with overwritten QoS policy
self._make_port(network_id=network.id, qos_policy_id=port_qos.id,
device_owner='compute:uu:id')
ml2plugin_mock = mock.MagicMock()
with mock.patch.object(
self.qos_plugin,
'_change_placement_allocation') as mock_alloc_change:
self.qos_plugin._check_network_for_placement_allocation_change(
'network', 'after_update', ml2plugin_mock,
payload=events.DBEventPayload(
self.context, states=(original_network, network)))
mock_alloc_change.assert_not_called()
ml2plugin_mock._make_port_dict.assert_not_called()
def test_check_network_for_placement_allocation_change_remove_qos(self):
original_qos = self._make_qos_policy()
original_network = self._make_network(original_qos.id)
network = self._make_network()
ml2plugin_mock = mock.MagicMock()
def fake_make_port_dict(port):
return {
'id': port.id,
'device_owner': port.device_owner,
'qos_policy_id': port.qos_policy_id,
'qos_network_policy_id': port.qos_network_policy_id,
}
ml2plugin_mock._make_port_dict.side_effect = fake_make_port_dict
port1 = self._make_port(
network_id=network.id, qos_policy_id=None,
device_owner='compute:uu:id')
port1_binding = ports_object.PortBinding(
self.context, port_id=port1.id, host='fake_host1',
vnic_type='fake_vnic_type', vif_type='fake_vif_type',
profile={'allocation': 'fake_allocation'})
port1_binding.create()
port1.bindings = [port1_binding]
port1.update()
with mock.patch.object(
self.qos_plugin,
'_change_placement_allocation') as mock_alloc_change:
def fake_change_placement_allocation(orig_policy, policy,
orig_port, port):
port['binding:profile'] = {}
mock_alloc_change.side_effect = fake_change_placement_allocation
self.qos_plugin._check_network_for_placement_allocation_change(
'network', 'after_update', ml2plugin_mock,
payload=events.DBEventPayload(
self.context, states=(original_network, network)))
self.assertEqual(ml2plugin_mock._make_port_dict.call_count, 1)
mock_alloc_change_calls = [
mock.call(
original_qos,
None,
{'id': port1.id,
'device_owner': 'compute:uu:id',
'qos_policy_id': None,
'qos_network_policy_id': None},
mock.ANY),
]
mock_alloc_change.assert_has_calls(mock_alloc_change_calls,
any_order=True)
port1.update()
self.assertDictEqual(port1.bindings[0].profile, {})
def test_check_network_for_placement_allocation_change(self):
original_qos = self._make_qos_policy()
qos = self._make_qos_policy()
original_network = self._make_network(original_qos.id)
network = self._make_network(qos.id)
ml2plugin_mock = mock.MagicMock()
def fake_make_port_dict(port):
return {
'id': port.id,
'device_owner': port.device_owner,
'qos_policy_id': port.qos_policy_id,
'qos_network_policy_id': port.qos_network_policy_id,
}
ml2plugin_mock._make_port_dict.side_effect = fake_make_port_dict
port1 = self._make_port(
network_id=network.id, qos_policy_id=None,
device_owner='compute:uu:id')
port1_binding = ports_object.PortBinding(
self.context, port_id=port1.id, host='fake_host1',
vnic_type='fake_vnic_type', vif_type='fake_vif_type', profile={})
port1_binding.create()
port1.bindings = [port1_binding]
port1.update()
port2 = self._make_port(
network_id=network.id, qos_policy_id=None,
device_owner='compute:uu:id')
port2_binding = ports_object.PortBinding(
self.context, port_id=port2.id, host='fake_host2',
vnic_type='fake_vnic_type', vif_type='fake_vif_type', profile={})
port2_binding.create()
port2.bindings = [port2_binding]
port2.update()
with mock.patch.object(
self.qos_plugin,
'_change_placement_allocation') as mock_alloc_change:
def fake_change_placement_allocation(orig_policy, policy,
orig_port, port):
port['binding:profile'] = {'allocation': 'fake_allocation'}
mock_alloc_change.side_effect = fake_change_placement_allocation
self.qos_plugin._check_network_for_placement_allocation_change(
'network', 'after_update', ml2plugin_mock,
payload=events.DBEventPayload(
self.context, states=(original_network, network)))
self.assertEqual(ml2plugin_mock._make_port_dict.call_count, 2)
mock_alloc_change_calls = [
mock.call(
original_qos,
qos,
{'id': port1.id,
'device_owner': 'compute:uu:id',
'qos_policy_id': None,
'qos_network_policy_id': qos.id},
mock.ANY),
mock.call(
original_qos,
qos,
{'id': port2.id,
'device_owner': 'compute:uu:id',
'qos_policy_id': None,
'qos_network_policy_id': qos.id},
mock.ANY)]
mock_alloc_change.assert_has_calls(mock_alloc_change_calls,
any_order=True)
port1.update()
port2.update()
self.assertDictEqual(
port1.bindings[0].profile, {'allocation': 'fake_allocation'})
self.assertDictEqual(
port2.bindings[0].profile, {'allocation': 'fake_allocation'})
def _prepare_port_for_placement_allocation(self, original_qos,
desired_qos=None,
qos_network_policy=None,
original_min_kbps=None,
desired_min_kbps=None,
original_min_kpps=None,
desired_min_kpps=None,
is_sriov=False):
kwargs = self._prepare_for_port_placement_allocation_change(
original_qos, desired_qos, qos_network_policy=qos_network_policy)
orig_port = kwargs['original_port']
qos = original_qos or qos_network_policy
qos.rules = []
allocation = {}
if original_min_kbps:
qos.rules += [self._make_qos_minbw_rule(
qos.id, min_kbps=original_min_kbps,
rule_id=TestQosPluginDB.QOS_MIN_BW_RULE_ID)]
allocation.update(
{TestQosPluginDB.MIN_BW_REQUEST_GROUP_UUID:
TestQosPluginDB.MIN_BW_RP})
if original_min_kpps:
qos.rules += [self._make_qos_minpps_rule(
qos.id, min_kpps=original_min_kpps,
rule_id=TestQosPluginDB.QOS_MIN_PPS_RULE_ID)]
allocation.update(
{TestQosPluginDB.MIN_PPS_REQUEST_GROUP_UUID:
TestQosPluginDB.MIN_PPS_RP})
if desired_qos:
desired_qos.rules = []
if desired_min_kbps:
desired_qos.rules += [self._make_qos_minbw_rule(
desired_qos.id, min_kbps=desired_min_kbps)]
if desired_min_kpps:
desired_qos.rules += [self._make_qos_minpps_rule(
desired_qos.id, min_kpps=desired_min_kpps)]
binding_prof = {}
if is_sriov:
binding_prof = {
'pci_slot': '0000:42:41.0',
'pci_vendor_info': '8086:107ed',
'physical_network': 'sriov_phy'
}
binding_prof.update({'allocation': allocation})
orig_port.update(
{'binding:profile': binding_prof,
'device_id': 'uu:id'}
)
return orig_port, kwargs['port']
def _assert_pci_info(self, port):
self.assertIn('pci_slot', port['binding:profile'])
self.assertIn('pci_vendor_info', port['binding:profile'])
self.assertIn('physical_network', port['binding:profile'])
def test_change_placement_allocation_increase(self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, original_min_kbps=1000, desired_min_kbps=2000,
is_sriov=True)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
qos1, qos2, orig_port, port)
mock_update_qos_alloc.assert_called_once_with(
consumer_uuid='uu:id',
alloc_diff={self.MIN_BW_RP: {'NET_BW_IGR_KILOBIT_PER_SEC': 1000}})
self._assert_pci_info(port)
def test_change_placement_allocation_increase_min_pps(self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, original_min_kpps=1000, desired_min_kpps=2000,
is_sriov=True)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
qos1, qos2, orig_port, port)
mock_update_qos_alloc.assert_called_once_with(
consumer_uuid='uu:id',
alloc_diff={self.MIN_PPS_RP: {
'NET_PACKET_RATE_IGR_KILOPACKET_PER_SEC': 1000}})
self._assert_pci_info(port)
def test_change_placement_allocation_increase_min_pps_and_min_bw(self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, original_min_kbps=1000, desired_min_kbps=2000,
original_min_kpps=500, desired_min_kpps=1000)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
qos1, qos2, orig_port, port)
mock_update_qos_alloc.assert_called_once_with(
consumer_uuid='uu:id',
alloc_diff={
self.MIN_PPS_RP: {
'NET_PACKET_RATE_IGR_KILOPACKET_PER_SEC': 500},
self.MIN_BW_RP: {'NET_BW_IGR_KILOBIT_PER_SEC': 1000}})
def test_change_placement_allocation_change_direction_min_pps_and_min_bw(
self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, original_min_kbps=1000, desired_min_kbps=2000,
original_min_kpps=500, desired_min_kpps=1000)
for rule in qos2.rules:
rule.direction = 'egress'
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
qos1, qos2, orig_port, port)
mock_update_qos_alloc.assert_called_once_with(
consumer_uuid='uu:id',
alloc_diff={
self.MIN_PPS_RP: {
'NET_PACKET_RATE_IGR_KILOPACKET_PER_SEC': -500,
'NET_PACKET_RATE_EGR_KILOPACKET_PER_SEC': 1000},
self.MIN_BW_RP: {
'NET_BW_IGR_KILOBIT_PER_SEC': -1000,
'NET_BW_EGR_KILOBIT_PER_SEC': 2000}})
def test_change_placement_allocation_change_dir_min_pps_ingress_to_any(
self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, original_min_kpps=1000, desired_min_kpps=1000)
for rule in qos2.rules:
rule.direction = 'any'
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.assertRaises(NotImplementedError,
self.qos_plugin._change_placement_allocation, qos1, qos2,
orig_port, port)
mock_update_qos_alloc.assert_not_called()
def test_change_placement_allocation_min_bw_dataplane_enforcement(self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, desired_min_kbps=1000)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(qos1, qos2, orig_port,
port)
mock_update_qos_alloc.assert_not_called()
def test_change_placement_allocation_min_bw_dataplane_enforcement_with_pps(
self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, desired_min_kbps=1000, original_min_kpps=500,
desired_min_kpps=1000)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(qos1, qos2, orig_port,
port)
mock_update_qos_alloc.assert_called_once_with(
consumer_uuid='uu:id',
alloc_diff={
self.MIN_PPS_RP: {
'NET_PACKET_RATE_IGR_KILOPACKET_PER_SEC': 500}})
def test_change_placement_allocation_decrease(self):
original_qos = self._make_qos_policy()
desired_qos = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
original_qos, desired_qos, original_min_kbps=2000,
desired_min_kbps=1000, is_sriov=True)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
original_qos, desired_qos, orig_port, port)
mock_update_qos_alloc.assert_called_once_with(
consumer_uuid='uu:id',
alloc_diff={self.MIN_BW_RP: {'NET_BW_IGR_KILOBIT_PER_SEC': -1000}})
self._assert_pci_info(port)
def test_change_placement_allocation_decrease_min_pps(self):
original_qos = self._make_qos_policy()
desired_qos = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
original_qos, desired_qos, original_min_kpps=2000,
desired_min_kpps=1000, is_sriov=True)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
original_qos, desired_qos, orig_port, port)
mock_update_qos_alloc.assert_called_once_with(
consumer_uuid='uu:id',
alloc_diff={self.MIN_PPS_RP: {
'NET_PACKET_RATE_IGR_KILOPACKET_PER_SEC': -1000}})
self._assert_pci_info(port)
def test_change_placement_allocation_no_original_qos(self):
qos1 = None
qos2 = self._make_qos_policy()
rule2_obj = self._make_qos_minbw_rule(qos2.id, min_kbps=1000)
qos2.rules = [rule2_obj]
orig_port = {'id': 'u:u', 'device_id': 'i:d', 'binding:profile': {}}
port = {}
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(qos1, qos2, orig_port,
port)
mock_update_qos_alloc.assert_not_called()
def test_change_placement_allocation_no_original_allocation(self):
qos1 = self._make_qos_policy()
rule1_obj = self._make_qos_minbw_rule(qos1.id, min_kbps=500)
qos1.rules = [rule1_obj]
qos2 = self._make_qos_policy()
rule2_obj = self._make_qos_minbw_rule(qos2.id, min_kbps=1000)
qos2.rules = [rule2_obj]
orig_port = {'id': 'u:u', 'device_id': 'i:d', 'binding:profile': {}}
port = {}
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(qos1, qos2, orig_port,
port)
mock_update_qos_alloc.assert_not_called()
def test_change_placement_allocation_new_policy_empty(self):
qos1 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(qos1,
original_min_kbps=1000, original_min_kpps=2000)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
qos1, None, orig_port, port)
mock_update_qos_alloc.assert_called_once_with(
consumer_uuid='uu:id',
alloc_diff={
self.MIN_BW_RP: {'NET_BW_IGR_KILOBIT_PER_SEC': -1000},
self.MIN_PPS_RP: {
'NET_PACKET_RATE_IGR_KILOPACKET_PER_SEC': -2000}})
def test_change_placement_allocation_no_min_bw(self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
bw_limit_rule1 = rule_object.QosDscpMarkingRule(dscp_mark=16)
bw_limit_rule2 = rule_object.QosDscpMarkingRule(dscp_mark=18)
qos1.rules = [bw_limit_rule1]
qos2.rules = [bw_limit_rule2]
orig_port = {
'binding:profile': {'allocation': {
self.MIN_BW_REQUEST_GROUP_UUID: self.MIN_BW_RP}},
'device_id': 'uu:id',
'id': '9416c220-160a-11ec-ba3d-474633eb825c',
}
port = {}
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
qos1, None, orig_port, port)
mock_update_qos_alloc.assert_not_called()
def test_change_placement_allocation_old_rule_not_min_bw(self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
bw_limit_rule = rule_object.QosDscpMarkingRule(dscp_mark=16)
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, desired_min_kbps=2000)
qos1.rules = [bw_limit_rule]
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(qos1, qos2, orig_port,
port)
mock_update_qos_alloc.assert_not_called()
def test_change_placement_allocation_new_rule_not_min_bw(self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
bw_limit_rule = rule_object.QosDscpMarkingRule(dscp_mark=16)
qos2.rules = [bw_limit_rule]
orig_port, port = self._prepare_port_for_placement_allocation(qos1,
original_min_kbps=1000)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
qos1, qos2, orig_port, port)
mock_update_qos_alloc.assert_called_once_with(
consumer_uuid='uu:id',
alloc_diff={self.MIN_BW_RP: {'NET_BW_IGR_KILOBIT_PER_SEC': -1000}})
def test_change_placement_allocation_equal_minkbps_and_minkpps(self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, original_min_kbps=1000, desired_min_kbps=1000,
original_min_kpps=1000, desired_min_kpps=1000)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
qos1, qos2, orig_port, port)
mock_update_qos_alloc.assert_not_called()
def test_change_placement_allocation_update_conflict(self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, original_min_kbps=1000, desired_min_kbps=2000)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
mock_update_qos_alloc.side_effect = ks_exc.Conflict(
response={'errors': [{'code': 'placement.concurrent_update'}]}
)
self.assertRaises(
neutron_qos_exc.QosPlacementAllocationUpdateConflict,
self.qos_plugin._change_placement_allocation,
qos1, qos2, orig_port, port)
def test_change_placement_allocation_update_generation_conflict(self):
qos1 = self._make_qos_policy()
qos2 = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
qos1, qos2, original_min_kbps=1000, desired_min_kbps=2000)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
mock_update_qos_alloc.side_effect = (
pl_exc.PlacementAllocationGenerationConflict(
consumer=self.MIN_BW_RP))
self.assertRaises(
pl_exc.PlacementAllocationGenerationConflict,
self.qos_plugin._change_placement_allocation,
qos1, qos2, orig_port, port)
def test_change_placement_allocation_qos_network_policy(self):
qos_network = self._make_qos_policy()
desired_qos = self._make_qos_policy()
orig_port, port = self._prepare_port_for_placement_allocation(
None, desired_qos, qos_network_policy=qos_network,
original_min_kbps=1000, desired_min_kbps=2000)
with mock.patch.object(self.qos_plugin._placement_client,
'update_qos_allocation') as mock_update_qos_alloc:
self.qos_plugin._change_placement_allocation(
qos_network, desired_qos, orig_port, port)
mock_update_qos_alloc.assert_called_once_with(
consumer_uuid='uu:id',
alloc_diff={self.MIN_BW_RP: {'NET_BW_IGR_KILOBIT_PER_SEC': 1000}})
| [
"neutron.objects.ports.Port",
"neutron.manager.init",
"neutron.objects.network.Network",
"unittest.mock.call.create",
"unittest.mock.call.update",
"neutron.objects.network.NetworkSegment",
"unittest.mock.call.QosPolicy",
"copy.copy",
"unittest.mock.patch",
"neutron.objects.qos.rule.QosPacketRateLimitRule",
"neutron.objects.qos.policy.QosPolicy",
"oslo_utils.uuidutils.generate_uuid",
"unittest.mock.call",
"unittest.mock.call.QosPolicy.get_policy_obj",
"neutron.objects.qos.rule.QosBandwidthLimitRule",
"neutron.objects.qos.rule.QosMinimumBandwidthRule",
"neutron_lib.exceptions.placement.PlacementAllocationGenerationConflict",
"unittest.mock.call.RuleCls",
"oslo_config.cfg.CONF.set_override",
"neutron.objects.qos.rule.QosMinimumPacketRateRule",
"unittest.mock.call.delete",
"keystoneauth1.exceptions.Conflict",
"unittest.mock.Mock",
"neutron.extensions.qos_rules_alias.Qos_rules_alias.get_resources",
"neutron.extensions.qos_pps_minimum_rule_alias.Qos_pps_minimum_rule_alias.get_resources",
"unittest.mock.MagicMock",
"neutron_lib.utils.net.random_mac_generator",
"neutron_lib.callbacks.events.DBEventPayload",
"neutron_lib.objects.utils.get_updatable_fields",
"neutron.services.qos.qos_plugin.QoSPlugin._extend_port_resource_request_bulk",
"neutron.objects.qos.rule.QosDscpMarkingRule",
"neutron_lib.context.get_admin_context",
"unittest.mock.call.driver.call",
"neutron_lib.plugins.directory.get_plugin",
"neutron.objects.ports.PortBinding",
"neutron.services.qos.qos_plugin.QoSPlugin._extend_port_resource_request",
"unittest.mock.patch.object",
"neutron_lib.context.Context"
] | [((30625, 30711), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.rbac_db.RbacNeutronDbObjectMixin.create_rbac_policy"""'], {}), "(\n 'neutron.objects.rbac_db.RbacNeutronDbObjectMixin.create_rbac_policy')\n", (30635, 30711), False, 'from unittest import mock\n'), ((30732, 30782), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy"""'], {}), "('neutron.objects.qos.policy.QosPolicy')\n", (30742, 30782), False, 'from unittest import mock\n'), ((32673, 32729), 'unittest.mock.patch.object', 'mock.patch.object', (['policy_object.QosPolicy', '"""get_object"""'], {}), "(policy_object.QosPolicy, 'get_object')\n", (32690, 32729), False, 'from unittest import mock\n'), ((32735, 32821), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.rbac_db.RbacNeutronDbObjectMixin.create_rbac_policy"""'], {}), "(\n 'neutron.objects.rbac_db.RbacNeutronDbObjectMixin.create_rbac_policy')\n", (32745, 32821), False, 'from unittest import mock\n'), ((32842, 32894), 'unittest.mock.patch.object', 'mock.patch.object', (['policy_object.QosPolicy', '"""update"""'], {}), "(policy_object.QosPolicy, 'update')\n", (32859, 32894), False, 'from unittest import mock\n'), ((34077, 34143), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.db.api.get_object"""'], {'return_value': 'None'}), "('neutron.objects.db.api.get_object', return_value=None)\n", (34087, 34143), False, 'from unittest import mock\n'), ((34149, 34201), 'unittest.mock.patch.object', 'mock.patch.object', (['policy_object.QosPolicy', '"""delete"""'], {}), "(policy_object.QosPolicy, 'delete')\n", (34166, 34201), False, 'from unittest import mock\n'), ((35130, 35186), 'unittest.mock.patch.object', 'mock.patch.object', (['policy_object.QosPolicy', '"""get_object"""'], {}), "(policy_object.QosPolicy, 'get_object')\n", (35147, 35186), False, 'from unittest import mock\n'), ((35192, 35254), 'unittest.mock.patch.object', 'mock.patch.object', (['rule_object.QosBandwidthLimitRule', '"""create"""'], {}), "(rule_object.QosBandwidthLimitRule, 'create')\n", (35209, 35254), False, 'from unittest import mock\n'), ((40048, 40110), 'unittest.mock.patch.object', 'mock.patch.object', (['rule_object.QosBandwidthLimitRule', '"""update"""'], {}), "(rule_object.QosBandwidthLimitRule, 'update')\n", (40065, 40110), False, 'from unittest import mock\n'), ((45336, 45398), 'unittest.mock.patch.object', 'mock.patch.object', (['rule_object.QosBandwidthLimitRule', '"""delete"""'], {}), "(rule_object.QosBandwidthLimitRule, 'delete')\n", (45353, 45398), False, 'from unittest import mock\n'), ((59494, 59534), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.ports.Port"""'], {}), "('neutron.objects.ports.Port')\n", (59504, 59534), False, 'from unittest import mock\n'), ((59540, 59590), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy"""'], {}), "('neutron.objects.qos.policy.QosPolicy')\n", (59550, 59590), False, 'from unittest import mock\n'), ((87381, 87442), 'unittest.mock.patch.object', 'mock.patch.object', (['qos_plugin.QoSPlugin', '"""update_policy_rule"""'], {}), "(qos_plugin.QoSPlugin, 'update_policy_rule')\n", (87398, 87442), False, 'from unittest import mock\n'), ((88466, 88524), 'unittest.mock.patch.object', 'mock.patch.object', (['qos_plugin.QoSPlugin', '"""get_policy_rule"""'], {}), "(qos_plugin.QoSPlugin, 'get_policy_rule')\n", (88483, 88524), False, 'from unittest import mock\n'), ((89371, 89432), 'unittest.mock.patch.object', 'mock.patch.object', (['qos_plugin.QoSPlugin', '"""delete_policy_rule"""'], {}), "(qos_plugin.QoSPlugin, 'delete_policy_rule')\n", (89388, 89432), False, 'from unittest import mock\n'), ((2511, 2575), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.obj_load_attr"""'], {}), "('neutron.objects.qos.policy.QosPolicy.obj_load_attr')\n", (2521, 2575), False, 'from unittest import mock\n'), ((3050, 3103), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""core_plugin"""', 'DB_PLUGIN_KLASS'], {}), "('core_plugin', DB_PLUGIN_KLASS)\n", (3071, 3103), False, 'from oslo_config import cfg\n'), ((3112, 3161), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""service_plugins"""', "['qos']"], {}), "('service_plugins', ['qos'])\n", (3133, 3161), False, 'from oslo_config import cfg\n'), ((3171, 3185), 'neutron.manager.init', 'manager.init', ([], {}), '()\n', (3183, 3185), False, 'from neutron import manager\n'), ((3212, 3255), 'neutron_lib.plugins.directory.get_plugin', 'directory.get_plugin', (['plugins_constants.QOS'], {}), '(plugins_constants.QOS)\n', (3232, 3255), False, 'from neutron_lib.plugins import directory\n'), ((3298, 3309), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (3307, 3309), False, 'from unittest import mock\n'), ((3480, 3523), 'neutron_lib.context.Context', 'context.Context', (['"""fake_user"""', '"""fake_tenant"""'], {}), "('fake_user', 'fake_tenant')\n", (3495, 3523), False, 'from neutron_lib import context\n'), ((3550, 3577), 'neutron_lib.context.get_admin_context', 'context.get_admin_context', ([], {}), '()\n', (3575, 3577), False, 'from neutron_lib import context\n'), ((4731, 4795), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (4754, 4795), True, 'from neutron.objects.qos import policy as policy_object\n'), ((4830, 4921), 'neutron.objects.qos.rule.QosBandwidthLimitRule', 'rule_object.QosBandwidthLimitRule', (['self.ctxt'], {}), "(self.ctxt, **self.rule_data[\n 'bandwidth_limit_rule'])\n", (4863, 4921), True, 'from neutron.objects.qos import rule as rule_object\n'), ((4956, 5041), 'neutron.objects.qos.rule.QosDscpMarkingRule', 'rule_object.QosDscpMarkingRule', (['self.ctxt'], {}), "(self.ctxt, **self.rule_data['dscp_marking_rule']\n )\n", (4986, 5041), True, 'from neutron.objects.qos import rule as rule_object\n'), ((5078, 5173), 'neutron.objects.qos.rule.QosMinimumBandwidthRule', 'rule_object.QosMinimumBandwidthRule', (['self.ctxt'], {}), "(self.ctxt, **self.rule_data[\n 'minimum_bandwidth_rule'])\n", (5113, 5173), True, 'from neutron.objects.qos import rule as rule_object\n'), ((5207, 5301), 'neutron.objects.qos.rule.QosPacketRateLimitRule', 'rule_object.QosPacketRateLimitRule', (['self.ctxt'], {}), "(self.ctxt, **self.rule_data[\n 'packet_rate_limit_rule'])\n", (5241, 5301), True, 'from neutron.objects.qos import rule as rule_object\n'), ((5339, 5437), 'neutron.objects.qos.rule.QosMinimumPacketRateRule', 'rule_object.QosMinimumPacketRateRule', (['self.ctxt'], {}), "(self.ctxt, **self.rule_data[\n 'minimum_packet_rate_rule'])\n", (5375, 5437), True, 'from neutron.objects.qos import rule as rule_object\n'), ((6100, 6125), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (6123, 6125), False, 'from oslo_utils import uuidutils\n'), ((6493, 6547), 'neutron.objects.ports.Port', 'ports_object.Port', (['self.ctxt'], {}), "(self.ctxt, **self.port_data['port'])\n", (6510, 6547), True, 'from neutron.objects import ports as ports_object\n'), ((6636, 6708), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'network_id': 'network_id', 'physical_network': 'physical_network'}), '(network_id=network_id, physical_network=physical_network)\n', (6650, 6708), False, 'from unittest import mock\n'), ((7804, 7829), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (7827, 7829), False, 'from oslo_utils import uuidutils\n'), ((8443, 8515), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'network_id': 'network_id', 'physical_network': 'physical_network'}), '(network_id=network_id, physical_network=physical_network)\n', (8457, 8515), False, 'from unittest import mock\n'), ((11710, 11784), 'neutron.objects.qos.rule.QosMinimumBandwidthRule', 'rule_object.QosMinimumBandwidthRule', (['self.ctxt'], {}), '(self.ctxt, **min_bw_rule_ingress_data)\n', (11745, 11784), True, 'from neutron.objects.qos import rule as rule_object\n'), ((11829, 11905), 'neutron.objects.qos.rule.QosMinimumPacketRateRule', 'rule_object.QosMinimumPacketRateRule', (['self.ctxt'], {}), '(self.ctxt, **min_pps_rule_ingress_data)\n', (11865, 11905), True, 'from neutron.objects.qos import rule as rule_object\n'), ((17244, 17318), 'neutron.objects.qos.rule.QosMinimumBandwidthRule', 'rule_object.QosMinimumBandwidthRule', (['self.ctxt'], {}), '(self.ctxt, **min_bw_rule_ingress_data)\n', (17279, 17318), True, 'from neutron.objects.qos import rule as rule_object\n'), ((17363, 17439), 'neutron.objects.qos.rule.QosMinimumPacketRateRule', 'rule_object.QosMinimumPacketRateRule', (['self.ctxt'], {}), '(self.ctxt, **min_pps_rule_ingress_data)\n', (17399, 17439), True, 'from neutron.objects.qos import rule as rule_object\n'), ((21358, 21383), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (21381, 21383), False, 'from oslo_utils import uuidutils\n'), ((21690, 21741), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'id': 'port_id', 'qos_policy_id': 'policy_id'}), '(id=port_id, qos_policy_id=policy_id)\n', (21704, 21741), False, 'from unittest import mock\n'), ((21764, 21792), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'id': 'policy_id'}), '(id=policy_id)\n', (21778, 21792), False, 'from unittest import mock\n'), ((21814, 21825), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (21823, 21825), False, 'from unittest import mock\n'), ((23350, 23375), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (23373, 23375), False, 'from oslo_utils import uuidutils\n'), ((23843, 23868), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (23866, 23868), False, 'from oslo_utils import uuidutils\n'), ((24561, 24589), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'id': 'policy_id'}), '(id=policy_id)\n', (24575, 24589), False, 'from unittest import mock\n'), ((24611, 24622), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (24620, 24622), False, 'from unittest import mock\n'), ((26438, 26463), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (26461, 26463), False, 'from oslo_utils import uuidutils\n'), ((27871, 27896), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (27894, 27896), False, 'from oslo_utils import uuidutils\n'), ((28553, 28615), 'neutron.objects.network.NetworkSegment', 'network_object.NetworkSegment', ([], {'physical_network': '"""fake physnet"""'}), "(physical_network='fake physnet')\n", (28582, 28615), True, 'from neutron.objects import network as network_object\n'), ((28643, 28696), 'neutron.objects.network.Network', 'network_object.Network', (['self.ctxt'], {'segments': '[segment]'}), '(self.ctxt, segments=[segment])\n', (28665, 28696), True, 'from neutron.objects import network as network_object\n'), ((29660, 29722), 'neutron.objects.network.NetworkSegment', 'network_object.NetworkSegment', ([], {'physical_network': '"""fake physnet"""'}), "(physical_network='fake physnet')\n", (29689, 29722), True, 'from neutron.objects import network as network_object\n'), ((29750, 29803), 'neutron.objects.network.Network', 'network_object.Network', (['self.ctxt'], {'segments': '[segment]'}), '(self.ctxt, segments=[segment])\n', (29772, 29803), True, 'from neutron.objects import network as network_object\n'), ((30879, 30890), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (30888, 30890), False, 'from unittest import mock\n'), ((31226, 31295), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""create_policy_precommit"""', 'self.ctxt', 'mock.ANY'], {}), "('create_policy_precommit', self.ctxt, mock.ANY)\n", (31247, 31295), False, 'from unittest import mock\n'), ((31336, 31395), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""create_policy"""', 'self.ctxt', 'mock.ANY'], {}), "('create_policy', self.ctxt, mock.ANY)\n", (31357, 31395), False, 'from unittest import mock\n'), ((31715, 31740), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (31738, 31740), False, 'from oslo_utils import uuidutils\n'), ((31762, 31787), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (31785, 31787), False, 'from oslo_utils import uuidutils\n'), ((33104, 33115), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (33113, 33115), False, 'from unittest import mock\n'), ((33310, 33398), 'neutron_lib.objects.utils.get_updatable_fields', 'obj_utils.get_updatable_fields', (['policy_object.QosPolicy', "self.policy_data['policy']"], {}), "(policy_object.QosPolicy, self.policy_data[\n 'policy'])\n", (33340, 33398), True, 'from neutron_lib.objects import utils as obj_utils\n'), ((33605, 33623), 'unittest.mock.call.update', 'mock.call.update', ([], {}), '()\n', (33621, 33623), False, 'from unittest import mock\n'), ((33661, 33730), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""update_policy_precommit"""', 'self.ctxt', 'mock.ANY'], {}), "('update_policy_precommit', self.ctxt, mock.ANY)\n", (33682, 33730), False, 'from unittest import mock\n'), ((33771, 33830), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""update_policy"""', 'self.ctxt', 'mock.ANY'], {}), "('update_policy', self.ctxt, mock.ANY)\n", (33792, 33830), False, 'from unittest import mock\n'), ((34304, 34315), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (34313, 34315), False, 'from unittest import mock\n'), ((34658, 34676), 'unittest.mock.call.delete', 'mock.call.delete', ([], {}), '()\n', (34674, 34676), False, 'from unittest import mock\n'), ((34714, 34783), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""delete_policy_precommit"""', 'self.ctxt', 'mock.ANY'], {}), "('delete_policy_precommit', self.ctxt, mock.ANY)\n", (34735, 34783), False, 'from unittest import mock\n'), ((34824, 34883), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""delete_policy"""', 'self.ctxt', 'mock.ANY'], {}), "('delete_policy', self.ctxt, mock.ANY)\n", (34845, 34883), False, 'from unittest import mock\n'), ((35387, 35409), 'copy.copy', 'copy.copy', (['self.policy'], {}), '(self.policy)\n', (35396, 35409), False, 'import copy\n'), ((35522, 35533), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (35531, 35533), False, 'from unittest import mock\n'), ((40195, 40206), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (40204, 40206), False, 'from unittest import mock\n'), ((40400, 40464), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (40423, 40464), True, 'from neutron.objects.qos import policy as policy_object\n'), ((44712, 44776), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (44735, 44776), True, 'from neutron.objects.qos import policy as policy_object\n'), ((44859, 44923), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (44882, 44923), True, 'from neutron.objects.qos import policy as policy_object\n'), ((45483, 45494), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (45492, 45494), False, 'from unittest import mock\n'), ((45688, 45752), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (45711, 45752), True, 'from neutron.objects.qos import policy as policy_object\n'), ((46735, 46799), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (46758, 46799), True, 'from neutron.objects.qos import policy as policy_object\n'), ((50293, 50357), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (50316, 50357), True, 'from neutron.objects.qos import policy as policy_object\n'), ((50809, 50873), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (50832, 50873), True, 'from neutron.objects.qos import policy as policy_object\n'), ((51344, 51408), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (51367, 51408), True, 'from neutron.objects.qos import policy as policy_object\n'), ((58010, 58037), 'neutron_lib.context.get_admin_context', 'context.get_admin_context', ([], {}), '()\n', (58035, 58037), False, 'from neutron_lib import context\n'), ((59752, 59763), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (59761, 59763), False, 'from unittest import mock\n'), ((60270, 60281), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (60279, 60281), False, 'from unittest import mock\n'), ((61634, 61698), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (61657, 61698), True, 'from neutron.objects.qos import policy as policy_object\n'), ((62896, 62960), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (62919, 62960), True, 'from neutron.objects.qos import policy as policy_object\n'), ((63436, 63500), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (63459, 63500), True, 'from neutron.objects.qos import policy as policy_object\n'), ((63989, 64053), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (64012, 64053), True, 'from neutron.objects.qos import policy as policy_object\n'), ((64513, 64577), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.ctxt'], {}), "(self.ctxt, **self.policy_data['policy'])\n", (64536, 64577), True, 'from neutron.objects.qos import policy as policy_object\n'), ((68988, 69015), 'neutron_lib.context.get_admin_context', 'context.get_admin_context', ([], {}), '()\n', (69013, 69015), False, 'from neutron_lib import context\n'), ((70200, 70262), 'neutron.objects.network.NetworkSegment', 'network_object.NetworkSegment', ([], {'physical_network': '"""fake physnet"""'}), "(physical_network='fake physnet')\n", (70229, 70262), True, 'from neutron.objects import network as network_object\n'), ((70290, 70343), 'neutron.objects.network.Network', 'network_object.Network', (['self.ctxt'], {'segments': '[segment]'}), '(self.ctxt, segments=[segment])\n', (70312, 70343), True, 'from neutron.objects import network as network_object\n'), ((71326, 71388), 'neutron.objects.network.NetworkSegment', 'network_object.NetworkSegment', ([], {'physical_network': '"""fake physnet"""'}), "(physical_network='fake physnet')\n", (71355, 71388), True, 'from neutron.objects import network as network_object\n'), ((71416, 71469), 'neutron.objects.network.Network', 'network_object.Network', (['self.ctxt'], {'segments': '[segment]'}), '(self.ctxt, segments=[segment])\n', (71438, 71469), True, 'from neutron.objects import network as network_object\n'), ((83074, 83101), 'neutron_lib.context.get_admin_context', 'context.get_admin_context', ([], {}), '()\n', (83099, 83101), False, 'from neutron_lib import context\n'), ((84226, 84273), 'neutron.extensions.qos_rules_alias.Qos_rules_alias.get_resources', 'qos_rules_alias.Qos_rules_alias.get_resources', ([], {}), '()\n', (84271, 84273), False, 'from neutron.extensions import qos_rules_alias\n'), ((84489, 84558), 'neutron.extensions.qos_pps_minimum_rule_alias.Qos_pps_minimum_rule_alias.get_resources', 'qos_pps_minimum_rule_alias.Qos_pps_minimum_rule_alias.get_resources', ([], {}), '()\n', (84556, 84558), False, 'from neutron.extensions import qos_pps_minimum_rule_alias\n'), ((84859, 84927), 'unittest.mock.patch', 'mock.patch', (['"""neutron.notifiers.batch_notifier.BatchNotifier._notify"""'], {}), "('neutron.notifiers.batch_notifier.BatchNotifier._notify')\n", (84869, 84927), False, 'from unittest import mock\n'), ((85300, 85343), 'neutron_lib.plugins.directory.get_plugin', 'directory.get_plugin', (['plugins_constants.QOS'], {}), '(plugins_constants.QOS)\n', (85320, 85343), False, 'from neutron_lib.plugins import directory\n'), ((85365, 85408), 'neutron_lib.context.Context', 'context.Context', (['"""fake_user"""', '"""fake_tenant"""'], {}), "('fake_user', 'fake_tenant')\n", (85380, 85408), False, 'from neutron_lib import context\n'), ((85674, 85699), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (85697, 85699), False, 'from oslo_utils import uuidutils\n'), ((91223, 91291), 'unittest.mock.patch', 'mock.patch', (['"""neutron.notifiers.batch_notifier.BatchNotifier._notify"""'], {}), "('neutron.notifiers.batch_notifier.BatchNotifier._notify')\n", (91233, 91291), False, 'from unittest import mock\n'), ((91681, 91724), 'neutron_lib.plugins.directory.get_plugin', 'directory.get_plugin', (['plugins_constants.QOS'], {}), '(plugins_constants.QOS)\n', (91701, 91724), False, 'from neutron_lib.plugins import directory\n'), ((91746, 91789), 'neutron_lib.context.Context', 'context.Context', (['"""fake_user"""', '"""fake_tenant"""'], {}), "('fake_user', 'fake_tenant')\n", (91761, 91789), False, 'from neutron_lib import context\n'), ((91932, 91957), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (91955, 91957), False, 'from oslo_utils import uuidutils\n'), ((92873, 92926), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""core_plugin"""', 'DB_PLUGIN_KLASS'], {}), "('core_plugin', DB_PLUGIN_KLASS)\n", (92894, 92926), False, 'from oslo_config import cfg\n'), ((92935, 92984), 'oslo_config.cfg.CONF.set_override', 'cfg.CONF.set_override', (['"""service_plugins"""', "['qos']"], {}), "('service_plugins', ['qos'])\n", (92956, 92984), False, 'from oslo_config import cfg\n'), ((92994, 93008), 'neutron.manager.init', 'manager.init', ([], {}), '()\n', (93006, 93008), False, 'from neutron import manager\n'), ((93035, 93078), 'neutron_lib.plugins.directory.get_plugin', 'directory.get_plugin', (['plugins_constants.QOS'], {}), '(plugins_constants.QOS)\n', (93055, 93078), False, 'from neutron_lib.plugins import directory\n'), ((93120, 93131), 'unittest.mock.Mock', 'mock.Mock', ([], {}), '()\n', (93129, 93131), False, 'from unittest import mock\n'), ((93303, 93330), 'neutron_lib.context.get_admin_context', 'context.get_admin_context', ([], {}), '()\n', (93328, 93330), False, 'from neutron_lib import context\n'), ((93357, 93382), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (93380, 93382), False, 'from oslo_utils import uuidutils\n'), ((93437, 93539), 'neutron.objects.qos.policy.QosPolicy', 'policy_object.QosPolicy', (['self.context'], {'project_id': 'self.project_id', 'shared': '(False)', 'is_default': '(False)'}), '(self.context, project_id=self.project_id, shared=\n False, is_default=False)\n', (93460, 93539), True, 'from neutron.objects.qos import policy as policy_object\n'), ((93828, 93992), 'neutron.objects.qos.rule.QosMinimumBandwidthRule', 'rule_object.QosMinimumBandwidthRule', (['self.context'], {'project_id': 'self.project_id', 'qos_policy_id': 'policy_id', 'direction': 'direction', 'min_kbps': 'min_kbps', 'id': 'rule_id'}), '(self.context, project_id=self.\n project_id, qos_policy_id=policy_id, direction=direction, min_kbps=\n min_kbps, id=rule_id)\n', (93863, 93992), True, 'from neutron.objects.qos import rule as rule_object\n'), ((94286, 94451), 'neutron.objects.qos.rule.QosMinimumPacketRateRule', 'rule_object.QosMinimumPacketRateRule', (['self.context'], {'project_id': 'self.project_id', 'qos_policy_id': 'policy_id', 'direction': 'direction', 'min_kpps': 'min_kpps', 'id': 'rule_id'}), '(self.context, project_id=self.\n project_id, qos_policy_id=policy_id, direction=direction, min_kpps=\n min_kpps, id=rule_id)\n', (94322, 94451), True, 'from neutron.objects.qos import rule as rule_object\n'), ((94942, 95219), 'neutron.objects.ports.Port', 'ports_object.Port', (['self.context'], {'network_id': 'network_id', 'device_owner': 'device_owner', 'project_id': 'self.project_id', 'admin_state_up': '(True)', 'status': '"""DOWN"""', 'device_id': '"""2"""', 'qos_policy_id': 'qos_policy_id', 'qos_network_policy_id': 'qos_network_policy_id', 'mac_address': 'mac', 'id': 'port_id'}), "(self.context, network_id=network_id, device_owner=\n device_owner, project_id=self.project_id, admin_state_up=True, status=\n 'DOWN', device_id='2', qos_policy_id=qos_policy_id,\n qos_network_policy_id=qos_network_policy_id, mac_address=mac, id=port_id)\n", (94959, 95219), True, 'from neutron.objects import ports as ports_object\n'), ((95377, 95442), 'neutron.objects.network.Network', 'network_object.Network', (['self.context'], {'qos_policy_id': 'qos_policy_id'}), '(self.context, qos_policy_id=qos_policy_id)\n', (95399, 95442), True, 'from neutron.objects import network as network_object\n'), ((103983, 103999), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (103997, 103999), False, 'from unittest import mock\n'), ((105180, 105196), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (105194, 105196), False, 'from unittest import mock\n'), ((105943, 105959), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (105957, 105959), False, 'from unittest import mock\n'), ((106467, 106648), 'neutron.objects.ports.PortBinding', 'ports_object.PortBinding', (['self.context'], {'port_id': 'port1.id', 'host': '"""fake_host1"""', 'vnic_type': '"""fake_vnic_type"""', 'vif_type': '"""fake_vif_type"""', 'profile': "{'allocation': 'fake_allocation'}"}), "(self.context, port_id=port1.id, host='fake_host1',\n vnic_type='fake_vnic_type', vif_type='fake_vif_type', profile={\n 'allocation': 'fake_allocation'})\n", (106491, 106648), True, 'from neutron.objects import ports as ports_object\n'), ((108317, 108333), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (108331, 108333), False, 'from unittest import mock\n'), ((108841, 108986), 'neutron.objects.ports.PortBinding', 'ports_object.PortBinding', (['self.context'], {'port_id': 'port1.id', 'host': '"""fake_host1"""', 'vnic_type': '"""fake_vnic_type"""', 'vif_type': '"""fake_vif_type"""', 'profile': '{}'}), "(self.context, port_id=port1.id, host='fake_host1',\n vnic_type='fake_vnic_type', vif_type='fake_vif_type', profile={})\n", (108865, 108986), True, 'from neutron.objects import ports as ports_object\n'), ((109258, 109403), 'neutron.objects.ports.PortBinding', 'ports_object.PortBinding', (['self.context'], {'port_id': 'port2.id', 'host': '"""fake_host2"""', 'vnic_type': '"""fake_vnic_type"""', 'vif_type': '"""fake_vif_type"""', 'profile': '{}'}), "(self.context, port_id=port2.id, host='fake_host2',\n vnic_type='fake_vnic_type', vif_type='fake_vif_type', profile={})\n", (109282, 109403), True, 'from neutron.objects import ports as ports_object\n'), ((123346, 123390), 'neutron.objects.qos.rule.QosDscpMarkingRule', 'rule_object.QosDscpMarkingRule', ([], {'dscp_mark': '(16)'}), '(dscp_mark=16)\n', (123376, 123390), True, 'from neutron.objects.qos import rule as rule_object\n'), ((123416, 123460), 'neutron.objects.qos.rule.QosDscpMarkingRule', 'rule_object.QosDscpMarkingRule', ([], {'dscp_mark': '(18)'}), '(dscp_mark=18)\n', (123446, 123460), True, 'from neutron.objects.qos import rule as rule_object\n'), ((124251, 124295), 'neutron.objects.qos.rule.QosDscpMarkingRule', 'rule_object.QosDscpMarkingRule', ([], {'dscp_mark': '(16)'}), '(dscp_mark=16)\n', (124281, 124295), True, 'from neutron.objects.qos import rule as rule_object\n'), ((124908, 124952), 'neutron.objects.qos.rule.QosDscpMarkingRule', 'rule_object.QosDscpMarkingRule', ([], {'dscp_mark': '(16)'}), '(dscp_mark=16)\n', (124938, 124952), True, 'from neutron.objects.qos import rule as rule_object\n'), ((6824, 6921), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.network.NetworkSegment.get_objects"""'], {'return_value': '[segment_mock]'}), "('neutron.objects.network.NetworkSegment.get_objects',\n return_value=[segment_mock])\n", (6834, 6921), False, 'from unittest import mock\n'), ((6961, 7066), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosMinimumBandwidthRule.get_objects"""'], {'return_value': 'min_bw_rules'}), "('neutron.objects.qos.rule.QosMinimumBandwidthRule.get_objects',\n return_value=min_bw_rules)\n", (6971, 7066), False, 'from unittest import mock\n'), ((7146, 7253), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosMinimumPacketRateRule.get_objects"""'], {'return_value': 'min_pps_rules'}), "('neutron.objects.qos.rule.QosMinimumPacketRateRule.get_objects',\n return_value=min_pps_rules)\n", (7156, 7253), False, 'from unittest import mock\n'), ((7333, 7422), 'unittest.mock.patch', 'mock.patch', (['"""uuid.uuid5"""'], {'return_value': '"""fake_uuid"""', 'side_effect': 'request_groups_uuids'}), "('uuid.uuid5', return_value='fake_uuid', side_effect=\n request_groups_uuids)\n", (7343, 7422), False, 'from unittest import mock\n'), ((7499, 7570), 'neutron.services.qos.qos_plugin.QoSPlugin._extend_port_resource_request', 'qos_plugin.QoSPlugin._extend_port_resource_request', (['port_res', 'self.port'], {}), '(port_res, self.port)\n', (7549, 7570), False, 'from neutron.services.qos import qos_plugin\n'), ((8631, 8728), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.network.NetworkSegment.get_objects"""'], {'return_value': '[segment_mock]'}), "('neutron.objects.network.NetworkSegment.get_objects',\n return_value=[segment_mock])\n", (8641, 8728), False, 'from unittest import mock\n'), ((8768, 8873), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosMinimumBandwidthRule.get_objects"""'], {'return_value': 'min_bw_rules'}), "('neutron.objects.qos.rule.QosMinimumBandwidthRule.get_objects',\n return_value=min_bw_rules)\n", (8778, 8873), False, 'from unittest import mock\n'), ((8953, 9060), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosMinimumPacketRateRule.get_objects"""'], {'return_value': 'min_pps_rules'}), "('neutron.objects.qos.rule.QosMinimumPacketRateRule.get_objects',\n return_value=min_pps_rules)\n", (8963, 9060), False, 'from unittest import mock\n'), ((9140, 9229), 'unittest.mock.patch', 'mock.patch', (['"""uuid.uuid5"""'], {'return_value': '"""fake_uuid"""', 'side_effect': 'request_groups_uuids'}), "('uuid.uuid5', return_value='fake_uuid', side_effect=\n request_groups_uuids)\n", (9150, 9229), False, 'from unittest import mock\n'), ((9306, 9378), 'neutron.services.qos.qos_plugin.QoSPlugin._extend_port_resource_request_bulk', 'qos_plugin.QoSPlugin._extend_port_resource_request_bulk', (['ports_res', 'None'], {}), '(ports_res, None)\n', (9361, 9378), False, 'from neutron.services.qos import qos_plugin\n'), ((11398, 11423), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (11421, 11423), False, 'from oslo_utils import uuidutils\n'), ((11567, 11592), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (11590, 11592), False, 'from oslo_utils import uuidutils\n'), ((16932, 16957), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (16955, 16957), False, 'from oslo_utils import uuidutils\n'), ((17101, 17126), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (17124, 17126), False, 'from oslo_utils import uuidutils\n'), ((20221, 20255), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'qos_policy_id': 'None'}), '(qos_policy_id=None)\n', (20235, 20255), False, 'from unittest import mock\n'), ((20338, 20372), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'qos_policy_id': 'None'}), '(qos_policy_id=None)\n', (20352, 20372), False, 'from unittest import mock\n'), ((20413, 20457), 'unittest.mock.MagicMock', 'mock.MagicMock', ([], {'qos_policy_id': 'self.policy.id'}), '(qos_policy_id=self.policy.id)\n', (20427, 20457), False, 'from unittest import mock\n'), ((20647, 20740), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.ports.Port.get_objects"""'], {'side_effect': '[network_ports, ports]'}), "('neutron.objects.ports.Port.get_objects', side_effect=[\n network_ports, ports])\n", (20657, 20740), False, 'from unittest import mock\n'), ((20771, 20823), 'unittest.mock.patch.object', 'mock.patch.object', (['self.policy', '"""get_bound_networks"""'], {}), "(self.policy, 'get_bound_networks')\n", (20788, 20823), False, 'from unittest import mock\n'), ((20847, 20896), 'unittest.mock.patch.object', 'mock.patch.object', (['self.policy', '"""get_bound_ports"""'], {}), "(self.policy, 'get_bound_ports')\n", (20864, 20896), False, 'from unittest import mock\n'), ((21839, 21914), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.ports.Port.get_object"""'], {'return_value': 'port_mock'}), "('neutron.objects.ports.Port.get_object', return_value=port_mock)\n", (21849, 21914), False, 'from unittest import mock\n'), ((21962, 22054), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'policy_mock'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n policy_mock)\n", (21972, 22054), False, 'from unittest import mock\n'), ((22099, 22161), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""validate_policy_for_port"""'], {}), "(self.qos_plugin, 'validate_policy_for_port')\n", (22116, 22161), False, 'from unittest import mock\n'), ((22213, 22278), 'unittest.mock.patch.object', 'mock.patch.object', (['self.ctxt', '"""elevated"""'], {'return_value': 'admin_ctxt'}), "(self.ctxt, 'elevated', return_value=admin_ctxt)\n", (22230, 22278), False, 'from unittest import mock\n'), ((24636, 24708), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.ports.Port.get_objects"""'], {'return_value': 'ports'}), "('neutron.objects.ports.Port.get_objects', return_value=ports)\n", (24646, 24708), False, 'from unittest import mock\n'), ((24757, 24849), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'policy_mock'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n policy_mock)\n", (24767, 24849), False, 'from unittest import mock\n'), ((24894, 24959), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""validate_policy_for_network"""'], {}), "(self.qos_plugin, 'validate_policy_for_network')\n", (24911, 24959), False, 'from unittest import mock\n'), ((25014, 25077), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""validate_policy_for_ports"""'], {}), "(self.qos_plugin, 'validate_policy_for_ports')\n", (25031, 25077), False, 'from unittest import mock\n'), ((25130, 25195), 'unittest.mock.patch.object', 'mock.patch.object', (['self.ctxt', '"""elevated"""'], {'return_value': 'admin_ctxt'}), "(self.ctxt, 'elevated', return_value=admin_ctxt)\n", (25147, 25195), False, 'from unittest import mock\n'), ((26859, 26884), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (26882, 26884), False, 'from oslo_utils import uuidutils\n'), ((26899, 26998), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin.driver_manager', '"""validate_rule_for_port"""'], {'return_value': '(False)'}), "(self.qos_plugin.driver_manager, 'validate_rule_for_port',\n return_value=False)\n", (26916, 26998), False, 'from unittest import mock\n'), ((27338, 27363), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (27361, 27363), False, 'from oslo_utils import uuidutils\n'), ((27378, 27476), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin.driver_manager', '"""validate_rule_for_port"""'], {'return_value': '(True)'}), "(self.qos_plugin.driver_manager, 'validate_rule_for_port',\n return_value=True)\n", (27395, 27476), False, 'from unittest import mock\n'), ((27910, 28011), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin.driver_manager', '"""validate_rule_for_network"""'], {'return_value': '(True)'}), "(self.qos_plugin.driver_manager,\n 'validate_rule_for_network', return_value=True)\n", (27927, 28011), False, 'from unittest import mock\n'), ((28930, 29017), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n policy)\n", (28940, 29017), False, 'from unittest import mock\n'), ((29061, 29135), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.network.Network.get_object"""'], {'return_value': 'net'}), "('neutron.objects.network.Network.get_object', return_value=net)\n", (29071, 29135), False, 'from unittest import mock\n'), ((29184, 29270), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_get_ports_with_policy"""'], {'return_value': '[port]'}), "(self.qos_plugin, '_get_ports_with_policy', return_value=[\n port])\n", (29201, 29270), False, 'from unittest import mock\n'), ((30020, 30107), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n policy)\n", (30030, 30107), False, 'from unittest import mock\n'), ((30151, 30225), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.network.Network.get_object"""'], {'return_value': 'net'}), "('neutron.objects.network.Network.get_object', return_value=net)\n", (30161, 30225), False, 'from unittest import mock\n'), ((30274, 30360), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_get_ports_with_policy"""'], {'return_value': '[port]'}), "(self.qos_plugin, '_get_ports_with_policy', return_value=[\n port])\n", (30291, 30360), False, 'from unittest import mock\n'), ((32462, 32512), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy"""'], {}), "('neutron.objects.qos.policy.QosPolicy')\n", (32472, 32512), False, 'from unittest import mock\n'), ((35721, 35833), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.qos_policy_validator.check_bandwidth_rule_conflict"""'], {'return_value': 'None'}), "(\n 'neutron.objects.qos.qos_policy_validator.check_bandwidth_rule_conflict',\n return_value=None)\n", (35731, 35833), False, 'from unittest import mock\n'), ((35895, 36005), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.qos_policy_validator.check_min_pps_rule_conflict"""'], {'return_value': 'None'}), "(\n 'neutron.objects.qos.qos_policy_validator.check_min_pps_rule_conflict',\n return_value=None)\n", (35905, 36005), False, 'from unittest import mock\n'), ((36270, 36288), 'unittest.mock.call.create', 'mock.call.create', ([], {}), '()\n', (36286, 36288), False, 'from unittest import mock\n'), ((36330, 36399), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""update_policy_precommit"""', 'self.ctxt', 'mock.ANY'], {}), "('update_policy_precommit', self.ctxt, mock.ANY)\n", (36351, 36399), False, 'from unittest import mock\n'), ((36448, 36507), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""update_policy"""', 'self.ctxt', 'mock.ANY'], {}), "('update_policy', self.ctxt, mock.ANY)\n", (36469, 36507), False, 'from unittest import mock\n'), ((36932, 37020), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (36942, 37020), False, 'from unittest import mock\n'), ((37572, 37660), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (37582, 37660), False, 'from unittest import mock\n'), ((38279, 38367), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (38289, 38367), False, 'from unittest import mock\n'), ((38919, 39007), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (38929, 39007), False, 'from unittest import mock\n'), ((39639, 39727), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (39649, 39727), False, 'from unittest import mock\n'), ((40538, 40612), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.get_rules"""'], {'return_value': '[self.rule]'}), "('neutron.objects.qos.rule.get_rules', return_value=[self.rule])\n", (40548, 40612), False, 'from unittest import mock\n'), ((40638, 40726), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (40648, 40726), False, 'from unittest import mock\n'), ((41066, 41084), 'unittest.mock.call.update', 'mock.call.update', ([], {}), '()\n', (41082, 41084), False, 'from unittest import mock\n'), ((41126, 41195), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""update_policy_precommit"""', 'self.ctxt', 'mock.ANY'], {}), "('update_policy_precommit', self.ctxt, mock.ANY)\n", (41147, 41195), False, 'from unittest import mock\n'), ((41244, 41303), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""update_policy"""', 'self.ctxt', 'mock.ANY'], {}), "('update_policy', self.ctxt, mock.ANY)\n", (41265, 41303), False, 'from unittest import mock\n'), ((41728, 41816), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (41738, 41816), False, 'from unittest import mock\n'), ((42258, 42346), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (42268, 42346), False, 'from unittest import mock\n'), ((42839, 42927), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (42849, 42927), False, 'from unittest import mock\n'), ((43304, 43392), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (43314, 43392), False, 'from unittest import mock\n'), ((43837, 43925), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (43847, 43925), False, 'from unittest import mock\n'), ((44322, 44410), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (44332, 44410), False, 'from unittest import mock\n'), ((44950, 45038), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (44960, 45038), False, 'from unittest import mock\n'), ((45779, 45867), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (45789, 45867), False, 'from unittest import mock\n'), ((46170, 46188), 'unittest.mock.call.delete', 'mock.call.delete', ([], {}), '()\n', (46186, 46188), False, 'from unittest import mock\n'), ((46230, 46299), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""update_policy_precommit"""', 'self.ctxt', 'mock.ANY'], {}), "('update_policy_precommit', self.ctxt, mock.ANY)\n", (46251, 46299), False, 'from unittest import mock\n'), ((46348, 46407), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""update_policy"""', 'self.ctxt', 'mock.ANY'], {}), "('update_policy', self.ctxt, mock.ANY)\n", (46369, 46407), False, 'from unittest import mock\n'), ((46826, 46914), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (46836, 46914), False, 'from unittest import mock\n'), ((47236, 47328), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (47246, 47328), False, 'from unittest import mock\n'), ((47828, 47920), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (47838, 47920), False, 'from unittest import mock\n'), ((48454, 48546), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (48464, 48546), False, 'from unittest import mock\n'), ((49185, 49270), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (49195, 49270), False, 'from unittest import mock\n'), ((49541, 49626), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (49551, 49626), False, 'from unittest import mock\n'), ((49933, 50018), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (49943, 50018), False, 'from unittest import mock\n'), ((50384, 50472), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (50394, 50472), False, 'from unittest import mock\n'), ((50900, 50988), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (50910, 50988), False, 'from unittest import mock\n'), ((51435, 51523), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (51445, 51523), False, 'from unittest import mock\n'), ((51856, 51948), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (51866, 51948), False, 'from unittest import mock\n'), ((52473, 52565), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (52483, 52565), False, 'from unittest import mock\n'), ((53196, 53281), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (53206, 53281), False, 'from unittest import mock\n'), ((53587, 53672), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (53597, 53672), False, 'from unittest import mock\n'), ((53941, 54033), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (53951, 54033), False, 'from unittest import mock\n'), ((54539, 54631), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (54549, 54631), False, 'from unittest import mock\n'), ((55171, 55263), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (55181, 55263), False, 'from unittest import mock\n'), ((55929, 56014), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (55939, 56014), False, 'from unittest import mock\n'), ((56325, 56410), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (56335, 56410), False, 'from unittest import mock\n'), ((56692, 56777), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (56702, 56777), False, 'from unittest import mock\n'), ((57075, 57160), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (57085, 57160), False, 'from unittest import mock\n'), ((57472, 57557), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (57482, 57557), False, 'from unittest import mock\n'), ((58353, 58457), 'unittest.mock.patch.object', 'mock.patch.object', (['qos_plugin.QoSPlugin', '"""supported_rule_type_details"""'], {'return_value': 'drivers_details'}), "(qos_plugin.QoSPlugin, 'supported_rule_type_details',\n return_value=drivers_details)\n", (58370, 58457), False, 'from unittest import mock\n'), ((59134, 59243), 'unittest.mock.patch.object', 'mock.patch.object', (['qos_plugin.QoSPlugin', '"""supported_rule_types"""'], {'return_value': 'qos_consts.VALID_RULE_TYPES'}), "(qos_plugin.QoSPlugin, 'supported_rule_types',\n return_value=qos_consts.VALID_RULE_TYPES)\n", (59151, 59243), False, 'from unittest import mock\n'), ((61080, 61139), 'unittest.mock.call.driver.call', 'mock.call.driver.call', (['"""update_policy"""', 'self.ctxt', 'mock.ANY'], {}), "('update_policy', self.ctxt, mock.ANY)\n", (61101, 61139), False, 'from unittest import mock\n'), ((61725, 61813), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (61735, 61813), False, 'from unittest import mock\n'), ((62415, 62503), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (62425, 62503), False, 'from unittest import mock\n'), ((62987, 63075), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (62997, 63075), False, 'from unittest import mock\n'), ((63527, 63615), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (63537, 63615), False, 'from unittest import mock\n'), ((64080, 64168), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (64090, 64168), False, 'from unittest import mock\n'), ((64604, 64692), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (64614, 64692), False, 'from unittest import mock\n'), ((65022, 65114), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (65032, 65114), False, 'from unittest import mock\n'), ((65663, 65755), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (65673, 65755), False, 'from unittest import mock\n'), ((66294, 66386), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (66304, 66386), False, 'from unittest import mock\n'), ((67050, 67135), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (67060, 67135), False, 'from unittest import mock\n'), ((67450, 67535), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (67460, 67535), False, 'from unittest import mock\n'), ((67821, 67906), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (67831, 67906), False, 'from unittest import mock\n'), ((68210, 68295), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (68220, 68295), False, 'from unittest import mock\n'), ((68617, 68702), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (68627, 68702), False, 'from unittest import mock\n'), ((69331, 69435), 'unittest.mock.patch.object', 'mock.patch.object', (['qos_plugin.QoSPlugin', '"""supported_rule_type_details"""'], {'return_value': 'drivers_details'}), "(qos_plugin.QoSPlugin, 'supported_rule_type_details',\n return_value=drivers_details)\n", (69348, 69435), False, 'from unittest import mock\n'), ((70577, 70665), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (70587, 70665), False, 'from unittest import mock\n'), ((70709, 70783), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.network.Network.get_object"""'], {'return_value': 'net'}), "('neutron.objects.network.Network.get_object', return_value=net)\n", (70719, 70783), False, 'from unittest import mock\n'), ((70832, 70918), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_get_ports_with_policy"""'], {'return_value': '[port]'}), "(self.qos_plugin, '_get_ports_with_policy', return_value=[\n port])\n", (70849, 70918), False, 'from unittest import mock\n'), ((71686, 71774), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (71696, 71774), False, 'from unittest import mock\n'), ((71818, 71892), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.network.Network.get_object"""'], {'return_value': 'net'}), "('neutron.objects.network.Network.get_object', return_value=net)\n", (71828, 71892), False, 'from unittest import mock\n'), ((71941, 72027), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_get_ports_with_policy"""'], {'return_value': '[port]'}), "(self.qos_plugin, '_get_ports_with_policy', return_value=[\n port])\n", (71958, 72027), False, 'from unittest import mock\n'), ((73600, 73693), 'neutron.objects.qos.rule.QosMinimumPacketRateRule', 'rule_object.QosMinimumPacketRateRule', (['self.ctxt'], {}), "(self.ctxt, **rule_data[\n 'minimum_packet_rate_rule'])\n", (73636, 73693), True, 'from neutron.objects.qos import rule as rule_object\n'), ((74410, 74498), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (74420, 74498), False, 'from unittest import mock\n'), ((75114, 75202), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (75124, 75202), False, 'from unittest import mock\n'), ((75605, 75690), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (75615, 75690), False, 'from unittest import mock\n'), ((76069, 76157), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (76079, 76157), False, 'from unittest import mock\n'), ((78171, 78259), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (78181, 78259), False, 'from unittest import mock\n'), ((78605, 78690), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (78615, 78690), False, 'from unittest import mock\n'), ((79107, 79195), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (79117, 79195), False, 'from unittest import mock\n'), ((79565, 79653), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (79575, 79653), False, 'from unittest import mock\n'), ((79963, 80048), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (79973, 80048), False, 'from unittest import mock\n'), ((80338, 80430), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (80348, 80430), False, 'from unittest import mock\n'), ((80946, 81038), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (80956, 81038), False, 'from unittest import mock\n'), ((81571, 81663), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'self.policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n self.policy)\n", (81581, 81663), False, 'from unittest import mock\n'), ((82321, 82406), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (82331, 82406), False, 'from unittest import mock\n'), ((82717, 82802), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=None\n )\n", (82727, 82802), False, 'from unittest import mock\n'), ((83417, 83521), 'unittest.mock.patch.object', 'mock.patch.object', (['qos_plugin.QoSPlugin', '"""supported_rule_type_details"""'], {'return_value': 'drivers_details'}), "(qos_plugin.QoSPlugin, 'supported_rule_type_details',\n return_value=drivers_details)\n", (83434, 83521), False, 'from unittest import mock\n'), ((87612, 87637), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (87635, 87637), False, 'from oslo_utils import uuidutils\n'), ((88689, 88714), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (88712, 88714), False, 'from oslo_utils import uuidutils\n'), ((89602, 89627), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (89625, 89627), False, 'from oslo_utils import uuidutils\n'), ((90555, 90580), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (90578, 90580), False, 'from oslo_utils import uuidutils\n'), ((93783, 93808), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (93806, 93808), False, 'from oslo_utils import uuidutils\n'), ((94241, 94266), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (94264, 94266), False, 'from oslo_utils import uuidutils\n'), ((94710, 94735), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (94733, 94735), False, 'from oslo_utils import uuidutils\n'), ((95871, 95936), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""validate_policy_for_network"""'], {}), "(self.qos_plugin, 'validate_policy_for_network')\n", (95888, 95936), False, 'from unittest import mock\n'), ((97380, 97442), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""validate_policy_for_port"""'], {}), "(self.qos_plugin, 'validate_policy_for_port')\n", (97397, 97442), False, 'from unittest import mock\n'), ((99919, 99985), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_change_placement_allocation"""'], {}), "(self.qos_plugin, '_change_placement_allocation')\n", (99936, 99985), False, 'from unittest import mock\n'), ((100712, 100778), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_change_placement_allocation"""'], {}), "(self.qos_plugin, '_change_placement_allocation')\n", (100729, 100778), False, 'from unittest import mock\n'), ((101540, 101606), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_change_placement_allocation"""'], {}), "(self.qos_plugin, '_change_placement_allocation')\n", (101557, 101606), False, 'from unittest import mock\n'), ((102408, 102474), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_change_placement_allocation"""'], {}), "(self.qos_plugin, '_change_placement_allocation')\n", (102425, 102474), False, 'from unittest import mock\n'), ((103270, 103336), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_change_placement_allocation"""'], {}), "(self.qos_plugin, '_change_placement_allocation')\n", (103287, 103336), False, 'from unittest import mock\n'), ((104014, 104080), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_change_placement_allocation"""'], {}), "(self.qos_plugin, '_change_placement_allocation')\n", (104031, 104080), False, 'from unittest import mock\n'), ((105211, 105277), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_change_placement_allocation"""'], {}), "(self.qos_plugin, '_change_placement_allocation')\n", (105228, 105277), False, 'from unittest import mock\n'), ((106786, 106852), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_change_placement_allocation"""'], {}), "(self.qos_plugin, '_change_placement_allocation')\n", (106803, 106852), False, 'from unittest import mock\n'), ((107541, 107693), 'unittest.mock.call', 'mock.call', (['original_qos', 'None', "{'id': port1.id, 'device_owner': 'compute:uu:id', 'qos_policy_id': None,\n 'qos_network_policy_id': None}", 'mock.ANY'], {}), "(original_qos, None, {'id': port1.id, 'device_owner':\n 'compute:uu:id', 'qos_policy_id': None, 'qos_network_policy_id': None},\n mock.ANY)\n", (107550, 107693), False, 'from unittest import mock\n'), ((109534, 109600), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin', '"""_change_placement_allocation"""'], {}), "(self.qos_plugin, '_change_placement_allocation')\n", (109551, 109600), False, 'from unittest import mock\n'), ((110321, 110475), 'unittest.mock.call', 'mock.call', (['original_qos', 'qos', "{'id': port1.id, 'device_owner': 'compute:uu:id', 'qos_policy_id': None,\n 'qos_network_policy_id': qos.id}", 'mock.ANY'], {}), "(original_qos, qos, {'id': port1.id, 'device_owner':\n 'compute:uu:id', 'qos_policy_id': None, 'qos_network_policy_id': qos.id\n }, mock.ANY)\n", (110330, 110475), False, 'from unittest import mock\n'), ((110605, 110759), 'unittest.mock.call', 'mock.call', (['original_qos', 'qos', "{'id': port2.id, 'device_owner': 'compute:uu:id', 'qos_policy_id': None,\n 'qos_network_policy_id': qos.id}", 'mock.ANY'], {}), "(original_qos, qos, {'id': port2.id, 'device_owner':\n 'compute:uu:id', 'qos_policy_id': None, 'qos_network_policy_id': qos.id\n }, mock.ANY)\n", (110614, 110759), False, 'from unittest import mock\n'), ((114150, 114227), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (114167, 114227), False, 'from unittest import mock\n'), ((114904, 114981), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (114921, 114981), False, 'from unittest import mock\n'), ((115730, 115807), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (115747, 115807), False, 'from unittest import mock\n'), ((116701, 116778), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (116718, 116778), False, 'from unittest import mock\n'), ((117756, 117833), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (117773, 117833), False, 'from unittest import mock\n'), ((118371, 118448), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (118388, 118448), False, 'from unittest import mock\n'), ((119010, 119087), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (119027, 119087), False, 'from unittest import mock\n'), ((119799, 119876), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (119816, 119876), False, 'from unittest import mock\n'), ((120599, 120676), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (120616, 120676), False, 'from unittest import mock\n'), ((121408, 121485), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (121425, 121485), False, 'from unittest import mock\n'), ((122143, 122220), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (122160, 122220), False, 'from unittest import mock\n'), ((122669, 122746), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (122686, 122746), False, 'from unittest import mock\n'), ((123807, 123884), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (123824, 123884), False, 'from unittest import mock\n'), ((124465, 124542), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (124482, 124542), False, 'from unittest import mock\n'), ((125116, 125193), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (125133, 125193), False, 'from unittest import mock\n'), ((125876, 125953), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (125893, 125953), False, 'from unittest import mock\n'), ((126447, 126524), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (126464, 126524), False, 'from unittest import mock\n'), ((126615, 126694), 'keystoneauth1.exceptions.Conflict', 'ks_exc.Conflict', ([], {'response': "{'errors': [{'code': 'placement.concurrent_update'}]}"}), "(response={'errors': [{'code': 'placement.concurrent_update'}]})\n", (126630, 126694), True, 'from keystoneauth1 import exceptions as ks_exc\n'), ((127242, 127319), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (127259, 127319), False, 'from unittest import mock\n'), ((127428, 127497), 'neutron_lib.exceptions.placement.PlacementAllocationGenerationConflict', 'pl_exc.PlacementAllocationGenerationConflict', ([], {'consumer': 'self.MIN_BW_RP'}), '(consumer=self.MIN_BW_RP)\n', (127472, 127497), True, 'from neutron_lib.exceptions import placement as pl_exc\n'), ((128086, 128163), 'unittest.mock.patch.object', 'mock.patch.object', (['self.qos_plugin._placement_client', '"""update_qos_allocation"""'], {}), "(self.qos_plugin._placement_client, 'update_qos_allocation')\n", (128103, 128163), False, 'from unittest import mock\n'), ((2224, 2274), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.db.api.create_object"""'], {}), "('neutron.objects.db.api.create_object')\n", (2234, 2274), False, 'from unittest import mock\n'), ((2291, 2341), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.db.api.update_object"""'], {}), "('neutron.objects.db.api.update_object')\n", (2301, 2341), False, 'from unittest import mock\n'), ((2358, 2408), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.db.api.delete_object"""'], {}), "('neutron.objects.db.api.delete_object')\n", (2368, 2408), False, 'from unittest import mock\n'), ((2425, 2472), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.db.api.get_object"""'], {}), "('neutron.objects.db.api.get_object')\n", (2435, 2472), False, 'from unittest import mock\n'), ((2788, 2860), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.base.NeutronDbObject.modify_fields_from_db"""'], {}), "('neutron.objects.base.NeutronDbObject.modify_fields_from_db')\n", (2798, 2860), False, 'from unittest import mock\n'), ((2899, 2958), 'unittest.mock.patch.object', 'mock.patch.object', (['policy_object.QosPolicy', '"""unset_default"""'], {}), "(policy_object.QosPolicy, 'unset_default')\n", (2916, 2958), False, 'from unittest import mock\n'), ((2975, 3032), 'unittest.mock.patch.object', 'mock.patch.object', (['policy_object.QosPolicy', '"""set_default"""'], {}), "(policy_object.QosPolicy, 'set_default')\n", (2992, 3032), False, 'from unittest import mock\n'), ((3335, 3412), 'unittest.mock.patch', 'mock.patch', (['"""neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi.push"""'], {}), "('neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi.push')\n", (3345, 3412), False, 'from unittest import mock\n'), ((3637, 3662), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (3660, 3662), False, 'from oslo_utils import uuidutils\n'), ((3701, 3726), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (3724, 3726), False, 'from oslo_utils import uuidutils\n'), ((3994, 4019), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (4017, 4019), False, 'from oslo_utils import uuidutils\n'), ((4176, 4201), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (4199, 4201), False, 'from oslo_utils import uuidutils\n'), ((4317, 4342), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (4340, 4342), False, 'from oslo_utils import uuidutils\n'), ((4439, 4464), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (4462, 4464), False, 'from oslo_utils import uuidutils\n'), ((4602, 4627), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (4625, 4627), False, 'from oslo_utils import uuidutils\n'), ((6181, 6206), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (6204, 6206), False, 'from oslo_utils import uuidutils\n'), ((23233, 23258), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (23256, 23258), False, 'from oslo_utils import uuidutils\n'), ((23652, 23677), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (23675, 23677), False, 'from oslo_utils import uuidutils\n'), ((24268, 24293), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (24291, 24293), False, 'from oslo_utils import uuidutils\n'), ((24321, 24346), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (24344, 24346), False, 'from oslo_utils import uuidutils\n'), ((24418, 24443), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (24441, 24443), False, 'from oslo_utils import uuidutils\n'), ((26318, 26343), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (26341, 26343), False, 'from oslo_utils import uuidutils\n'), ((26749, 26774), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (26772, 26774), False, 'from oslo_utils import uuidutils\n'), ((28794, 28819), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (28817, 28819), False, 'from oslo_utils import uuidutils\n'), ((28844, 28869), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (28867, 28869), False, 'from oslo_utils import uuidutils\n'), ((29901, 29926), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (29924, 29926), False, 'from oslo_utils import uuidutils\n'), ((29951, 29976), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (29974, 29976), False, 'from oslo_utils import uuidutils\n'), ((31158, 31179), 'unittest.mock.call.QosPolicy', 'mock.call.QosPolicy', ([], {}), '()\n', (31177, 31179), False, 'from unittest import mock\n'), ((47366, 47437), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosBandwidthLimitRule.get_object"""'], {}), "('neutron.objects.qos.rule.QosBandwidthLimitRule.get_object')\n", (47376, 47437), False, 'from unittest import mock\n'), ((47958, 48030), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosBandwidthLimitRule.get_objects"""'], {}), "('neutron.objects.qos.rule.QosBandwidthLimitRule.get_objects')\n", (47968, 48030), False, 'from unittest import mock\n'), ((48584, 48656), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosBandwidthLimitRule.get_objects"""'], {}), "('neutron.objects.qos.rule.QosBandwidthLimitRule.get_objects')\n", (48594, 48656), False, 'from unittest import mock\n'), ((51986, 52055), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosDscpMarkingRule.get_objects"""'], {}), "('neutron.objects.qos.rule.QosDscpMarkingRule.get_objects')\n", (51996, 52055), False, 'from unittest import mock\n'), ((52603, 52672), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosDscpMarkingRule.get_objects"""'], {}), "('neutron.objects.qos.rule.QosDscpMarkingRule.get_objects')\n", (52613, 52672), False, 'from unittest import mock\n'), ((54071, 54144), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosMinimumBandwidthRule.get_object"""'], {}), "('neutron.objects.qos.rule.QosMinimumBandwidthRule.get_object')\n", (54081, 54144), False, 'from unittest import mock\n'), ((54669, 54743), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosMinimumBandwidthRule.get_objects"""'], {}), "('neutron.objects.qos.rule.QosMinimumBandwidthRule.get_objects')\n", (54679, 54743), False, 'from unittest import mock\n'), ((55301, 55375), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosMinimumBandwidthRule.get_objects"""'], {}), "('neutron.objects.qos.rule.QosMinimumBandwidthRule.get_objects')\n", (55311, 55375), False, 'from unittest import mock\n'), ((65152, 65224), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosPacketRateLimitRule.get_object"""'], {}), "('neutron.objects.qos.rule.QosPacketRateLimitRule.get_object')\n", (65162, 65224), False, 'from unittest import mock\n'), ((65793, 65866), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosPacketRateLimitRule.get_objects"""'], {}), "('neutron.objects.qos.rule.QosPacketRateLimitRule.get_objects')\n", (65803, 65866), False, 'from unittest import mock\n'), ((66424, 66497), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosPacketRateLimitRule.get_objects"""'], {}), "('neutron.objects.qos.rule.QosPacketRateLimitRule.get_objects')\n", (66434, 66497), False, 'from unittest import mock\n'), ((70441, 70466), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (70464, 70466), False, 'from oslo_utils import uuidutils\n'), ((70491, 70516), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (70514, 70516), False, 'from oslo_utils import uuidutils\n'), ((71567, 71592), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (71590, 71592), False, 'from oslo_utils import uuidutils\n'), ((71617, 71642), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (71640, 71642), False, 'from oslo_utils import uuidutils\n'), ((73061, 73149), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (73071, 73149), False, 'from unittest import mock\n'), ((73777, 73865), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (73787, 73865), False, 'from unittest import mock\n'), ((74956, 74981), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (74979, 74981), False, 'from oslo_utils import uuidutils\n'), ((77157, 77254), 'neutron.objects.qos.rule.QosMinimumPacketRateRule', 'rule_object.QosMinimumPacketRateRule', (['self.ctxt'], {}), "(self.ctxt, **rules_data[0][\n 'minimum_packet_rate_rule'])\n", (77193, 77254), True, 'from neutron.objects.qos import rule as rule_object\n'), ((77288, 77385), 'neutron.objects.qos.rule.QosMinimumPacketRateRule', 'rule_object.QosMinimumPacketRateRule', (['self.ctxt'], {}), "(self.ctxt, **rules_data[1][\n 'minimum_packet_rate_rule'])\n", (77324, 77385), True, 'from neutron.objects.qos import rule as rule_object\n'), ((77479, 77567), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.policy.QosPolicy.get_object"""'], {'return_value': '_policy'}), "('neutron.objects.qos.policy.QosPolicy.get_object', return_value=\n _policy)\n", (77489, 77567), False, 'from unittest import mock\n'), ((80468, 80542), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosMinimumPacketRateRule.get_object"""'], {}), "('neutron.objects.qos.rule.QosMinimumPacketRateRule.get_object')\n", (80478, 80542), False, 'from unittest import mock\n'), ((81076, 81151), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosMinimumPacketRateRule.get_objects"""'], {}), "('neutron.objects.qos.rule.QosMinimumPacketRateRule.get_objects')\n", (81086, 81151), False, 'from unittest import mock\n'), ((81701, 81776), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosMinimumPacketRateRule.get_objects"""'], {}), "('neutron.objects.qos.rule.QosMinimumPacketRateRule.get_objects')\n", (81711, 81776), False, 'from unittest import mock\n'), ((87932, 88008), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosRule.get_object"""'], {'return_value': 'rule'}), "('neutron.objects.qos.rule.QosRule.get_object', return_value=rule)\n", (87942, 88008), False, 'from unittest import mock\n'), ((88260, 88356), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'rule_object_class', 'rule_id', 'self.qos_policy_id', '{rule_data_name: data}'], {}), '(mock.ANY, rule_object_class, rule_id, self.qos_policy_id, {\n rule_data_name: data})\n', (88269, 88356), False, 'from unittest import mock\n'), ((89009, 89085), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosRule.get_object"""'], {'return_value': 'rule'}), "('neutron.objects.qos.rule.QosRule.get_object', return_value=rule)\n", (89019, 89085), False, 'from unittest import mock\n'), ((89192, 89259), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'rule_object_class', 'rule_id', 'self.qos_policy_id'], {}), '(mock.ANY, rule_object_class, rule_id, self.qos_policy_id)\n', (89201, 89259), False, 'from unittest import mock\n'), ((89922, 89998), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosRule.get_object"""'], {'return_value': 'rule'}), "('neutron.objects.qos.rule.QosRule.get_object', return_value=rule)\n", (89932, 89998), False, 'from unittest import mock\n'), ((90242, 90309), 'unittest.mock.call', 'mock.call', (['mock.ANY', 'rule_object_class', 'rule_id', 'self.qos_policy_id'], {}), '(mock.ANY, rule_object_class, rule_id, self.qos_policy_id)\n', (90251, 90309), False, 'from unittest import mock\n'), ((90598, 90674), 'unittest.mock.patch', 'mock.patch', (['"""neutron.objects.qos.rule.QosRule.get_object"""'], {'return_value': 'None'}), "('neutron.objects.qos.rule.QosRule.get_object', return_value=None)\n", (90608, 90674), False, 'from unittest import mock\n'), ((93156, 93233), 'unittest.mock.patch', 'mock.patch', (['"""neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi.push"""'], {}), "('neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi.push')\n", (93166, 93233), False, 'from unittest import mock\n'), ((94823, 94863), 'neutron_lib.utils.net.random_mac_generator', 'net_utils.random_mac_generator', (['base_mac'], {}), '(base_mac)\n', (94853, 94863), True, 'from neutron_lib.utils import net as net_utils\n'), ((7936, 7961), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (7959, 7961), False, 'from oslo_utils import uuidutils\n'), ((8215, 8240), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (8238, 8240), False, 'from oslo_utils import uuidutils\n'), ((20298, 20323), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (20321, 20323), False, 'from oslo_utils import uuidutils\n'), ((22445, 22547), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['self.ctxt'], {'desired_state': "kwargs['port']", 'states': "(kwargs['original_port'],)"}), "(self.ctxt, desired_state=kwargs['port'], states=(\n kwargs['original_port'],))\n", (22466, 22547), False, 'from neutron_lib.callbacks import events\n'), ((25368, 25476), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['self.ctxt'], {'desired_state': "kwargs['network']", 'states': "(kwargs['original_network'],)"}), "(self.ctxt, desired_state=kwargs['network'], states=(\n kwargs['original_network'],))\n", (25389, 25476), False, 'from neutron_lib.callbacks import events\n'), ((61017, 61036), 'unittest.mock.call.RuleCls', 'mock.call.RuleCls', ([], {}), '()\n', (61034, 61036), False, 'from unittest import mock\n'), ((72635, 72660), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (72658, 72660), False, 'from oslo_utils import uuidutils\n'), ((72860, 72885), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (72883, 72885), False, 'from oslo_utils import uuidutils\n'), ((76636, 76661), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (76659, 76661), False, 'from oslo_utils import uuidutils\n'), ((76861, 76886), 'oslo_utils.uuidutils.generate_uuid', 'uuidutils.generate_uuid', ([], {}), '()\n', (76884, 76886), False, 'from oslo_utils import uuidutils\n'), ((96160, 96232), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['self.context'], {'resource_id': "kwargs['network']['id']"}), "(self.context, resource_id=kwargs['network']['id'])\n", (96181, 96232), False, 'from neutron_lib.callbacks import events\n'), ((97629, 97698), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['self.context'], {'resource_id': "kwargs['port']['id']"}), "(self.context, resource_id=kwargs['port']['id'])\n", (97650, 97698), False, 'from neutron_lib.callbacks import events\n'), ((100194, 100254), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['context'], {'states': '(original_port, port)'}), '(context, states=(original_port, port))\n', (100215, 100254), False, 'from neutron_lib.callbacks import events\n'), ((100987, 101047), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['context'], {'states': '(original_port, port)'}), '(context, states=(original_port, port))\n', (101008, 101047), False, 'from neutron_lib.callbacks import events\n'), ((101815, 101875), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['context'], {'states': '(original_port, port)'}), '(context, states=(original_port, port))\n', (101836, 101875), False, 'from neutron_lib.callbacks import events\n'), ((102683, 102743), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['context'], {'states': '(original_port, port)'}), '(context, states=(original_port, port))\n', (102704, 102743), False, 'from neutron_lib.callbacks import events\n'), ((103545, 103605), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['context'], {'states': '(original_port, port)'}), '(context, states=(original_port, port))\n', (103566, 103605), False, 'from neutron_lib.callbacks import events\n'), ((104295, 104366), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['self.context'], {'states': '(original_network, network)'}), '(self.context, states=(original_network, network))\n', (104316, 104366), False, 'from neutron_lib.callbacks import events\n'), ((105492, 105563), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['self.context'], {'states': '(original_network, network)'}), '(self.context, states=(original_network, network))\n', (105513, 105563), False, 'from neutron_lib.callbacks import events\n'), ((107327, 107398), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['self.context'], {'states': '(original_network, network)'}), '(self.context, states=(original_network, network))\n', (107348, 107398), False, 'from neutron_lib.callbacks import events\n'), ((110106, 110177), 'neutron_lib.callbacks.events.DBEventPayload', 'events.DBEventPayload', (['self.context'], {'states': '(original_network, network)'}), '(self.context, states=(original_network, network))\n', (110127, 110177), False, 'from neutron_lib.callbacks import events\n'), ((60836, 60872), 'unittest.mock.call.QosPolicy.get_policy_obj', 'mock.call.QosPolicy.get_policy_obj', ([], {}), '()\n', (60870, 60872), False, 'from unittest import mock\n')] |
import re
import discord
from redbot.core import commands
class Covfefe(commands.Cog):
"""
Convert almost any word into covfefe
"""
def __init__(self, bot):
self.bot = bot
async def covfefe(self, x, k="aeiouy])"):
"""
https://codegolf.stackexchange.com/a/123697
"""
try:
b, c, v = re.findall(f"(.*?[{k}([^{k}.*?([{k}", x)[0]
return b + c + (("bcdfgkpstvz" + c)["pgtvkgbzdfs".find(c)] + v) * 2
except IndexError:
return None
async def red_delete_data_for_user(self, **kwargs):
"""
Nothing to delete
"""
return
@commands.command()
async def covefy(self, ctx, msg):
"""Convert almost any word into covfefe"""
newword = await self.covfefe(msg)
if newword is not None:
await ctx.send(newword)
else:
await ctx.send("I cannot covfefeify that word")
| [
"re.findall",
"redbot.core.commands.command"
] | [((661, 679), 'redbot.core.commands.command', 'commands.command', ([], {}), '()\n', (677, 679), False, 'from redbot.core import commands\n'), ((358, 398), 're.findall', 're.findall', (['f"""(.*?[{k}([^{k}.*?([{k}"""', 'x'], {}), "(f'(.*?[{k}([^{k}.*?([{k}', x)\n", (368, 398), False, 'import re\n')] |
import os
import numpy as np
import tensorflow as tf
from models_gqa.model import Model
from models_gqa.config import build_cfg_from_argparse
from util.gqa_train.data_reader import DataReader
import json
# Load config
cfg = build_cfg_from_argparse()
# Start session
os.environ["CUDA_VISIBLE_DEVICES"] = str(cfg.GPU_ID)
sess = tf.Session(config=tf.ConfigProto(
gpu_options=tf.GPUOptions(allow_growth=cfg.GPU_MEM_GROWTH)))
# Data files
imdb_file = cfg.IMDB_FILE % cfg.TEST.SPLIT_VQA
scene_graph_file = cfg.SCENE_GRAPH_FILE % \
cfg.TEST.SPLIT_VQA.replace('_balanced', '').replace('_all', '')
data_reader = DataReader(
imdb_file, shuffle=False, one_pass=True, batch_size=cfg.TEST.BATCH_SIZE,
T_encoder=cfg.T_ENCODER,
vocab_question_file=cfg.VOCAB_QUESTION_FILE,
vocab_answer_file=cfg.VOCAB_ANSWER_FILE,
feature_type=cfg.FEAT_TYPE,
spatial_feature_dir=cfg.SPATIAL_FEATURE_DIR,
objects_feature_dir=cfg.OBJECTS_FEATURE_DIR,
objects_max_num=cfg.W_FEAT,
scene_graph_file=scene_graph_file,
vocab_name_file=cfg.VOCAB_NAME_FILE,
vocab_attr_file=cfg.VOCAB_ATTR_FILE,
spatial_pos_enc_dim=cfg.SPATIAL_POS_ENC_DIM,
bbox_tile_num=cfg.BBOX_TILE_NUM)
num_vocab = data_reader.batch_loader.vocab_dict.num_vocab
num_choices = data_reader.batch_loader.answer_dict.num_vocab
# Inputs and model
input_seq_batch = tf.placeholder(tf.int32, [None, None])
seq_length_batch = tf.placeholder(tf.int32, [None])
image_feat_batch = tf.placeholder(
tf.float32, [None, cfg.H_FEAT, cfg.W_FEAT, cfg.D_FEAT])
image_valid_batch = tf.placeholder(
tf.float32, [None, cfg.H_FEAT, cfg.W_FEAT])
model = Model(
input_seq_batch, seq_length_batch, image_feat_batch, image_valid_batch,
num_vocab=num_vocab, num_choices=num_choices, is_training=False)
# Load snapshot
if cfg.TEST.USE_EMA:
ema = tf.train.ExponentialMovingAverage(decay=0.9) # decay doesn't matter
var_names = {
(ema.average_name(v) if v in model.params else v.op.name): v
for v in tf.global_variables()}
else:
var_names = {v.op.name: v for v in tf.global_variables()}
snapshot_file = cfg.TEST.SNAPSHOT_FILE % (cfg.EXP_NAME, cfg.TEST.ITER)
print('loading model snapshot from %s' % snapshot_file)
snapshot_saver = tf.train.Saver(var_names)
snapshot_saver.restore(sess, snapshot_file)
print('Done')
# Write results
result_dir = cfg.TEST.RESULT_DIR % (cfg.EXP_NAME, cfg.TEST.ITER)
os.makedirs(result_dir, exist_ok=True)
# Run test
answer_correct, num_questions = 0, 0
if cfg.TEST.OUTPUT_VQA_EVAL_PRED:
output_predictions = []
answer_word_list = data_reader.batch_loader.answer_dict.word_list
pred_file = os.path.join(
result_dir, 'gqa_eval_preds_%s_%s_%08d.json' % (
cfg.TEST.SPLIT_VQA, cfg.EXP_NAME, cfg.TEST.ITER))
for n_batch, batch in enumerate(data_reader.batches()):
if 'answer_label_batch' not in batch:
batch['answer_label_batch'] = -np.ones(
len(batch['qid_list']), np.int32)
if num_questions == 0:
print('imdb has no answer labels. Using dummy labels.\n\n'
'**The final accuracy will be zero (no labels provided)**\n')
vqa_scores_value = sess.run(model.vqa_scores, feed_dict={
input_seq_batch: batch['input_seq_batch'],
seq_length_batch: batch['seq_length_batch'],
image_feat_batch: batch['image_feat_batch'],
image_valid_batch: batch['image_valid_batch']})
# compute accuracy
vqa_labels = batch['answer_label_batch']
vqa_predictions = np.argmax(vqa_scores_value, axis=1)
answer_correct += np.sum(vqa_predictions == vqa_labels)
num_questions += len(vqa_labels)
accuracy = answer_correct / num_questions
if n_batch % 20 == 0:
print('exp: %s, iter = %d, accumulated accuracy on %s = %f (%d / %d)' %
(cfg.EXP_NAME, cfg.TEST.ITER, cfg.TEST.SPLIT_VQA,
accuracy, answer_correct, num_questions))
if cfg.TEST.OUTPUT_VQA_EVAL_PRED:
output_predictions.extend([
{"questionId": qId, "prediction": answer_word_list[p]}
for qId, p in zip(batch['qid_list'], vqa_predictions)])
with open(os.path.join(
result_dir, 'vqa_results_%s.txt' % cfg.TEST.SPLIT_VQA), 'w') as f:
print('\nexp: %s, iter = %d, final accuracy on %s = %f (%d / %d)' %
(cfg.EXP_NAME, cfg.TEST.ITER, cfg.TEST.SPLIT_VQA,
accuracy, answer_correct, num_questions))
print('exp: %s, iter = %d, final accuracy on %s = %f (%d / %d)' %
(cfg.EXP_NAME, cfg.TEST.ITER, cfg.TEST.SPLIT_VQA,
accuracy, answer_correct, num_questions), file=f)
if cfg.TEST.OUTPUT_VQA_EVAL_PRED:
with open(pred_file, 'w') as f:
json.dump(output_predictions, f, indent=2)
print('prediction file written to %s' % pred_file)
| [
"os.makedirs",
"models_gqa.config.build_cfg_from_argparse",
"tensorflow.placeholder",
"tensorflow.train.Saver",
"os.path.join",
"numpy.argmax",
"tensorflow.global_variables",
"numpy.sum",
"util.gqa_train.data_reader.DataReader",
"tensorflow.train.ExponentialMovingAverage",
"tensorflow.GPUOptions",
"json.dump",
"models_gqa.model.Model"
] | [((226, 251), 'models_gqa.config.build_cfg_from_argparse', 'build_cfg_from_argparse', ([], {}), '()\n', (249, 251), False, 'from models_gqa.config import build_cfg_from_argparse\n'), ((615, 1173), 'util.gqa_train.data_reader.DataReader', 'DataReader', (['imdb_file'], {'shuffle': '(False)', 'one_pass': '(True)', 'batch_size': 'cfg.TEST.BATCH_SIZE', 'T_encoder': 'cfg.T_ENCODER', 'vocab_question_file': 'cfg.VOCAB_QUESTION_FILE', 'vocab_answer_file': 'cfg.VOCAB_ANSWER_FILE', 'feature_type': 'cfg.FEAT_TYPE', 'spatial_feature_dir': 'cfg.SPATIAL_FEATURE_DIR', 'objects_feature_dir': 'cfg.OBJECTS_FEATURE_DIR', 'objects_max_num': 'cfg.W_FEAT', 'scene_graph_file': 'scene_graph_file', 'vocab_name_file': 'cfg.VOCAB_NAME_FILE', 'vocab_attr_file': 'cfg.VOCAB_ATTR_FILE', 'spatial_pos_enc_dim': 'cfg.SPATIAL_POS_ENC_DIM', 'bbox_tile_num': 'cfg.BBOX_TILE_NUM'}), '(imdb_file, shuffle=False, one_pass=True, batch_size=cfg.TEST.\n BATCH_SIZE, T_encoder=cfg.T_ENCODER, vocab_question_file=cfg.\n VOCAB_QUESTION_FILE, vocab_answer_file=cfg.VOCAB_ANSWER_FILE,\n feature_type=cfg.FEAT_TYPE, spatial_feature_dir=cfg.SPATIAL_FEATURE_DIR,\n objects_feature_dir=cfg.OBJECTS_FEATURE_DIR, objects_max_num=cfg.W_FEAT,\n scene_graph_file=scene_graph_file, vocab_name_file=cfg.VOCAB_NAME_FILE,\n vocab_attr_file=cfg.VOCAB_ATTR_FILE, spatial_pos_enc_dim=cfg.\n SPATIAL_POS_ENC_DIM, bbox_tile_num=cfg.BBOX_TILE_NUM)\n', (625, 1173), False, 'from util.gqa_train.data_reader import DataReader\n'), ((1353, 1391), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32', '[None, None]'], {}), '(tf.int32, [None, None])\n', (1367, 1391), True, 'import tensorflow as tf\n'), ((1411, 1443), 'tensorflow.placeholder', 'tf.placeholder', (['tf.int32', '[None]'], {}), '(tf.int32, [None])\n', (1425, 1443), True, 'import tensorflow as tf\n'), ((1463, 1533), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, cfg.H_FEAT, cfg.W_FEAT, cfg.D_FEAT]'], {}), '(tf.float32, [None, cfg.H_FEAT, cfg.W_FEAT, cfg.D_FEAT])\n', (1477, 1533), True, 'import tensorflow as tf\n'), ((1559, 1617), 'tensorflow.placeholder', 'tf.placeholder', (['tf.float32', '[None, cfg.H_FEAT, cfg.W_FEAT]'], {}), '(tf.float32, [None, cfg.H_FEAT, cfg.W_FEAT])\n', (1573, 1617), True, 'import tensorflow as tf\n'), ((1631, 1781), 'models_gqa.model.Model', 'Model', (['input_seq_batch', 'seq_length_batch', 'image_feat_batch', 'image_valid_batch'], {'num_vocab': 'num_vocab', 'num_choices': 'num_choices', 'is_training': '(False)'}), '(input_seq_batch, seq_length_batch, image_feat_batch,\n image_valid_batch, num_vocab=num_vocab, num_choices=num_choices,\n is_training=False)\n', (1636, 1781), False, 'from models_gqa.model import Model\n'), ((2239, 2264), 'tensorflow.train.Saver', 'tf.train.Saver', (['var_names'], {}), '(var_names)\n', (2253, 2264), True, 'import tensorflow as tf\n'), ((2405, 2443), 'os.makedirs', 'os.makedirs', (['result_dir'], {'exist_ok': '(True)'}), '(result_dir, exist_ok=True)\n', (2416, 2443), False, 'import os\n'), ((1831, 1875), 'tensorflow.train.ExponentialMovingAverage', 'tf.train.ExponentialMovingAverage', ([], {'decay': '(0.9)'}), '(decay=0.9)\n', (1864, 1875), True, 'import tensorflow as tf\n'), ((2641, 2756), 'os.path.join', 'os.path.join', (['result_dir', "('gqa_eval_preds_%s_%s_%08d.json' % (cfg.TEST.SPLIT_VQA, cfg.EXP_NAME, cfg.\n TEST.ITER))"], {}), "(result_dir, 'gqa_eval_preds_%s_%s_%08d.json' % (cfg.TEST.\n SPLIT_VQA, cfg.EXP_NAME, cfg.TEST.ITER))\n", (2653, 2756), False, 'import os\n'), ((3515, 3550), 'numpy.argmax', 'np.argmax', (['vqa_scores_value'], {'axis': '(1)'}), '(vqa_scores_value, axis=1)\n', (3524, 3550), True, 'import numpy as np\n'), ((3573, 3610), 'numpy.sum', 'np.sum', (['(vqa_predictions == vqa_labels)'], {}), '(vqa_predictions == vqa_labels)\n', (3579, 3610), True, 'import numpy as np\n'), ((4142, 4209), 'os.path.join', 'os.path.join', (['result_dir', "('vqa_results_%s.txt' % cfg.TEST.SPLIT_VQA)"], {}), "(result_dir, 'vqa_results_%s.txt' % cfg.TEST.SPLIT_VQA)\n", (4154, 4209), False, 'import os\n'), ((4686, 4728), 'json.dump', 'json.dump', (['output_predictions', 'f'], {'indent': '(2)'}), '(output_predictions, f, indent=2)\n', (4695, 4728), False, 'import json\n'), ((2004, 2025), 'tensorflow.global_variables', 'tf.global_variables', ([], {}), '()\n', (2023, 2025), True, 'import tensorflow as tf\n'), ((2072, 2093), 'tensorflow.global_variables', 'tf.global_variables', ([], {}), '()\n', (2091, 2093), True, 'import tensorflow as tf\n'), ((379, 425), 'tensorflow.GPUOptions', 'tf.GPUOptions', ([], {'allow_growth': 'cfg.GPU_MEM_GROWTH'}), '(allow_growth=cfg.GPU_MEM_GROWTH)\n', (392, 425), True, 'import tensorflow as tf\n')] |
#!/usr/bin/env python3
"""
example of 3D scalar field
If you get this error, ParaView doesn't know your data file format:
TypeError: TestFileReadability argument %Id: %V
"""
from pathlib import Path
import argparse
import paraview.simple as pvs
p = argparse.ArgumentParser()
p.add_argument("fn", help="data file to load with paraview OpenDataFile()")
P = p.parse_args()
fn = Path(P.fn).expanduser()
if not fn.is_file():
raise FileNotFoundError(fn)
pvs.OpenDataFile(str(fn))
| [
"argparse.ArgumentParser",
"pathlib.Path"
] | [((253, 278), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (276, 278), False, 'import argparse\n'), ((380, 390), 'pathlib.Path', 'Path', (['P.fn'], {}), '(P.fn)\n', (384, 390), False, 'from pathlib import Path\n')] |
from django import forms
from .models import *
from server.models import *
class ChoiceFieldNoValidation(forms.ChoiceField):
def validate(self, value):
pass
class SaveSearchForm(forms.ModelForm):
class Meta:
model = SavedSearch
fields = ('name',)
class SearchRowForm(forms.ModelForm):
skip_fields = [
'id',
'machine_group',
'report',
'activity',
'errors',
'warnings',
'install_log',
'puppet_errors',
'install_log_hash'
]
search_fields = []
for f in Machine._meta.fields:
if f.name not in skip_fields:
add = (f.name,f.name,)
search_fields.append(add)
search_field = ChoiceFieldNoValidation(choices=sorted(search_fields))
and_or = ChoiceFieldNoValidation(choices=AND_OR_CHOICES)
def __init__(self, *args, **kwargs):
self.search_group = kwargs.pop('search_group', None)
super(SearchRowForm, self).__init__(*args, **kwargs)
try:
search_group_count = self.search_group.searchrow_set.count()
except:
search_group_count = 0
if search_group_count == 0 and self.search_group:
self.fields['and_or'] = ChoiceFieldNoValidation(
initial='AND',
widget=forms.HiddenInput()
)
class Meta:
model = SearchRow
fields = ('search_models', 'search_field', 'and_or', 'operator','search_term',)
| [
"django.forms.HiddenInput"
] | [((1317, 1336), 'django.forms.HiddenInput', 'forms.HiddenInput', ([], {}), '()\n', (1334, 1336), False, 'from django import forms\n')] |
# In[42]:
from scipy.integrate import odeint
import numpy as np
import matplotlib.pyplot as plt
# In[43]:
# describe the model
def deriv(y, t, N, beta, gamma, delta):
S, E, I, R = y
dSdt = -beta * S * I / N # S(t) – susceptible (de som är mottagliga för infektion).
dEdt = beta * S * I / N - gamma * E
dIdt = delta * E - gamma * I # I(t) – infected (de som har pågående infektion)
dRdt = gamma * I
return dSdt, dEdt, dIdt, dRdt
# In[44]:
# describe the parameters
N = 2283 #Totala befolkningen N=s(t)+I(t)+R(t)
D = 4.0 #infections last four days
gamma = 1.0 / D #Reoval rate (Hur många som tillfrisknar)
delta = 1.0 / 5.0 #incubation period of five days
R_0 = 2.5 #Reproduktionstalet
beta = R_0 * gamma #r_0=beta/gamma. antal som smittas per infekterad och per tid (beror på virusets egenskaper samt hur vi beter oss).
S0, E0, I0, R0 = N-1, 1, 0, 0 # initial conditions: one infected, rest susceptible
#Rt = R0 * S(t)/Ntot* (1 – b). b = effekt av policy och beteendeförändringar
# In[45]:
t = np.linspace(0, 99, 100) # Grid of time points (in days)
y0 = S0, E0, I0, R0 # Initial conditions vector
# Integrate the SIR equations over the time grid, t.
ret = odeint(deriv, y0, t, args=(N, beta, gamma, delta))
S, E, I, R = ret.T
# In[46]:
def plotsir(t, S, E, I, R):
f, ax = plt.subplots(1,1,figsize=(10,4))
ax.plot(t, S, 'b', alpha=0.7, linewidth=2, label='Susceptible')
ax.plot(t, E, 'y', alpha=0.7, linewidth=2, label='Exposed')
ax.plot(t, I, 'r', alpha=0.7, linewidth=2, label='Infected')
ax.plot(t, R, 'g', alpha=0.7, linewidth=2, label='Recovered')
ax.set_xlabel('Time (days)')
ax.yaxis.set_tick_params(length=0)
ax.xaxis.set_tick_params(length=0)
ax.grid(b=True, which='major', c='w', lw=2, ls='-')
legend = ax.legend()
legend.get_frame().set_alpha(0.5)
for spine in ('top', 'right', 'bottom', 'left'):
ax.spines[spine].set_visible(False)
plt.savefig('Plot.png')
plt.show();
# plot the graph
# In[47]:
plotsir(t, S, E, I, R)
# In[ ]:
| [
"matplotlib.pyplot.savefig",
"scipy.integrate.odeint",
"numpy.linspace",
"matplotlib.pyplot.subplots",
"matplotlib.pyplot.show"
] | [((1045, 1068), 'numpy.linspace', 'np.linspace', (['(0)', '(99)', '(100)'], {}), '(0, 99, 100)\n', (1056, 1068), True, 'import numpy as np\n'), ((1210, 1260), 'scipy.integrate.odeint', 'odeint', (['deriv', 'y0', 't'], {'args': '(N, beta, gamma, delta)'}), '(deriv, y0, t, args=(N, beta, gamma, delta))\n', (1216, 1260), False, 'from scipy.integrate import odeint\n'), ((1332, 1367), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(1)'], {'figsize': '(10, 4)'}), '(1, 1, figsize=(10, 4))\n', (1344, 1367), True, 'import matplotlib.pyplot as plt\n'), ((1935, 1958), 'matplotlib.pyplot.savefig', 'plt.savefig', (['"""Plot.png"""'], {}), "('Plot.png')\n", (1946, 1958), True, 'import matplotlib.pyplot as plt\n'), ((1961, 1971), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1969, 1971), True, 'import matplotlib.pyplot as plt\n')] |
from yezdi.lexer.token import TokenType
from yezdi.parser.ast import Program, Statement, Participant, Title, LineStatement
class Parser:
def __init__(self, lexer):
self.lexer = lexer
self.current_token = None
self.peek_token = None
self.next_token()
self.next_token()
self.participants = {}
def next_token(self):
self.current_token, self.peek_token = self.peek_token, self.lexer.next_token()
def parse_program(self):
program = Program()
while self.current_token.type != TokenType.EOF:
statement = self.parse_statement()
if statement:
program.statements.append(statement)
self.next_token()
return program
def parse_statement(self):
if self.current_token.type == TokenType.IDENTIFIER:
return self.parse_line_statement()
elif self.current_token.type == TokenType.TITLE:
return self.parse_title()
return None
def parse_line_statement(self):
participant_literal = self.current_token.literal
if not self.peek_token.type in [TokenType.SOLID_LINE, TokenType.DASHED_LINE]:
return None
self.next_token()
participant = Participant(participant_literal)
line = LineStatement(self.current_token.type)
line.set_source(participant)
if not self.expect_peek(TokenType.IDENTIFIER):
return None
target = Participant(self.current_token.literal)
line.set_target(target)
if not self.expect_peek(TokenType.COLON):
return None
if self.expect_peek(TokenType.IDENTIFIER):
line.set_info(self.current_token.literal)
if self.peek_token.type not in [TokenType.NEWLINE, TokenType.EOF]:
return None
statement = Statement(line)
return statement
def get_participant(self, value):
if value in self.participants:
return self.participants[value]
else:
participant = Participant(value)
self.participants[value] = participant
return participant
def expect_peek(self, token_type):
if self.peek_token.type == token_type:
self.next_token()
return True
else:
return False
def parse_title(self):
if not self.expect_peek(TokenType.IDENTIFIER):
return None
title = Title(self.current_token.literal)
return Statement(title)
class ParserError(Exception):
pass
| [
"yezdi.parser.ast.Program",
"yezdi.parser.ast.Participant",
"yezdi.parser.ast.Statement",
"yezdi.parser.ast.Title",
"yezdi.parser.ast.LineStatement"
] | [((507, 516), 'yezdi.parser.ast.Program', 'Program', ([], {}), '()\n', (514, 516), False, 'from yezdi.parser.ast import Program, Statement, Participant, Title, LineStatement\n'), ((1260, 1292), 'yezdi.parser.ast.Participant', 'Participant', (['participant_literal'], {}), '(participant_literal)\n', (1271, 1292), False, 'from yezdi.parser.ast import Program, Statement, Participant, Title, LineStatement\n'), ((1308, 1346), 'yezdi.parser.ast.LineStatement', 'LineStatement', (['self.current_token.type'], {}), '(self.current_token.type)\n', (1321, 1346), False, 'from yezdi.parser.ast import Program, Statement, Participant, Title, LineStatement\n'), ((1480, 1519), 'yezdi.parser.ast.Participant', 'Participant', (['self.current_token.literal'], {}), '(self.current_token.literal)\n', (1491, 1519), False, 'from yezdi.parser.ast import Program, Statement, Participant, Title, LineStatement\n'), ((1850, 1865), 'yezdi.parser.ast.Statement', 'Statement', (['line'], {}), '(line)\n', (1859, 1865), False, 'from yezdi.parser.ast import Program, Statement, Participant, Title, LineStatement\n'), ((2453, 2486), 'yezdi.parser.ast.Title', 'Title', (['self.current_token.literal'], {}), '(self.current_token.literal)\n', (2458, 2486), False, 'from yezdi.parser.ast import Program, Statement, Participant, Title, LineStatement\n'), ((2502, 2518), 'yezdi.parser.ast.Statement', 'Statement', (['title'], {}), '(title)\n', (2511, 2518), False, 'from yezdi.parser.ast import Program, Statement, Participant, Title, LineStatement\n'), ((2053, 2071), 'yezdi.parser.ast.Participant', 'Participant', (['value'], {}), '(value)\n', (2064, 2071), False, 'from yezdi.parser.ast import Program, Statement, Participant, Title, LineStatement\n')] |
# list categories in category folder
from os import walk
from os.path import abspath,join, pardir
categories_folder = abspath(join(__file__,pardir,pardir,"category"))
post_folder = abspath(join(__file__,pardir,pardir,"_posts"))
site_categories = []
for root,directories,files in walk(categories_folder):
for f in files:
site_categories.append(f.split(".md")[0])
site_categories = set(site_categories)
for root,directories,files in walk(post_folder):
for f in files:
with open(join(root,f),'r',encoding="utf-8") as fi:
lines = fi.readlines()
for l in lines:
if l.find("categories")==0:
categories = l.split(":")[1]
for c in [" ","[","]","\n"]:
categories = categories.replace(c,"")
categories=categories.split(",")
if len(set(categories)-site_categories)>0:
print(f,set(categories)-site_categories)
break
print("done") | [
"os.path.join",
"os.walk"
] | [((282, 305), 'os.walk', 'walk', (['categories_folder'], {}), '(categories_folder)\n', (286, 305), False, 'from os import walk\n'), ((464, 481), 'os.walk', 'walk', (['post_folder'], {}), '(post_folder)\n', (468, 481), False, 'from os import walk\n'), ((128, 170), 'os.path.join', 'join', (['__file__', 'pardir', 'pardir', '"""category"""'], {}), "(__file__, pardir, pardir, 'category')\n", (132, 170), False, 'from os.path import abspath, join, pardir\n'), ((191, 231), 'os.path.join', 'join', (['__file__', 'pardir', 'pardir', '"""_posts"""'], {}), "(__file__, pardir, pardir, '_posts')\n", (195, 231), False, 'from os.path import abspath, join, pardir\n'), ((521, 534), 'os.path.join', 'join', (['root', 'f'], {}), '(root, f)\n', (525, 534), False, 'from os.path import abspath, join, pardir\n')] |
"""
sentry.options.defaults
~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2014 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from __future__ import absolute_import, print_function
from sentry.logging import LoggingFormat
from sentry.options import (
FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY,
register,
)
from sentry.utils.types import Dict, String, Sequence
# Cache
# register('cache.backend', flags=FLAG_NOSTORE)
# register('cache.options', type=Dict, flags=FLAG_NOSTORE)
# System
register('system.admin-email', flags=FLAG_REQUIRED)
register('system.support-email', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('system.security-email', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('system.databases', type=Dict, flags=FLAG_NOSTORE)
# register('system.debug', default=False, flags=FLAG_NOSTORE)
register('system.rate-limit', default=0, flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('system.secret-key', flags=FLAG_NOSTORE)
# Absolute URL to the sentry root directory. Should not include a trailing slash.
register('system.url-prefix', ttl=60, grace=3600, flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
register('system.root-api-key', flags=FLAG_PRIORITIZE_DISK)
register('system.logging-format', default=LoggingFormat.HUMAN, flags=FLAG_NOSTORE)
# Redis
register(
'redis.clusters',
type=Dict,
default={
'default': {
'hosts': {
0: {
'host': '127.0.0.1',
'port': 6379,
}
},
},
},
flags=FLAG_NOSTORE | FLAG_IMMUTABLE
)
register('redis.options', type=Dict, flags=FLAG_NOSTORE)
# symbolizer specifics
register('dsym.cache-path', type=String, default='/tmp/sentry-dsym-cache')
# Mail
register('mail.backend', default='smtp', flags=FLAG_NOSTORE)
register('mail.host', default='localhost', flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
register('mail.port', default=25, flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
register('mail.username', flags=FLAG_REQUIRED | FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('mail.password', flags=FLAG_REQUIRED | FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('mail.use-tls', default=False, flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
register('mail.subject-prefix', default='[Sentry] ', flags=FLAG_PRIORITIZE_DISK)
register('mail.from', default='root@localhost', flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)
register('mail.list-namespace', type=String, default='localhost', flags=FLAG_NOSTORE)
register('mail.enable-replies', default=False, flags=FLAG_PRIORITIZE_DISK)
register('mail.reply-hostname', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('mail.mailgun-api-key', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
# SMS
register('sms.twilio-account', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('sms.twilio-token', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('sms.twilio-number', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
# U2F
register('u2f.app-id', default='', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('u2f.facets', default=(), type=Sequence,
flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('auth.ip-rate-limit', default=0, flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('auth.user-rate-limit', default=0, flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
register('api.rate-limit.org-create', default=5, flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)
# Filestore
register('filestore.backend', default='filesystem', flags=FLAG_NOSTORE)
register('filestore.options', default={'location': '/tmp/sentry-files'}, flags=FLAG_NOSTORE)
| [
"sentry.options.register"
] | [((589, 640), 'sentry.options.register', 'register', (['"""system.admin-email"""'], {'flags': 'FLAG_REQUIRED'}), "('system.admin-email', flags=FLAG_REQUIRED)\n", (597, 640), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((641, 720), 'sentry.options.register', 'register', (['"""system.support-email"""'], {'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('system.support-email', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)\n", (649, 720), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((721, 806), 'sentry.options.register', 'register', (['"""system.security-email"""'], {'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('system.security-email', flags=FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK\n )\n", (729, 806), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((802, 861), 'sentry.options.register', 'register', (['"""system.databases"""'], {'type': 'Dict', 'flags': 'FLAG_NOSTORE'}), "('system.databases', type=Dict, flags=FLAG_NOSTORE)\n", (810, 861), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((924, 1015), 'sentry.options.register', 'register', (['"""system.rate-limit"""'], {'default': '(0)', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('system.rate-limit', default=0, flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (932, 1015), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((1012, 1061), 'sentry.options.register', 'register', (['"""system.secret-key"""'], {'flags': 'FLAG_NOSTORE'}), "('system.secret-key', flags=FLAG_NOSTORE)\n", (1020, 1061), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((1144, 1241), 'sentry.options.register', 'register', (['"""system.url-prefix"""'], {'ttl': '(60)', 'grace': '(3600)', 'flags': '(FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)'}), "('system.url-prefix', ttl=60, grace=3600, flags=FLAG_REQUIRED |\n FLAG_PRIORITIZE_DISK)\n", (1152, 1241), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((1238, 1297), 'sentry.options.register', 'register', (['"""system.root-api-key"""'], {'flags': 'FLAG_PRIORITIZE_DISK'}), "('system.root-api-key', flags=FLAG_PRIORITIZE_DISK)\n", (1246, 1297), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((1298, 1385), 'sentry.options.register', 'register', (['"""system.logging-format"""'], {'default': 'LoggingFormat.HUMAN', 'flags': 'FLAG_NOSTORE'}), "('system.logging-format', default=LoggingFormat.HUMAN, flags=\n FLAG_NOSTORE)\n", (1306, 1385), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((1390, 1545), 'sentry.options.register', 'register', (['"""redis.clusters"""'], {'type': 'Dict', 'default': "{'default': {'hosts': {(0): {'host': '127.0.0.1', 'port': 6379}}}}", 'flags': '(FLAG_NOSTORE | FLAG_IMMUTABLE)'}), "('redis.clusters', type=Dict, default={'default': {'hosts': {(0): {\n 'host': '127.0.0.1', 'port': 6379}}}}, flags=FLAG_NOSTORE | FLAG_IMMUTABLE)\n", (1398, 1545), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((1684, 1740), 'sentry.options.register', 'register', (['"""redis.options"""'], {'type': 'Dict', 'flags': 'FLAG_NOSTORE'}), "('redis.options', type=Dict, flags=FLAG_NOSTORE)\n", (1692, 1740), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((1765, 1839), 'sentry.options.register', 'register', (['"""dsym.cache-path"""'], {'type': 'String', 'default': '"""/tmp/sentry-dsym-cache"""'}), "('dsym.cache-path', type=String, default='/tmp/sentry-dsym-cache')\n", (1773, 1839), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((1848, 1908), 'sentry.options.register', 'register', (['"""mail.backend"""'], {'default': '"""smtp"""', 'flags': 'FLAG_NOSTORE'}), "('mail.backend', default='smtp', flags=FLAG_NOSTORE)\n", (1856, 1908), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((1909, 1999), 'sentry.options.register', 'register', (['"""mail.host"""'], {'default': '"""localhost"""', 'flags': '(FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)'}), "('mail.host', default='localhost', flags=FLAG_REQUIRED |\n FLAG_PRIORITIZE_DISK)\n", (1917, 1999), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((1996, 2073), 'sentry.options.register', 'register', (['"""mail.port"""'], {'default': '(25)', 'flags': '(FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)'}), "('mail.port', default=25, flags=FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)\n", (2004, 2073), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2074, 2166), 'sentry.options.register', 'register', (['"""mail.username"""'], {'flags': '(FLAG_REQUIRED | FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('mail.username', flags=FLAG_REQUIRED | FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (2082, 2166), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2163, 2255), 'sentry.options.register', 'register', (['"""mail.password"""'], {'flags': '(FLAG_REQUIRED | FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('mail.password', flags=FLAG_REQUIRED | FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (2171, 2255), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2252, 2339), 'sentry.options.register', 'register', (['"""mail.use-tls"""'], {'default': '(False)', 'flags': '(FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)'}), "('mail.use-tls', default=False, flags=FLAG_REQUIRED |\n FLAG_PRIORITIZE_DISK)\n", (2260, 2339), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2336, 2421), 'sentry.options.register', 'register', (['"""mail.subject-prefix"""'], {'default': '"""[Sentry] """', 'flags': 'FLAG_PRIORITIZE_DISK'}), "('mail.subject-prefix', default='[Sentry] ', flags=FLAG_PRIORITIZE_DISK\n )\n", (2344, 2421), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2417, 2512), 'sentry.options.register', 'register', (['"""mail.from"""'], {'default': '"""root@localhost"""', 'flags': '(FLAG_REQUIRED | FLAG_PRIORITIZE_DISK)'}), "('mail.from', default='root@localhost', flags=FLAG_REQUIRED |\n FLAG_PRIORITIZE_DISK)\n", (2425, 2512), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2509, 2599), 'sentry.options.register', 'register', (['"""mail.list-namespace"""'], {'type': 'String', 'default': '"""localhost"""', 'flags': 'FLAG_NOSTORE'}), "('mail.list-namespace', type=String, default='localhost', flags=\n FLAG_NOSTORE)\n", (2517, 2599), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2595, 2669), 'sentry.options.register', 'register', (['"""mail.enable-replies"""'], {'default': '(False)', 'flags': 'FLAG_PRIORITIZE_DISK'}), "('mail.enable-replies', default=False, flags=FLAG_PRIORITIZE_DISK)\n", (2603, 2669), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2670, 2764), 'sentry.options.register', 'register', (['"""mail.reply-hostname"""'], {'default': '""""""', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('mail.reply-hostname', default='', flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (2678, 2764), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2761, 2856), 'sentry.options.register', 'register', (['"""mail.mailgun-api-key"""'], {'default': '""""""', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('mail.mailgun-api-key', default='', flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (2769, 2856), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2860, 2953), 'sentry.options.register', 'register', (['"""sms.twilio-account"""'], {'default': '""""""', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('sms.twilio-account', default='', flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (2868, 2953), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((2950, 3041), 'sentry.options.register', 'register', (['"""sms.twilio-token"""'], {'default': '""""""', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('sms.twilio-token', default='', flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (2958, 3041), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((3038, 3130), 'sentry.options.register', 'register', (['"""sms.twilio-number"""'], {'default': '""""""', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('sms.twilio-number', default='', flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (3046, 3130), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((3134, 3219), 'sentry.options.register', 'register', (['"""u2f.app-id"""'], {'default': '""""""', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('u2f.app-id', default='', flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (3142, 3219), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((3216, 3316), 'sentry.options.register', 'register', (['"""u2f.facets"""'], {'default': '()', 'type': 'Sequence', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('u2f.facets', default=(), type=Sequence, flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (3224, 3316), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((3323, 3415), 'sentry.options.register', 'register', (['"""auth.ip-rate-limit"""'], {'default': '(0)', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('auth.ip-rate-limit', default=0, flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (3331, 3415), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((3412, 3506), 'sentry.options.register', 'register', (['"""auth.user-rate-limit"""'], {'default': '(0)', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('auth.user-rate-limit', default=0, flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (3420, 3506), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((3504, 3603), 'sentry.options.register', 'register', (['"""api.rate-limit.org-create"""'], {'default': '(5)', 'flags': '(FLAG_ALLOW_EMPTY | FLAG_PRIORITIZE_DISK)'}), "('api.rate-limit.org-create', default=5, flags=FLAG_ALLOW_EMPTY |\n FLAG_PRIORITIZE_DISK)\n", (3512, 3603), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((3613, 3684), 'sentry.options.register', 'register', (['"""filestore.backend"""'], {'default': '"""filesystem"""', 'flags': 'FLAG_NOSTORE'}), "('filestore.backend', default='filesystem', flags=FLAG_NOSTORE)\n", (3621, 3684), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n'), ((3685, 3781), 'sentry.options.register', 'register', (['"""filestore.options"""'], {'default': "{'location': '/tmp/sentry-files'}", 'flags': 'FLAG_NOSTORE'}), "('filestore.options', default={'location': '/tmp/sentry-files'},\n flags=FLAG_NOSTORE)\n", (3693, 3781), False, 'from sentry.options import FLAG_IMMUTABLE, FLAG_NOSTORE, FLAG_PRIORITIZE_DISK, FLAG_REQUIRED, FLAG_ALLOW_EMPTY, register\n')] |
"""Download `Unicodedata` files."""
from __future__ import unicode_literals
import os
import zipfile
import codecs
from urllib.request import urlopen
__version__ = '2.2.0'
HOME = os.path.dirname(os.path.abspath(__file__))
def zip_unicode(output, version):
"""Zip the Unicode files."""
zipper = zipfile.ZipFile(os.path.join(output, 'unicodedata', '%s.zip' % version), 'w', zipfile.ZIP_DEFLATED)
target = os.path.join(output, 'unicodedata', version)
print('Zipping %s.zip...' % version)
for root, dirs, files in os.walk(target):
for file in files:
if file.endswith('.txt'):
zipper.write(os.path.join(root, file), arcname=file)
def unzip_unicode(output, version):
"""Unzip the Unicode files."""
unzipper = zipfile.ZipFile(os.path.join(output, 'unicodedata', '%s.zip' % version))
target = os.path.join(output, 'unicodedata', version)
print('Unzipping %s.zip...' % version)
os.makedirs(target)
for f in unzipper.namelist():
# Do I need backslash on windows? Or is it forward as well?
unzipper.extract(f, target)
def download_unicodedata(version, output=HOME, no_zip=False):
"""Download Unicode data scripts and blocks."""
ver = tuple([int(x) for x in version.split('.')])
files = [
'UnicodeData.txt',
'Scripts.txt',
'Blocks.txt',
'PropList.txt',
'DerivedCoreProperties.txt',
'DerivedNormalizationProps.txt',
'CompositionExclusions.txt',
'PropertyValueAliases.txt',
'PropertyAliases.txt',
'EastAsianWidth.txt',
'LineBreak.txt',
'HangulSyllableType.txt',
'DerivedAge.txt',
'auxiliary/WordBreakProperty.txt',
'auxiliary/SentenceBreakProperty.txt',
'auxiliary/GraphemeBreakProperty.txt',
'extracted/DerivedDecompositionType.txt',
'extracted/DerivedNumericType.txt',
'extracted/DerivedNumericValues.txt',
'extracted/DerivedJoiningType.txt',
'extracted/DerivedJoiningGroup.txt',
'extracted/DerivedCombiningClass.txt',
'emoji/emoji-data.txt'
]
files.append('ScriptExtensions.txt')
files.append('IndicPositionalCategory.txt')
files.append('IndicSyllabicCategory.txt')
files.append('BidiBrackets.txt')
if ver >= (11, 0, 0):
files.append('VerticalOrientation.txt')
http_url = 'http://www.unicode.org/Public/%s/ucd/' % version
ftp_url = 'ftp://ftp.unicode.org/Public/%s/ucd/' % version
destination = os.path.join(output, 'unicodedata', version)
if not os.path.exists(destination):
os.makedirs(destination)
zip_data = not no_zip
for f in files:
file_location = os.path.join(destination, os.path.basename(f))
retrieved = False
if not os.path.exists(file_location):
for url in (ftp_url, http_url):
furl = url + f
try:
print('Downloading: %s --> %s' % (furl, file_location))
response = urlopen(furl, timeout=30)
data = response.read()
except Exception:
print('Failed: %s' % url)
continue
with codecs.open(file_location, 'w', encoding='utf-8') as uf:
uf.write(data.decode('utf-8'))
retrieved = True
break
if not retrieved:
print('Failed to acquire all needed Unicode files!')
break
else:
retrieved = True
print('Skipping: found %s' % file_location)
if not retrieved:
zip_data = False
break
if zip_data and not os.path.exists(os.path.join(output, 'unicodedata', '%s.zip' % version)):
zip_unicode(output, version)
def get_unicodedata(version, output=HOME, no_zip=False):
"""Ensure we have Unicode data to generate Unicode tables."""
target = os.path.join(output, 'unicodedata', version)
zip_target = os.path.join(output, 'unicodedata', '%s.zip' % version)
if not os.path.exists(target) and os.path.exists(zip_target):
unzip_unicode(output, version)
# Download missing files if any. Zip if required.
download_unicodedata(version, output, no_zip)
if __name__ == '__main__':
import argparse
import unicodedata
parser = argparse.ArgumentParser(prog='unidatadownload', description='Generate a unicode property table.')
parser.add_argument('--version', action='version', version="%(prog)s " + __version__)
parser.add_argument('--output', default=HOME, help='Output file.')
parser.add_argument('--unicode-version', default=None, help='Force a specific Unicode version.')
args = parser.parse_args()
if args.unicode_version is None:
version = unicodedata.unidata_version
else:
version = args.unicode_version
get_unicodedata(version, output=args.output)
| [
"os.path.exists",
"argparse.ArgumentParser",
"os.makedirs",
"os.path.join",
"os.path.basename",
"os.path.abspath",
"codecs.open",
"urllib.request.urlopen",
"os.walk"
] | [((197, 222), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (212, 222), False, 'import os\n'), ((420, 464), 'os.path.join', 'os.path.join', (['output', '"""unicodedata"""', 'version'], {}), "(output, 'unicodedata', version)\n", (432, 464), False, 'import os\n'), ((537, 552), 'os.walk', 'os.walk', (['target'], {}), '(target)\n', (544, 552), False, 'import os\n'), ((863, 907), 'os.path.join', 'os.path.join', (['output', '"""unicodedata"""', 'version'], {}), "(output, 'unicodedata', version)\n", (875, 907), False, 'import os\n'), ((957, 976), 'os.makedirs', 'os.makedirs', (['target'], {}), '(target)\n', (968, 976), False, 'import os\n'), ((2541, 2585), 'os.path.join', 'os.path.join', (['output', '"""unicodedata"""', 'version'], {}), "(output, 'unicodedata', version)\n", (2553, 2585), False, 'import os\n'), ((3983, 4027), 'os.path.join', 'os.path.join', (['output', '"""unicodedata"""', 'version'], {}), "(output, 'unicodedata', version)\n", (3995, 4027), False, 'import os\n'), ((4045, 4100), 'os.path.join', 'os.path.join', (['output', '"""unicodedata"""', "('%s.zip' % version)"], {}), "(output, 'unicodedata', '%s.zip' % version)\n", (4057, 4100), False, 'import os\n'), ((4398, 4500), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""unidatadownload"""', 'description': '"""Generate a unicode property table."""'}), "(prog='unidatadownload', description=\n 'Generate a unicode property table.')\n", (4421, 4500), False, 'import argparse\n'), ((323, 378), 'os.path.join', 'os.path.join', (['output', '"""unicodedata"""', "('%s.zip' % version)"], {}), "(output, 'unicodedata', '%s.zip' % version)\n", (335, 378), False, 'import os\n'), ((793, 848), 'os.path.join', 'os.path.join', (['output', '"""unicodedata"""', "('%s.zip' % version)"], {}), "(output, 'unicodedata', '%s.zip' % version)\n", (805, 848), False, 'import os\n'), ((2597, 2624), 'os.path.exists', 'os.path.exists', (['destination'], {}), '(destination)\n', (2611, 2624), False, 'import os\n'), ((2634, 2658), 'os.makedirs', 'os.makedirs', (['destination'], {}), '(destination)\n', (2645, 2658), False, 'import os\n'), ((4140, 4166), 'os.path.exists', 'os.path.exists', (['zip_target'], {}), '(zip_target)\n', (4154, 4166), False, 'import os\n'), ((2757, 2776), 'os.path.basename', 'os.path.basename', (['f'], {}), '(f)\n', (2773, 2776), False, 'import os\n'), ((2819, 2848), 'os.path.exists', 'os.path.exists', (['file_location'], {}), '(file_location)\n', (2833, 2848), False, 'import os\n'), ((4113, 4135), 'os.path.exists', 'os.path.exists', (['target'], {}), '(target)\n', (4127, 4135), False, 'import os\n'), ((3749, 3804), 'os.path.join', 'os.path.join', (['output', '"""unicodedata"""', "('%s.zip' % version)"], {}), "(output, 'unicodedata', '%s.zip' % version)\n", (3761, 3804), False, 'import os\n'), ((648, 672), 'os.path.join', 'os.path.join', (['root', 'file'], {}), '(root, file)\n', (660, 672), False, 'import os\n'), ((3053, 3078), 'urllib.request.urlopen', 'urlopen', (['furl'], {'timeout': '(30)'}), '(furl, timeout=30)\n', (3060, 3078), False, 'from urllib.request import urlopen\n'), ((3252, 3301), 'codecs.open', 'codecs.open', (['file_location', '"""w"""'], {'encoding': '"""utf-8"""'}), "(file_location, 'w', encoding='utf-8')\n", (3263, 3301), False, 'import codecs\n')] |
# See LICENSE for licensing information.
#
# Copyright (c) 2021 Regents of the University of California and The Board
# of Regents for the Oklahoma Agricultural and Mechanical College
# (acting for and on behalf of Oklahoma State University)
# All rights reserved.
#
import debug
import datetime
from policy import associativity
from globals import OPTS, print_time
class cache:
"""
This is not a design module, but contains a cache design instance.
"""
def __init__(self, cache_config, name):
cache_config.set_local_config(self)
self.name = name
# Import the design module of the cache
if OPTS.associativity == associativity.DIRECT:
from direct_cache import direct_cache as cache
elif OPTS.associativity == associativity.N_WAY:
from n_way_cache import n_way_cache as cache
elif OPTS.associativity == associativity.FULLY:
# TODO: from full_cache import full_cache as cache
debug.error("Fully associative cache is not supported at the moment.", -1)
else:
debug.error("Invalid associativity.", -1)
self.c = cache(cache_config, name)
def config_write(self, paths):
""" Save the config files. """
self.c.config_write(paths)
def verilog_write(self, path):
""" Save the Verilog file. """
self.c.verilog_write(path)
def save(self):
""" Save all the output files. """
debug.print_raw("Saving output files...")
# Write the config files
start_time = datetime.datetime.now()
cpaths = {
"data": OPTS.output_path + OPTS.data_array_name + "_config.py",
"tag": OPTS.output_path + OPTS.tag_array_name + "_config.py",
"use": OPTS.output_path + OPTS.use_array_name + "_config.py"
}
if not OPTS.replacement_policy.has_sram_array(): del cpaths["use"]
for k, cpath in cpaths.items():
debug.print_raw("Config: Writing to {}".format(cpath))
self.config_write(cpaths)
print_time("Config", datetime.datetime.now(), start_time)
# Write the Verilog file
start_time = datetime.datetime.now()
vpath = OPTS.output_path + self.c.name + ".v"
debug.print_raw("Verilog: Writing to {}".format(vpath))
self.verilog_write(vpath)
print_time("Verilog", datetime.datetime.now(), start_time) | [
"n_way_cache.n_way_cache",
"debug.print_raw",
"debug.error",
"datetime.datetime.now",
"globals.OPTS.replacement_policy.has_sram_array"
] | [((1151, 1176), 'n_way_cache.n_way_cache', 'cache', (['cache_config', 'name'], {}), '(cache_config, name)\n', (1156, 1176), True, 'from n_way_cache import n_way_cache as cache\n'), ((1475, 1516), 'debug.print_raw', 'debug.print_raw', (['"""Saving output files..."""'], {}), "('Saving output files...')\n", (1490, 1516), False, 'import debug\n'), ((1572, 1595), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (1593, 1595), False, 'import datetime\n'), ((2185, 2208), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2206, 2208), False, 'import datetime\n'), ((1863, 1903), 'globals.OPTS.replacement_policy.has_sram_array', 'OPTS.replacement_policy.has_sram_array', ([], {}), '()\n', (1901, 1903), False, 'from globals import OPTS, print_time\n'), ((2093, 2116), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2114, 2116), False, 'import datetime\n'), ((2391, 2414), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2412, 2414), False, 'import datetime\n'), ((990, 1064), 'debug.error', 'debug.error', (['"""Fully associative cache is not supported at the moment."""', '(-1)'], {}), "('Fully associative cache is not supported at the moment.', -1)\n", (1001, 1064), False, 'import debug\n'), ((1091, 1132), 'debug.error', 'debug.error', (['"""Invalid associativity."""', '(-1)'], {}), "('Invalid associativity.', -1)\n", (1102, 1132), False, 'import debug\n')] |
#!/usr/bin/env python
#
# Copyright 2017-2018 Amazon.com, Inc. and its affiliates. All Rights Reserved.
#
# Licensed under the MIT License. See the LICENSE accompanying this file
# for the specific language governing permissions and limitations under
# the License.
#
#
# Copy this script to /sbin/mount.efs and make sure it is executable.
#
# You will be able to mount an EFS file system by its short name, by adding it
# to /etc/fstab. The syntax of an fstab entry is:
#
# [Device] [Mount Point] [File System Type] [Options] [Dump] [Pass]
#
# Add an entry like this:
#
# fs-deadbeef /mount_point efs _netdev 0 0
#
# Using the 'efs' type will cause '/sbin/mount.efs' to be called by 'mount -a'
# for this file system. The '_netdev' option tells the init system that the
# 'efs' type is a networked file system type. This has been tested with systemd
# (Amazon Linux 2, CentOS 7, RHEL 7, Debian 9, and Ubuntu 16.04), and upstart
# (Amazon Linux 2017.09).
#
# Once there is an entry in fstab, the file system can be mounted with:
#
# sudo mount /mount_point
#
# The script will add recommended mount options, if not provided in fstab.
import base64
import errno
import hashlib
import hmac
import json
import logging
import os
import pwd
import random
import re
import socket
import subprocess
import sys
import threading
import time
from contextlib import contextmanager
from datetime import datetime, timedelta
from logging.handlers import RotatingFileHandler
try:
import ConfigParser
from ConfigParser import NoOptionError, NoSectionError
except ImportError:
from configparser import ConfigParser, NoOptionError, NoSectionError
try:
from urllib.parse import quote_plus
except ImportError:
from urllib import quote_plus
try:
from urllib2 import URLError, HTTPError, build_opener, urlopen, Request, HTTPHandler
from urllib import urlencode
except ImportError:
from urllib.request import urlopen, Request
from urllib.error import URLError, HTTPError
from urllib.parse import urlencode
try:
import botocore.session
from botocore.exceptions import ClientError, NoCredentialsError, EndpointConnectionError
BOTOCORE_PRESENT = True
except ImportError:
BOTOCORE_PRESENT = False
VERSION = '1.28.2'
SERVICE = 'elasticfilesystem'
CONFIG_FILE = '/etc/amazon/efs/efs-utils.conf'
CONFIG_SECTION = 'mount'
CLIENT_INFO_SECTION = 'client-info'
CLIENT_SOURCE_STR_LEN_LIMIT = 100
CLOUDWATCH_LOG_SECTION = 'cloudwatch-log'
DEFAULT_CLOUDWATCH_LOG_GROUP = '/aws/efs/utils'
DEFAULT_RETENTION_DAYS = 14
# Cloudwatchlog agent dict includes cloudwatchlog botocore client, cloudwatchlog group name, cloudwatchlog stream name
CLOUDWATCHLOG_AGENT = None
LOG_DIR = '/var/log/amazon/efs'
LOG_FILE = 'mount.log'
STATE_FILE_DIR = '/var/run/efs'
PRIVATE_KEY_FILE = '/etc/amazon/efs/privateKey.pem'
DATE_ONLY_FORMAT = '%Y%m%d'
SIGV4_DATETIME_FORMAT = '%Y%m%dT%H%M%SZ'
CERT_DATETIME_FORMAT = '%y%m%d%H%M%SZ'
AWS_CREDENTIALS_FILE = os.path.expanduser(os.path.join('~' + pwd.getpwuid(os.getuid()).pw_name, '.aws', 'credentials'))
AWS_CONFIG_FILE = os.path.expanduser(os.path.join('~' + pwd.getpwuid(os.getuid()).pw_name, '.aws', 'config'))
CA_CONFIG_BODY = """dir = %s
RANDFILE = $dir/database/.rand
[ ca ]
default_ca = local_ca
[ local_ca ]
database = $dir/database/index.txt
serial = $dir/database/serial
private_key = %s
cert = $dir/certificate.pem
new_certs_dir = $dir/certs
default_md = sha256
preserve = no
policy = efsPolicy
x509_extensions = v3_ca
[ efsPolicy ]
CN = supplied
[ req ]
prompt = no
distinguished_name = req_distinguished_name
[ req_distinguished_name ]
CN = %s
%s
%s
%s
"""
# SigV4 Auth
ALGORITHM = 'AWS4-HMAC-SHA256'
AWS4_REQUEST = 'aws4_request'
HTTP_REQUEST_METHOD = 'GET'
CANONICAL_URI = '/'
CANONICAL_HEADERS_DICT = {
'host': '%s'
}
CANONICAL_HEADERS = '\n'.join(['%s:%s' % (k, v) for k, v in sorted(CANONICAL_HEADERS_DICT.items())])
SIGNED_HEADERS = ';'.join(CANONICAL_HEADERS_DICT.keys())
REQUEST_PAYLOAD = ''
FS_ID_RE = re.compile('^(?P<fs_id>fs-[0-9a-f]+)$')
EFS_FQDN_RE = re.compile(r'^(?P<fs_id>fs-[0-9a-f]+)\.efs\.(?P<region>[a-z0-9-]+)\.(?P<dns_name_suffix>[a-z0-9.]+)$')
AP_ID_RE = re.compile('^fsap-[0-9a-f]{17}$')
CREDENTIALS_KEYS = ['AccessKeyId', 'SecretAccessKey', 'Token']
ECS_URI_ENV = 'AWS_CONTAINER_CREDENTIALS_RELATIVE_URI'
ECS_TASK_METADATA_API = 'http://169.254.170.2'
WEB_IDENTITY_ROLE_ARN_ENV = 'AWS_ROLE_ARN'
WEB_IDENTITY_TOKEN_FILE_ENV = 'AWS_WEB_IDENTITY_TOKEN_FILE'
STS_ENDPOINT_URL = 'https://sts.amazonaws.com/'
INSTANCE_METADATA_TOKEN_URL = 'http://169.254.169.254/latest/api/token'
INSTANCE_METADATA_SERVICE_URL = 'http://169.254.169.254/latest/dynamic/instance-identity/document/'
INSTANCE_IAM_URL = 'http://169.254.169.254/latest/meta-data/iam/security-credentials/'
SECURITY_CREDS_ECS_URI_HELP_URL = 'https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html'
SECURITY_CREDS_WEBIDENTITY_HELP_URL = 'https://docs.aws.amazon.com/eks/latest/userguide/iam-roles-for-service-accounts.html'
SECURITY_CREDS_IAM_ROLE_HELP_URL = 'https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html'
DEFAULT_STUNNEL_VERIFY_LEVEL = 2
DEFAULT_STUNNEL_CAFILE = '/etc/amazon/efs/efs-utils.crt'
NOT_BEFORE_MINS = 15
NOT_AFTER_HOURS = 3
EFS_ONLY_OPTIONS = [
'accesspoint',
'awscredsuri',
'awsprofile',
'cafile',
'iam',
'netns',
'noocsp',
'ocsp',
'tls',
'tlsport',
'verify'
]
UNSUPPORTED_OPTIONS = [
'capath'
]
STUNNEL_GLOBAL_CONFIG = {
'fips': 'no',
'foreground': 'yes',
'socket': [
'l:SO_REUSEADDR=yes',
'a:SO_BINDTODEVICE=lo',
],
}
STUNNEL_EFS_CONFIG = {
'client': 'yes',
'accept': '127.0.0.1:%s',
'connect': '%s:2049',
'sslVersion': 'TLSv1.2',
'renegotiation': 'no',
'TIMEOUTbusy': '20',
'TIMEOUTclose': '0',
'TIMEOUTidle': '70',
'delay': 'yes',
}
WATCHDOG_SERVICE = 'amazon-efs-mount-watchdog'
SYSTEM_RELEASE_PATH = '/etc/system-release'
OS_RELEASE_PATH = '/etc/os-release'
RHEL8_RELEASE_NAME = 'Red Hat Enterprise Linux release 8'
CENTOS8_RELEASE_NAME = 'CentOS Linux release 8'
FEDORA_RELEASE_NAME = 'Fedora release'
SUSE_RELEASE_NAME = 'openSUSE Leap'
SKIP_NO_LIBWRAP_RELEASES = [RHEL8_RELEASE_NAME, CENTOS8_RELEASE_NAME, FEDORA_RELEASE_NAME, SUSE_RELEASE_NAME]
def fatal_error(user_message, log_message=None, exit_code=1):
if log_message is None:
log_message = user_message
sys.stderr.write('%s\n' % user_message)
logging.error(log_message)
publish_cloudwatch_log(CLOUDWATCHLOG_AGENT, 'Mount failed, %s' % log_message)
sys.exit(exit_code)
def get_target_region(config):
def _fatal_error(message):
fatal_error('Error retrieving region. Please set the "region" parameter in the efs-utils configuration file.', message)
metadata_exception = 'Unknown error'
try:
return config.get(CONFIG_SECTION, 'region')
except NoOptionError:
pass
try:
return get_region_from_instance_metadata()
except Exception as e:
metadata_exception = e
logging.warning('Region not found in config file and metadata service call failed, falling back '
'to legacy "dns_name_format" check')
try:
region = get_region_from_legacy_dns_format(config)
sys.stdout.write('Warning: region obtained from "dns_name_format" field. Please set the "region" '
'parameter in the efs-utils configuration file.')
return region
except Exception:
logging.warning('Legacy check for region in "dns_name_format" failed')
_fatal_error(metadata_exception)
def get_region_from_instance_metadata():
instance_identity = get_instance_identity_info_from_instance_metadata('region')
if not instance_identity:
raise Exception("Cannot retrieve region from instance_metadata")
return instance_identity
def get_instance_identity_info_from_instance_metadata(property):
ec2_metadata_unsuccessful_resp = 'Unsuccessful retrieval of EC2 metadata at %s.' % INSTANCE_METADATA_SERVICE_URL
ec2_metadata_url_error_msg = 'Unable to reach %s to retrieve EC2 instance metadata.' % INSTANCE_METADATA_SERVICE_URL
instance_identity = url_request_helper(INSTANCE_METADATA_SERVICE_URL, ec2_metadata_unsuccessful_resp,
ec2_metadata_url_error_msg, retry_with_new_header_token=True)
if instance_identity:
try:
return instance_identity[property]
except KeyError as e:
logging.warning('%s not present in %s: %s' % (property, instance_identity, e))
except TypeError as e:
logging.warning('response %s is not a json object: %s' % (instance_identity, e))
return None
def get_region_from_legacy_dns_format(config):
"""
For backwards compatibility check dns_name_format to obtain the target region. This functionality
should only be used if region is not present in the config file and metadata calls fail.
"""
dns_name_format = config.get(CONFIG_SECTION, 'dns_name_format')
if '{region}' not in dns_name_format:
split_dns_name_format = dns_name_format.split('.')
if '{dns_name_suffix}' in dns_name_format:
return split_dns_name_format[-2]
elif 'amazonaws.com' in dns_name_format:
return split_dns_name_format[-3]
raise Exception('Region not found in dns_name_format')
def get_aws_ec2_metadata_token():
try:
opener = build_opener(HTTPHandler)
request = Request(INSTANCE_METADATA_TOKEN_URL)
request.add_header('X-aws-ec2-metadata-token-ttl-seconds', 21600)
request.get_method = lambda: 'PUT'
res = opener.open(request)
return res.read()
except NameError:
headers = {'X-aws-ec2-metadata-token-ttl-seconds': 21600}
req = Request(INSTANCE_METADATA_TOKEN_URL, headers=headers, method='PUT')
res = urlopen(req)
return res.read()
def get_aws_security_credentials(use_iam, awsprofile=None, aws_creds_uri=None):
"""
Lookup AWS security credentials (access key ID and secret access key). Adapted credentials provider chain from:
https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html and
https://docs.aws.amazon.com/sdk-for-java/v1/developer-guide/credentials.html
"""
if not use_iam:
return None, None
# attempt to lookup AWS security credentials through the credentials URI the ECS agent generated
if aws_creds_uri:
return get_aws_security_credentials_from_ecs(aws_creds_uri, True)
# attempt to lookup AWS security credentials in AWS credentials file (~/.aws/credentials)
# and configs file (~/.aws/config) with given awsprofile
if awsprofile:
return get_aws_security_credentials_from_awsprofile(awsprofile, True)
# attempt to lookup AWS security credentials through AWS_CONTAINER_CREDENTIALS_RELATIVE_URI environment variable
if ECS_URI_ENV in os.environ:
credentials, credentials_source = get_aws_security_credentials_from_ecs(os.environ[ECS_URI_ENV], False)
if credentials and credentials_source:
return credentials, credentials_source
# attempt to lookup AWS security credentials through AssumeRoleWithWebIdentity
# (e.g. for IAM Role for Service Accounts (IRSA) approach on EKS)
if WEB_IDENTITY_ROLE_ARN_ENV in os.environ and WEB_IDENTITY_TOKEN_FILE_ENV in os.environ:
credentials, credentials_source = get_aws_security_credentials_from_webidentity(
os.environ[WEB_IDENTITY_ROLE_ARN_ENV],
os.environ[WEB_IDENTITY_TOKEN_FILE_ENV],
False
)
if credentials and credentials_source:
return credentials, credentials_source
# attempt to lookup AWS security credentials with IAM role name attached to instance
# through IAM role name security credentials lookup uri
iam_role_name = get_iam_role_name()
if iam_role_name:
credentials, credentials_source = get_aws_security_credentials_from_instance_metadata(iam_role_name)
if credentials and credentials_source:
return credentials, credentials_source
error_msg = 'AWS Access Key ID and Secret Access Key are not found in AWS credentials file (%s), config file (%s), ' \
'from ECS credentials relative uri, or from the instance security credentials service' % \
(AWS_CREDENTIALS_FILE, AWS_CONFIG_FILE)
fatal_error(error_msg, error_msg)
def get_aws_security_credentials_from_awsprofile(awsprofile, is_fatal=False):
for file_path in [AWS_CREDENTIALS_FILE, AWS_CONFIG_FILE]:
if os.path.exists(file_path):
credentials = credentials_file_helper(file_path, awsprofile)
if credentials['AccessKeyId']:
return credentials, os.path.basename(file_path) + ':' + awsprofile
# Fail if credentials cannot be fetched from the given awsprofile
if is_fatal:
log_message = 'AWS security credentials not found in %s or %s under named profile [%s]' % \
(AWS_CREDENTIALS_FILE, AWS_CONFIG_FILE, awsprofile)
fatal_error(log_message)
else:
return None, None
def get_aws_security_credentials_from_ecs(aws_creds_uri, is_fatal=False):
ecs_uri = ECS_TASK_METADATA_API + aws_creds_uri
ecs_unsuccessful_resp = 'Unsuccessful retrieval of AWS security credentials at %s.' % ecs_uri
ecs_url_error_msg = 'Unable to reach %s to retrieve AWS security credentials. See %s for more info.' \
% (ecs_uri, SECURITY_CREDS_ECS_URI_HELP_URL)
ecs_security_dict = url_request_helper(ecs_uri, ecs_unsuccessful_resp, ecs_url_error_msg)
if ecs_security_dict and all(k in ecs_security_dict for k in CREDENTIALS_KEYS):
return ecs_security_dict, 'ecs:' + aws_creds_uri
# Fail if credentials cannot be fetched from the given aws_creds_uri
if is_fatal:
fatal_error(ecs_unsuccessful_resp, ecs_unsuccessful_resp)
else:
return None, None
def get_aws_security_credentials_from_webidentity(role_arn, token_file, is_fatal=False):
try:
with open(token_file, 'r') as f:
token = f.read()
except Exception as e:
if is_fatal:
unsuccessful_resp = 'Error reading token file %s: %s' % (token_file, e)
fatal_error(unsuccessful_resp, unsuccessful_resp)
else:
return None, None
webidentity_url = STS_ENDPOINT_URL + '?' + urlencode({
'Version': '2011-06-15',
'Action': 'AssumeRoleWithWebIdentity',
'RoleArn': role_arn,
'RoleSessionName': 'efs-mount-helper',
'WebIdentityToken': token
})
unsuccessful_resp = 'Unsuccessful retrieval of AWS security credentials at %s.' % STS_ENDPOINT_URL
url_error_msg = 'Unable to reach %s to retrieve AWS security credentials. See %s for more info.' % \
(STS_ENDPOINT_URL, SECURITY_CREDS_WEBIDENTITY_HELP_URL)
resp = url_request_helper(webidentity_url, unsuccessful_resp, url_error_msg, headers={'Accept': 'application/json'})
if resp:
creds = resp \
.get('AssumeRoleWithWebIdentityResponse', {}) \
.get('AssumeRoleWithWebIdentityResult', {}) \
.get('Credentials', {})
if all(k in creds for k in ['AccessKeyId', 'SecretAccessKey', 'SessionToken']):
return {
'AccessKeyId': creds['AccessKeyId'],
'SecretAccessKey': creds['SecretAccessKey'],
'Token': creds['SessionToken']
}, 'webidentity:' + ','.join([role_arn, token_file])
# Fail if credentials cannot be fetched from the given aws_creds_uri
if is_fatal:
fatal_error(unsuccessful_resp, unsuccessful_resp)
else:
return None, None
def get_aws_security_credentials_from_instance_metadata(iam_role_name):
security_creds_lookup_url = INSTANCE_IAM_URL + iam_role_name
unsuccessful_resp = 'Unsuccessful retrieval of AWS security credentials at %s.' % security_creds_lookup_url
url_error_msg = 'Unable to reach %s to retrieve AWS security credentials. See %s for more info.' % \
(security_creds_lookup_url, SECURITY_CREDS_IAM_ROLE_HELP_URL)
iam_security_dict = url_request_helper(security_creds_lookup_url, unsuccessful_resp,
url_error_msg, retry_with_new_header_token=True)
if iam_security_dict and all(k in iam_security_dict for k in CREDENTIALS_KEYS):
return iam_security_dict, 'metadata:'
else:
return None, None
def get_iam_role_name():
iam_role_unsuccessful_resp = 'Unsuccessful retrieval of IAM role name at %s.' % INSTANCE_IAM_URL
iam_role_url_error_msg = 'Unable to reach %s to retrieve IAM role name. See %s for more info.' % \
(INSTANCE_IAM_URL, SECURITY_CREDS_IAM_ROLE_HELP_URL)
iam_role_name = url_request_helper(INSTANCE_IAM_URL, iam_role_unsuccessful_resp,
iam_role_url_error_msg, retry_with_new_header_token=True)
return iam_role_name
def credentials_file_helper(file_path, awsprofile):
aws_credentials_configs = read_config(file_path)
credentials = {'AccessKeyId': None, 'SecretAccessKey': None, 'Token': None}
try:
access_key = aws_credentials_configs.get(awsprofile, 'aws_access_key_id')
secret_key = aws_credentials_configs.get(awsprofile, 'aws_secret_access_key')
session_token = aws_credentials_configs.get(awsprofile, 'aws_session_token')
credentials['AccessKeyId'] = access_key
credentials['SecretAccessKey'] = secret_key
credentials['Token'] = session_token
except NoOptionError as e:
if 'aws_access_key_id' in str(e) or 'aws_secret_access_key' in str(e):
logging.debug('aws_access_key_id or aws_secret_access_key not found in %s under named profile [%s]', file_path,
awsprofile)
if 'aws_session_token' in str(e):
logging.debug('aws_session_token not found in %s', file_path)
credentials['AccessKeyId'] = aws_credentials_configs.get(awsprofile, 'aws_access_key_id')
credentials['SecretAccessKey'] = aws_credentials_configs.get(awsprofile, 'aws_secret_access_key')
except NoSectionError:
logging.debug('No [%s] section found in config file %s', awsprofile, file_path)
return credentials
def get_aws_profile(options, use_iam):
awsprofile = options.get('awsprofile')
if not awsprofile and use_iam:
for file_path in [AWS_CREDENTIALS_FILE, AWS_CONFIG_FILE]:
aws_credentials_configs = read_config(file_path)
# check if aws access key id is found under [default] section in current file and return 'default' if so
try:
access_key = aws_credentials_configs.get('default', 'aws_access_key_id')
if access_key is not None:
return 'default'
except (NoSectionError, NoOptionError):
continue
return awsprofile
def url_request_helper(url, unsuccessful_resp, url_error_msg, headers={}, retry_with_new_header_token=False):
try:
req = Request(url)
for k, v in headers.items():
req.add_header(k, v)
request_resp = urlopen(req, timeout=1)
return get_resp_obj(request_resp, url, unsuccessful_resp)
except HTTPError as e:
# For instance enable with IMDSv2, Unauthorized 401 error will be thrown,
# to retrieve metadata, the header should embeded with metadata token
if e.code == 401 and retry_with_new_header_token:
token = get_aws_ec2_metadata_token()
req.add_header('X-aws-ec2-metadata-token', token)
request_resp = urlopen(req, timeout=1)
return get_resp_obj(request_resp, url, unsuccessful_resp)
err_msg = 'Unable to reach the url at %s: status=%d, reason is %s' % (url, e.code, e.reason)
except URLError as e:
err_msg = 'Unable to reach the url at %s, reason is %s' % (url, e.reason)
if err_msg:
logging.debug('%s %s', url_error_msg, err_msg)
return None
def get_resp_obj(request_resp, url, unsuccessful_resp):
if request_resp.getcode() != 200:
logging.debug(unsuccessful_resp + ' %s: ResponseCode=%d', url, request_resp.getcode())
return None
resp_body = request_resp.read()
resp_body_type = type(resp_body)
try:
if resp_body_type is str:
resp_dict = json.loads(resp_body)
else:
resp_dict = json.loads(resp_body.decode(request_resp.headers.get_content_charset() or 'us-ascii'))
return resp_dict
except ValueError as e:
logging.info('ValueError parsing "%s" into json: %s. Returning response body.' % (str(resp_body), e))
return resp_body if resp_body_type is str else resp_body.decode('utf-8')
def parse_options(options):
opts = {}
for o in options.split(','):
if '=' in o:
k, v = o.split('=')
opts[k] = v
else:
opts[o] = None
return opts
def get_tls_port_range(config):
lower_bound = config.getint(CONFIG_SECTION, 'port_range_lower_bound')
upper_bound = config.getint(CONFIG_SECTION, 'port_range_upper_bound')
if lower_bound >= upper_bound:
fatal_error('Configuration option "port_range_upper_bound" defined as %d '
'must be strictly greater than "port_range_lower_bound" defined as %d.'
% (upper_bound, lower_bound))
return lower_bound, upper_bound
def choose_tls_port(config, options):
if 'tlsport' in options:
ports_to_try = [int(options['tlsport'])]
else:
lower_bound, upper_bound = get_tls_port_range(config)
tls_ports = list(range(lower_bound, upper_bound))
# Choose a random midpoint, and then try ports in-order from there
mid = random.randrange(len(tls_ports))
ports_to_try = tls_ports[mid:] + tls_ports[:mid]
assert len(tls_ports) == len(ports_to_try)
sock = socket.socket()
for tls_port in ports_to_try:
try:
sock.bind(('localhost', tls_port))
sock.close()
return tls_port
except socket.error:
continue
sock.close()
if 'tlsport' in options:
fatal_error('Specified port [%s] is unavailable. Try selecting a different port.' % options['tlsport'])
else:
fatal_error('Failed to locate an available port in the range [%d, %d], try specifying a different port range in %s'
% (lower_bound, upper_bound, CONFIG_FILE))
def is_ocsp_enabled(config, options):
if 'ocsp' in options:
return True
elif 'noocsp' in options:
return False
else:
return config.getboolean(CONFIG_SECTION, 'stunnel_check_cert_validity')
def get_mount_specific_filename(fs_id, mountpoint, tls_port):
return '%s.%s.%d' % (fs_id, os.path.abspath(mountpoint).replace(os.sep, '.').lstrip('.'), tls_port)
def serialize_stunnel_config(config, header=None):
lines = []
if header:
lines.append('[%s]' % header)
for k, v in config.items():
if type(v) is list:
for item in v:
lines.append('%s = %s' % (k, item))
else:
lines.append('%s = %s' % (k, v))
return lines
def add_stunnel_ca_options(efs_config, config, options):
if 'cafile' in options:
stunnel_cafile = options['cafile']
else:
try:
stunnel_cafile = config.get(CONFIG_SECTION, 'stunnel_cafile')
except NoOptionError:
logging.debug('No CA file configured, using default CA file %s', DEFAULT_STUNNEL_CAFILE)
stunnel_cafile = DEFAULT_STUNNEL_CAFILE
if not os.path.exists(stunnel_cafile):
fatal_error('Failed to find certificate authority file for verification',
'Failed to find CAfile "%s"' % stunnel_cafile)
efs_config['CAfile'] = stunnel_cafile
def is_stunnel_option_supported(stunnel_output, stunnel_option_name):
supported = False
for line in stunnel_output:
if line.startswith(stunnel_option_name):
supported = True
break
if not supported:
logging.warning('stunnel does not support "%s"', stunnel_option_name)
return supported
def get_version_specific_stunnel_options():
stunnel_command = [_stunnel_bin(), '-help']
proc = subprocess.Popen(stunnel_command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
proc.wait()
_, err = proc.communicate()
stunnel_output = err.splitlines()
check_host_supported = is_stunnel_option_supported(stunnel_output, b'checkHost')
ocsp_aia_supported = is_stunnel_option_supported(stunnel_output, b'OCSPaia')
return check_host_supported, ocsp_aia_supported
def _stunnel_bin():
return find_command_path('stunnel',
'Please install it following the instructions at '
'https://docs.aws.amazon.com/efs/latest/ug/using-amazon-efs-utils.html#upgrading-stunnel')
def find_command_path(command, install_method):
try:
env_path = '/sbin:/usr/sbin:/usr/local/sbin:/root/bin:/usr/local/bin:/usr/bin:/bin'
os.putenv('PATH', env_path)
path = subprocess.check_output(['which', command])
except subprocess.CalledProcessError as e:
fatal_error('Failed to locate %s in %s - %s' % (command, env_path, install_method), e)
return path.strip().decode()
def get_system_release_version():
try:
with open(SYSTEM_RELEASE_PATH) as f:
return f.read().strip()
except IOError:
logging.debug('Unable to read %s', SYSTEM_RELEASE_PATH)
try:
with open(OS_RELEASE_PATH) as f:
for line in f:
if 'PRETTY_NAME' in line:
return line.split('=')[1].strip()
except IOError:
logging.debug('Unable to read %s', OS_RELEASE_PATH)
return 'unknown'
def write_stunnel_config_file(config, state_file_dir, fs_id, mountpoint, tls_port, dns_name, verify_level, ocsp_enabled,
options, log_dir=LOG_DIR, cert_details=None):
"""
Serializes stunnel configuration to a file. Unfortunately this does not conform to Python's config file format, so we have to
hand-serialize it.
"""
mount_filename = get_mount_specific_filename(fs_id, mountpoint, tls_port)
global_config = dict(STUNNEL_GLOBAL_CONFIG)
if config.getboolean(CONFIG_SECTION, 'stunnel_debug_enabled'):
global_config['debug'] = 'debug'
if config.has_option(CONFIG_SECTION, 'stunnel_logs_file'):
global_config['output'] = config.get(CONFIG_SECTION, 'stunnel_logs_file').replace('{fs_id}', fs_id)
else:
global_config['output'] = os.path.join(log_dir, '%s.stunnel.log' % mount_filename)
efs_config = dict(STUNNEL_EFS_CONFIG)
efs_config['accept'] = efs_config['accept'] % tls_port
efs_config['connect'] = efs_config['connect'] % dns_name
efs_config['verify'] = verify_level
if verify_level > 0:
add_stunnel_ca_options(efs_config, config, options)
if cert_details:
efs_config['cert'] = cert_details['certificate']
efs_config['key'] = cert_details['privateKey']
check_host_supported, ocsp_aia_supported = get_version_specific_stunnel_options()
tls_controls_message = 'WARNING: Your client lacks sufficient controls to properly enforce TLS. Please upgrade stunnel, ' \
'or disable "%%s" in %s.\nSee %s for more detail.' % (CONFIG_FILE,
'https://docs.aws.amazon.com/console/efs/troubleshooting-tls')
if config.getboolean(CONFIG_SECTION, 'stunnel_check_cert_hostname'):
if check_host_supported:
efs_config['checkHost'] = dns_name
else:
fatal_error(tls_controls_message % 'stunnel_check_cert_hostname')
# Only use the config setting if the override is not set
if ocsp_enabled:
if ocsp_aia_supported:
efs_config['OCSPaia'] = 'yes'
else:
fatal_error(tls_controls_message % 'stunnel_check_cert_validity')
system_release_version = get_system_release_version()
if not any(release in system_release_version for release in SKIP_NO_LIBWRAP_RELEASES):
efs_config['libwrap'] = 'no'
stunnel_config = '\n'.join(serialize_stunnel_config(global_config) + serialize_stunnel_config(efs_config, 'efs'))
logging.debug('Writing stunnel configuration:\n%s', stunnel_config)
stunnel_config_file = os.path.join(state_file_dir, 'stunnel-config.%s' % mount_filename)
with open(stunnel_config_file, 'w') as f:
f.write(stunnel_config)
return stunnel_config_file
def write_tls_tunnel_state_file(fs_id, mountpoint, tls_port, tunnel_pid, command, files, state_file_dir, cert_details=None):
"""
Return the name of the temporary file containing TLS tunnel state, prefixed with a '~'. This file needs to be renamed to a
non-temporary version following a successful mount.
"""
state_file = '~' + get_mount_specific_filename(fs_id, mountpoint, tls_port)
state = {
'pid': tunnel_pid,
'cmd': command,
'files': files,
}
if cert_details:
state.update(cert_details)
with open(os.path.join(state_file_dir, state_file), 'w') as f:
json.dump(state, f)
return state_file
def test_tunnel_process(tunnel_proc, fs_id):
tunnel_proc.poll()
if tunnel_proc.returncode is not None:
out, err = tunnel_proc.communicate()
fatal_error('Failed to initialize TLS tunnel for %s' % fs_id,
'Failed to start TLS tunnel (errno=%d). stdout="%s" stderr="%s"'
% (tunnel_proc.returncode, out.strip(), err.strip()))
def poll_tunnel_process(tunnel_proc, fs_id, mount_completed):
"""
poll the tunnel process health every .5s during the mount attempt to fail fast if the tunnel dies - since this is not called
from the main thread, if the tunnel fails, exit uncleanly with os._exit
"""
while not mount_completed.is_set():
try:
test_tunnel_process(tunnel_proc, fs_id)
except SystemExit as e:
os._exit(e.code)
mount_completed.wait(.5)
def get_init_system(comm_file='/proc/1/comm'):
init_system = 'unknown'
try:
with open(comm_file) as f:
init_system = f.read().strip()
except IOError:
logging.warning('Unable to read %s', comm_file)
logging.debug('Identified init system: %s', init_system)
return init_system
def check_network_target(fs_id):
with open(os.devnull, 'w') as devnull:
rc = subprocess.call(['systemctl', 'status', 'network.target'], stdout=devnull, stderr=devnull, close_fds=True)
if rc != 0:
fatal_error('Failed to mount %s because the network was not yet available, add "_netdev" to your mount options' % fs_id,
exit_code=0)
def check_network_status(fs_id, init_system):
if init_system != 'systemd':
logging.debug('Not testing network on non-systemd init systems')
return
check_network_target(fs_id)
def start_watchdog(init_system):
if init_system == 'init':
proc = subprocess.Popen(
['/sbin/status', WATCHDOG_SERVICE], stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
status, _ = proc.communicate()
if 'stop' in status:
with open(os.devnull, 'w') as devnull:
subprocess.Popen(['/sbin/start', WATCHDOG_SERVICE], stdout=devnull, stderr=devnull, close_fds=True)
elif 'start' in status:
logging.debug('%s is already running', WATCHDOG_SERVICE)
elif init_system == 'systemd':
rc = subprocess.call(['systemctl', 'is-active', '--quiet', WATCHDOG_SERVICE], close_fds=True)
if rc != 0:
with open(os.devnull, 'w') as devnull:
subprocess.Popen(['systemctl', 'start', WATCHDOG_SERVICE], stdout=devnull, stderr=devnull, close_fds=True)
else:
logging.debug('%s is already running', WATCHDOG_SERVICE)
else:
error_message = 'Could not start %s, unrecognized init system "%s"' % (WATCHDOG_SERVICE, init_system)
sys.stderr.write('%s\n' % error_message)
logging.warning(error_message)
def create_required_directory(config, directory):
mode = 0o750
try:
mode_str = config.get(CONFIG_SECTION, 'state_file_dir_mode')
try:
mode = int(mode_str, 8)
except ValueError:
logging.warning('Bad state_file_dir_mode "%s" in config file "%s"', mode_str, CONFIG_FILE)
except NoOptionError:
pass
try:
os.makedirs(directory, mode)
except OSError as e:
if errno.EEXIST != e.errno or not os.path.isdir(directory):
raise
@contextmanager
def bootstrap_tls(config, init_system, dns_name, fs_id, mountpoint, options, state_file_dir=STATE_FILE_DIR):
tls_port = choose_tls_port(config, options)
# override the tlsport option so that we can later override the port the NFS client uses to connect to stunnel.
# if the user has specified tlsport=X at the command line this will just re-set tlsport to X.
options['tlsport'] = tls_port
use_iam = 'iam' in options
ap_id = options.get('accesspoint')
cert_details = {}
security_credentials = None
client_info = get_client_info(config)
if use_iam:
aws_creds_uri = options.get('awscredsuri')
if aws_creds_uri:
kwargs = {'aws_creds_uri': aws_creds_uri}
else:
kwargs = {'awsprofile': get_aws_profile(options, use_iam)}
security_credentials, credentials_source = get_aws_security_credentials(use_iam, **kwargs)
if credentials_source:
cert_details['awsCredentialsMethod'] = credentials_source
if ap_id:
cert_details['accessPoint'] = ap_id
# additional symbol appended to avoid naming collisions
cert_details['mountStateDir'] = get_mount_specific_filename(fs_id, mountpoint, tls_port) + '+'
# common name for certificate signing request is max 64 characters
cert_details['commonName'] = socket.gethostname()[0:64]
cert_details['region'] = get_target_region(config)
cert_details['certificateCreationTime'] = create_certificate(config, cert_details['mountStateDir'],
cert_details['commonName'], cert_details['region'], fs_id,
security_credentials, ap_id, client_info,
base_path=state_file_dir)
cert_details['certificate'] = os.path.join(state_file_dir, cert_details['mountStateDir'], 'certificate.pem')
cert_details['privateKey'] = get_private_key_path()
cert_details['fsId'] = fs_id
start_watchdog(init_system)
if not os.path.exists(state_file_dir):
create_required_directory(config, state_file_dir)
verify_level = int(options.get('verify', DEFAULT_STUNNEL_VERIFY_LEVEL))
ocsp_enabled = is_ocsp_enabled(config, options)
stunnel_config_file = write_stunnel_config_file(config, state_file_dir, fs_id, mountpoint, tls_port, dns_name, verify_level,
ocsp_enabled, options, cert_details=cert_details)
tunnel_args = [_stunnel_bin(), stunnel_config_file]
if 'netns' in options:
tunnel_args = ['nsenter', '--net=' + options['netns']] + tunnel_args
# launch the tunnel in a process group so if it has any child processes, they can be killed easily by the mount watchdog
logging.info('Starting TLS tunnel: "%s"', ' '.join(tunnel_args))
tunnel_proc = subprocess.Popen(
tunnel_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, preexec_fn=os.setsid, close_fds=True)
logging.info('Started TLS tunnel, pid: %d', tunnel_proc.pid)
temp_tls_state_file = write_tls_tunnel_state_file(fs_id, mountpoint, tls_port, tunnel_proc.pid, tunnel_args,
[stunnel_config_file], state_file_dir, cert_details=cert_details)
try:
yield tunnel_proc
finally:
os.rename(os.path.join(state_file_dir, temp_tls_state_file), os.path.join(state_file_dir, temp_tls_state_file[1:]))
def get_nfs_mount_options(options):
# If you change these options, update the man page as well at man/mount.efs.8
if 'nfsvers' not in options and 'vers' not in options:
options['nfsvers'] = '4.1'
if 'rsize' not in options:
options['rsize'] = '1048576'
if 'wsize' not in options:
options['wsize'] = '1048576'
if 'soft' not in options and 'hard' not in options:
options['hard'] = None
if 'timeo' not in options:
options['timeo'] = '600'
if 'retrans' not in options:
options['retrans'] = '2'
if 'noresvport' not in options:
options['noresvport'] = None
if 'tls' in options:
options['port'] = options['tlsport']
def to_nfs_option(k, v):
if v is None:
return k
return '%s=%s' % (str(k), str(v))
nfs_options = [to_nfs_option(k, v) for k, v in options.items() if k not in EFS_ONLY_OPTIONS]
return ','.join(nfs_options)
def mount_nfs(dns_name, path, mountpoint, options):
if 'tls' in options:
mount_path = '127.0.0.1:%s' % path
else:
mount_path = '%s:%s' % (dns_name, path)
command = ['/sbin/mount.nfs4', mount_path, mountpoint, '-o', get_nfs_mount_options(options)]
if 'netns' in options:
command = ['nsenter', '--net=' + options['netns']] + command
logging.info('Executing: "%s"', ' '.join(command))
proc = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
out, err = proc.communicate()
if proc.returncode == 0:
message = 'Successfully mounted %s at %s' % (dns_name, mountpoint)
logging.info(message)
publish_cloudwatch_log(CLOUDWATCHLOG_AGENT, message)
else:
message = 'Failed to mount %s at %s: returncode=%d, stderr="%s"' % (dns_name, mountpoint, proc.returncode, err.strip())
fatal_error(err.strip(), message, proc.returncode)
def usage(out, exit_code=1):
out.write('Usage: mount.efs [--version] [-h|--help] <fsname> <mountpoint> [-o <options>]\n')
sys.exit(exit_code)
def parse_arguments_early_exit(args=None):
"""Parse arguments, checking for early exit conditions only"""
if args is None:
args = sys.argv
if '-h' in args[1:] or '--help' in args[1:]:
usage(out=sys.stdout, exit_code=0)
if '--version' in args[1:]:
sys.stdout.write('%s Version: %s\n' % (args[0], VERSION))
sys.exit(0)
def parse_arguments(config, args=None):
"""Parse arguments, return (fsid, path, mountpoint, options)"""
if args is None:
args = sys.argv
fsname = None
mountpoint = None
options = {}
if len(args) > 1:
fsname = args[1]
if len(args) > 2:
mountpoint = args[2]
if len(args) > 4 and '-o' in args[:-1]:
options_index = args.index('-o') + 1
options = parse_options(args[options_index])
if not fsname or not mountpoint:
usage(out=sys.stderr)
fs_id, path = match_device(config, fsname)
return fs_id, path, mountpoint, options
def get_client_info(config):
client_info = {}
# source key/value pair in config file
if config.has_option(CLIENT_INFO_SECTION, 'source'):
client_source = config.get(CLIENT_INFO_SECTION, 'source')
if 0 < len(client_source) <= CLIENT_SOURCE_STR_LEN_LIMIT:
client_info['source'] = client_source
return client_info
def create_certificate(config, mount_name, common_name, region, fs_id, security_credentials, ap_id, client_info,
base_path=STATE_FILE_DIR):
current_time = get_utc_now()
tls_paths = tls_paths_dictionary(mount_name, base_path)
certificate_config = os.path.join(tls_paths['mount_dir'], 'config.conf')
certificate_signing_request = os.path.join(tls_paths['mount_dir'], 'request.csr')
certificate = os.path.join(tls_paths['mount_dir'], 'certificate.pem')
ca_dirs_check(config, tls_paths['database_dir'], tls_paths['certs_dir'])
ca_supporting_files_check(tls_paths['index'], tls_paths['index_attr'], tls_paths['serial'], tls_paths['rand'])
private_key = check_and_create_private_key(base_path)
if security_credentials:
public_key = os.path.join(tls_paths['mount_dir'], 'publicKey.pem')
create_public_key(private_key, public_key)
create_ca_conf(certificate_config, common_name, tls_paths['mount_dir'], private_key, current_time, region, fs_id,
security_credentials, ap_id, client_info)
create_certificate_signing_request(certificate_config, private_key, certificate_signing_request)
not_before = get_certificate_timestamp(current_time, minutes=-NOT_BEFORE_MINS)
not_after = get_certificate_timestamp(current_time, hours=NOT_AFTER_HOURS)
cmd = 'openssl ca -startdate %s -enddate %s -selfsign -batch -notext -config %s -in %s -out %s' % \
(not_before, not_after, certificate_config, certificate_signing_request, certificate)
subprocess_call(cmd, 'Failed to create self-signed client-side certificate')
return current_time.strftime(CERT_DATETIME_FORMAT)
def get_private_key_path():
"""Wrapped for mocking purposes in unit tests"""
return PRIVATE_KEY_FILE
def check_and_create_private_key(base_path=STATE_FILE_DIR):
# Creating RSA private keys is slow, so we will create one private key and allow mounts to share it.
# This means, however, that we have to include a locking mechanism to ensure that the private key is
# atomically created, as mounts occurring in parallel may try to create the key simultaneously.
key = get_private_key_path()
@contextmanager
def open_lock_file():
lock_file = os.path.join(base_path, 'efs-utils-lock')
f = os.open(lock_file, os.O_CREAT | os.O_DSYNC | os.O_EXCL | os.O_RDWR)
try:
lock_file_contents = 'PID: %s' % os.getpid()
os.write(f, lock_file_contents.encode('utf-8'))
yield f
finally:
os.close(f)
os.remove(lock_file)
def do_with_lock(function):
while True:
try:
with open_lock_file():
return function()
except OSError as e:
if e.errno == errno.EEXIST:
logging.info('Failed to take out private key creation lock, sleeping 50 ms')
time.sleep(0.05)
else:
raise
def generate_key():
if os.path.isfile(key):
return
cmd = 'openssl genpkey -algorithm RSA -out %s -pkeyopt rsa_keygen_bits:3072' % key
subprocess_call(cmd, 'Failed to create private key')
read_only_mode = 0o400
os.chmod(key, read_only_mode)
do_with_lock(generate_key)
return key
def create_certificate_signing_request(config_path, private_key, csr_path):
cmd = 'openssl req -new -config %s -key %s -out %s' % (config_path, private_key, csr_path)
subprocess_call(cmd, 'Failed to create certificate signing request (csr)')
def create_ca_conf(config_path, common_name, directory, private_key, date,
region, fs_id, security_credentials, ap_id, client_info):
"""Populate ca/req configuration file with fresh configurations at every mount since SigV4 signature can change"""
public_key_path = os.path.join(directory, 'publicKey.pem')
ca_extension_body = ca_extension_builder(ap_id, security_credentials, fs_id, client_info)
efs_client_auth_body = efs_client_auth_builder(public_key_path, security_credentials['AccessKeyId'],
security_credentials['SecretAccessKey'], date, region, fs_id,
security_credentials['Token']) if security_credentials else ''
efs_client_info_body = efs_client_info_builder(client_info) if client_info else ''
full_config_body = CA_CONFIG_BODY % (directory, private_key, common_name, ca_extension_body,
efs_client_auth_body, efs_client_info_body)
with open(config_path, 'w') as f:
f.write(full_config_body)
return full_config_body
def ca_extension_builder(ap_id, security_credentials, fs_id, client_info):
ca_extension_str = '[ v3_ca ]\nsubjectKeyIdentifier = hash'
if ap_id:
ca_extension_str += '\n1.3.6.1.4.1.4843.7.1 = ASN1:UTF8String:' + ap_id
if security_credentials:
ca_extension_str += '\n1.3.6.1.4.1.4843.7.2 = ASN1:SEQUENCE:efs_client_auth'
ca_extension_str += '\n1.3.6.1.4.1.4843.7.3 = ASN1:UTF8String:' + fs_id
if client_info:
ca_extension_str += '\n1.3.6.1.4.1.4843.7.4 = ASN1:SEQUENCE:efs_client_info'
return ca_extension_str
def efs_client_auth_builder(public_key_path, access_key_id, secret_access_key, date, region, fs_id, session_token=None):
public_key_hash = get_public_key_sha1(public_key_path)
canonical_request = create_canonical_request(public_key_hash, date, access_key_id, region, fs_id, session_token)
string_to_sign = create_string_to_sign(canonical_request, date, region)
signature = calculate_signature(string_to_sign, date, secret_access_key, region)
efs_client_auth_str = '[ efs_client_auth ]'
efs_client_auth_str += '\naccessKeyId = UTF8String:' + access_key_id
efs_client_auth_str += '\nsignature = OCTETSTRING:' + signature
efs_client_auth_str += '\nsigv4DateTime = UTCTIME:' + date.strftime(CERT_DATETIME_FORMAT)
if session_token:
efs_client_auth_str += '\nsessionToken = EXPLICIT:0,UTF8String:' + session_token
return efs_client_auth_str
def efs_client_info_builder(client_info):
efs_client_info_str = '[ efs_client_info ]'
for key, value in client_info.items():
efs_client_info_str += '\n%s = UTF8String:%s' % (key, value)
return efs_client_info_str
def create_public_key(private_key, public_key):
cmd = 'openssl rsa -in %s -outform PEM -pubout -out %s' % (private_key, public_key)
subprocess_call(cmd, 'Failed to create public key')
def subprocess_call(cmd, error_message):
"""Helper method to run shell openssl command and to handle response error messages"""
retry_times = 3
for retry in range(retry_times):
process = subprocess.Popen(cmd.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
(output, err) = process.communicate()
rc = process.poll()
if rc != 0:
logging.error('Command %s failed, rc=%s, stdout="%s", stderr="%s"' % (cmd, rc, output, err), exc_info=True)
try:
process.kill()
except OSError:
# Silently fail if the subprocess has exited already
pass
else:
return output, err
error_message = '%s, error is: %s' % (error_message, err)
fatal_error(error_message, error_message)
def ca_dirs_check(config, database_dir, certs_dir):
"""Check if mount's database and certs directories exist and if not, create directories (also create all intermediate
directories if they don't exist)."""
if not os.path.exists(database_dir):
create_required_directory(config, database_dir)
if not os.path.exists(certs_dir):
create_required_directory(config, certs_dir)
def ca_supporting_files_check(index_path, index_attr_path, serial_path, rand_path):
"""Recreate all supporting openssl ca and req files if they're not present in their respective directories"""
if not os.path.isfile(index_path):
open(index_path, 'w').close()
if not os.path.isfile(index_attr_path):
with open(index_attr_path, 'w+') as f:
f.write('unique_subject = no')
if not os.path.isfile(serial_path):
with open(serial_path, 'w+') as f:
f.write('00')
if not os.path.isfile(rand_path):
open(rand_path, 'w').close()
def get_certificate_timestamp(current_time, **kwargs):
updated_time = current_time + timedelta(**kwargs)
return updated_time.strftime(CERT_DATETIME_FORMAT)
def get_utc_now():
"""
Wrapped for patching purposes in unit tests
"""
return datetime.utcnow()
def assert_root():
if os.geteuid() != 0:
sys.stderr.write('only root can run mount.efs\n')
sys.exit(1)
def read_config(config_file=CONFIG_FILE):
try:
p = ConfigParser.SafeConfigParser()
except AttributeError:
p = ConfigParser()
p.read(config_file)
return p
def bootstrap_logging(config, log_dir=LOG_DIR):
raw_level = config.get(CONFIG_SECTION, 'logging_level')
levels = {
'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL
}
level = levels.get(raw_level.lower())
level_error = False
if not level:
# delay logging error about malformed log level until after logging is configured
level_error = True
level = logging.INFO
max_bytes = config.getint(CONFIG_SECTION, 'logging_max_bytes')
file_count = config.getint(CONFIG_SECTION, 'logging_file_count')
handler = RotatingFileHandler(os.path.join(log_dir, LOG_FILE), maxBytes=max_bytes, backupCount=file_count)
handler.setFormatter(logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(message)s'))
logger = logging.getLogger()
logger.setLevel(level)
logger.addHandler(handler)
if level_error:
logging.error('Malformed logging level "%s", setting logging level to %s', raw_level, level)
def get_dns_name(config, fs_id):
def _validate_replacement_field_count(format_str, expected_ct):
if format_str.count('{') != expected_ct or format_str.count('}') != expected_ct:
raise ValueError('DNS name format has an incorrect number of replacement fields')
dns_name_format = config.get(CONFIG_SECTION, 'dns_name_format')
if '{fs_id}' not in dns_name_format:
raise ValueError('DNS name format must include {fs_id}')
format_args = {'fs_id': fs_id}
expected_replacement_field_ct = 1
if '{region}' in dns_name_format:
expected_replacement_field_ct += 1
format_args['region'] = get_target_region(config)
if '{dns_name_suffix}' in dns_name_format:
expected_replacement_field_ct += 1
config_section = CONFIG_SECTION
region = format_args.get('region')
if region:
region_specific_config_section = '%s.%s' % (CONFIG_SECTION, region)
if config.has_section(region_specific_config_section):
config_section = region_specific_config_section
format_args['dns_name_suffix'] = config.get(config_section, 'dns_name_suffix')
logging.debug("Using dns_name_suffix %s in config section [%s]", format_args.get('dns_name_suffix'), config_section)
_validate_replacement_field_count(dns_name_format, expected_replacement_field_ct)
dns_name = dns_name_format.format(**format_args)
try:
socket.gethostbyname(dns_name)
except socket.gaierror:
fatal_error('Failed to resolve "%s" - check that your file system ID is correct.\nSee %s for more detail.'
% (dns_name, 'https://docs.aws.amazon.com/console/efs/mount-dns-name'),
'Failed to resolve "%s"' % dns_name)
return dns_name
def tls_paths_dictionary(mount_name, base_path=STATE_FILE_DIR):
tls_dict = {
'mount_dir': os.path.join(base_path, mount_name),
# every mount will have its own ca mode assets due to lack of multi-threading support in openssl
'database_dir': os.path.join(base_path, mount_name, 'database'),
'certs_dir': os.path.join(base_path, mount_name, 'certs'),
'index': os.path.join(base_path, mount_name, 'database/index.txt'),
'index_attr': os.path.join(base_path, mount_name, 'database/index.txt.attr'),
'serial': os.path.join(base_path, mount_name, 'database/serial'),
'rand': os.path.join(base_path, mount_name, 'database/.rand')
}
return tls_dict
def get_public_key_sha1(public_key):
# truncating public key to remove the header and footer '-----(BEGIN|END) PUBLIC KEY-----'
with open(public_key, 'r') as f:
lines = f.readlines()
lines = lines[1:-1]
key = ''.join(lines)
key = bytearray(base64.b64decode(key))
# Parse the public key to pull out the actual key material by looking for the key BIT STRING
# Example:
# 0:d=0 hl=4 l= 418 cons: SEQUENCE
# 4:d=1 hl=2 l= 13 cons: SEQUENCE
# 6:d=2 hl=2 l= 9 prim: OBJECT :rsaEncryption
# 17:d=2 hl=2 l= 0 prim: NULL
# 19:d=1 hl=4 l= 399 prim: BIT STRING
cmd = 'openssl asn1parse -inform PEM -in %s' % public_key
output, err = subprocess_call(cmd, 'Unable to ASN1 parse public key file, %s, correctly' % public_key)
key_line = ''
for line in output.splitlines():
if 'BIT STRING' in line.decode('utf-8'):
key_line = line.decode('utf-8')
if not key_line:
err_msg = 'Public key file, %s, is incorrectly formatted' % public_key
fatal_error(err_msg, err_msg)
key_line = key_line.replace(' ', '')
# DER encoding TLV (Tag, Length, Value)
# - the first octet (byte) is the tag (type)
# - the next octets are the length - "definite form"
# - the first octet always has the high order bit (8) set to 1
# - the remaining 127 bits are used to encode the number of octets that follow
# - the following octets encode, as big-endian, the length (which may be 0) as a number of octets
# - the remaining octets are the "value" aka content
#
# For a BIT STRING, the first octet of the value is used to signify the number of unused bits that exist in the last
# content byte. Note that this is explicitly excluded from the SubjectKeyIdentifier hash, per
# https://tools.ietf.org/html/rfc5280#section-4.2.1.2
#
# Example:
# 0382018f00...<subjectPublicKey>
# - 03 - BIT STRING tag
# - 82 - 2 length octets to follow (ignore high order bit)
# - 018f - length of 399
# - 00 - no unused bits in the last content byte
offset = int(key_line.split(':')[0])
key = key[offset:]
num_length_octets = key[1] & 0b01111111
# Exclude the tag (1), length (1 + num_length_octets), and number of unused bits (1)
offset = 1 + 1 + num_length_octets + 1
key = key[offset:]
sha1 = hashlib.sha1()
sha1.update(key)
return sha1.hexdigest()
def create_canonical_request(public_key_hash, date, access_key, region, fs_id, session_token=None):
"""
Create a Canonical Request - https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html
"""
formatted_datetime = date.strftime(SIGV4_DATETIME_FORMAT)
credential = quote_plus(access_key + '/' + get_credential_scope(date, region))
request = HTTP_REQUEST_METHOD + '\n'
request += CANONICAL_URI + '\n'
request += create_canonical_query_string(public_key_hash, credential, formatted_datetime, session_token) + '\n'
request += CANONICAL_HEADERS % fs_id + '\n'
request += SIGNED_HEADERS + '\n'
sha256 = hashlib.sha256()
sha256.update(REQUEST_PAYLOAD.encode())
request += sha256.hexdigest()
return request
def create_canonical_query_string(public_key_hash, credential, formatted_datetime, session_token=None):
canonical_query_params = {
'Action': 'Connect',
# Public key hash is included in canonical request to tie the signature to a specific key pair to avoid replay attacks
'PublicKeyHash': quote_plus(public_key_hash),
'X-Amz-Algorithm': ALGORITHM,
'X-Amz-Credential': credential,
'X-Amz-Date': quote_plus(formatted_datetime),
'X-Amz-Expires': 86400,
'X-Amz-SignedHeaders': SIGNED_HEADERS,
}
if session_token:
canonical_query_params['X-Amz-Security-Token'] = quote_plus(session_token)
# Cannot use urllib.urlencode because it replaces the %s's
return '&'.join(['%s=%s' % (k, v) for k, v in sorted(canonical_query_params.items())])
def create_string_to_sign(canonical_request, date, region):
"""
Create a String to Sign - https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html
"""
string_to_sign = ALGORITHM + '\n'
string_to_sign += date.strftime(SIGV4_DATETIME_FORMAT) + '\n'
string_to_sign += get_credential_scope(date, region) + '\n'
sha256 = hashlib.sha256()
sha256.update(canonical_request.encode())
string_to_sign += sha256.hexdigest()
return string_to_sign
def calculate_signature(string_to_sign, date, secret_access_key, region):
"""
Calculate the Signature - https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html
"""
def _sign(key, msg):
return hmac.new(key, msg.encode('utf-8'), hashlib.sha256)
key_date = _sign(('AWS4' + secret_access_key).encode('utf-8'), date.strftime(DATE_ONLY_FORMAT)).digest()
add_region = _sign(key_date, region).digest()
add_service = _sign(add_region, SERVICE).digest()
signing_key = _sign(add_service, 'aws4_request').digest()
return _sign(signing_key, string_to_sign).hexdigest()
def get_credential_scope(date, region):
return '/'.join([date.strftime(DATE_ONLY_FORMAT), region, SERVICE, AWS4_REQUEST])
def match_device(config, device):
"""Return the EFS id and the remote path to mount"""
try:
remote, path = device.split(':', 1)
except ValueError:
remote = device
path = '/'
if FS_ID_RE.match(remote):
return remote, path
try:
primary, secondaries, _ = socket.gethostbyname_ex(remote)
hostnames = list(filter(lambda e: e is not None, [primary] + secondaries))
except socket.gaierror:
create_default_cloudwatchlog_agent_if_not_exist(config)
fatal_error(
'Failed to resolve "%s" - check that the specified DNS name is a CNAME record resolving to a valid EFS DNS '
'name' % remote,
'Failed to resolve "%s"' % remote
)
if not hostnames:
create_default_cloudwatchlog_agent_if_not_exist(config)
fatal_error(
'The specified domain name "%s" did not resolve to an EFS mount target' % remote
)
for hostname in hostnames:
efs_fqdn_match = EFS_FQDN_RE.match(hostname)
if efs_fqdn_match:
fs_id = efs_fqdn_match.group('fs_id')
expected_dns_name = get_dns_name(config, fs_id)
# check that the DNS name of the mount target matches exactly the DNS name the CNAME resolves to
if hostname == expected_dns_name:
return fs_id, path
else:
create_default_cloudwatchlog_agent_if_not_exist(config)
fatal_error('The specified CNAME "%s" did not resolve to a valid DNS name for an EFS mount target. '
'Please refer to the EFS documentation for mounting with DNS names for examples: %s'
% (remote, 'https://docs.aws.amazon.com/efs/latest/ug/mounting-fs-mount-cmd-dns-name.html'))
def is_nfs_mount(mountpoint):
cmd = ['stat', '-f', '-L', '-c', '%T', mountpoint]
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True)
output, _ = p.communicate()
return output and 'nfs' in str(output)
def mount_tls(config, init_system, dns_name, path, fs_id, mountpoint, options):
if os.path.ismount(mountpoint) and is_nfs_mount(mountpoint):
sys.stdout.write("%s is already mounted, please run 'mount' command to verify\n" % mountpoint)
logging.warning("%s is already mounted, mount aborted" % mountpoint)
return
with bootstrap_tls(config, init_system, dns_name, fs_id, mountpoint, options) as tunnel_proc:
mount_completed = threading.Event()
t = threading.Thread(target=poll_tunnel_process, args=(tunnel_proc, fs_id, mount_completed))
t.daemon = True
t.start()
mount_nfs(dns_name, path, mountpoint, options)
mount_completed.set()
t.join()
def check_unsupported_options(options):
for unsupported_option in UNSUPPORTED_OPTIONS:
if unsupported_option in options:
warn_message = 'The "%s" option is not supported and has been ignored, as amazon-efs-utils relies on a built-in ' \
'trust store.' % unsupported_option
sys.stderr.write('WARN: %s\n' % warn_message)
logging.warning(warn_message)
del options[unsupported_option]
def check_options_validity(options):
if 'tls' in options:
if 'port' in options:
fatal_error('The "port" and "tls" options are mutually exclusive')
if 'tlsport' in options:
try:
int(options['tlsport'])
except ValueError:
fatal_error('tlsport option [%s] is not an integer' % options['tlsport'])
if 'ocsp' in options and 'noocsp' in options:
fatal_error('The "ocsp" and "noocsp" options are mutually exclusive')
if 'accesspoint' in options:
if 'tls' not in options:
fatal_error('The "tls" option is required when mounting via "accesspoint"')
if not AP_ID_RE.match(options['accesspoint']):
fatal_error('Access Point ID %s is malformed' % options['accesspoint'])
if 'iam' in options and 'tls' not in options:
fatal_error('The "tls" option is required when mounting via "iam"')
if 'awsprofile' in options and 'iam' not in options:
fatal_error('The "iam" option is required when mounting with named profile option, "awsprofile"')
if 'awscredsuri' in options and 'iam' not in options:
fatal_error('The "iam" option is required when mounting with "awscredsuri"')
if 'awscredsuri' in options and 'awsprofile' in options:
fatal_error('The "awscredsuri" and "awsprofile" options are mutually exclusive')
def bootstrap_cloudwatch_logging(config, fs_id=None):
if not check_if_cloudwatch_log_enabled(config):
return None
cloudwatchlog_client = get_botocore_client(config, 'logs')
if not cloudwatchlog_client:
return None
cloudwatchlog_config = get_cloudwatchlog_config(config, fs_id)
log_group_name = cloudwatchlog_config.get('log_group_name')
log_stream_name = cloudwatchlog_config.get('log_stream_name')
retention_days = cloudwatchlog_config.get('retention_days')
group_creation_completed = create_cloudwatch_log_group(cloudwatchlog_client, log_group_name)
if not group_creation_completed:
return None
put_retention_policy_completed = put_cloudwatch_log_retention_policy(cloudwatchlog_client, log_group_name, retention_days)
if not put_retention_policy_completed:
return None
stream_creation_completed = create_cloudwatch_log_stream(cloudwatchlog_client, log_group_name, log_stream_name)
if not stream_creation_completed:
return None
return {
'client': cloudwatchlog_client,
'log_group_name': log_group_name,
'log_stream_name': log_stream_name
}
def create_default_cloudwatchlog_agent_if_not_exist(config):
if not check_if_cloudwatch_log_enabled(config):
return None
global CLOUDWATCHLOG_AGENT
if not CLOUDWATCHLOG_AGENT:
CLOUDWATCHLOG_AGENT = bootstrap_cloudwatch_logging(config)
def get_botocore_client(config, service):
if not BOTOCORE_PRESENT:
logging.error('Failed to import botocore, please install botocore first.')
return None
session = botocore.session.get_session()
region = get_target_region(config)
iam_role_name = get_iam_role_name()
if iam_role_name:
credentials, _ = get_aws_security_credentials_from_instance_metadata(iam_role_name)
if credentials:
return session.create_client(service, aws_access_key_id=credentials['AccessKeyId'],
aws_secret_access_key=credentials['SecretAccessKey'],
aws_session_token=credentials['Token'], region_name=region)
return session.create_client(service, region_name=region)
def get_cloudwatchlog_config(config, fs_id=None):
log_group_name = DEFAULT_CLOUDWATCH_LOG_GROUP
if config.has_option(CLOUDWATCH_LOG_SECTION, 'log_group_name'):
log_group_name = config.get(CLOUDWATCH_LOG_SECTION, 'log_group_name')
retention_days = DEFAULT_RETENTION_DAYS
if config.has_option(CLOUDWATCH_LOG_SECTION, 'retention_in_days'):
retention_days = config.get(CLOUDWATCH_LOG_SECTION, 'retention_in_days')
log_stream_name = get_cloudwatch_log_stream_name(fs_id)
return {
'log_group_name': log_group_name,
'retention_days': int(retention_days),
'log_stream_name': log_stream_name
}
def get_cloudwatch_log_stream_name(fs_id=None):
instance_id = get_instance_identity_info_from_instance_metadata('instanceId')
if instance_id and fs_id:
log_stream_name = '%s - %s - mount.log' % (fs_id, instance_id)
elif instance_id:
log_stream_name = '%s - mount.log' % (instance_id)
elif fs_id:
log_stream_name = '%s - mount.log' % (fs_id)
else:
log_stream_name = 'default - mount.log'
return log_stream_name
def check_if_cloudwatch_log_enabled(config):
if config.has_option(CLOUDWATCH_LOG_SECTION, 'enabled'):
return config.getboolean(CLOUDWATCH_LOG_SECTION, 'enabled')
return False
def cloudwatch_create_log_group_helper(cloudwatchlog_client, log_group_name):
cloudwatchlog_client.create_log_group(
logGroupName=log_group_name
)
logging.info('Created cloudwatch log group %s' % log_group_name)
def create_cloudwatch_log_group(cloudwatchlog_client, log_group_name):
try:
cloudwatch_create_log_group_helper(cloudwatchlog_client, log_group_name)
except ClientError as e:
exception = e.response['Error']['Code']
if exception == 'ResourceAlreadyExistsException':
logging.debug('Log group %s already exist, %s' % (log_group_name, e.response))
return True
elif exception == 'LimitExceededException':
logging.error('Reached the maximum number of log groups that can be created, %s' % e.response)
return False
elif exception == 'OperationAbortedException':
logging.debug('Multiple requests to update the same log group %s were in conflict, %s' % (log_group_name, e.response))
return False
elif exception == 'InvalidParameterException':
logging.error('Log group name %s is specified incorrectly, %s' % (log_group_name, e.response))
return False
else:
handle_general_botocore_exceptions(e)
return False
except NoCredentialsError as e:
logging.warning('Credentials are not properly configured, %s' % e)
return False
except EndpointConnectionError as e:
logging.warning('Could not connect to the endpoint, %s' % e)
return False
except Exception as e:
logging.warning('Unknown error, %s.' % e)
return False
return True
def cloudwatch_put_retention_policy_helper(cloudwatchlog_client, log_group_name, retention_days):
cloudwatchlog_client.put_retention_policy(
logGroupName=log_group_name,
retentionInDays=retention_days
)
logging.debug('Set cloudwatch log group retention days to %s' % retention_days)
def put_cloudwatch_log_retention_policy(cloudwatchlog_client, log_group_name, retention_days):
try:
cloudwatch_put_retention_policy_helper(cloudwatchlog_client, log_group_name, retention_days)
except ClientError as e:
exception = e.response['Error']['Code']
if exception == 'ResourceNotFoundException':
logging.error('Log group %s does not exist, %s' % (log_group_name, e.response))
return False
elif exception == 'OperationAbortedException':
logging.debug('Multiple requests to update the same log group %s were in conflict, %s' % (log_group_name, e.response))
return False
elif exception == 'InvalidParameterException':
logging.error('Either parameter log group name %s or retention in days %s is specified incorrectly, %s'
% (log_group_name, retention_days, e.response))
return False
else:
handle_general_botocore_exceptions(e)
return False
except NoCredentialsError as e:
logging.warning('Credentials are not properly configured, %s' % e)
return False
except EndpointConnectionError as e:
logging.warning('Could not connect to the endpoint, %s' % e)
return False
except Exception as e:
logging.warning('Unknown error, %s.' % e)
return False
return True
def cloudwatch_create_log_stream_helper(cloudwatchlog_client, log_group_name, log_stream_name):
cloudwatchlog_client.create_log_stream(
logGroupName=log_group_name,
logStreamName=log_stream_name
)
logging.info('Created cloudwatch log stream %s in log group %s' % (log_stream_name, log_group_name))
def create_cloudwatch_log_stream(cloudwatchlog_client, log_group_name, log_stream_name):
try:
cloudwatch_create_log_stream_helper(cloudwatchlog_client, log_group_name, log_stream_name)
except ClientError as e:
exception = e.response['Error']['Code']
if exception == 'ResourceAlreadyExistsException':
logging.debug('Log stream %s already exist in log group %s, %s' % (log_stream_name, log_group_name, e.response))
return True
elif exception == 'InvalidParameterException':
logging.error('Either parameter log group name %s or log stream name %s is specified incorrectly, %s'
% (log_group_name, log_stream_name, e.response))
return False
elif exception == 'ResourceNotFoundException':
logging.error('Log group %s does not exist, %s' % (log_group_name, e.response))
return False
else:
handle_general_botocore_exceptions(e)
return False
except NoCredentialsError as e:
logging.warning('Credentials are not properly configured, %s' % e)
return False
except EndpointConnectionError as e:
logging.warning('Could not connect to the endpoint, %s' % e)
return False
except Exception as e:
logging.warning('Unknown error, %s.' % e)
return False
return True
def cloudwatch_put_log_events_helper(cloudwatchlog_agent, message, token=None):
kwargs = {
'logGroupName': cloudwatchlog_agent.get('log_group_name'),
'logStreamName': cloudwatchlog_agent.get('log_stream_name'),
'logEvents': [
{
'timestamp': int(round(time.time() * 1000)),
'message': message
}
]
}
if token:
kwargs['sequenceToken'] = token
cloudwatchlog_agent.get('client').put_log_events(**kwargs)
def publish_cloudwatch_log(cloudwatchlog_agent, message):
if not cloudwatchlog_agent or not cloudwatchlog_agent.get('client'):
return False
token = get_log_stream_next_token(cloudwatchlog_agent)
try:
cloudwatch_put_log_events_helper(cloudwatchlog_agent, message, token)
except ClientError as e:
exception = e.response['Error']['Code']
if exception == 'InvalidSequenceTokenException':
logging.debug('The sequence token is not valid, %s' % e.response)
return False
elif exception == 'InvalidParameterException':
logging.debug('One of the parameter to put log events is not valid, %s' % e.response)
return False
elif exception == 'DataAlreadyAcceptedException':
logging.debug('The event %s was already logged, %s' % (message, e.response))
return False
elif exception == 'UnrecognizedClientException':
logging.debug('The most likely cause is an invalid AWS access key ID or secret Key, %s' % e.response)
return False
elif exception == 'ResourceNotFoundException':
logging.error('Either log group %s or log stream %s does not exist, %s'
% (cloudwatchlog_agent.get('log_group_name'), cloudwatchlog_agent.get('log_stream_name'), e.response))
return False
else:
logging.debug('Unexpected error: %s' % e)
return False
except NoCredentialsError as e:
logging.warning('Credentials are not properly configured, %s' % e)
return False
except EndpointConnectionError as e:
logging.warning('Could not connect to the endpoint, %s' % e)
return False
except Exception as e:
logging.warning('Unknown error, %s.' % e)
return False
return True
def cloudwatch_describe_log_streams_helper(cloudwatchlog_agent):
return cloudwatchlog_agent.get('client').describe_log_streams(
logGroupName=cloudwatchlog_agent.get('log_group_name'),
logStreamNamePrefix=cloudwatchlog_agent.get('log_stream_name')
)
def get_log_stream_next_token(cloudwatchlog_agent):
try:
response = cloudwatch_describe_log_streams_helper(cloudwatchlog_agent)
except ClientError as e:
exception = e.response['Error']['Code']
if exception == 'InvalidParameterException':
logging.debug('Either parameter log group name %s or log stream name %s is specified incorrectly, %s'
% (cloudwatchlog_agent.get('log_group_name'), cloudwatchlog_agent.get('log_stream_name'), e.response))
elif exception == 'ResourceNotFoundException':
logging.debug('Either log group %s or log stream %s does not exist, %s'
% (cloudwatchlog_agent.get('log_group_name'), cloudwatchlog_agent.get('log_stream_name'), e.response))
else:
handle_general_botocore_exceptions(e)
return None
except NoCredentialsError as e:
logging.warning('Credentials are not properly configured, %s' % e)
return None
except EndpointConnectionError as e:
logging.warning('Could not connect to the endpoint, %s' % e)
return None
except Exception as e:
logging.warning('Unknown error, %s' % e)
return None
try:
log_stream = response['logStreams'][0]
return log_stream.get('uploadSequenceToken')
except (IndexError, TypeError, KeyError):
pass
return None
def handle_general_botocore_exceptions(error):
exception = error.response['Error']['Code']
if exception == 'ServiceUnavailableException':
logging.debug('The service cannot complete the request, %s' % error.response)
elif exception == 'AccessDeniedException':
logging.debug('User is not authorized to perform the action, %s' % error.response)
else:
logging.debug('Unexpected error: %s' % error)
def main():
parse_arguments_early_exit()
assert_root()
config = read_config()
bootstrap_logging(config)
fs_id, path, mountpoint, options = parse_arguments(config)
logging.info('version=%s options=%s', VERSION, options)
global CLOUDWATCHLOG_AGENT
CLOUDWATCHLOG_AGENT = bootstrap_cloudwatch_logging(config, fs_id)
check_unsupported_options(options)
check_options_validity(options)
init_system = get_init_system()
check_network_status(fs_id, init_system)
dns_name = get_dns_name(config, fs_id)
if 'tls' in options:
mount_tls(config, init_system, dns_name, path, fs_id, mountpoint, options)
else:
mount_nfs(dns_name, path, mountpoint, options)
if '__main__' == __name__:
main()
| [
"logging.getLogger",
"logging.debug",
"configparser.ConfigParser",
"re.compile",
"os.path.ismount",
"os.getuid",
"urllib.request.Request",
"os.open",
"time.sleep",
"sys.exit",
"urllib.parse.urlencode",
"datetime.timedelta",
"hashlib.sha1",
"logging.info",
"logging.error",
"os.remove",
"os.path.exists",
"socket.gethostbyname_ex",
"urllib.quote_plus",
"subprocess.Popen",
"os.putenv",
"configparser.ConfigParser.SafeConfigParser",
"os.chmod",
"os.path.isdir",
"subprocess.call",
"os.getpid",
"socket.gethostname",
"urllib.request.urlopen",
"subprocess.check_output",
"hashlib.sha256",
"json.loads",
"os.close",
"logging.warning",
"os.path.isfile",
"sys.stderr.write",
"time.time",
"socket.gethostbyname",
"socket.socket",
"datetime.datetime.utcnow",
"os.makedirs",
"logging.Formatter",
"os.path.join",
"os.geteuid",
"base64.b64decode",
"threading.Event",
"os._exit",
"os.path.basename",
"os.path.abspath",
"threading.Thread",
"urllib2.build_opener",
"json.dump",
"sys.stdout.write"
] | [((4023, 4062), 're.compile', 're.compile', (['"""^(?P<fs_id>fs-[0-9a-f]+)$"""'], {}), "('^(?P<fs_id>fs-[0-9a-f]+)$')\n", (4033, 4062), False, 'import re\n'), ((4077, 4191), 're.compile', 're.compile', (['"""^(?P<fs_id>fs-[0-9a-f]+)\\\\.efs\\\\.(?P<region>[a-z0-9-]+)\\\\.(?P<dns_name_suffix>[a-z0-9.]+)$"""'], {}), "(\n '^(?P<fs_id>fs-[0-9a-f]+)\\\\.efs\\\\.(?P<region>[a-z0-9-]+)\\\\.(?P<dns_name_suffix>[a-z0-9.]+)$'\n )\n", (4087, 4191), False, 'import re\n'), ((4191, 4224), 're.compile', 're.compile', (['"""^fsap-[0-9a-f]{17}$"""'], {}), "('^fsap-[0-9a-f]{17}$')\n", (4201, 4224), False, 'import re\n'), ((6479, 6518), 'sys.stderr.write', 'sys.stderr.write', (["('%s\\n' % user_message)"], {}), "('%s\\n' % user_message)\n", (6495, 6518), False, 'import sys\n'), ((6523, 6549), 'logging.error', 'logging.error', (['log_message'], {}), '(log_message)\n', (6536, 6549), False, 'import logging\n'), ((6636, 6655), 'sys.exit', 'sys.exit', (['exit_code'], {}), '(exit_code)\n', (6644, 6655), False, 'import sys\n'), ((22246, 22261), 'socket.socket', 'socket.socket', ([], {}), '()\n', (22259, 22261), False, 'import socket\n'), ((24646, 24748), 'subprocess.Popen', 'subprocess.Popen', (['stunnel_command'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'close_fds': '(True)'}), '(stunnel_command, stdout=subprocess.PIPE, stderr=subprocess\n .PIPE, close_fds=True)\n', (24662, 24748), False, 'import subprocess\n'), ((28755, 28825), 'logging.debug', 'logging.debug', (['"""Writing stunnel configuration:\n%s"""', 'stunnel_config'], {}), '("""Writing stunnel configuration:\n%s""", stunnel_config)\n', (28768, 28825), False, 'import logging\n'), ((28850, 28916), 'os.path.join', 'os.path.join', (['state_file_dir', "('stunnel-config.%s' % mount_filename)"], {}), "(state_file_dir, 'stunnel-config.%s' % mount_filename)\n", (28862, 28916), False, 'import os\n'), ((30822, 30878), 'logging.debug', 'logging.debug', (['"""Identified init system: %s"""', 'init_system'], {}), "('Identified init system: %s', init_system)\n", (30835, 30878), False, 'import logging\n'), ((35068, 35146), 'os.path.join', 'os.path.join', (['state_file_dir', "cert_details['mountStateDir']", '"""certificate.pem"""'], {}), "(state_file_dir, cert_details['mountStateDir'], 'certificate.pem')\n", (35080, 35146), False, 'import os\n'), ((36105, 36225), 'subprocess.Popen', 'subprocess.Popen', (['tunnel_args'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'preexec_fn': 'os.setsid', 'close_fds': '(True)'}), '(tunnel_args, stdout=subprocess.PIPE, stderr=subprocess.\n PIPE, preexec_fn=os.setsid, close_fds=True)\n', (36121, 36225), False, 'import subprocess\n'), ((36234, 36294), 'logging.info', 'logging.info', (['"""Started TLS tunnel, pid: %d"""', 'tunnel_proc.pid'], {}), "('Started TLS tunnel, pid: %d', tunnel_proc.pid)\n", (36246, 36294), False, 'import logging\n'), ((38104, 38197), 'subprocess.Popen', 'subprocess.Popen', (['command'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'close_fds': '(True)'}), '(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n close_fds=True)\n', (38120, 38197), False, 'import subprocess\n'), ((38753, 38772), 'sys.exit', 'sys.exit', (['exit_code'], {}), '(exit_code)\n', (38761, 38772), False, 'import sys\n'), ((40400, 40451), 'os.path.join', 'os.path.join', (["tls_paths['mount_dir']", '"""config.conf"""'], {}), "(tls_paths['mount_dir'], 'config.conf')\n", (40412, 40451), False, 'import os\n'), ((40486, 40537), 'os.path.join', 'os.path.join', (["tls_paths['mount_dir']", '"""request.csr"""'], {}), "(tls_paths['mount_dir'], 'request.csr')\n", (40498, 40537), False, 'import os\n'), ((40556, 40611), 'os.path.join', 'os.path.join', (["tls_paths['mount_dir']", '"""certificate.pem"""'], {}), "(tls_paths['mount_dir'], 'certificate.pem')\n", (40568, 40611), False, 'import os\n'), ((44028, 44068), 'os.path.join', 'os.path.join', (['directory', '"""publicKey.pem"""'], {}), "(directory, 'publicKey.pem')\n", (44040, 44068), False, 'import os\n'), ((48841, 48858), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (48856, 48858), False, 'from datetime import datetime, timedelta\n'), ((50057, 50076), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (50074, 50076), False, 'import logging\n'), ((55195, 55209), 'hashlib.sha1', 'hashlib.sha1', ([], {}), '()\n', (55207, 55209), False, 'import hashlib\n'), ((55931, 55947), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (55945, 55947), False, 'import hashlib\n'), ((57240, 57256), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (57254, 57256), False, 'import hashlib\n'), ((59996, 60085), 'subprocess.Popen', 'subprocess.Popen', (['cmd'], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'close_fds': '(True)'}), '(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE,\n close_fds=True)\n', (60012, 60085), False, 'import subprocess\n'), ((66484, 66548), 'logging.info', 'logging.info', (["('Created cloudwatch log group %s' % log_group_name)"], {}), "('Created cloudwatch log group %s' % log_group_name)\n", (66496, 66548), False, 'import logging\n'), ((68244, 68323), 'logging.debug', 'logging.debug', (["('Set cloudwatch log group retention days to %s' % retention_days)"], {}), "('Set cloudwatch log group retention days to %s' % retention_days)\n", (68257, 68323), False, 'import logging\n'), ((69953, 70058), 'logging.info', 'logging.info', (["('Created cloudwatch log stream %s in log group %s' % (log_stream_name,\n log_group_name))"], {}), "('Created cloudwatch log stream %s in log group %s' % (\n log_stream_name, log_group_name))\n", (69965, 70058), False, 'import logging\n'), ((76123, 76178), 'logging.info', 'logging.info', (['"""version=%s options=%s"""', 'VERSION', 'options'], {}), "('version=%s options=%s', VERSION, options)\n", (76135, 76178), False, 'import logging\n'), ((7353, 7508), 'sys.stdout.write', 'sys.stdout.write', (['"""Warning: region obtained from "dns_name_format" field. Please set the "region" parameter in the efs-utils configuration file."""'], {}), '(\n \'Warning: region obtained from "dns_name_format" field. Please set the "region" parameter in the efs-utils configuration file.\'\n )\n', (7369, 7508), False, 'import sys\n'), ((9553, 9578), 'urllib2.build_opener', 'build_opener', (['HTTPHandler'], {}), '(HTTPHandler)\n', (9565, 9578), False, 'from urllib2 import URLError, HTTPError, build_opener, urlopen, Request, HTTPHandler\n'), ((9597, 9633), 'urllib.request.Request', 'Request', (['INSTANCE_METADATA_TOKEN_URL'], {}), '(INSTANCE_METADATA_TOKEN_URL)\n', (9604, 9633), False, 'from urllib.request import urlopen, Request\n'), ((12742, 12767), 'os.path.exists', 'os.path.exists', (['file_path'], {}), '(file_path)\n', (12756, 12767), False, 'import os\n'), ((14584, 14754), 'urllib.parse.urlencode', 'urlencode', (["{'Version': '2011-06-15', 'Action': 'AssumeRoleWithWebIdentity', 'RoleArn':\n role_arn, 'RoleSessionName': 'efs-mount-helper', 'WebIdentityToken': token}"], {}), "({'Version': '2011-06-15', 'Action': 'AssumeRoleWithWebIdentity',\n 'RoleArn': role_arn, 'RoleSessionName': 'efs-mount-helper',\n 'WebIdentityToken': token})\n", (14593, 14754), False, 'from urllib.parse import urlencode\n'), ((19347, 19359), 'urllib.request.Request', 'Request', (['url'], {}), '(url)\n', (19354, 19359), False, 'from urllib.request import urlopen, Request\n'), ((19453, 19476), 'urllib.request.urlopen', 'urlopen', (['req'], {'timeout': '(1)'}), '(req, timeout=1)\n', (19460, 19476), False, 'from urllib.request import urlopen, Request\n'), ((20255, 20301), 'logging.debug', 'logging.debug', (['"""%s %s"""', 'url_error_msg', 'err_msg'], {}), "('%s %s', url_error_msg, err_msg)\n", (20268, 20301), False, 'import logging\n'), ((23972, 24002), 'os.path.exists', 'os.path.exists', (['stunnel_cafile'], {}), '(stunnel_cafile)\n', (23986, 24002), False, 'import os\n'), ((24449, 24518), 'logging.warning', 'logging.warning', (['"""stunnel does not support "%s\\""""', 'stunnel_option_name'], {}), '(\'stunnel does not support "%s"\', stunnel_option_name)\n', (24464, 24518), False, 'import logging\n'), ((25472, 25499), 'os.putenv', 'os.putenv', (['"""PATH"""', 'env_path'], {}), "('PATH', env_path)\n", (25481, 25499), False, 'import os\n'), ((25515, 25558), 'subprocess.check_output', 'subprocess.check_output', (["['which', command]"], {}), "(['which', command])\n", (25538, 25558), False, 'import subprocess\n'), ((29663, 29682), 'json.dump', 'json.dump', (['state', 'f'], {}), '(state, f)\n', (29672, 29682), False, 'import json\n'), ((30993, 31103), 'subprocess.call', 'subprocess.call', (["['systemctl', 'status', 'network.target']"], {'stdout': 'devnull', 'stderr': 'devnull', 'close_fds': '(True)'}), "(['systemctl', 'status', 'network.target'], stdout=devnull,\n stderr=devnull, close_fds=True)\n", (31008, 31103), False, 'import subprocess\n'), ((31368, 31432), 'logging.debug', 'logging.debug', (['"""Not testing network on non-systemd init systems"""'], {}), "('Not testing network on non-systemd init systems')\n", (31381, 31432), False, 'import logging\n'), ((31561, 31681), 'subprocess.Popen', 'subprocess.Popen', (["['/sbin/status', WATCHDOG_SERVICE]"], {'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.PIPE', 'close_fds': '(True)'}), "(['/sbin/status', WATCHDOG_SERVICE], stdout=subprocess.PIPE,\n stderr=subprocess.PIPE, close_fds=True)\n", (31577, 31681), False, 'import subprocess\n'), ((33038, 33066), 'os.makedirs', 'os.makedirs', (['directory', 'mode'], {}), '(directory, mode)\n', (33049, 33066), False, 'import os\n'), ((34526, 34546), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (34544, 34546), False, 'import socket\n'), ((35281, 35311), 'os.path.exists', 'os.path.exists', (['state_file_dir'], {}), '(state_file_dir)\n', (35295, 35311), False, 'import os\n'), ((38341, 38362), 'logging.info', 'logging.info', (['message'], {}), '(message)\n', (38353, 38362), False, 'import logging\n'), ((39064, 39121), 'sys.stdout.write', 'sys.stdout.write', (["('%s Version: %s\\n' % (args[0], VERSION))"], {}), "('%s Version: %s\\n' % (args[0], VERSION))\n", (39080, 39121), False, 'import sys\n'), ((39130, 39141), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (39138, 39141), False, 'import sys\n'), ((40915, 40968), 'os.path.join', 'os.path.join', (["tls_paths['mount_dir']", '"""publicKey.pem"""'], {}), "(tls_paths['mount_dir'], 'publicKey.pem')\n", (40927, 40968), False, 'import os\n'), ((42384, 42425), 'os.path.join', 'os.path.join', (['base_path', '"""efs-utils-lock"""'], {}), "(base_path, 'efs-utils-lock')\n", (42396, 42425), False, 'import os\n'), ((42438, 42505), 'os.open', 'os.open', (['lock_file', '(os.O_CREAT | os.O_DSYNC | os.O_EXCL | os.O_RDWR)'], {}), '(lock_file, os.O_CREAT | os.O_DSYNC | os.O_EXCL | os.O_RDWR)\n', (42445, 42505), False, 'import os\n'), ((43172, 43191), 'os.path.isfile', 'os.path.isfile', (['key'], {}), '(key)\n', (43186, 43191), False, 'import os\n'), ((43404, 43433), 'os.chmod', 'os.chmod', (['key', 'read_only_mode'], {}), '(key, read_only_mode)\n', (43412, 43433), False, 'import os\n'), ((47807, 47835), 'os.path.exists', 'os.path.exists', (['database_dir'], {}), '(database_dir)\n', (47821, 47835), False, 'import os\n'), ((47904, 47929), 'os.path.exists', 'os.path.exists', (['certs_dir'], {}), '(certs_dir)\n', (47918, 47929), False, 'import os\n'), ((48195, 48221), 'os.path.isfile', 'os.path.isfile', (['index_path'], {}), '(index_path)\n', (48209, 48221), False, 'import os\n'), ((48272, 48303), 'os.path.isfile', 'os.path.isfile', (['index_attr_path'], {}), '(index_attr_path)\n', (48286, 48303), False, 'import os\n'), ((48406, 48433), 'os.path.isfile', 'os.path.isfile', (['serial_path'], {}), '(serial_path)\n', (48420, 48433), False, 'import os\n'), ((48515, 48540), 'os.path.isfile', 'os.path.isfile', (['rand_path'], {}), '(rand_path)\n', (48529, 48540), False, 'import os\n'), ((48670, 48689), 'datetime.timedelta', 'timedelta', ([], {}), '(**kwargs)\n', (48679, 48689), False, 'from datetime import datetime, timedelta\n'), ((48887, 48899), 'os.geteuid', 'os.geteuid', ([], {}), '()\n', (48897, 48899), False, 'import os\n'), ((48914, 48963), 'sys.stderr.write', 'sys.stderr.write', (['"""only root can run mount.efs\n"""'], {}), "('only root can run mount.efs\\n')\n", (48930, 48963), False, 'import sys\n'), ((48972, 48983), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (48980, 48983), False, 'import sys\n'), ((49049, 49080), 'configparser.ConfigParser.SafeConfigParser', 'ConfigParser.SafeConfigParser', ([], {}), '()\n', (49078, 49080), False, 'from configparser import ConfigParser, NoOptionError, NoSectionError\n'), ((49873, 49904), 'os.path.join', 'os.path.join', (['log_dir', 'LOG_FILE'], {}), '(log_dir, LOG_FILE)\n', (49885, 49904), False, 'import os\n'), ((49975, 50041), 'logging.Formatter', 'logging.Formatter', ([], {'fmt': '"""%(asctime)s - %(levelname)s - %(message)s"""'}), "(fmt='%(asctime)s - %(levelname)s - %(message)s')\n", (49992, 50041), False, 'import logging\n'), ((50164, 50260), 'logging.error', 'logging.error', (['"""Malformed logging level "%s", setting logging level to %s"""', 'raw_level', 'level'], {}), '(\'Malformed logging level "%s", setting logging level to %s\',\n raw_level, level)\n', (50177, 50260), False, 'import logging\n'), ((51712, 51742), 'socket.gethostbyname', 'socket.gethostbyname', (['dns_name'], {}), '(dns_name)\n', (51732, 51742), False, 'import socket\n'), ((52160, 52195), 'os.path.join', 'os.path.join', (['base_path', 'mount_name'], {}), '(base_path, mount_name)\n', (52172, 52195), False, 'import os\n'), ((52326, 52373), 'os.path.join', 'os.path.join', (['base_path', 'mount_name', '"""database"""'], {}), "(base_path, mount_name, 'database')\n", (52338, 52373), False, 'import os\n'), ((52396, 52440), 'os.path.join', 'os.path.join', (['base_path', 'mount_name', '"""certs"""'], {}), "(base_path, mount_name, 'certs')\n", (52408, 52440), False, 'import os\n'), ((52459, 52516), 'os.path.join', 'os.path.join', (['base_path', 'mount_name', '"""database/index.txt"""'], {}), "(base_path, mount_name, 'database/index.txt')\n", (52471, 52516), False, 'import os\n'), ((52540, 52602), 'os.path.join', 'os.path.join', (['base_path', 'mount_name', '"""database/index.txt.attr"""'], {}), "(base_path, mount_name, 'database/index.txt.attr')\n", (52552, 52602), False, 'import os\n'), ((52622, 52676), 'os.path.join', 'os.path.join', (['base_path', 'mount_name', '"""database/serial"""'], {}), "(base_path, mount_name, 'database/serial')\n", (52634, 52676), False, 'import os\n'), ((52694, 52747), 'os.path.join', 'os.path.join', (['base_path', 'mount_name', '"""database/.rand"""'], {}), "(base_path, mount_name, 'database/.rand')\n", (52706, 52747), False, 'import os\n'), ((53050, 53071), 'base64.b64decode', 'base64.b64decode', (['key'], {}), '(key)\n', (53066, 53071), False, 'import base64\n'), ((56364, 56391), 'urllib.quote_plus', 'quote_plus', (['public_key_hash'], {}), '(public_key_hash)\n', (56374, 56391), False, 'from urllib import quote_plus\n'), ((56493, 56523), 'urllib.quote_plus', 'quote_plus', (['formatted_datetime'], {}), '(formatted_datetime)\n', (56503, 56523), False, 'from urllib import quote_plus\n'), ((56690, 56715), 'urllib.quote_plus', 'quote_plus', (['session_token'], {}), '(session_token)\n', (56700, 56715), False, 'from urllib import quote_plus\n'), ((58441, 58472), 'socket.gethostbyname_ex', 'socket.gethostbyname_ex', (['remote'], {}), '(remote)\n', (58464, 58472), False, 'import socket\n'), ((60246, 60273), 'os.path.ismount', 'os.path.ismount', (['mountpoint'], {}), '(mountpoint)\n', (60261, 60273), False, 'import os\n'), ((60312, 60415), 'sys.stdout.write', 'sys.stdout.write', (['("%s is already mounted, please run \'mount\' command to verify\\n" % mountpoint)'], {}), '(\n "%s is already mounted, please run \'mount\' command to verify\\n" %\n mountpoint)\n', (60328, 60415), False, 'import sys\n'), ((60415, 60483), 'logging.warning', 'logging.warning', (["('%s is already mounted, mount aborted' % mountpoint)"], {}), "('%s is already mounted, mount aborted' % mountpoint)\n", (60430, 60483), False, 'import logging\n'), ((60624, 60641), 'threading.Event', 'threading.Event', ([], {}), '()\n', (60639, 60641), False, 'import threading\n'), ((60654, 60746), 'threading.Thread', 'threading.Thread', ([], {'target': 'poll_tunnel_process', 'args': '(tunnel_proc, fs_id, mount_completed)'}), '(target=poll_tunnel_process, args=(tunnel_proc, fs_id,\n mount_completed))\n', (60670, 60746), False, 'import threading\n'), ((64282, 64356), 'logging.error', 'logging.error', (['"""Failed to import botocore, please install botocore first."""'], {}), "('Failed to import botocore, please install botocore first.')\n", (64295, 64356), False, 'import logging\n'), ((75650, 75727), 'logging.debug', 'logging.debug', (["('The service cannot complete the request, %s' % error.response)"], {}), "('The service cannot complete the request, %s' % error.response)\n", (75663, 75727), False, 'import logging\n'), ((7117, 7258), 'logging.warning', 'logging.warning', (['"""Region not found in config file and metadata service call failed, falling back to legacy "dns_name_format" check"""'], {}), '(\n \'Region not found in config file and metadata service call failed, falling back to legacy "dns_name_format" check\'\n )\n', (7132, 7258), False, 'import logging\n'), ((7579, 7649), 'logging.warning', 'logging.warning', (['"""Legacy check for region in "dns_name_format" failed"""'], {}), '(\'Legacy check for region in "dns_name_format" failed\')\n', (7594, 7649), False, 'import logging\n'), ((9914, 9981), 'urllib.request.Request', 'Request', (['INSTANCE_METADATA_TOKEN_URL'], {'headers': 'headers', 'method': '"""PUT"""'}), "(INSTANCE_METADATA_TOKEN_URL, headers=headers, method='PUT')\n", (9921, 9981), False, 'from urllib.request import urlopen, Request\n'), ((9996, 10008), 'urllib.request.urlopen', 'urlopen', (['req'], {}), '(req)\n', (10003, 10008), False, 'from urllib.request import urlopen, Request\n'), ((18459, 18538), 'logging.debug', 'logging.debug', (['"""No [%s] section found in config file %s"""', 'awsprofile', 'file_path'], {}), "('No [%s] section found in config file %s', awsprofile, file_path)\n", (18472, 18538), False, 'import logging\n'), ((20670, 20691), 'json.loads', 'json.loads', (['resp_body'], {}), '(resp_body)\n', (20680, 20691), False, 'import json\n'), ((25888, 25943), 'logging.debug', 'logging.debug', (['"""Unable to read %s"""', 'SYSTEM_RELEASE_PATH'], {}), "('Unable to read %s', SYSTEM_RELEASE_PATH)\n", (25901, 25943), False, 'import logging\n'), ((26146, 26197), 'logging.debug', 'logging.debug', (['"""Unable to read %s"""', 'OS_RELEASE_PATH'], {}), "('Unable to read %s', OS_RELEASE_PATH)\n", (26159, 26197), False, 'import logging\n'), ((27056, 27112), 'os.path.join', 'os.path.join', (['log_dir', "('%s.stunnel.log' % mount_filename)"], {}), "(log_dir, '%s.stunnel.log' % mount_filename)\n", (27068, 27112), False, 'import os\n'), ((29602, 29642), 'os.path.join', 'os.path.join', (['state_file_dir', 'state_file'], {}), '(state_file_dir, state_file)\n', (29614, 29642), False, 'import os\n'), ((30769, 30816), 'logging.warning', 'logging.warning', (['"""Unable to read %s"""', 'comm_file'], {}), "('Unable to read %s', comm_file)\n", (30784, 30816), False, 'import logging\n'), ((32080, 32172), 'subprocess.call', 'subprocess.call', (["['systemctl', 'is-active', '--quiet', WATCHDOG_SERVICE]"], {'close_fds': '(True)'}), "(['systemctl', 'is-active', '--quiet', WATCHDOG_SERVICE],\n close_fds=True)\n", (32095, 32172), False, 'import subprocess\n'), ((32575, 32615), 'sys.stderr.write', 'sys.stderr.write', (["('%s\\n' % error_message)"], {}), "('%s\\n' % error_message)\n", (32591, 32615), False, 'import sys\n'), ((32624, 32654), 'logging.warning', 'logging.warning', (['error_message'], {}), '(error_message)\n', (32639, 32654), False, 'import logging\n'), ((36596, 36645), 'os.path.join', 'os.path.join', (['state_file_dir', 'temp_tls_state_file'], {}), '(state_file_dir, temp_tls_state_file)\n', (36608, 36645), False, 'import os\n'), ((36647, 36700), 'os.path.join', 'os.path.join', (['state_file_dir', 'temp_tls_state_file[1:]'], {}), '(state_file_dir, temp_tls_state_file[1:])\n', (36659, 36700), False, 'import os\n'), ((42685, 42696), 'os.close', 'os.close', (['f'], {}), '(f)\n', (42693, 42696), False, 'import os\n'), ((42709, 42729), 'os.remove', 'os.remove', (['lock_file'], {}), '(lock_file)\n', (42718, 42729), False, 'import os\n'), ((47152, 47263), 'logging.error', 'logging.error', (['(\'Command %s failed, rc=%s, stdout="%s", stderr="%s"\' % (cmd, rc, output, err))'], {'exc_info': '(True)'}), '(\'Command %s failed, rc=%s, stdout="%s", stderr="%s"\' % (cmd,\n rc, output, err), exc_info=True)\n', (47165, 47263), False, 'import logging\n'), ((49120, 49134), 'configparser.ConfigParser', 'ConfigParser', ([], {}), '()\n', (49132, 49134), False, 'from configparser import ConfigParser, NoOptionError, NoSectionError\n'), ((61225, 61270), 'sys.stderr.write', 'sys.stderr.write', (["('WARN: %s\\n' % warn_message)"], {}), "('WARN: %s\\n' % warn_message)\n", (61241, 61270), False, 'import sys\n'), ((61283, 61312), 'logging.warning', 'logging.warning', (['warn_message'], {}), '(warn_message)\n', (61298, 61312), False, 'import logging\n'), ((67678, 67744), 'logging.warning', 'logging.warning', (["('Credentials are not properly configured, %s' % e)"], {}), "('Credentials are not properly configured, %s' % e)\n", (67693, 67744), False, 'import logging\n'), ((67815, 67875), 'logging.warning', 'logging.warning', (["('Could not connect to the endpoint, %s' % e)"], {}), "('Could not connect to the endpoint, %s' % e)\n", (67830, 67875), False, 'import logging\n'), ((67932, 67973), 'logging.warning', 'logging.warning', (["('Unknown error, %s.' % e)"], {}), "('Unknown error, %s.' % e)\n", (67947, 67973), False, 'import logging\n'), ((69393, 69459), 'logging.warning', 'logging.warning', (["('Credentials are not properly configured, %s' % e)"], {}), "('Credentials are not properly configured, %s' % e)\n", (69408, 69459), False, 'import logging\n'), ((69530, 69590), 'logging.warning', 'logging.warning', (["('Could not connect to the endpoint, %s' % e)"], {}), "('Could not connect to the endpoint, %s' % e)\n", (69545, 69590), False, 'import logging\n'), ((69647, 69688), 'logging.warning', 'logging.warning', (["('Unknown error, %s.' % e)"], {}), "('Unknown error, %s.' % e)\n", (69662, 69688), False, 'import logging\n'), ((71112, 71178), 'logging.warning', 'logging.warning', (["('Credentials are not properly configured, %s' % e)"], {}), "('Credentials are not properly configured, %s' % e)\n", (71127, 71178), False, 'import logging\n'), ((71249, 71309), 'logging.warning', 'logging.warning', (["('Could not connect to the endpoint, %s' % e)"], {}), "('Could not connect to the endpoint, %s' % e)\n", (71264, 71309), False, 'import logging\n'), ((71366, 71407), 'logging.warning', 'logging.warning', (["('Unknown error, %s.' % e)"], {}), "('Unknown error, %s.' % e)\n", (71381, 71407), False, 'import logging\n'), ((73474, 73540), 'logging.warning', 'logging.warning', (["('Credentials are not properly configured, %s' % e)"], {}), "('Credentials are not properly configured, %s' % e)\n", (73489, 73540), False, 'import logging\n'), ((73611, 73671), 'logging.warning', 'logging.warning', (["('Could not connect to the endpoint, %s' % e)"], {}), "('Could not connect to the endpoint, %s' % e)\n", (73626, 73671), False, 'import logging\n'), ((73728, 73769), 'logging.warning', 'logging.warning', (["('Unknown error, %s.' % e)"], {}), "('Unknown error, %s.' % e)\n", (73743, 73769), False, 'import logging\n'), ((74994, 75060), 'logging.warning', 'logging.warning', (["('Credentials are not properly configured, %s' % e)"], {}), "('Credentials are not properly configured, %s' % e)\n", (75009, 75060), False, 'import logging\n'), ((75130, 75190), 'logging.warning', 'logging.warning', (["('Could not connect to the endpoint, %s' % e)"], {}), "('Could not connect to the endpoint, %s' % e)\n", (75145, 75190), False, 'import logging\n'), ((75246, 75286), 'logging.warning', 'logging.warning', (["('Unknown error, %s' % e)"], {}), "('Unknown error, %s' % e)\n", (75261, 75286), False, 'import logging\n'), ((75783, 75870), 'logging.debug', 'logging.debug', (["('User is not authorized to perform the action, %s' % error.response)"], {}), "('User is not authorized to perform the action, %s' % error.\n response)\n", (75796, 75870), False, 'import logging\n'), ((75884, 75929), 'logging.debug', 'logging.debug', (["('Unexpected error: %s' % error)"], {}), "('Unexpected error: %s' % error)\n", (75897, 75929), False, 'import logging\n'), ((8593, 8671), 'logging.warning', 'logging.warning', (["('%s not present in %s: %s' % (property, instance_identity, e))"], {}), "('%s not present in %s: %s' % (property, instance_identity, e))\n", (8608, 8671), False, 'import logging\n'), ((8715, 8800), 'logging.warning', 'logging.warning', (["('response %s is not a json object: %s' % (instance_identity, e))"], {}), "('response %s is not a json object: %s' % (instance_identity, e)\n )\n", (8730, 8800), False, 'import logging\n'), ((17946, 18079), 'logging.debug', 'logging.debug', (['"""aws_access_key_id or aws_secret_access_key not found in %s under named profile [%s]"""', 'file_path', 'awsprofile'], {}), "(\n 'aws_access_key_id or aws_secret_access_key not found in %s under named profile [%s]'\n , file_path, awsprofile)\n", (17959, 18079), False, 'import logging\n'), ((18150, 18211), 'logging.debug', 'logging.debug', (['"""aws_session_token not found in %s"""', 'file_path'], {}), "('aws_session_token not found in %s', file_path)\n", (18163, 18211), False, 'import logging\n'), ((19927, 19950), 'urllib.request.urlopen', 'urlopen', (['req'], {'timeout': '(1)'}), '(req, timeout=1)\n', (19934, 19950), False, 'from urllib.request import urlopen, Request\n'), ((23819, 23911), 'logging.debug', 'logging.debug', (['"""No CA file configured, using default CA file %s"""', 'DEFAULT_STUNNEL_CAFILE'], {}), "('No CA file configured, using default CA file %s',\n DEFAULT_STUNNEL_CAFILE)\n", (23832, 23911), False, 'import logging\n'), ((30527, 30543), 'os._exit', 'os._exit', (['e.code'], {}), '(e.code)\n', (30535, 30543), False, 'import os\n'), ((31830, 31934), 'subprocess.Popen', 'subprocess.Popen', (["['/sbin/start', WATCHDOG_SERVICE]"], {'stdout': 'devnull', 'stderr': 'devnull', 'close_fds': '(True)'}), "(['/sbin/start', WATCHDOG_SERVICE], stdout=devnull, stderr=\n devnull, close_fds=True)\n", (31846, 31934), False, 'import subprocess\n'), ((31974, 32030), 'logging.debug', 'logging.debug', (['"""%s is already running"""', 'WATCHDOG_SERVICE'], {}), "('%s is already running', WATCHDOG_SERVICE)\n", (31987, 32030), False, 'import logging\n'), ((32389, 32445), 'logging.debug', 'logging.debug', (['"""%s is already running"""', 'WATCHDOG_SERVICE'], {}), "('%s is already running', WATCHDOG_SERVICE)\n", (32402, 32445), False, 'import logging\n'), ((32890, 32984), 'logging.warning', 'logging.warning', (['"""Bad state_file_dir_mode "%s" in config file "%s\\""""', 'mode_str', 'CONFIG_FILE'], {}), '(\'Bad state_file_dir_mode "%s" in config file "%s"\',\n mode_str, CONFIG_FILE)\n', (32905, 32984), False, 'import logging\n'), ((42564, 42575), 'os.getpid', 'os.getpid', ([], {}), '()\n', (42573, 42575), False, 'import os\n'), ((66860, 66938), 'logging.debug', 'logging.debug', (["('Log group %s already exist, %s' % (log_group_name, e.response))"], {}), "('Log group %s already exist, %s' % (log_group_name, e.response))\n", (66873, 66938), False, 'import logging\n'), ((68674, 68753), 'logging.error', 'logging.error', (["('Log group %s does not exist, %s' % (log_group_name, e.response))"], {}), "('Log group %s does not exist, %s' % (log_group_name, e.response))\n", (68687, 68753), False, 'import logging\n'), ((70401, 70518), 'logging.debug', 'logging.debug', (["('Log stream %s already exist in log group %s, %s' % (log_stream_name,\n log_group_name, e.response))"], {}), "('Log stream %s already exist in log group %s, %s' % (\n log_stream_name, log_group_name, e.response))\n", (70414, 70518), False, 'import logging\n'), ((72407, 72472), 'logging.debug', 'logging.debug', (["('The sequence token is not valid, %s' % e.response)"], {}), "('The sequence token is not valid, %s' % e.response)\n", (72420, 72472), False, 'import logging\n'), ((3041, 3052), 'os.getuid', 'os.getuid', ([], {}), '()\n', (3050, 3052), False, 'import os\n'), ((3156, 3167), 'os.getuid', 'os.getuid', ([], {}), '()\n', (3165, 3167), False, 'import os\n'), ((32256, 32366), 'subprocess.Popen', 'subprocess.Popen', (["['systemctl', 'start', WATCHDOG_SERVICE]"], {'stdout': 'devnull', 'stderr': 'devnull', 'close_fds': '(True)'}), "(['systemctl', 'start', WATCHDOG_SERVICE], stdout=devnull,\n stderr=devnull, close_fds=True)\n", (32272, 32366), False, 'import subprocess\n'), ((33134, 33158), 'os.path.isdir', 'os.path.isdir', (['directory'], {}), '(directory)\n', (33147, 33158), False, 'import os\n'), ((67027, 67131), 'logging.error', 'logging.error', (["('Reached the maximum number of log groups that can be created, %s' % e.\n response)"], {}), "(\n 'Reached the maximum number of log groups that can be created, %s' % e.\n response)\n", (67040, 67131), False, 'import logging\n'), ((68846, 68973), 'logging.debug', 'logging.debug', (["('Multiple requests to update the same log group %s were in conflict, %s' %\n (log_group_name, e.response))"], {}), "(\n 'Multiple requests to update the same log group %s were in conflict, %s' %\n (log_group_name, e.response))\n", (68859, 68973), False, 'import logging\n'), ((70605, 70765), 'logging.error', 'logging.error', (["('Either parameter log group name %s or log stream name %s is specified incorrectly, %s'\n % (log_group_name, log_stream_name, e.response))"], {}), "(\n 'Either parameter log group name %s or log stream name %s is specified incorrectly, %s'\n % (log_group_name, log_stream_name, e.response))\n", (70618, 70765), False, 'import logging\n'), ((72565, 72655), 'logging.debug', 'logging.debug', (["('One of the parameter to put log events is not valid, %s' % e.response)"], {}), "('One of the parameter to put log events is not valid, %s' % e\n .response)\n", (72578, 72655), False, 'import logging\n'), ((42974, 43050), 'logging.info', 'logging.info', (['"""Failed to take out private key creation lock, sleeping 50 ms"""'], {}), "('Failed to take out private key creation lock, sleeping 50 ms')\n", (42986, 43050), False, 'import logging\n'), ((43071, 43087), 'time.sleep', 'time.sleep', (['(0.05)'], {}), '(0.05)\n', (43081, 43087), False, 'import time\n'), ((67214, 67341), 'logging.debug', 'logging.debug', (["('Multiple requests to update the same log group %s were in conflict, %s' %\n (log_group_name, e.response))"], {}), "(\n 'Multiple requests to update the same log group %s were in conflict, %s' %\n (log_group_name, e.response))\n", (67227, 67341), False, 'import logging\n'), ((69057, 69218), 'logging.error', 'logging.error', (["('Either parameter log group name %s or retention in days %s is specified incorrectly, %s'\n % (log_group_name, retention_days, e.response))"], {}), "(\n 'Either parameter log group name %s or retention in days %s is specified incorrectly, %s'\n % (log_group_name, retention_days, e.response))\n", (69070, 69218), False, 'import logging\n'), ((70874, 70953), 'logging.error', 'logging.error', (["('Log group %s does not exist, %s' % (log_group_name, e.response))"], {}), "('Log group %s does not exist, %s' % (log_group_name, e.response))\n", (70887, 70953), False, 'import logging\n'), ((72746, 72822), 'logging.debug', 'logging.debug', (["('The event %s was already logged, %s' % (message, e.response))"], {}), "('The event %s was already logged, %s' % (message, e.response))\n", (72759, 72822), False, 'import logging\n'), ((12921, 12948), 'os.path.basename', 'os.path.basename', (['file_path'], {}), '(file_path)\n', (12937, 12948), False, 'import os\n'), ((23139, 23166), 'os.path.abspath', 'os.path.abspath', (['mountpoint'], {}), '(mountpoint)\n', (23154, 23166), False, 'import os\n'), ((67425, 67524), 'logging.error', 'logging.error', (["('Log group name %s is specified incorrectly, %s' % (log_group_name, e.\n response))"], {}), "('Log group name %s is specified incorrectly, %s' % (\n log_group_name, e.response))\n", (67438, 67524), False, 'import logging\n'), ((71754, 71765), 'time.time', 'time.time', ([], {}), '()\n', (71763, 71765), False, 'import time\n'), ((72917, 73027), 'logging.debug', 'logging.debug', (["('The most likely cause is an invalid AWS access key ID or secret Key, %s' %\n e.response)"], {}), "(\n 'The most likely cause is an invalid AWS access key ID or secret Key, %s' %\n e.response)\n", (72930, 73027), False, 'import logging\n'), ((73363, 73404), 'logging.debug', 'logging.debug', (["('Unexpected error: %s' % e)"], {}), "('Unexpected error: %s' % e)\n", (73376, 73404), False, 'import logging\n')] |
"""
This script is where the preprocessed data is used to train the SVM model to
perform the classification. I am using Stratified K-Fold Cross Validation to
prevent bias and/or any imbalance that could affect the model's accuracy.
REFERENCE: https://medium.com/@bedigunjit/simple-guide-to-text-classification-nlp-using-svm-and-naive-bayes-with-python-421db3a72d34
"""
import numpy as np
import pandas as pd
from sklearn import model_selection, svm
from sklearn.metrics import accuracy_score
from sklearn.preprocessing import LabelEncoder
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.model_selection import StratifiedKFold
# Open preproccessed csv
df = pd.read_csv("preprocessed.csv", index_col=0)
print(df.head())
print("SPLITTING TRAIN-TEST")
x = df["Text"]
y = df["PublicationTitle"]
train_x, test_x, train_y, test_y = model_selection.train_test_split(
df["Text"], df["PublicationTitle"], test_size=0.3)
# Label encode the target variable to transform categorical data of string
# type into numerical values the model can understand
encoder = LabelEncoder()
# train_y = encoder.fit_transform(train_y)
# test_y = encoder.fit_transform(test_y)
# Word vectorization
# turning a collection of text documents into numerical feature vectors
# We are using Term Frequency - Inverse Document
tfidf_vect = TfidfVectorizer(max_features=5000)
tfidf_vect.fit(df["Text"])
# train_x_tfidf = tfidf_vect.transform(train_x)
# test_x_tfidf = tfidf_vect.transform(test_x)
x_tfidf = tfidf_vect.transform(df["Text"])
y = encoder.fit_transform(y)
# print(tfidf_vect.vocabulary_)
# Fit the training dataset to the classifier
print("TRAINING THE MODEL")
SVM = svm.SVC(C=1.0, kernel='linear', degree=3, gamma='auto')
skf = StratifiedKFold(n_splits=10, shuffle=True, random_state=1)
accuracies = []
fold = 1
for train_idx, test_idx in skf.split(x, y):
print("Working on fold", fold)
x_train_fold, x_test_fold = x_tfidf[train_idx], x_tfidf[test_idx]
y_train_fold, y_test_fold = y[train_idx], y[test_idx]
SVM.fit(x_train_fold, y_train_fold)
acc = SVM.score(x_test_fold, y_test_fold)
print("Acc", fold, ":", acc)
accuracies.append(acc)
fold += 1
print("ACCURACIES:", accuracies)
print("Max Accuracy:", np.max(accuracies))
print("Min Accuracy:", np.min(accuracies))
print("Mean of Accuracies:", np.mean(accuracies))
print("STD of Accuracies:", np.std(accuracies))
# print("RUNNING TEST PREDICTIONS")
# predictions = SVM.predict(test_x_tfidf)
# # Calculate accuracy score
# accuracy = accuracy_score(test_y, predictions)
# print("Accuracy:", str(accuracy * 100) + "%")
| [
"numpy.mean",
"sklearn.preprocessing.LabelEncoder",
"pandas.read_csv",
"sklearn.model_selection.train_test_split",
"numpy.std",
"numpy.max",
"sklearn.model_selection.StratifiedKFold",
"sklearn.feature_extraction.text.TfidfVectorizer",
"numpy.min",
"sklearn.svm.SVC"
] | [((686, 730), 'pandas.read_csv', 'pd.read_csv', (['"""preprocessed.csv"""'], {'index_col': '(0)'}), "('preprocessed.csv', index_col=0)\n", (697, 730), True, 'import pandas as pd\n'), ((857, 944), 'sklearn.model_selection.train_test_split', 'model_selection.train_test_split', (["df['Text']", "df['PublicationTitle']"], {'test_size': '(0.3)'}), "(df['Text'], df['PublicationTitle'],\n test_size=0.3)\n", (889, 944), False, 'from sklearn import model_selection, svm\n'), ((1087, 1101), 'sklearn.preprocessing.LabelEncoder', 'LabelEncoder', ([], {}), '()\n', (1099, 1101), False, 'from sklearn.preprocessing import LabelEncoder\n'), ((1342, 1376), 'sklearn.feature_extraction.text.TfidfVectorizer', 'TfidfVectorizer', ([], {'max_features': '(5000)'}), '(max_features=5000)\n', (1357, 1376), False, 'from sklearn.feature_extraction.text import TfidfVectorizer\n'), ((1686, 1741), 'sklearn.svm.SVC', 'svm.SVC', ([], {'C': '(1.0)', 'kernel': '"""linear"""', 'degree': '(3)', 'gamma': '"""auto"""'}), "(C=1.0, kernel='linear', degree=3, gamma='auto')\n", (1693, 1741), False, 'from sklearn import model_selection, svm\n'), ((1749, 1807), 'sklearn.model_selection.StratifiedKFold', 'StratifiedKFold', ([], {'n_splits': '(10)', 'shuffle': '(True)', 'random_state': '(1)'}), '(n_splits=10, shuffle=True, random_state=1)\n', (1764, 1807), False, 'from sklearn.model_selection import StratifiedKFold\n'), ((2260, 2278), 'numpy.max', 'np.max', (['accuracies'], {}), '(accuracies)\n', (2266, 2278), True, 'import numpy as np\n'), ((2303, 2321), 'numpy.min', 'np.min', (['accuracies'], {}), '(accuracies)\n', (2309, 2321), True, 'import numpy as np\n'), ((2352, 2371), 'numpy.mean', 'np.mean', (['accuracies'], {}), '(accuracies)\n', (2359, 2371), True, 'import numpy as np\n'), ((2401, 2419), 'numpy.std', 'np.std', (['accuracies'], {}), '(accuracies)\n', (2407, 2419), True, 'import numpy as np\n')] |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from lxml import etree
from openerp.osv import fields, osv
class account_payment_populate_statement(osv.osv_memory):
_name = "account.payment.populate.statement"
_description = "Account Payment Populate Statement"
_columns = {
'lines': fields.many2many('payment.line', 'payment_line_rel_', 'payment_id', 'line_id', 'Payment Lines')
}
def fields_view_get(self, cr, uid, view_id=None, view_type='form', context=None, toolbar=False, submenu=False):
line_obj = self.pool.get('payment.line')
res = super(account_payment_populate_statement, self).fields_view_get(cr, uid, view_id=view_id, view_type=view_type, context=context, toolbar=toolbar, submenu=False)
line_ids = line_obj.search(cr, uid, [
('move_line_id.reconcile_id', '=', False),
('bank_statement_line_id', '=', False),
('move_line_id.state','=','valid')])
line_ids.extend(line_obj.search(cr, uid, [
('move_line_id.reconcile_id', '=', False),
('order_id.mode', '=', False),
('move_line_id.state','=','valid')]))
domain = '[("id", "in", '+ str(line_ids)+')]'
doc = etree.XML(res['arch'])
nodes = doc.xpath("//field[@name='lines']")
for node in nodes:
node.set('domain', domain)
res['arch'] = etree.tostring(doc)
return res
def populate_statement(self, cr, uid, ids, context=None):
line_obj = self.pool.get('payment.line')
statement_obj = self.pool.get('account.bank.statement')
statement_line_obj = self.pool.get('account.bank.statement.line')
currency_obj = self.pool.get('res.currency')
voucher_obj = self.pool.get('account.voucher')
voucher_line_obj = self.pool.get('account.voucher.line')
move_line_obj = self.pool.get('account.move.line')
if context is None:
context = {}
data = self.read(cr, uid, ids, context=context)[0]
line_ids = data['lines']
if not line_ids:
return {'type': 'ir.actions.act_window_close'}
statement = statement_obj.browse(cr, uid, context['active_id'], context=context)
for line in line_obj.browse(cr, uid, line_ids, context=context):
ctx = context.copy()
ctx['date'] = line.ml_maturity_date # was value_date earlier,but this field exists no more now
amount = currency_obj.compute(cr, uid, line.currency.id,
statement.currency.id, line.amount_currency, context=ctx)
if not line.move_line_id.id:
continue
context = dict(context, move_line_ids=[line.move_line_id.id])
result = voucher_obj.onchange_partner_id(cr, uid, [], partner_id=line.partner_id.id, journal_id=statement.journal_id.id, amount=abs(amount), currency_id= statement.currency.id, ttype='payment', date=line.ml_maturity_date, context=context)
if line.move_line_id:
voucher_res = {
'type': 'payment',
'name': line.name,
'partner_id': line.partner_id.id,
'journal_id': statement.journal_id.id,
'account_id': result['value'].get('account_id', statement.journal_id.default_credit_account_id.id),
'company_id': statement.company_id.id,
'currency_id': statement.currency.id,
'date': line.date or time.strftime('%Y-%m-%d'),
'amount': abs(amount),
'period_id': statement.period_id.id,
}
voucher_id = voucher_obj.create(cr, uid, voucher_res, context=context)
voucher_line_dict = {}
for line_dict in result['value']['line_cr_ids'] + result['value']['line_dr_ids']:
move_line = move_line_obj.browse(cr, uid, line_dict['move_line_id'], context)
if line.move_line_id.move_id.id == move_line.move_id.id:
voucher_line_dict = line_dict
if voucher_line_dict:
voucher_line_dict.update({'voucher_id': voucher_id})
voucher_line_obj.create(cr, uid, voucher_line_dict, context=context)
st_line_id = statement_line_obj.create(cr, uid, {
'name': line.order_id.reference or '?',
'amount': - amount,
'partner_id': line.partner_id.id,
'statement_id': statement.id,
'ref': line.communication,
}, context=context)
line_obj.write(cr, uid, [line.id], {'bank_statement_line_id': st_line_id})
return {'type': 'ir.actions.act_window_close'}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"lxml.etree.XML",
"openerp.osv.fields.many2many",
"lxml.etree.tostring",
"time.strftime"
] | [((1249, 1348), 'openerp.osv.fields.many2many', 'fields.many2many', (['"""payment.line"""', '"""payment_line_rel_"""', '"""payment_id"""', '"""line_id"""', '"""Payment Lines"""'], {}), "('payment.line', 'payment_line_rel_', 'payment_id',\n 'line_id', 'Payment Lines')\n", (1265, 1348), False, 'from openerp.osv import fields, osv\n'), ((2161, 2183), 'lxml.etree.XML', 'etree.XML', (["res['arch']"], {}), "(res['arch'])\n", (2170, 2183), False, 'from lxml import etree\n'), ((2324, 2343), 'lxml.etree.tostring', 'etree.tostring', (['doc'], {}), '(doc)\n', (2338, 2343), False, 'from lxml import etree\n'), ((4486, 4511), 'time.strftime', 'time.strftime', (['"""%Y-%m-%d"""'], {}), "('%Y-%m-%d')\n", (4499, 4511), False, 'import time\n')] |
from cklib.args import get_arg_parser, ArgumentParser
from cloudkeeper_plugin_cleanup_aws_loadbalancers import CleanupAWSLoadbalancersPlugin
def test_args():
arg_parser = get_arg_parser()
CleanupAWSLoadbalancersPlugin.add_args(arg_parser)
arg_parser.parse_args()
assert ArgumentParser.args.cleanup_aws_loadbalancers is False
assert ArgumentParser.args.cleanup_aws_loadbalancers_age == "7 days"
| [
"cklib.args.get_arg_parser",
"cloudkeeper_plugin_cleanup_aws_loadbalancers.CleanupAWSLoadbalancersPlugin.add_args"
] | [((177, 193), 'cklib.args.get_arg_parser', 'get_arg_parser', ([], {}), '()\n', (191, 193), False, 'from cklib.args import get_arg_parser, ArgumentParser\n'), ((198, 248), 'cloudkeeper_plugin_cleanup_aws_loadbalancers.CleanupAWSLoadbalancersPlugin.add_args', 'CleanupAWSLoadbalancersPlugin.add_args', (['arg_parser'], {}), '(arg_parser)\n', (236, 248), False, 'from cloudkeeper_plugin_cleanup_aws_loadbalancers import CleanupAWSLoadbalancersPlugin\n')] |
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
__author__ = "<NAME>, <NAME>, and <NAME>"
__copyright__ = "Copyright 2013-2015 UKP TU Darmstadt"
__credits__ = ["<NAME>", "<NAME>", "<NAME>"]
__license__ = "ASL"
class Redirector(webapp.RequestHandler):
def get(self):
self.redirect("/argunit/home")
def post(self):
self.redirect("/argunit/home")
application = webapp.WSGIApplication(
[('/.*', Redirector)],
debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| [
"google.appengine.ext.webapp.WSGIApplication",
"google.appengine.ext.webapp.util.run_wsgi_app"
] | [((437, 494), 'google.appengine.ext.webapp.WSGIApplication', 'webapp.WSGIApplication', (["[('/.*', Redirector)]"], {'debug': '(True)'}), "([('/.*', Redirector)], debug=True)\n", (459, 494), False, 'from google.appengine.ext import webapp\n'), ((522, 547), 'google.appengine.ext.webapp.util.run_wsgi_app', 'run_wsgi_app', (['application'], {}), '(application)\n', (534, 547), False, 'from google.appengine.ext.webapp.util import run_wsgi_app\n')] |
from __future__ import annotations
import json
import logging
from contextlib import contextmanager, ExitStack
from typing import List, Dict
import pandas as pd
from lithops.storage import Storage
from lithops.storage.utils import CloudObject, StorageNoSuchKeyError
from sm.engine.annotation_lithops.build_moldb import (
build_moldb,
InputMolDb,
DbFDRData,
)
from sm.engine.annotation_lithops.calculate_centroids import (
calculate_centroids,
validate_centroids,
)
from sm.engine.annotation_lithops.executor import Executor
from sm.engine.annotation_lithops.io import (
CObj,
save_cobj,
iter_cobjects_with_prefetch,
deserialize,
)
from sm.engine.annotation_lithops.utils import jsonhash
from sm.engine.utils.db_mutex import DBMutex
from sm.engine.ds_config import DSConfig
from sm.engine.annotation.isocalc_wrapper import IsocalcWrapper
logger = logging.getLogger('annotation-pipeline')
class CentroidsCacheEntry:
def __init__(
self, executor: Executor, sm_storage: Dict, ds_config: DSConfig, moldbs: List[InputMolDb]
):
ds_hash_params = ds_config.copy()
self.ds_config = {
**ds_hash_params, # type: ignore # https://github.com/python/mypy/issues/4122
# Include the `targeted` value of databases so that a new cache entry is made if
# someone manually changes that field
'databases': [(moldb['id'], moldb['targeted']) for moldb in moldbs],
}
# Remove database_ids as it may be in a different order to moldbs
del self.ds_config['database_ids']
self.ds_hash = jsonhash(self.ds_config)
self.executor = executor
self.storage = executor.storage
self.bucket, raw_prefix = sm_storage['centroids']
self.prefix = f"{raw_prefix}/{self.ds_hash}"
self.config_key = f'{self.prefix}/ds_config.json'
self.meta_key = f'{self.prefix}/meta'
@contextmanager
def lock(self):
with DBMutex().lock(self.ds_hash, timeout=3600):
yield
def load(self):
try:
db_data_cobjs, peaks_cobjs = deserialize(
self.storage.get_object(self.bucket, self.meta_key)
)
return db_data_cobjs, peaks_cobjs
except StorageNoSuchKeyError:
return None
def save(self, db_data_cobjs: List[CObj[DbFDRData]], peaks_cobjs: List[CObj[pd.DataFrame]]):
def batch_copy(src_cobjs: List[CloudObject], dest_prefix: str, *, storage: Storage):
# If Lithops' storage supported Copy Object operations, this could be easily optimized.
# Not sure if it's worth the effort yet
result_cobjs = []
for i, data in enumerate(iter_cobjects_with_prefetch(storage, src_cobjs)):
dest_key = f'{dest_prefix}/{i:06}'
result_cobjs.append(storage.put_cloudobject(data, dest_bucket, dest_key))
return result_cobjs
dest_bucket = self.bucket
# Copy cobjs to the cache dir
new_db_data_cobjs, new_peaks_cobjs = self.executor.map(
batch_copy,
[(db_data_cobjs, f'{self.prefix}/db_data'), (peaks_cobjs, f'{self.prefix}/peaks')],
runtime_memory=1024,
)
# Save config in case it's needed for debugging
self.storage.put_cloudobject(
json.dumps(self.ds_config, indent=4), self.bucket, self.config_key
)
# Save list of cobjects. This list would be easy to reconstruct by listing keys, but
# saving a separate object as the last step of the process is helpful to confirm that
# the cache item is complete, and didn't partially fail to copy.
save_cobj(self.storage, (new_db_data_cobjs, new_peaks_cobjs), self.bucket, self.meta_key)
return new_db_data_cobjs, new_peaks_cobjs
def clear(self):
keys = self.storage.list_keys(self.bucket, self.prefix)
if keys:
logger.info(f'Clearing centroids cache {self.prefix}')
self.storage.delete_objects(self.bucket, keys)
def get_moldb_centroids(
executor: Executor,
sm_storage: Dict,
ds_config: DSConfig,
moldbs: List[InputMolDb],
debug_validate=False,
use_cache=True,
use_db_mutex=True,
):
moldb_cache = CentroidsCacheEntry(executor, sm_storage, ds_config, moldbs)
with ExitStack() as stack:
if use_db_mutex:
stack.enter_context(moldb_cache.lock())
if use_cache:
cached_val = moldb_cache.load()
else:
cached_val = None
moldb_cache.clear()
if cached_val:
db_data_cobjs, peaks_cobjs = cached_val
logger.info(
f'Loaded {len(db_data_cobjs)} DBs, {len(peaks_cobjs)} peak segms from cache'
)
else:
formula_cobjs, db_data_cobjs = build_moldb(executor, ds_config, moldbs)
isocalc_wrapper = IsocalcWrapper(ds_config)
peaks_cobjs = calculate_centroids(executor, formula_cobjs, isocalc_wrapper)
if debug_validate:
validate_centroids(executor, peaks_cobjs)
moldb_cache.save(db_data_cobjs, peaks_cobjs)
logger.info(f'Saved {len(db_data_cobjs)} DBs, {len(peaks_cobjs)} peak segms to cache')
return db_data_cobjs, peaks_cobjs
| [
"logging.getLogger",
"sm.engine.annotation.isocalc_wrapper.IsocalcWrapper",
"sm.engine.annotation_lithops.io.iter_cobjects_with_prefetch",
"sm.engine.annotation_lithops.calculate_centroids.calculate_centroids",
"sm.engine.annotation_lithops.io.save_cobj",
"json.dumps",
"sm.engine.annotation_lithops.build_moldb.build_moldb",
"contextlib.ExitStack",
"sm.engine.annotation_lithops.calculate_centroids.validate_centroids",
"sm.engine.utils.db_mutex.DBMutex",
"sm.engine.annotation_lithops.utils.jsonhash"
] | [((886, 926), 'logging.getLogger', 'logging.getLogger', (['"""annotation-pipeline"""'], {}), "('annotation-pipeline')\n", (903, 926), False, 'import logging\n'), ((1614, 1638), 'sm.engine.annotation_lithops.utils.jsonhash', 'jsonhash', (['self.ds_config'], {}), '(self.ds_config)\n', (1622, 1638), False, 'from sm.engine.annotation_lithops.utils import jsonhash\n'), ((3707, 3800), 'sm.engine.annotation_lithops.io.save_cobj', 'save_cobj', (['self.storage', '(new_db_data_cobjs, new_peaks_cobjs)', 'self.bucket', 'self.meta_key'], {}), '(self.storage, (new_db_data_cobjs, new_peaks_cobjs), self.bucket,\n self.meta_key)\n', (3716, 3800), False, 'from sm.engine.annotation_lithops.io import CObj, save_cobj, iter_cobjects_with_prefetch, deserialize\n'), ((4366, 4377), 'contextlib.ExitStack', 'ExitStack', ([], {}), '()\n', (4375, 4377), False, 'from contextlib import contextmanager, ExitStack\n'), ((3362, 3398), 'json.dumps', 'json.dumps', (['self.ds_config'], {'indent': '(4)'}), '(self.ds_config, indent=4)\n', (3372, 3398), False, 'import json\n'), ((4873, 4913), 'sm.engine.annotation_lithops.build_moldb.build_moldb', 'build_moldb', (['executor', 'ds_config', 'moldbs'], {}), '(executor, ds_config, moldbs)\n', (4884, 4913), False, 'from sm.engine.annotation_lithops.build_moldb import build_moldb, InputMolDb, DbFDRData\n'), ((4944, 4969), 'sm.engine.annotation.isocalc_wrapper.IsocalcWrapper', 'IsocalcWrapper', (['ds_config'], {}), '(ds_config)\n', (4958, 4969), False, 'from sm.engine.annotation.isocalc_wrapper import IsocalcWrapper\n'), ((4996, 5057), 'sm.engine.annotation_lithops.calculate_centroids.calculate_centroids', 'calculate_centroids', (['executor', 'formula_cobjs', 'isocalc_wrapper'], {}), '(executor, formula_cobjs, isocalc_wrapper)\n', (5015, 5057), False, 'from sm.engine.annotation_lithops.calculate_centroids import calculate_centroids, validate_centroids\n'), ((2732, 2779), 'sm.engine.annotation_lithops.io.iter_cobjects_with_prefetch', 'iter_cobjects_with_prefetch', (['storage', 'src_cobjs'], {}), '(storage, src_cobjs)\n', (2759, 2779), False, 'from sm.engine.annotation_lithops.io import CObj, save_cobj, iter_cobjects_with_prefetch, deserialize\n'), ((5105, 5146), 'sm.engine.annotation_lithops.calculate_centroids.validate_centroids', 'validate_centroids', (['executor', 'peaks_cobjs'], {}), '(executor, peaks_cobjs)\n', (5123, 5146), False, 'from sm.engine.annotation_lithops.calculate_centroids import calculate_centroids, validate_centroids\n'), ((1982, 1991), 'sm.engine.utils.db_mutex.DBMutex', 'DBMutex', ([], {}), '()\n', (1989, 1991), False, 'from sm.engine.utils.db_mutex import DBMutex\n')] |
from PyQt5.QtWidgets import QMenu
from gui.main_window.node_editor.items.connector_item import ConnectorItem
class ConnectorTopItem(ConnectorItem):
""" Class to provide top connector functionality """
def __init__(self, index, nodeItem, nodeEditor, parent=None):
super(ConnectorTopItem, self).__init__(index, nodeItem, nodeEditor, parent)
def isTopConnector(self):
""" Returns whether the connector is a top connector (implementation for parent class) """
return True
def isInPlace(self):
""" Returns whether the connector is connected to a in-place working layer
A top connector is in place if any connected bottom connector is in place.
(implementation for parent class) """
for connection in self._connections:
if connection.getIsInPlace():
return True
return False
def getConnectedNodes(self):
""" Returns a list of node items, connected to this connector (implementation for parent class) """
nodes = list()
# for each connection get the node connected to the bottom of the connection
for connection in self._connections:
connectionsBottomConnector = connection.getBottomConnector()
if connectionsBottomConnector is not None:
nodes.append(connectionsBottomConnector.getNodeItem())
return nodes
def addConnection(self, connection):
""" Adds a connection to the connector and sets the start of the connection to this connectors position
(implementation for parent class) """
self._connections.append(connection)
connection.setStart(self.scenePos())
def updateConnectionPositions(self):
""" Updates the connected connections, sets the start of all connected connections to this connectors position
(implementation for parent class) """
for connection in self._connections:
connection.setStart(self.scenePos())
def contextMenuEvent(self, event):
""" Context menu for the top connector """
contextMenu = QMenu()
renameTop = contextMenu.addAction("Change name")
disconnectTop = contextMenu.addAction("Disconnect")
if self.getConnectionCount() == 0:
disconnectTop.setEnabled(False)
removeTop = contextMenu.addAction("Remove")
action = contextMenu.exec_(event.screenPos())
if action is not None:
if action == removeTop:
self._nodeEditor.tryToRemoveTopBlob(self._nodeItem.getLayerID(), self._index)
elif action == renameTop:
self._nodeEditor.tryToRenameTopBlob(self)
elif action == disconnectTop:
self._nodeEditor.disconnectTopBlob(self._nodeItem.getLayerID(), self._index)
| [
"PyQt5.QtWidgets.QMenu"
] | [((2120, 2127), 'PyQt5.QtWidgets.QMenu', 'QMenu', ([], {}), '()\n', (2125, 2127), False, 'from PyQt5.QtWidgets import QMenu\n')] |
from django.contrib import admin
from users.models import Friendship
admin.site.register(Friendship)
# Register your models here.
| [
"django.contrib.admin.site.register"
] | [((71, 102), 'django.contrib.admin.site.register', 'admin.site.register', (['Friendship'], {}), '(Friendship)\n', (90, 102), False, 'from django.contrib import admin\n')] |
import docx
doc = docx.Document('demo.docx')
print('paragraphs number: %s' % len(doc.paragraphs))
print('1st paragraph: %s' % doc.paragraphs[0].text)
print('2nd paragraph: %s' % doc.paragraphs[1].text)
print('paragraphs runs: %s' % len(doc.paragraphs[1].runs))
print('1st paragraph run: %s' % doc.paragraphs[1].runs[0].text)
print('2nd paragraph run: %s' % doc.paragraphs[1].runs[1].text)
print('3rd paragraph run: %s' % doc.paragraphs[1].runs[2].text)
print('4th paragraph run: %s' % doc.paragraphs[1].runs[3].text)
| [
"docx.Document"
] | [((21, 47), 'docx.Document', 'docx.Document', (['"""demo.docx"""'], {}), "('demo.docx')\n", (34, 47), False, 'import docx\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import re
import glob
import random
import struct
def get_old_seed():
with open('include/syscalls.h') as f:
code = f.read()
match = re.search(r'#define SW2_SEED (0x[a-fA-F0-9]{8})', code)
assert match is not None, 'SW2_SEED not found!'
return match.group(1)
def replace_seed(old_seed, new_seed):
with open('include/syscalls.h') as f:
code = f.read()
code = code.replace(
f'#define SW2_SEED {old_seed}',
f'#define SW2_SEED 0x{new_seed:08X}',
1
)
with open('include/syscalls.h', 'w') as f:
f.write(code)
def get_function_hash(seed, function_name, is_syscall=True):
function_hash = seed
function_name = function_name.replace('_', '')
if is_syscall and function_name[:2] == 'Nt':
function_name = 'Zw' + function_name[2:]
name = function_name + '\0'
ror8 = lambda v: ((v >> 8) & (2 ** 32 - 1)) | ((v << 24) & (2 ** 32 - 1))
for segment in [s for s in [name[i:i + 2] for i in range(len(name))] if len(s) == 2]:
partial_name_short = struct.unpack('<H', segment.encode())[0]
function_hash ^= partial_name_short + ror8(function_hash)
return function_hash
def replace_syscall_hashes(seed):
with open('source/syscalls.c') as f:
code = f.read()
regex = re.compile(r'__declspec\(naked\) NTSTATUS (Nt[^(]+)')
syscall_names = re.findall(regex, code)
syscall_names = set(syscall_names)
syscall_definitions = code.split('#elif defined(__GNUC__)')[3]
for syscall_name in syscall_names:
regex = re.compile('NTSTATUS ' + syscall_name + '\\(.*?"mov ecx, (0x[A-Fa-f0-9]{8})', re.DOTALL)
match = re.search(regex, syscall_definitions)
assert match is not None, f'hash of syscall {syscall_name} not found!'
old_hash = match.group(1)
new_hash = get_function_hash(seed, syscall_name)
print(f'{syscall_name} -> {old_hash} - 0x{new_hash:08X}')
code = code.replace(
old_hash,
f'0x{new_hash:08X}'
)
with open('source/syscalls.c', 'w') as f:
f.write(code)
with open('source/syscalls-asm.asm') as f:
code = f.read()
for syscall_name in syscall_names:
regex = re.compile(syscall_name + ' PROC.*?mov ecx, 0([A-Fa-f0-9]{8})h', re.DOTALL)
match = re.search(regex, code)
assert match is not None, f'hash of syscall {syscall_name} not found!'
old_hash = match.group(1)
new_hash = get_function_hash(seed, syscall_name)
code = code.replace(
f'0{old_hash}h',
f'0{new_hash:08X}h',
1
)
with open('source/syscalls-asm.asm', 'w') as f:
f.write(code)
def replace_dinvoke_hashes(seed):
for header_file in glob.glob("include/**/*.h", recursive=True):
with open(header_file) as f:
code = f.read()
regex = re.compile(r'#define (\w+)_SW2_HASH (0x[a-fA-F0-9]{8})')
matches = re.findall(regex, code)
for function_name, old_hash in matches:
new_hash = get_function_hash(seed, function_name, is_syscall=False)
code = code.replace(
f'#define {function_name}_SW2_HASH {old_hash}',
f'#define {function_name}_SW2_HASH 0x{new_hash:08X}',
1
)
if matches:
with open(header_file, 'w') as f:
f.write(code)
def main():
new_seed = random.randint(2 ** 28, 2 ** 32 - 1)
#new_seed = 0x1337c0de
old_seed = get_old_seed()
replace_seed(old_seed, new_seed)
replace_syscall_hashes(new_seed)
replace_dinvoke_hashes(new_seed)
if os.name == 'nt':
print('done! recompile with:\nnmake -f Makefile.msvc')
else:
print('done! recompile with:\nmake -f Makefile.mingw')
if __name__ == '__main__':
main()
| [
"re.compile",
"re.findall",
"random.randint",
"glob.glob",
"re.search"
] | [((208, 262), 're.search', 're.search', (['"""#define SW2_SEED (0x[a-fA-F0-9]{8})"""', 'code'], {}), "('#define SW2_SEED (0x[a-fA-F0-9]{8})', code)\n", (217, 262), False, 'import re\n'), ((1357, 1411), 're.compile', 're.compile', (['"""__declspec\\\\(naked\\\\) NTSTATUS (Nt[^(]+)"""'], {}), "('__declspec\\\\(naked\\\\) NTSTATUS (Nt[^(]+)')\n", (1367, 1411), False, 'import re\n'), ((1431, 1454), 're.findall', 're.findall', (['regex', 'code'], {}), '(regex, code)\n', (1441, 1454), False, 'import re\n'), ((2820, 2863), 'glob.glob', 'glob.glob', (['"""include/**/*.h"""'], {'recursive': '(True)'}), "('include/**/*.h', recursive=True)\n", (2829, 2863), False, 'import glob\n'), ((3497, 3533), 'random.randint', 'random.randint', (['(2 ** 28)', '(2 ** 32 - 1)'], {}), '(2 ** 28, 2 ** 32 - 1)\n', (3511, 3533), False, 'import random\n'), ((1617, 1709), 're.compile', 're.compile', (['(\'NTSTATUS \' + syscall_name + \'\\\\(.*?"mov ecx, (0x[A-Fa-f0-9]{8})\')', 're.DOTALL'], {}), '(\'NTSTATUS \' + syscall_name +\n \'\\\\(.*?"mov ecx, (0x[A-Fa-f0-9]{8})\', re.DOTALL)\n', (1627, 1709), False, 'import re\n'), ((1722, 1759), 're.search', 're.search', (['regex', 'syscall_definitions'], {}), '(regex, syscall_definitions)\n', (1731, 1759), False, 'import re\n'), ((2286, 2361), 're.compile', 're.compile', (["(syscall_name + ' PROC.*?mov ecx, 0([A-Fa-f0-9]{8})h')", 're.DOTALL'], {}), "(syscall_name + ' PROC.*?mov ecx, 0([A-Fa-f0-9]{8})h', re.DOTALL)\n", (2296, 2361), False, 'import re\n'), ((2378, 2400), 're.search', 're.search', (['regex', 'code'], {}), '(regex, code)\n', (2387, 2400), False, 'import re\n'), ((2946, 3002), 're.compile', 're.compile', (['"""#define (\\\\w+)_SW2_HASH (0x[a-fA-F0-9]{8})"""'], {}), "('#define (\\\\w+)_SW2_HASH (0x[a-fA-F0-9]{8})')\n", (2956, 3002), False, 'import re\n'), ((3021, 3044), 're.findall', 're.findall', (['regex', 'code'], {}), '(regex, code)\n', (3031, 3044), False, 'import re\n')] |
import os
from pathlib import Path
__all__ = ['list_files_recur', 'scan_and_create_dir_tree', 'get_all_data_files', 'get_subsubdirs']
def list_files_recur(path):
"""
Cheater function that wraps path.rglob().
:param Path path: path to list recursively
:return list: list of Path objects
"""
files = []
for file in path.rglob('*'):
files.append(file)
return files
def scan_and_create_dir_tree(path, file=True):
"""
Creates all the necessary directories for the file at the end of path to be created.
When specified with a filepath to a file or folder, it creates directories until the path is valid.
:param Path path: must end with a filename, else the final directory won't be created
:param bool file: Boolean, does the given path end with a file? If not, path.parts[-1] will be created
:return None:
"""
parts = path.parts
path_to_check = Path(parts[0])
for i in range(1, len(parts)):
if not path_to_check.exists():
path_to_check.mkdir()
path_to_check = path_to_check / parts[i]
if file:
pass
else:
if not path_to_check.exists():
path_to_check.mkdir()
def get_all_data_files(path, filetype):
"""
Recursively search the given directory for .xxx files.
:param Path path: Path to search
:param str filetype: str, ".type" of file to search for
:return list: list of file-like Path objects
"""
files = list_files_recur(path)
files[:] = [file for file in files if filetype in file.name]
return files
def get_subsubdirs(path):
"""
Get the second-level subdirectories of the given path.
If given path 'a/b', a sample return would be ['a/b/c/d', 'a/b/c/d2', 'a/b/c/etc']
:param str path:
:return list: list containing Path instances for all paths found two levels below the supplied path
"""
leveltwo_subdirs = []
immediate_subdirs = [os.scandir(subdir) for subdir in os.scandir(path) if Path(subdir).is_dir()]
for scan in immediate_subdirs:
for subdir in scan:
leveltwo_subdirs.append(Path(subdir)) if Path(subdir).is_dir() else None
return leveltwo_subdirs
| [
"os.scandir",
"pathlib.Path"
] | [((926, 940), 'pathlib.Path', 'Path', (['parts[0]'], {}), '(parts[0])\n', (930, 940), False, 'from pathlib import Path\n'), ((1959, 1977), 'os.scandir', 'os.scandir', (['subdir'], {}), '(subdir)\n', (1969, 1977), False, 'import os\n'), ((1992, 2008), 'os.scandir', 'os.scandir', (['path'], {}), '(path)\n', (2002, 2008), False, 'import os\n'), ((2012, 2024), 'pathlib.Path', 'Path', (['subdir'], {}), '(subdir)\n', (2016, 2024), False, 'from pathlib import Path\n'), ((2135, 2147), 'pathlib.Path', 'Path', (['subdir'], {}), '(subdir)\n', (2139, 2147), False, 'from pathlib import Path\n'), ((2152, 2164), 'pathlib.Path', 'Path', (['subdir'], {}), '(subdir)\n', (2156, 2164), False, 'from pathlib import Path\n')] |
# Copyright (C) 2018-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
import numpy as np
from openvino.tools.mo.front.common.partial_infer.utils import dynamic_dimension_value, shape_array, set_input_shapes
from openvino.tools.mo.ops.op import Op
class ExperimentalDetectronDetectionOutput(Op):
op = 'ExperimentalDetectronDetectionOutput'
enabled = True
def __init__(self, graph, attrs):
mandatory_props = dict(
type=self.op,
op=self.op,
version='opset6',
infer=self.infer,
reverse_infer=self.reverse_infer,
type_infer=self.type_infer,
in_ports_count=4,
out_ports_count=3,
)
super().__init__(graph, mandatory_props, attrs)
def backend_attrs(self):
return [
('class_agnostic_box_regression', lambda node: str(bool(node['class_agnostic_box_regression'])).lower()),
'max_detections_per_image',
'nms_threshold',
'num_classes',
'post_nms_count',
'score_threshold',
'max_delta_log_wh',
('deltas_weights', lambda node: ','.join(map(str, node['deltas_weights'])))]
@staticmethod
def infer(node):
rois_num = node.max_detections_per_image
# boxes
node.out_port(0).data.set_shape([rois_num, 4])
# classes, scores, batch indices
# We use range(1, 1 + max(node.out_ports().keys())) instead of range(1, 3), because there are incorrectly
# generated models where ExperimentalDetectronDetectionOutput has 4 outputs.
for port_ind in range(1, 1 + max(node.out_ports().keys())):
if not node.out_port(port_ind).disconnected():
node.out_port(port_ind).data.set_shape([rois_num])
@staticmethod
def type_infer(node):
in_data_type = node.in_port(0).get_data_type()
node.out_port(0).set_data_type(in_data_type)
node.out_port(1).set_data_type(np.int32) # the second output contains class indices
node.out_port(2).set_data_type(in_data_type)
if node.is_out_port_connected(3):
node.out_port(3).set_data_type(np.int32) # the fourth output contains batch indices
@staticmethod
def reverse_infer(node):
set_input_shapes(node,
shape_array([dynamic_dimension_value, 4]),
shape_array([dynamic_dimension_value, node['num_classes'] * 4]),
shape_array([dynamic_dimension_value, node['num_classes']]),
shape_array([1, 3]))
| [
"openvino.tools.mo.front.common.partial_infer.utils.shape_array"
] | [((2352, 2393), 'openvino.tools.mo.front.common.partial_infer.utils.shape_array', 'shape_array', (['[dynamic_dimension_value, 4]'], {}), '([dynamic_dimension_value, 4])\n', (2363, 2393), False, 'from openvino.tools.mo.front.common.partial_infer.utils import dynamic_dimension_value, shape_array, set_input_shapes\n'), ((2420, 2483), 'openvino.tools.mo.front.common.partial_infer.utils.shape_array', 'shape_array', (["[dynamic_dimension_value, node['num_classes'] * 4]"], {}), "([dynamic_dimension_value, node['num_classes'] * 4])\n", (2431, 2483), False, 'from openvino.tools.mo.front.common.partial_infer.utils import dynamic_dimension_value, shape_array, set_input_shapes\n'), ((2510, 2569), 'openvino.tools.mo.front.common.partial_infer.utils.shape_array', 'shape_array', (["[dynamic_dimension_value, node['num_classes']]"], {}), "([dynamic_dimension_value, node['num_classes']])\n", (2521, 2569), False, 'from openvino.tools.mo.front.common.partial_infer.utils import dynamic_dimension_value, shape_array, set_input_shapes\n'), ((2596, 2615), 'openvino.tools.mo.front.common.partial_infer.utils.shape_array', 'shape_array', (['[1, 3]'], {}), '([1, 3])\n', (2607, 2615), False, 'from openvino.tools.mo.front.common.partial_infer.utils import dynamic_dimension_value, shape_array, set_input_shapes\n')] |
#!/usr/bin/env python
import numpy as np, os, sys
from get_sepsis_score import load_sepsis_model, get_sepsis_score
def load_challenge_data(file):
with open(file, 'r') as f:
header = f.readline().strip()
column_names = header.split('|')
data = np.loadtxt(f, delimiter='|')
# Ignore SepsisLabel column if present.
if column_names[-1] == 'SepsisLabel':
column_names = column_names[:-1]
data = data[:, :-1]
return data
def save_challenge_predictions(file, scores, labels):
with open(file, 'w') as f:
f.write('PredictedProbability|PredictedLabel\n')
for (s, l) in zip(scores, labels):
f.write('%g|%d\n' % (s, l))
if __name__ == '__main__':
# Parse arguments.
if len(sys.argv) != 3:
raise Exception('Include the input and output directories as arguments, e.g., python driver.py input output.')
input_directory = sys.argv[1]
output_directory = sys.argv[2]
# Find files.
files = []
for f in os.listdir(input_directory):
if os.path.isfile(os.path.join(input_directory, f)) and not f.lower().startswith('.') and f.lower().endswith('psv'):
files.append(f)
if not os.path.isdir(output_directory):
os.mkdir(output_directory)
# Load model.
model = load_sepsis_model()
print(model)
# Iterate over files.
for f in files:
# Load data.
input_file = os.path.join(input_directory, f)
data = load_challenge_data(input_file)
# print(type(data))
# Make predictions.
num_rows = len(data)
scores = np.zeros(num_rows)
labels = np.zeros(num_rows)
for t in range(num_rows):
current_data = data[:t+1]
current_score, current_label = get_sepsis_score(current_data, model)
scores[t] = current_score
labels[t] = current_label
# Save results.
output_file = os.path.join(output_directory, f)
save_challenge_predictions(output_file, scores, labels)
| [
"get_sepsis_score.load_sepsis_model",
"os.listdir",
"os.path.join",
"numpy.zeros",
"os.path.isdir",
"os.mkdir",
"get_sepsis_score.get_sepsis_score",
"numpy.loadtxt"
] | [((1015, 1042), 'os.listdir', 'os.listdir', (['input_directory'], {}), '(input_directory)\n', (1025, 1042), False, 'import numpy as np, os, sys\n'), ((1308, 1327), 'get_sepsis_score.load_sepsis_model', 'load_sepsis_model', ([], {}), '()\n', (1325, 1327), False, 'from get_sepsis_score import load_sepsis_model, get_sepsis_score\n'), ((273, 301), 'numpy.loadtxt', 'np.loadtxt', (['f'], {'delimiter': '"""|"""'}), "(f, delimiter='|')\n", (283, 301), True, 'import numpy as np, os, sys\n'), ((1209, 1240), 'os.path.isdir', 'os.path.isdir', (['output_directory'], {}), '(output_directory)\n', (1222, 1240), False, 'import numpy as np, os, sys\n'), ((1250, 1276), 'os.mkdir', 'os.mkdir', (['output_directory'], {}), '(output_directory)\n', (1258, 1276), False, 'import numpy as np, os, sys\n'), ((1434, 1466), 'os.path.join', 'os.path.join', (['input_directory', 'f'], {}), '(input_directory, f)\n', (1446, 1466), False, 'import numpy as np, os, sys\n'), ((1617, 1635), 'numpy.zeros', 'np.zeros', (['num_rows'], {}), '(num_rows)\n', (1625, 1635), True, 'import numpy as np, os, sys\n'), ((1653, 1671), 'numpy.zeros', 'np.zeros', (['num_rows'], {}), '(num_rows)\n', (1661, 1671), True, 'import numpy as np, os, sys\n'), ((1948, 1981), 'os.path.join', 'os.path.join', (['output_directory', 'f'], {}), '(output_directory, f)\n', (1960, 1981), False, 'import numpy as np, os, sys\n'), ((1787, 1824), 'get_sepsis_score.get_sepsis_score', 'get_sepsis_score', (['current_data', 'model'], {}), '(current_data, model)\n', (1803, 1824), False, 'from get_sepsis_score import load_sepsis_model, get_sepsis_score\n'), ((1070, 1102), 'os.path.join', 'os.path.join', (['input_directory', 'f'], {}), '(input_directory, f)\n', (1082, 1102), False, 'import numpy as np, os, sys\n')] |
import os
import df2img
import disnake
import pandas as pd
from PIL import Image
import discordbot.config_discordbot as cfg
from discordbot.config_discordbot import logger
from discordbot.helpers import autocrop_image
from gamestonk_terminal.economy import wsj_model
async def currencies_command(ctx):
"""Currencies overview [Wall St. Journal]"""
try:
# Debug user input
if cfg.DEBUG:
logger.debug("econ-currencies")
# Retrieve data
df = wsj_model.global_currencies()
df = pd.DataFrame.from_dict(df)
# Check for argument
if df.empty:
raise Exception("No available data found")
df["Last"] = pd.to_numeric(df["Last"].astype(float))
df["Chng"] = pd.to_numeric(df["Chng"].astype(float))
df["%Chng"] = pd.to_numeric(df["%Chng"].astype(float))
formats = {"Last": "{:.2f}", "Chng": "{:.2f}", "%Chng": "{:.2f}%"}
for col, value in formats.items():
df[col] = df[col].map(lambda x: value.format(x)) # pylint: disable=W0640
df = df.fillna("")
df.set_index(" ", inplace=True)
# Debug user output
if cfg.DEBUG:
logger.debug(df.to_string())
df = df[
[
"Last",
"Chng",
"%Chng",
]
]
dindex = len(df.index)
fig = df2img.plot_dataframe(
df,
fig_size=(800, (40 + (40 * dindex))),
col_width=[8, 3, 3],
tbl_cells=dict(
align="left",
height=35,
),
template="plotly_dark",
font=dict(
family="Consolas",
size=20,
),
paper_bgcolor="rgba(0, 0, 0, 0)",
)
imagefile = "econ-currencies.png"
df2img.save_dataframe(fig=fig, filename=imagefile)
image = Image.open(imagefile)
image = autocrop_image(image, 0)
image.save(imagefile, "PNG", quality=100)
image = disnake.File(imagefile)
title = "Economy: [WSJ] Currencies"
embed = disnake.Embed(title=title, colour=cfg.COLOR)
embed.set_image(url=f"attachment://{imagefile}")
embed.set_author(
name=cfg.AUTHOR_NAME,
icon_url=cfg.AUTHOR_ICON_URL,
)
os.remove(imagefile)
await ctx.send(embed=embed, file=image)
except Exception as e:
embed = disnake.Embed(
title="ERROR Economy: [WSJ] Currencies",
colour=cfg.COLOR,
description=e,
)
embed.set_author(
name=cfg.AUTHOR_NAME,
icon_url=cfg.AUTHOR_ICON_URL,
)
await ctx.send(embed=embed, delete_after=30.0)
| [
"disnake.Embed",
"PIL.Image.open",
"discordbot.helpers.autocrop_image",
"gamestonk_terminal.economy.wsj_model.global_currencies",
"df2img.save_dataframe",
"discordbot.config_discordbot.logger.debug",
"pandas.DataFrame.from_dict",
"disnake.File",
"os.remove"
] | [((496, 525), 'gamestonk_terminal.economy.wsj_model.global_currencies', 'wsj_model.global_currencies', ([], {}), '()\n', (523, 525), False, 'from gamestonk_terminal.economy import wsj_model\n'), ((539, 565), 'pandas.DataFrame.from_dict', 'pd.DataFrame.from_dict', (['df'], {}), '(df)\n', (561, 565), True, 'import pandas as pd\n'), ((1860, 1910), 'df2img.save_dataframe', 'df2img.save_dataframe', ([], {'fig': 'fig', 'filename': 'imagefile'}), '(fig=fig, filename=imagefile)\n', (1881, 1910), False, 'import df2img\n'), ((1927, 1948), 'PIL.Image.open', 'Image.open', (['imagefile'], {}), '(imagefile)\n', (1937, 1948), False, 'from PIL import Image\n'), ((1965, 1989), 'discordbot.helpers.autocrop_image', 'autocrop_image', (['image', '(0)'], {}), '(image, 0)\n', (1979, 1989), False, 'from discordbot.helpers import autocrop_image\n'), ((2057, 2080), 'disnake.File', 'disnake.File', (['imagefile'], {}), '(imagefile)\n', (2069, 2080), False, 'import disnake\n'), ((2142, 2186), 'disnake.Embed', 'disnake.Embed', ([], {'title': 'title', 'colour': 'cfg.COLOR'}), '(title=title, colour=cfg.COLOR)\n', (2155, 2186), False, 'import disnake\n'), ((2364, 2384), 'os.remove', 'os.remove', (['imagefile'], {}), '(imagefile)\n', (2373, 2384), False, 'import os\n'), ((426, 457), 'discordbot.config_discordbot.logger.debug', 'logger.debug', (['"""econ-currencies"""'], {}), "('econ-currencies')\n", (438, 457), False, 'from discordbot.config_discordbot import logger\n'), ((2478, 2569), 'disnake.Embed', 'disnake.Embed', ([], {'title': '"""ERROR Economy: [WSJ] Currencies"""', 'colour': 'cfg.COLOR', 'description': 'e'}), "(title='ERROR Economy: [WSJ] Currencies', colour=cfg.COLOR,\n description=e)\n", (2491, 2569), False, 'import disnake\n')] |
# @Title: 最长字符串链 (Longest String Chain)
# @Author: KivenC
# @Date: 2019-05-26 20:35:25
# @Runtime: 144 ms
# @Memory: 13.3 MB
class Solution:
# # way 1
# def longestStrChain(self, words: List[str]) -> int:
# # 动态规划
# # dp[i] = max(dp[i], dp[j] + 1) (0 <= j < i 且 words[j] 是 words[i] 的前身)
# length = len(words)
# if length < 2:
# return length
# dp = [1 for _ in range(length)]
# words.sort(key=len) # 按字符串长度递增排序
# for i in range(1, length):
# if i >= 1 and words[i] == words[i - 1]: # 去重
# continue
# for j in range(i - 1, -1, -1):
# if len(words[i]) - len(words[j]) > 1: # 剪枝
# break
# if len(words[i]) == len(words[j]):
# continue
# if self.isPre(words[j], words[i]):
# dp[i] = max(dp[i], dp[j] + 1)
# return max(dp)
# def isPre(self, word1: str, word2: str) -> bool:
# # 判断 word1 是否是 word2 的前身
# # 双指针
# # i, j, length1, length2 = 0, 0, len(word1), len(word2)
# # while i < length1 and j < length2:
# # if word1[i] == word2[j]:
# # i += 1
# # j += 1
# # if length2 - length1 == 1 and i == length1:
# # return True
# # return False
# # word2 去除任意一个位置的字符后与 word1 进行比对
# if len(word1) + 1 != len(word2):
# return False
# for i in range(len(word2)):
# if word2[: i] + word2[i + 1:] == word1:
# return True
# return False
# way 2
def longestStrChain(self, words: List[str]) -> int:
import collections
length = len(words)
if length < 2:
return length
pool = collections.defaultdict(list) # 将字符串按照其长度进行分组
dp = {}
for word in words:
pool[len(word)].append(word)
for key in sorted(pool.keys()):
if key - 1 not in pool:
continue
for word in pool[key]:
for j in range(key):
tmp = word[: j] + word[j + 1:]
if tmp in pool[key - 1]:
dp[word] = max(dp.get(word, 1), dp.get(tmp, 1) + 1)
return max(dp.values()) if dp else 1
| [
"collections.defaultdict"
] | [((1844, 1873), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (1867, 1873), False, 'import collections\n')] |
import connexion
import six
from openapi_server import query_manager
from openapi_server.utils.vars import DATATRANSFORMATION_TYPE_NAME, DATATRANSFORMATION_TYPE_URI
from openapi_server.models.data_transformation import DataTransformation # noqa: E501
from openapi_server import util
def custom_datasetspecifications_id_datatransformations_get(id, custom_query_name=None, username=None): # noqa: E501
"""Gets a list of data transformations related a dataset
Gets a list of data transformations related a dataset # noqa: E501
:param id: The ID of the dataspecification
:type id: str
:param custom_query_name: Name of the custom query
:type custom_query_name: str
:param username: Username to query
:type username: str
:rtype: List[DataTransformation]
"""
return query_manager.get_resource(id=id,
custom_query_name=custom_query_name,
username=username,
rdf_type_uri=DATATRANSFORMATION_TYPE_URI,
rdf_type_name=DATATRANSFORMATION_TYPE_NAME,
kls=DataTransformation)
def datatransformations_get(username=None, label=None, page=None, per_page=None): # noqa: E501
"""List all instances of DataTransformation
Gets a list of all instances of DataTransformation (more information in https://w3id.org/okn/o/sd#DataTransformation) # noqa: E501
:param username: Name of the user graph to query
:type username: str
:param label: Filter by label
:type label: str
:param page: Page number
:type page: int
:param per_page: Items per page
:type per_page: int
:rtype: List[DataTransformation]
"""
return query_manager.get_resource(
username=username,
label=label,
page=page,
per_page=per_page,
rdf_type_uri=DATATRANSFORMATION_TYPE_URI,
rdf_type_name=DATATRANSFORMATION_TYPE_NAME,
kls=DataTransformation)
def datatransformations_id_delete(id, user=None): # noqa: E501
"""Delete an existing DataTransformation
Delete an existing DataTransformation (more information in https://w3id.org/okn/o/sd#DataTransformation) # noqa: E501
:param id: The ID of the DataTransformation to be retrieved
:type id: str
:param user: Username
:type user: str
:rtype: None
"""
return query_manager.delete_resource(id=id,
user=user,
rdf_type_uri=DATATRANSFORMATION_TYPE_URI,
rdf_type_name=DATATRANSFORMATION_TYPE_NAME,
kls=DataTransformation)
def datatransformations_id_get(id, username=None): # noqa: E501
"""Get a single DataTransformation by its id
Gets the details of a given DataTransformation (more information in https://w3id.org/okn/o/sd#DataTransformation) # noqa: E501
:param id: The ID of the DataTransformation to be retrieved
:type id: str
:param username: Name of the user graph to query
:type username: str
:rtype: DataTransformation
"""
return query_manager.get_resource(id=id,
username=username,
rdf_type_uri=DATATRANSFORMATION_TYPE_URI,
rdf_type_name=DATATRANSFORMATION_TYPE_NAME,
kls=DataTransformation)
def datatransformations_id_put(id, user=None, data_transformation=None): # noqa: E501
"""Update an existing DataTransformation
Updates an existing DataTransformation (more information in https://w3id.org/okn/o/sd#DataTransformation) # noqa: E501
:param id: The ID of the DataTransformation to be retrieved
:type id: str
:param user: Username
:type user: str
:param data_transformation: An old DataTransformationto be updated
:type data_transformation: dict | bytes
:rtype: DataTransformation
"""
if connexion.request.is_json:
data_transformation = DataTransformation.from_dict(connexion.request.get_json()) # noqa: E501
return query_manager.put_resource(id=id,
user=user,
body=data_transformation,
rdf_type_uri=DATATRANSFORMATION_TYPE_URI,
rdf_type_name=DATATRANSFORMATION_TYPE_NAME,
kls=DataTransformation)
def datatransformations_post(user=None, data_transformation=None): # noqa: E501
"""Create one DataTransformation
Create a new instance of DataTransformation (more information in https://w3id.org/okn/o/sd#DataTransformation) # noqa: E501
:param user: Username
:type user: str
:param data_transformation: Information about the DataTransformationto be created
:type data_transformation: dict | bytes
:rtype: DataTransformation
"""
if connexion.request.is_json:
data_transformation = DataTransformation.from_dict(connexion.request.get_json()) # noqa: E501
return query_manager.post_resource(
user=user,
body=data_transformation,
rdf_type_uri=DATATRANSFORMATION_TYPE_URI,
rdf_type_name=DATATRANSFORMATION_TYPE_NAME,
kls=DataTransformation)
| [
"openapi_server.query_manager.delete_resource",
"openapi_server.query_manager.post_resource",
"openapi_server.query_manager.get_resource",
"connexion.request.get_json",
"openapi_server.query_manager.put_resource"
] | [((813, 1020), 'openapi_server.query_manager.get_resource', 'query_manager.get_resource', ([], {'id': 'id', 'custom_query_name': 'custom_query_name', 'username': 'username', 'rdf_type_uri': 'DATATRANSFORMATION_TYPE_URI', 'rdf_type_name': 'DATATRANSFORMATION_TYPE_NAME', 'kls': 'DataTransformation'}), '(id=id, custom_query_name=custom_query_name,\n username=username, rdf_type_uri=DATATRANSFORMATION_TYPE_URI,\n rdf_type_name=DATATRANSFORMATION_TYPE_NAME, kls=DataTransformation)\n', (839, 1020), False, 'from openapi_server import query_manager\n'), ((1636, 1842), 'openapi_server.query_manager.get_resource', 'query_manager.get_resource', ([], {'username': 'username', 'label': 'label', 'page': 'page', 'per_page': 'per_page', 'rdf_type_uri': 'DATATRANSFORMATION_TYPE_URI', 'rdf_type_name': 'DATATRANSFORMATION_TYPE_NAME', 'kls': 'DataTransformation'}), '(username=username, label=label, page=page,\n per_page=per_page, rdf_type_uri=DATATRANSFORMATION_TYPE_URI,\n rdf_type_name=DATATRANSFORMATION_TYPE_NAME, kls=DataTransformation)\n', (1662, 1842), False, 'from openapi_server import query_manager\n'), ((2294, 2460), 'openapi_server.query_manager.delete_resource', 'query_manager.delete_resource', ([], {'id': 'id', 'user': 'user', 'rdf_type_uri': 'DATATRANSFORMATION_TYPE_URI', 'rdf_type_name': 'DATATRANSFORMATION_TYPE_NAME', 'kls': 'DataTransformation'}), '(id=id, user=user, rdf_type_uri=\n DATATRANSFORMATION_TYPE_URI, rdf_type_name=DATATRANSFORMATION_TYPE_NAME,\n kls=DataTransformation)\n', (2323, 2460), False, 'from openapi_server import query_manager\n'), ((2945, 3116), 'openapi_server.query_manager.get_resource', 'query_manager.get_resource', ([], {'id': 'id', 'username': 'username', 'rdf_type_uri': 'DATATRANSFORMATION_TYPE_URI', 'rdf_type_name': 'DATATRANSFORMATION_TYPE_NAME', 'kls': 'DataTransformation'}), '(id=id, username=username, rdf_type_uri=\n DATATRANSFORMATION_TYPE_URI, rdf_type_name=DATATRANSFORMATION_TYPE_NAME,\n kls=DataTransformation)\n', (2971, 3116), False, 'from openapi_server import query_manager\n'), ((3832, 4021), 'openapi_server.query_manager.put_resource', 'query_manager.put_resource', ([], {'id': 'id', 'user': 'user', 'body': 'data_transformation', 'rdf_type_uri': 'DATATRANSFORMATION_TYPE_URI', 'rdf_type_name': 'DATATRANSFORMATION_TYPE_NAME', 'kls': 'DataTransformation'}), '(id=id, user=user, body=data_transformation,\n rdf_type_uri=DATATRANSFORMATION_TYPE_URI, rdf_type_name=\n DATATRANSFORMATION_TYPE_NAME, kls=DataTransformation)\n', (3858, 4021), False, 'from openapi_server import query_manager\n'), ((4669, 4852), 'openapi_server.query_manager.post_resource', 'query_manager.post_resource', ([], {'user': 'user', 'body': 'data_transformation', 'rdf_type_uri': 'DATATRANSFORMATION_TYPE_URI', 'rdf_type_name': 'DATATRANSFORMATION_TYPE_NAME', 'kls': 'DataTransformation'}), '(user=user, body=data_transformation,\n rdf_type_uri=DATATRANSFORMATION_TYPE_URI, rdf_type_name=\n DATATRANSFORMATION_TYPE_NAME, kls=DataTransformation)\n', (4696, 4852), False, 'from openapi_server import query_manager\n'), ((3776, 3804), 'connexion.request.get_json', 'connexion.request.get_json', ([], {}), '()\n', (3802, 3804), False, 'import connexion\n'), ((4613, 4641), 'connexion.request.get_json', 'connexion.request.get_json', ([], {}), '()\n', (4639, 4641), False, 'import connexion\n')] |
import numpy as np
import scipy
import warnings
try:
import matplotlib.pyplot as pl
import matplotlib
except ImportError:
warnings.warn("matplotlib could not be loaded!")
pass
from . import labels
from . import colors
def truncate_text(text, max_len):
if len(text) > max_len:
return text[:int(max_len/2)-2] + "..." + text[-int(max_len/2)+1:]
else:
return text
def monitoring_plot(ind, shap_values, features, feature_names=None):
""" Create a SHAP monitoring plot.
(Note this function is preliminary and subject to change!!)
A SHAP monitoring plot is meant to display the behavior of a model
over time. Often the shap_values given to this plot explain the loss
of a model, so changes in a feature's impact on the model's loss over
time can help in monitoring the model's performance.
Parameters
----------
ind : int
Index of the feature to plot.
shap_values : numpy.array
Matrix of SHAP values (# samples x # features)
features : numpy.array or pandas.DataFrame
Matrix of feature values (# samples x # features)
feature_names : list
Names of the features (length # features)
"""
if str(type(features)).endswith("'pandas.core.frame.DataFrame'>"):
if feature_names is None:
feature_names = features.columns
features = features.values
pl.figure(figsize=(10,3))
ys = shap_values[:,ind]
xs = np.arange(len(ys))#np.linspace(0, 12*2, len(ys))
pvals = []
inc = 50
for i in range(inc, len(ys)-inc, inc):
#stat, pval = scipy.stats.mannwhitneyu(v[:i], v[i:], alternative="two-sided")
stat, pval = scipy.stats.ttest_ind(ys[:i], ys[i:])
pvals.append(pval)
min_pval = np.min(pvals)
min_pval_ind = np.argmin(pvals)*inc + inc
if min_pval < 0.05 / shap_values.shape[1]:
pl.axvline(min_pval_ind, linestyle="dashed", color="#666666", alpha=0.2)
pl.scatter(xs, ys, s=10, c=features[:,ind], cmap=colors.red_blue)
pl.xlabel("Sample index")
pl.ylabel(truncate_text(feature_names[ind], 30) + "\nSHAP value", size=13)
pl.gca().xaxis.set_ticks_position('bottom')
pl.gca().yaxis.set_ticks_position('left')
pl.gca().spines['right'].set_visible(False)
pl.gca().spines['top'].set_visible(False)
cb = pl.colorbar()
cb.outline.set_visible(False)
bbox = cb.ax.get_window_extent().transformed(pl.gcf().dpi_scale_trans.inverted())
cb.ax.set_aspect((bbox.height - 0.7) * 20)
cb.set_label(truncate_text(feature_names[ind], 30), size=13)
pl.show() | [
"matplotlib.pyplot.gca",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.colorbar",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.figure",
"scipy.stats.ttest_ind",
"matplotlib.pyplot.scatter",
"numpy.min",
"warnings.warn",
"numpy.argmin",
"matplotlib.pyplot.axvline",
"matplotlib.pyplot.show"
] | [((1417, 1443), 'matplotlib.pyplot.figure', 'pl.figure', ([], {'figsize': '(10, 3)'}), '(figsize=(10, 3))\n', (1426, 1443), True, 'import matplotlib.pyplot as pl\n'), ((1792, 1805), 'numpy.min', 'np.min', (['pvals'], {}), '(pvals)\n', (1798, 1805), True, 'import numpy as np\n'), ((1998, 2064), 'matplotlib.pyplot.scatter', 'pl.scatter', (['xs', 'ys'], {'s': '(10)', 'c': 'features[:, ind]', 'cmap': 'colors.red_blue'}), '(xs, ys, s=10, c=features[:, ind], cmap=colors.red_blue)\n', (2008, 2064), True, 'import matplotlib.pyplot as pl\n'), ((2073, 2098), 'matplotlib.pyplot.xlabel', 'pl.xlabel', (['"""Sample index"""'], {}), "('Sample index')\n", (2082, 2098), True, 'import matplotlib.pyplot as pl\n'), ((2375, 2388), 'matplotlib.pyplot.colorbar', 'pl.colorbar', ([], {}), '()\n', (2386, 2388), True, 'import matplotlib.pyplot as pl\n'), ((2625, 2634), 'matplotlib.pyplot.show', 'pl.show', ([], {}), '()\n', (2632, 2634), True, 'import matplotlib.pyplot as pl\n'), ((134, 182), 'warnings.warn', 'warnings.warn', (['"""matplotlib could not be loaded!"""'], {}), "('matplotlib could not be loaded!')\n", (147, 182), False, 'import warnings\n'), ((1712, 1749), 'scipy.stats.ttest_ind', 'scipy.stats.ttest_ind', (['ys[:i]', 'ys[i:]'], {}), '(ys[:i], ys[i:])\n', (1733, 1749), False, 'import scipy\n'), ((1912, 1984), 'matplotlib.pyplot.axvline', 'pl.axvline', (['min_pval_ind'], {'linestyle': '"""dashed"""', 'color': '"""#666666"""', 'alpha': '(0.2)'}), "(min_pval_ind, linestyle='dashed', color='#666666', alpha=0.2)\n", (1922, 1984), True, 'import matplotlib.pyplot as pl\n'), ((1825, 1841), 'numpy.argmin', 'np.argmin', (['pvals'], {}), '(pvals)\n', (1834, 1841), True, 'import numpy as np\n'), ((2182, 2190), 'matplotlib.pyplot.gca', 'pl.gca', ([], {}), '()\n', (2188, 2190), True, 'import matplotlib.pyplot as pl\n'), ((2230, 2238), 'matplotlib.pyplot.gca', 'pl.gca', ([], {}), '()\n', (2236, 2238), True, 'import matplotlib.pyplot as pl\n'), ((2276, 2284), 'matplotlib.pyplot.gca', 'pl.gca', ([], {}), '()\n', (2282, 2284), True, 'import matplotlib.pyplot as pl\n'), ((2324, 2332), 'matplotlib.pyplot.gca', 'pl.gca', ([], {}), '()\n', (2330, 2332), True, 'import matplotlib.pyplot as pl\n'), ((2472, 2480), 'matplotlib.pyplot.gcf', 'pl.gcf', ([], {}), '()\n', (2478, 2480), True, 'import matplotlib.pyplot as pl\n')] |
import re
import os
import cmd
import sys
import common
from getpass import getpass
from kp import KeePassError, get_password
from configmanager import ConfigManager, ConfigManagerError
common.init()
class ParseArgsException(Exception):
def __init__(self, msg):
self.msg = msg
class ModuleCore(cmd.Cmd):
def __init__(self, module = ''):
cmd.Cmd.__init__(self)
self.master = None
if module == '#':
self.prompt_sign = '#>'
elif module != '':
self.prompt_sign = '[' + module + ']>'
else:
self.prompt_sign = '->'
#defaults
self.ruler = '-'
#Completions
self.directories = []
self.file_server_database = []
self.file_server = []
self.do_cd('.')
configs = ConfigManager().get_config_list()
for conf in configs:
self.file_server_database.append(conf)
self.file_server.append(conf)
for srv in ConfigManager('config/' + conf + '.yaml').get_all():
self.file_server_database.append(conf + '.' + srv)
self.file_server.append(conf + '.' + srv)
for db in ConfigManager('config/' + conf + '.yaml').get(srv)['databases']:
self.file_server_database.append(conf + '.' + srv + '.' + db)
def precmd(self, line):
if not sys.stdin.isatty():
print(line)
return line
def postcmd(self, stop, line):
if not sys.stdin.isatty():
print("")
return stop
def parse_args(self, string="", n=0, m=0):
list = re.findall('"+.*"+|[a-zA-Z0-9!@#$%^&*()_+-,./<>?]+', string)
arg_counter = len(list);
if (arg_counter >= n and arg_counter <= m) or (arg_counter == n and m == 0) or n == 0:
r_list = []
for l in list:
r_list.append(l.replace('"', ''))
return (r_list, len(list))
else:
raise ParseArgsException("Incorrect number of arguments")
# wykonuje daną funkcję (callback) na wszystkich bazach
def exec_on_config(self, callback, args, values, view = ''): # link - file.server.base
if values == '': # wykonaj na wszystkich plikach
files = ConfigManager().get_config_list() # pobierz listę plików konfiguracyjnych
# wyświetl na czym będziesz wykonywać
print("Exec on:")
for file in files:
print('+-',file)
ans = input("Are you sure? [NO/yes/info]: ")
if ans == "yes": #wykonaj callback
for file in files:
if view == 'tree': print('+-', file)
try:
servers = ConfigManager("config/" + file + ".yaml").get_all()
for srv in servers:
if view == 'tree': print("| +-", srv)
databases = servers[srv]["databases"]
for db in databases:
if view == 'tree': print("| | +-", db)
if view == 'list': print('[', file, '->', srv, '->', db, ']')
callback(file, srv, db, *args)
except ConfigManagerError as e:
print(e)
elif ans == "info": #podaj tylko informację na czym callback zostałby wykonany
for file in files:
print('+-', file)
servers = ConfigManager("config/" + file + ".yaml").get_all()
for srv in servers:
print('| +-', srv)
databases = servers[srv]["databases"]
for db in databases:
print('| | +-', db)
else: #jeżeli nie zdecydujemy się na wykonanie czegokolwiek
print("aborted")
else: # jeżeli specjalizujemy na czym chcemy wykonać
val = values.split('.') #rozdzielamy nazwę_pliku.serwera.bazy
params = len(val)
if params == 1: # jeżeli podano nazwę tylko pliku to wykonaj na wszystkich serwerach, bazach które są w nim zapisane
file = val[0]
try:
servers = ConfigManager("config/" + file + ".yaml").get_all()
for srv in servers:
if view == 'tree': print("+-", srv)
databases = servers[srv]["databases"]
for db in databases:
if view == 'tree': print("| +-", db)
if view == 'list': print('[', srv, '->', db, ']')
callback(file, srv, db, *args)
except ConfigManagerError as e:
print(e)
except KeyError as e:
print(e, "is not exist")
elif params == 2: # jeżeli podano nazwę pliku i serwer to wykonaj na wszystkich bazach na serwerze
file = val[0]
try:
servers = ConfigManager("config/" + file + ".yaml").get_all()
srv = val[1]
databases = servers[srv]["databases"]
for db in databases:
if view == 'tree': print("+-", db)
if view == 'list': print('[', db, ']')
callback(file, srv, db, *args)
except ConfigManagerError as e:
print(e)
except KeyError as e:
print(e, "is not exist")
elif params == 3: # podano nazwę pliku, serwer i nazwę bazy - wykonaj polecenie dokładnie na niej
try:
callback(val[0], val[1], val[2], *args)
except ConfigManagerError as e:
print(e)
except KeyError as e:
print(e, "is not exist")
# zwraca skróconą ścieżkę do aktualnego katalogu - funkcja pomocnicza
def get_shortpath(self):
path = common.get_cdir()
separator = ''
if '\\' in path:
separator = '\\'
else:
separator = '/'
start = path.find(separator)
end = path.rfind(separator, 0, len(path)-1)
if start < end:
return (path[0:start+1] + '...' + path[end:])
else:
return (path)
# autouzupełnienia dla cmd polecenia cd
def complete_cd(self, text, line, begidx, endidx):
if not text:
completions = self.directories[:]
else:
completions = [f for f in self.directories if f.startswith(text)]
return completions
# polecenie cd - pozwala na przemieszczanie się po katalogach
def do_cd(self, args):
"Move to directory"
if args == '':
print(common.get_cdir())
else:
try:
common.chdir(args)
self.prompt = self.get_shortpath() + ' ' + self.prompt_sign
self.directories = []
for name in os.listdir(common.get_cdir()):
if os.path.isdir(os.path.join(common.get_cdir(), name)):
self.directories.append(name)
except FileNotFoundError as e:
print(e)
# wyświetla wszystkie pliki w lokalizacji
def do_ls(self, args):
"List directory"
for name in os.listdir(common.get_cdir()):
print(name)
# podaje pełną ścieżkę aktualnego katalogu
def do_pwd(self, args):
"Print path"
print(common.get_cdir())
# pozwala na decyzję czy chcemy wyświetlać warningi
def do_warn(self, args):
"""warn <on/off>"""
try:
(values, values_num) = self.parse_args(args, 0, 1)
if values_num == 1:
if values[0] == 'on':
print('Warnings on')
self.warn = True
elif values[0] == 'off':
print('Warnings off')
self.warn = False
else:
print('Incorrect argument.')
else:
if self.warn == True:
print('Status: on')
else:
print('Status: off')
except ParseArgsException as e:
print(e)
# ustawia masterpassword dla keepasa
def do_setMaster(self,args):
"Set master password"
if sys.stdin.isatty(): # jezeli jako shell
p = getpass('Enter Master Password: ')
else:
p = sys.stdin.readline().rstrip()
self.master = p
def do_exit(self, *args):
return True
def do_EOF(self, line):
return True
def emptyline(self):
return False
# Musimy wyłapać wszystko co możliwe, nie ma pliku, zly master itp. i zwrocic 1 wyjątek
def get_password(self, alias):
keepass_path = common.keepass_path
if self.master == None:
raise KeePassError("Master Password Not Set")
try:
return get_password(keepass_path, self.master, alias)
except KeePassError as e:
raise e
def connect_command_builder(self,connection, perm):
try:
command = connection["adress"] + "_" + connection["user"]+ "_" + \
self.get_password(connection["keepass"]) + "_" + str(connection["sshport"]) + "_" + str(connection["remoteport"]) + "_" + perm
except (KeyError, KeePassError) as e1:
try:
command = connection["adress"] + "_" + connection["user"]+ "_" + \
connection["passwd"] + "_" + str(connection["sshport"]) + "_" + str(connection["remoteport"]) + "_" + perm
return command
except KeyError as e2:
if isinstance(e1,KeePassError):
raise KeePassError("Unable to use Keepass(" + e1.value + ") or Password")
else:
raise KeePassError("Invalid connection in yaml file")
raise KeePassError(e1)
return command | [
"kp.get_password",
"common.chdir",
"kp.KeePassError",
"getpass.getpass",
"configmanager.ConfigManager",
"sys.stdin.readline",
"common.init",
"sys.stdin.isatty",
"re.findall",
"common.get_cdir",
"cmd.Cmd.__init__"
] | [((189, 202), 'common.init', 'common.init', ([], {}), '()\n', (200, 202), False, 'import common\n'), ((349, 371), 'cmd.Cmd.__init__', 'cmd.Cmd.__init__', (['self'], {}), '(self)\n', (365, 371), False, 'import cmd\n'), ((1374, 1434), 're.findall', 're.findall', (['""""+.*"+|[a-zA-Z0-9!@#$%^&*()_+-,./<>?]+"""', 'string'], {}), '(\'"+.*"+|[a-zA-Z0-9!@#$%^&*()_+-,./<>?]+\', string)\n', (1384, 1434), False, 'import re\n'), ((4751, 4768), 'common.get_cdir', 'common.get_cdir', ([], {}), '()\n', (4766, 4768), False, 'import common\n'), ((6625, 6643), 'sys.stdin.isatty', 'sys.stdin.isatty', ([], {}), '()\n', (6641, 6643), False, 'import sys\n'), ((1181, 1199), 'sys.stdin.isatty', 'sys.stdin.isatty', ([], {}), '()\n', (1197, 1199), False, 'import sys\n'), ((1273, 1291), 'sys.stdin.isatty', 'sys.stdin.isatty', ([], {}), '()\n', (1289, 1291), False, 'import sys\n'), ((5854, 5871), 'common.get_cdir', 'common.get_cdir', ([], {}), '()\n', (5869, 5871), False, 'import common\n'), ((5982, 5999), 'common.get_cdir', 'common.get_cdir', ([], {}), '()\n', (5997, 5999), False, 'import common\n'), ((6672, 6706), 'getpass.getpass', 'getpass', (['"""Enter Master Password: """'], {}), "('Enter Master Password: ')\n", (6679, 6706), False, 'from getpass import getpass\n'), ((7087, 7126), 'kp.KeePassError', 'KeePassError', (['"""Master Password Not Set"""'], {}), "('Master Password Not Set')\n", (7099, 7126), False, 'from kp import KeePassError, get_password\n'), ((7144, 7190), 'kp.get_password', 'get_password', (['keepass_path', 'self.master', 'alias'], {}), '(keepass_path, self.master, alias)\n', (7156, 7190), False, 'from kp import KeePassError, get_password\n'), ((700, 715), 'configmanager.ConfigManager', 'ConfigManager', ([], {}), '()\n', (713, 715), False, 'from configmanager import ConfigManager, ConfigManagerError\n'), ((5401, 5418), 'common.get_cdir', 'common.get_cdir', ([], {}), '()\n', (5416, 5418), False, 'import common\n'), ((5440, 5458), 'common.chdir', 'common.chdir', (['args'], {}), '(args)\n', (5452, 5458), False, 'import common\n'), ((7965, 7981), 'kp.KeePassError', 'KeePassError', (['e1'], {}), '(e1)\n', (7977, 7981), False, 'from kp import KeePassError, get_password\n'), ((846, 887), 'configmanager.ConfigManager', 'ConfigManager', (["('config/' + conf + '.yaml')"], {}), "('config/' + conf + '.yaml')\n", (859, 887), False, 'from configmanager import ConfigManager, ConfigManagerError\n'), ((1929, 1944), 'configmanager.ConfigManager', 'ConfigManager', ([], {}), '()\n', (1942, 1944), False, 'from configmanager import ConfigManager, ConfigManagerError\n'), ((5577, 5594), 'common.get_cdir', 'common.get_cdir', ([], {}), '()\n', (5592, 5594), False, 'import common\n'), ((6722, 6742), 'sys.stdin.readline', 'sys.stdin.readline', ([], {}), '()\n', (6740, 6742), False, 'import sys\n'), ((1014, 1055), 'configmanager.ConfigManager', 'ConfigManager', (["('config/' + conf + '.yaml')"], {}), "('config/' + conf + '.yaml')\n", (1027, 1055), False, 'from configmanager import ConfigManager, ConfigManagerError\n'), ((3433, 3474), 'configmanager.ConfigManager', 'ConfigManager', (["('config/' + file + '.yaml')"], {}), "('config/' + file + '.yaml')\n", (3446, 3474), False, 'from configmanager import ConfigManager, ConfigManagerError\n'), ((5632, 5649), 'common.get_cdir', 'common.get_cdir', ([], {}), '()\n', (5647, 5649), False, 'import common\n'), ((7819, 7886), 'kp.KeePassError', 'KeePassError', (["('Unable to use Keepass(' + e1.value + ') or Password')"], {}), "('Unable to use Keepass(' + e1.value + ') or Password')\n", (7831, 7886), False, 'from kp import KeePassError, get_password\n'), ((7908, 7955), 'kp.KeePassError', 'KeePassError', (['"""Invalid connection in yaml file"""'], {}), "('Invalid connection in yaml file')\n", (7920, 7955), False, 'from kp import KeePassError, get_password\n'), ((2288, 2329), 'configmanager.ConfigManager', 'ConfigManager', (["('config/' + file + '.yaml')"], {}), "('config/' + file + '.yaml')\n", (2301, 2329), False, 'from configmanager import ConfigManager, ConfigManagerError\n'), ((2839, 2880), 'configmanager.ConfigManager', 'ConfigManager', (["('config/' + file + '.yaml')"], {}), "('config/' + file + '.yaml')\n", (2852, 2880), False, 'from configmanager import ConfigManager, ConfigManagerError\n'), ((4014, 4055), 'configmanager.ConfigManager', 'ConfigManager', (["('config/' + file + '.yaml')"], {}), "('config/' + file + '.yaml')\n", (4027, 4055), False, 'from configmanager import ConfigManager, ConfigManagerError\n')] |
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 13 14:57:32 2020
@author: Nicolai
"""
import sys
import os
importpath = os.path.dirname(os.path.realpath(__file__)) + "/../"
sys.path.append(importpath)
from FemPdeBase import FemPdeBase
import numpy as np
# import from ngsolve
import ngsolve as ngs
from netgen.geom2d import unit_square
import time
import psutil
import gc
class FemPde1(FemPdeBase):
"""
**Implementation of PDE1 of the testbed:**
.. math::
- \Delta u(\mathbf{x}) = -2^{40}y^{10}(1-y)^{10}[90x^8(1-x)^{10}
- 200x^9(1-x)^9 + 90x^{10}(1-x)^8]
-2^{40}x^{10}(1-x)^{10}[90y^8(1-y)^{10}
- 200y^9(1-y)^9 + 90y^{10}(1-y)^8]
\Omega: \mathbf{x} \in [0,1]
u(\mathbf{x})|_{\partial \Omega} = 0
**with the solution:**
.. math::
u(\mathbf{x}) = 2^{40}x^{10}(1-x)^{10}y^{10}(1-y)^{10}
Attributes
----------
max_nodf: int
the maximum number of degrees of freedom that can be created in the
adaptive mesh refinement, standard value is 50000
Methods
-------
solve()
solves the pde by calling ngsolve, provides: static condensation,
adaptive mesh refinement, parallelisation (where possible), sets the
internal variables for evaluating the exact solution and calculating
the distance between exact and approx solution
also sets execution time and memory consumption
Examples
--------
>>> import numpy as np
>>> fempde2 = FemPde2(True)
>>> pos = np.array([0.5, 0.5])
>>> fempde2.exact(pos)
>>> x -> numpy.ndarray with shape (2,)
_mesh -> ngs.comp.Mesh
_ngs_ex -> ngs.fem.CoefficientFunction
-> try to call solve() first
>>> fempde2.solve()
>>> fempde2.exact(pos)
1.0
>>> fempde2.approx(pos)
0.999998924259486
>>> fempde2.normL2()
5.853102150391562e-07
>>> fempde2.exec_time
3.830256175994873
>>> fempde2.mem_consumption
76705792
"""
def __init__(self, show_gui, max_ndof=50000):
super().__init__(show_gui)
# init protected
self._pde_string = "-laplacian(u(x)) = -(2^40*y^10*(1-y)^10*(90*x^8*(1-x)^10 - 200*x^9*(1-x)^9 + 90*x^10*(1-x)^8)) -(2^40*x^10*(1-x)^10*(90*y^8*(1-y)^10 - 200*y^9*(1-y)^9 + 90*y^10*(1-y)^8))"
self._ngs_ex = (2**(4*10))*(ngs.x**10)*((1-ngs.x)**10)*(ngs.y**10)*((1-ngs.y)**10)
# init public
self.max_ndof = max_ndof
def solve(self):
# disable garbage collector
# --------------------------------------------------------------------#
gc.disable()
while(gc.isenabled()):
time.sleep(0.1)
# --------------------------------------------------------------------#
# measure how much memory is used until here
process = psutil.Process()
memstart = process.memory_info().vms
# starts timer
tstart = time.time()
if self.show_gui:
import netgen.gui
# create mesh with initial size 0.1
self._mesh = ngs.Mesh(unit_square.GenerateMesh(maxh=0.1))
#create finite element space
self._fes = ngs.H1(self._mesh, order=2, dirichlet=".*", autoupdate=True)
# test and trail function
u = self._fes.TrialFunction()
v = self._fes.TestFunction()
# create bilinear form and enable static condensation
self._a = ngs.BilinearForm(self._fes, condense=True)
self._a += ngs.grad(u)*ngs.grad(v)*ngs.dx
# creat linear functional and apply RHS
self._f = ngs.LinearForm(self._fes)
self._f += ( \
-(2**40*ngs.y**10*(1-ngs.y)**10*(90*ngs.x**8*(1-ngs.x)**10 - 200*ngs.x**9*(1-ngs.x)**9 + 90*ngs.x**10*(1-ngs.x)**8)) \
-(2**40*ngs.x**10*(1-ngs.x)**10*(90*ngs.y**8*(1-ngs.y)**10 - 200*ngs.y**9*(1-ngs.y)**9 + 90*ngs.y**10*(1-ngs.y)**8)) )*v*ngs.dx
# preconditioner: multigrid - what prerequisits must the problem have?
self._c = ngs.Preconditioner(self._a,"multigrid")
# create grid function that holds the solution and set the boundary to 0
self._gfu = ngs.GridFunction(self._fes, autoupdate=True) # solution
self._g = 0.0
self._gfu.Set(self._g, definedon=self._mesh.Boundaries(".*"))
# draw grid function in gui
if self.show_gui:
ngs.Draw(self._gfu)
# create Hcurl space for flux calculation and estimate error
self._space_flux = ngs.HDiv(self._mesh, order=2, autoupdate=True)
self._gf_flux = ngs.GridFunction(self._space_flux, "flux", autoupdate=True)
# TaskManager starts threads that (standard thread nr is numer of cores)
with ngs.TaskManager():
# this is the adaptive loop
while self._fes.ndof < self.max_ndof:
self._solveStep()
self._estimateError()
self._mesh.Refine()
# since the adaptive loop stopped with a mesh refinement, the gfu must be
# calculated one last time
self._solveStep()
if self.show_gui:
ngs.Draw(self._gfu)
# set measured exectution time
self._exec_time = time.time() - tstart
# set measured used memory
memstop = process.memory_info().vms - memstart
self._mem_consumption = memstop
# enable garbage collector
# --------------------------------------------------------------------#
gc.enable()
gc.collect()
# --------------------------------------------------------------------#
if __name__ == "__main__":
fempde1 = FemPde1(True)
print(fempde1.pde_string)
try:
fempde1.exact(np.array([0.5,0.5]))
except:
print("Î error message above")
try:
fempde1.approx(np.array([0.5,0.5]))
except:
print("Î error message above")
fempde1.solve()
print("-------------------------------------")
print("exact(0.5, 0.5) = {}".format(fempde1.exact(np.array([0.5,0.5]))))
print("approx(0.5, 0.5) = {}".format(fempde1.approx(np.array([0.5,0.5]))))
print("L2 norm to the real solution {}".format(fempde1.normL2()))
print("solving took {} sec".format(fempde1.exec_time))
print("solving uses {} Mb".format(fempde1.mem_consumption/1000000))
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from matplotlib import cm
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
x = y = np.arange(0, 1.01, 0.01)
X, Y = np.meshgrid(x, y)
zs0 = np.array([fempde1.exact(\
np.array([x,y])) for x,y in zip(np.ravel(X), np.ravel(Y))])
Z = zs0.reshape(X.shape)
ax.plot_surface(X, Y, Z, cmap=cm.gnuplot)
fig.tight_layout()
ax.set_xlabel("X0")
ax.set_ylabel("X1")
ax.set_zlabel("f(X0, X1)")
plt.show()
fig.savefig("sol_pde_1.pdf", bbox_inches='tight')
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
x = y = np.arange(0, 1.01, 0.01)
X, Y = np.meshgrid(x, y)
zs0 = np.array([fempde1.approx(\
np.array([x,y])) for x,y in zip(np.ravel(X), np.ravel(Y))])
Z = zs0.reshape(X.shape)
ax.plot_surface(X, Y, Z, cmap=cm.gnuplot)
ax.set_xlabel("X0")
ax.set_ylabel("X1")
ax.set_zlabel("f(X0,X1)")
plt.show()
| [
"psutil.Process",
"netgen.geom2d.unit_square.GenerateMesh",
"time.sleep",
"numpy.array",
"ngsolve.Preconditioner",
"sys.path.append",
"numpy.arange",
"gc.enable",
"gc.disable",
"gc.isenabled",
"ngsolve.GridFunction",
"numpy.meshgrid",
"ngsolve.LinearForm",
"ngsolve.HDiv",
"gc.collect",
"time.time",
"matplotlib.pyplot.show",
"ngsolve.BilinearForm",
"ngsolve.Draw",
"ngsolve.grad",
"os.path.realpath",
"matplotlib.pyplot.figure",
"ngsolve.H1",
"numpy.ravel",
"ngsolve.TaskManager"
] | [((174, 201), 'sys.path.append', 'sys.path.append', (['importpath'], {}), '(importpath)\n', (189, 201), False, 'import sys\n'), ((6776, 6788), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (6786, 6788), True, 'import matplotlib.pyplot as plt\n'), ((6848, 6872), 'numpy.arange', 'np.arange', (['(0)', '(1.01)', '(0.01)'], {}), '(0, 1.01, 0.01)\n', (6857, 6872), True, 'import numpy as np\n'), ((6884, 6901), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (6895, 6901), True, 'import numpy as np\n'), ((7203, 7213), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7211, 7213), True, 'import matplotlib.pyplot as plt\n'), ((7288, 7300), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (7298, 7300), True, 'import matplotlib.pyplot as plt\n'), ((7360, 7384), 'numpy.arange', 'np.arange', (['(0)', '(1.01)', '(0.01)'], {}), '(0, 1.01, 0.01)\n', (7369, 7384), True, 'import numpy as np\n'), ((7396, 7413), 'numpy.meshgrid', 'np.meshgrid', (['x', 'y'], {}), '(x, y)\n', (7407, 7413), True, 'import numpy as np\n'), ((7687, 7697), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (7695, 7697), True, 'import matplotlib.pyplot as plt\n'), ((137, 163), 'os.path.realpath', 'os.path.realpath', (['__file__'], {}), '(__file__)\n', (153, 163), False, 'import os\n'), ((2784, 2796), 'gc.disable', 'gc.disable', ([], {}), '()\n', (2794, 2796), False, 'import gc\n'), ((2811, 2825), 'gc.isenabled', 'gc.isenabled', ([], {}), '()\n', (2823, 2825), False, 'import gc\n'), ((3016, 3032), 'psutil.Process', 'psutil.Process', ([], {}), '()\n', (3030, 3032), False, 'import psutil\n'), ((3127, 3138), 'time.time', 'time.time', ([], {}), '()\n', (3136, 3138), False, 'import time\n'), ((3380, 3440), 'ngsolve.H1', 'ngs.H1', (['self._mesh'], {'order': '(2)', 'dirichlet': '""".*"""', 'autoupdate': '(True)'}), "(self._mesh, order=2, dirichlet='.*', autoupdate=True)\n", (3386, 3440), True, 'import ngsolve as ngs\n'), ((3648, 3690), 'ngsolve.BilinearForm', 'ngs.BilinearForm', (['self._fes'], {'condense': '(True)'}), '(self._fes, condense=True)\n', (3664, 3690), True, 'import ngsolve as ngs\n'), ((3812, 3837), 'ngsolve.LinearForm', 'ngs.LinearForm', (['self._fes'], {}), '(self._fes)\n', (3826, 3837), True, 'import ngsolve as ngs\n'), ((4231, 4271), 'ngsolve.Preconditioner', 'ngs.Preconditioner', (['self._a', '"""multigrid"""'], {}), "(self._a, 'multigrid')\n", (4249, 4271), True, 'import ngsolve as ngs\n'), ((4381, 4425), 'ngsolve.GridFunction', 'ngs.GridFunction', (['self._fes'], {'autoupdate': '(True)'}), '(self._fes, autoupdate=True)\n', (4397, 4425), True, 'import ngsolve as ngs\n'), ((4739, 4785), 'ngsolve.HDiv', 'ngs.HDiv', (['self._mesh'], {'order': '(2)', 'autoupdate': '(True)'}), '(self._mesh, order=2, autoupdate=True)\n', (4747, 4785), True, 'import ngsolve as ngs\n'), ((4810, 4869), 'ngsolve.GridFunction', 'ngs.GridFunction', (['self._space_flux', '"""flux"""'], {'autoupdate': '(True)'}), "(self._space_flux, 'flux', autoupdate=True)\n", (4826, 4869), True, 'import ngsolve as ngs\n'), ((5772, 5783), 'gc.enable', 'gc.enable', ([], {}), '()\n', (5781, 5783), False, 'import gc\n'), ((5792, 5804), 'gc.collect', 'gc.collect', ([], {}), '()\n', (5802, 5804), False, 'import gc\n'), ((2840, 2855), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (2850, 2855), False, 'import time\n'), ((3278, 3312), 'netgen.geom2d.unit_square.GenerateMesh', 'unit_square.GenerateMesh', ([], {'maxh': '(0.1)'}), '(maxh=0.1)\n', (3302, 3312), False, 'from netgen.geom2d import unit_square\n'), ((4614, 4633), 'ngsolve.Draw', 'ngs.Draw', (['self._gfu'], {}), '(self._gfu)\n', (4622, 4633), True, 'import ngsolve as ngs\n'), ((4973, 4990), 'ngsolve.TaskManager', 'ngs.TaskManager', ([], {}), '()\n', (4988, 4990), True, 'import ngsolve as ngs\n'), ((5381, 5400), 'ngsolve.Draw', 'ngs.Draw', (['self._gfu'], {}), '(self._gfu)\n', (5389, 5400), True, 'import ngsolve as ngs\n'), ((5479, 5490), 'time.time', 'time.time', ([], {}), '()\n', (5488, 5490), False, 'import time\n'), ((6022, 6042), 'numpy.array', 'np.array', (['[0.5, 0.5]'], {}), '([0.5, 0.5])\n', (6030, 6042), True, 'import numpy as np\n'), ((6131, 6151), 'numpy.array', 'np.array', (['[0.5, 0.5]'], {}), '([0.5, 0.5])\n', (6139, 6151), True, 'import numpy as np\n'), ((3710, 3721), 'ngsolve.grad', 'ngs.grad', (['u'], {}), '(u)\n', (3718, 3721), True, 'import ngsolve as ngs\n'), ((3722, 3733), 'ngsolve.grad', 'ngs.grad', (['v'], {}), '(v)\n', (3730, 3733), True, 'import ngsolve as ngs\n'), ((6343, 6363), 'numpy.array', 'np.array', (['[0.5, 0.5]'], {}), '([0.5, 0.5])\n', (6351, 6363), True, 'import numpy as np\n'), ((6422, 6442), 'numpy.array', 'np.array', (['[0.5, 0.5]'], {}), '([0.5, 0.5])\n', (6430, 6442), True, 'import numpy as np\n'), ((6947, 6963), 'numpy.array', 'np.array', (['[x, y]'], {}), '([x, y])\n', (6955, 6963), True, 'import numpy as np\n'), ((7460, 7476), 'numpy.array', 'np.array', (['[x, y]'], {}), '([x, y])\n', (7468, 7476), True, 'import numpy as np\n'), ((6979, 6990), 'numpy.ravel', 'np.ravel', (['X'], {}), '(X)\n', (6987, 6990), True, 'import numpy as np\n'), ((6992, 7003), 'numpy.ravel', 'np.ravel', (['Y'], {}), '(Y)\n', (7000, 7003), True, 'import numpy as np\n'), ((7492, 7503), 'numpy.ravel', 'np.ravel', (['X'], {}), '(X)\n', (7500, 7503), True, 'import numpy as np\n'), ((7505, 7516), 'numpy.ravel', 'np.ravel', (['Y'], {}), '(Y)\n', (7513, 7516), True, 'import numpy as np\n')] |
import pytest
from pydantic import ValidationError
from overhave.transport import OverhaveS3ManagerSettings
class TestS3ManagerSettings:
""" Unit tests for :class:`OverhaveS3ManagerSettings`. """
@pytest.mark.parametrize("test_s3_enabled", [False])
def test_disabled(self, test_s3_enabled: bool) -> None:
settings = OverhaveS3ManagerSettings(enabled=test_s3_enabled)
assert not settings.enabled
assert not settings.url
assert not settings.access_key
assert not settings.secret_key
@pytest.mark.parametrize("test_s3_enabled", [True])
def test_empty_enabled(self, test_s3_enabled: bool) -> None:
with pytest.raises(ValidationError):
OverhaveS3ManagerSettings(enabled=test_s3_enabled)
@pytest.mark.parametrize("test_s3_autocreate_buckets", [False, True], indirect=True)
@pytest.mark.parametrize("test_s3_enabled", [True], indirect=True)
def test_correct_enabled(
self,
test_s3_enabled: bool,
test_s3_autocreate_buckets: bool,
test_s3_manager_settings: OverhaveS3ManagerSettings,
) -> None:
assert test_s3_manager_settings.enabled == test_s3_enabled
assert test_s3_manager_settings.url
assert test_s3_manager_settings.access_key
assert test_s3_manager_settings.secret_key
assert test_s3_manager_settings.verify
assert test_s3_manager_settings.autocreate_buckets == test_s3_autocreate_buckets
| [
"pytest.mark.parametrize",
"overhave.transport.OverhaveS3ManagerSettings",
"pytest.raises"
] | [((209, 260), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""test_s3_enabled"""', '[False]'], {}), "('test_s3_enabled', [False])\n", (232, 260), False, 'import pytest\n'), ((543, 593), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""test_s3_enabled"""', '[True]'], {}), "('test_s3_enabled', [True])\n", (566, 593), False, 'import pytest\n'), ((773, 860), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""test_s3_autocreate_buckets"""', '[False, True]'], {'indirect': '(True)'}), "('test_s3_autocreate_buckets', [False, True],\n indirect=True)\n", (796, 860), False, 'import pytest\n'), ((862, 927), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""test_s3_enabled"""', '[True]'], {'indirect': '(True)'}), "('test_s3_enabled', [True], indirect=True)\n", (885, 927), False, 'import pytest\n'), ((340, 390), 'overhave.transport.OverhaveS3ManagerSettings', 'OverhaveS3ManagerSettings', ([], {'enabled': 'test_s3_enabled'}), '(enabled=test_s3_enabled)\n', (365, 390), False, 'from overhave.transport import OverhaveS3ManagerSettings\n'), ((672, 702), 'pytest.raises', 'pytest.raises', (['ValidationError'], {}), '(ValidationError)\n', (685, 702), False, 'import pytest\n'), ((716, 766), 'overhave.transport.OverhaveS3ManagerSettings', 'OverhaveS3ManagerSettings', ([], {'enabled': 'test_s3_enabled'}), '(enabled=test_s3_enabled)\n', (741, 766), False, 'from overhave.transport import OverhaveS3ManagerSettings\n')] |
import FWCore.ParameterSet.Config as cms
process = cms.Process("LIKELIHOODPDFDBREADER")
# process.load("MuonAnalysis.MomentumScaleCalibration.local_CSA08_Y_cff")
process.source = cms.Source("EmptySource",
numberEventsInRun = cms.untracked.uint32(1),
firstRun = cms.untracked.uint32(1)
)
process.load("Configuration.StandardSequences.MagneticField_cff")
process.load("Geometry.CMSCommonData.cmsIdealGeometryXML_cfi")
process.load("Geometry.CommonTopologies.globalTrackingGeometry_cfi")
process.load("RecoMuon.DetLayers.muonDetLayerGeometry_cfi")
process.load("Geometry.MuonNumbering.muonNumberingInitialization_cfi")
process.load("RecoMuon.TrackingTools.MuonServiceProxy_cff")
# process.source = cms.Source("PoolSource",
# fileNames = cms.untracked.vstring()
# )
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.poolDBESSource = cms.ESSource("PoolDBESSource",
BlobStreamerName = cms.untracked.string('TBufferBlobStreamingService'),
DBParameters = cms.PSet(
messageLevel = cms.untracked.int32(2),
authenticationPath = cms.untracked.string('/afs/cern.ch/cms/DB/conddb')
),
timetype = cms.untracked.string('runnumber'),
connect = cms.string('sqlite_file:dummy2.db'),
toGet = cms.VPSet(cms.PSet(
record = cms.string('MuScleFitLikelihoodPdfRcd'),
tag = cms.string('MuScleFitLikelihoodPdf_2_1_12')
))
)
process.LikelihoodPdfDBReaderModule = cms.EDAnalyzer(
"LikelihoodPdfDBReader"
)
process.p1 = cms.Path(process.LikelihoodPdfDBReaderModule)
| [
"FWCore.ParameterSet.Config.string",
"FWCore.ParameterSet.Config.untracked.string",
"FWCore.ParameterSet.Config.untracked.int32",
"FWCore.ParameterSet.Config.untracked.uint32",
"FWCore.ParameterSet.Config.Process",
"FWCore.ParameterSet.Config.Path",
"FWCore.ParameterSet.Config.EDAnalyzer"
] | [((52, 88), 'FWCore.ParameterSet.Config.Process', 'cms.Process', (['"""LIKELIHOODPDFDBREADER"""'], {}), "('LIKELIHOODPDFDBREADER')\n", (63, 88), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1452, 1491), 'FWCore.ParameterSet.Config.EDAnalyzer', 'cms.EDAnalyzer', (['"""LikelihoodPdfDBReader"""'], {}), "('LikelihoodPdfDBReader')\n", (1466, 1491), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1512, 1557), 'FWCore.ParameterSet.Config.Path', 'cms.Path', (['process.LikelihoodPdfDBReaderModule'], {}), '(process.LikelihoodPdfDBReaderModule)\n', (1520, 1557), True, 'import FWCore.ParameterSet.Config as cms\n'), ((231, 254), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['(1)'], {}), '(1)\n', (251, 254), True, 'import FWCore.ParameterSet.Config as cms\n'), ((271, 294), 'FWCore.ParameterSet.Config.untracked.uint32', 'cms.untracked.uint32', (['(1)'], {}), '(1)\n', (291, 294), True, 'import FWCore.ParameterSet.Config as cms\n'), ((836, 858), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(1)'], {}), '(1)\n', (855, 858), True, 'import FWCore.ParameterSet.Config as cms\n'), ((940, 991), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""TBufferBlobStreamingService"""'], {}), "('TBufferBlobStreamingService')\n", (960, 991), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1170, 1203), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""runnumber"""'], {}), "('runnumber')\n", (1190, 1203), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1219, 1254), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""sqlite_file:dummy2.db"""'], {}), "('sqlite_file:dummy2.db')\n", (1229, 1254), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1044, 1066), 'FWCore.ParameterSet.Config.untracked.int32', 'cms.untracked.int32', (['(2)'], {}), '(2)\n', (1063, 1066), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1097, 1147), 'FWCore.ParameterSet.Config.untracked.string', 'cms.untracked.string', (['"""/afs/cern.ch/cms/DB/conddb"""'], {}), "('/afs/cern.ch/cms/DB/conddb')\n", (1117, 1147), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1305, 1344), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""MuScleFitLikelihoodPdfRcd"""'], {}), "('MuScleFitLikelihoodPdfRcd')\n", (1315, 1344), True, 'import FWCore.ParameterSet.Config as cms\n'), ((1360, 1403), 'FWCore.ParameterSet.Config.string', 'cms.string', (['"""MuScleFitLikelihoodPdf_2_1_12"""'], {}), "('MuScleFitLikelihoodPdf_2_1_12')\n", (1370, 1403), True, 'import FWCore.ParameterSet.Config as cms\n')] |
#!/usr/bin/python
from mod_pywebsocket import msgutil
import time
def web_socket_do_extra_handshake(request):
pass # Always accept.
def web_socket_transfer_data(request):
time.sleep(3)
msgutil.send_message(request, "line")
| [
"mod_pywebsocket.msgutil.send_message",
"time.sleep"
] | [((177, 190), 'time.sleep', 'time.sleep', (['(3)'], {}), '(3)\n', (187, 190), False, 'import time\n'), ((192, 229), 'mod_pywebsocket.msgutil.send_message', 'msgutil.send_message', (['request', '"""line"""'], {}), "(request, 'line')\n", (212, 229), False, 'from mod_pywebsocket import msgutil\n')] |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from typing import TYPE_CHECKING, List, Optional, Sequence
from airflow.exceptions import AirflowException
from airflow.models import BaseOperator
from airflow.providers.microsoft.psrp.hooks.psrp import PSRPHook
if TYPE_CHECKING:
from airflow.utils.context import Context
class PSRPOperator(BaseOperator):
"""PowerShell Remoting Protocol operator.
:param psrp_conn_id: connection id
:type psrp_conn_id: str
:param command: command to execute on remote host. (templated)
:type command: str
:param powershell: powershell to execute on remote host. (templated)
:type powershell: str
"""
template_fields: Sequence[str] = (
"command",
"powershell",
)
template_fields_renderers = {"command": "powershell", "powershell": "powershell"}
ui_color = "#901dd2"
def __init__(
self,
*,
psrp_conn_id: str,
command: Optional[str] = None,
powershell: Optional[str] = None,
**kwargs,
) -> None:
super().__init__(**kwargs)
if not (command or powershell):
raise ValueError("Must provide either 'command' or 'powershell'")
self.conn_id = psrp_conn_id
self.command = command
self.powershell = powershell
def execute(self, context: "Context") -> List[str]:
with PSRPHook(self.conn_id) as hook:
ps = hook.invoke_powershell(
f"cmd.exe /c @'\n{self.command}\n'@" if self.command else self.powershell
)
if ps.had_errors:
raise AirflowException("Process failed")
return ps.output
| [
"airflow.providers.microsoft.psrp.hooks.psrp.PSRPHook",
"airflow.exceptions.AirflowException"
] | [((2123, 2145), 'airflow.providers.microsoft.psrp.hooks.psrp.PSRPHook', 'PSRPHook', (['self.conn_id'], {}), '(self.conn_id)\n', (2131, 2145), False, 'from airflow.providers.microsoft.psrp.hooks.psrp import PSRPHook\n'), ((2344, 2378), 'airflow.exceptions.AirflowException', 'AirflowException', (['"""Process failed"""'], {}), "('Process failed')\n", (2360, 2378), False, 'from airflow.exceptions import AirflowException\n')] |
"""
This module contains the rule-based inference (rulebased_deduction engine)
"""
import itertools
from collections import defaultdict
from itertools import chain
from excut.explanations_mining.descriptions import dump_explanations_to_file
from excut.explanations_mining.descriptions_new import Description2, Atom, load_from_file
from excut.explanations_mining.explaining_engines_extended import PathBasedClustersExplainerExtended
from excut.explanations_mining.simple_miner.description_miner_extended import DescriptionMinerExtended, ExplanationStructure
from excut.kg.kg_query_interface_extended import EndPointKGQueryInterfaceExtended, KGQueryInterfaceExtended
from excut.kg.kg_indexing import Indexer
from excut.kg.utils.data_formating import n3_repr
from excut.utils.logging import logger
from excut.kg.utils.Constants import DEFUALT_AUX_RELATION
from excut.clustering import target_entities as tes
class Prediction:
"""
An object to represent the prediction of the rules
:ivar triple: the predicted triple
:ivar all_sources: all rules that predicted the same triple
"""
# def __init__(self, triple: tuple, source_description=Description(), all_sources=None):
def __init__(self, triple=None, sources=None):
self.triple = triple
# self.source_description = source_descriptionf
self.all_sources = sources if sources else list() # sources if sources else {source_description}
def get_subject(self):
return self.triple[0]
def get_object(self):
return self.triple[2]
def get_quality(self, measure='x_coverage', method=max):
# return self.source_description.get_quality(measure)
return method([source.get_quality(measure) for source in self.all_sources])
def get_main_description(self, measure='x_coverage', method=max):
return method(self.all_sources, key=lambda d: d.get_quality(measure))
def __str__(self):
return str(self.triple) + '<<' + str(self.get_main_description())
def __repr__(self):
return "%s\t(\t%s,%s)" % (self.__class__.__name__, repr(self.triple), repr(self.all_sources))
def __eq__(self, other):
return other.triple == self.triple
def __hash__(self):
return hash(self.triple)
class DeductionEngine():
"""
Abstract rulebased_deduction/inference engine.
"""
def __init__(self, **kwargs):
pass
def infer(self, descriptions, recursive=False, topk=-1):
pass
class SparqlBasedDeductionEngineExtended(DeductionEngine):
"""
Deduction engine that converts the rules to sparql and fire them over the KG.
The rule-based_deduction takes care of consolidating similar predictions
"""
def __init__(self, kg_query_interface: KGQueryInterfaceExtended, relation=DEFUALT_AUX_RELATION, quality='x_coverage', quality_aggregation=max):
"""
:param kg_query_interface: interface for the KG.
:param relation: the relation used in the predicted triple (optional)
:param quality: objective quality measure for ranking the predictions (optional) by default
the exclusive coverage of the rules is used
:param quality_aggregation: the methd used for aggregating the score if multiple rules infers the same fact
(optional) by default max is used.
"""
super(SparqlBasedDeductionEngineExtended, self).__init__()
self.relation = relation
self.query_executer = kg_query_interface
self.quality = quality
self.quality_aggregation = quality_aggregation
self.labels_indexer=Indexer(store=kg_query_interface.type,
endpoint=kg_query_interface.endpoint,
graph= kg_query_interface.labels_graph,
identifier=kg_query_interface.labels_identifier)
def infer(self, descriptions_list, target_entities=None, min_quality=0, topk=-1, output_filepath=None,
clear_target_entities=True):
"""
Infer new facts for a giving set of descriptions
:param descriptions_list: list of explantions/descriptions rules
:param target_entities: entities and their labels for which predictions are generated
:param min_quality: minimum aggregated quality for the predictions
:param topk: k *distinct* highest quality predictions per entity,
:param output_filepath: predictions output file.
:param clear_target_entities: clear indexed target entities after done inference
:return: dictionary of predicted entity-clusters assignments
"""
if isinstance(descriptions_list,dict):
descriptions_list=list(itertools.chain.from_iterable(descriptions_list.values()))
if target_entities:
self.labels_indexer.index_triples(target_entities)
self.relation=target_entities.get_relation()
predictions = list(map(self._infer_single, descriptions_list))
per_entity_predictions = self.consolidate(predictions)
per_entity_predictions = self._merge_and_sort_cut(per_entity_predictions, min_quality, topk=topk)
if output_filepath:
dump_predictions_map(per_entity_predictions, output_filepath, triple_format=True, topk=topk, with_weight=True,
with_description=False, quality=self.quality)
if target_entities and clear_target_entities:
self.labels_indexer.drop()
return per_entity_predictions
def consolidate(self, predictions):
"""
Combine predictions from different rules
:param predictions: list of generated predictions
:return: combined single prediction with several sources for equivalent predictions
:rtype: dict
"""
# per_var_predictions = defaultdict(lambda: defaultdict(list))
# for p in chain.from_iterable(predictions):
# per_var_predictions[p.get_subject()][p.get_object()].append(p)
per_entity_predictions = defaultdict(lambda: defaultdict(Prediction))
for p in list(chain.from_iterable(predictions)):
cons_pred = per_entity_predictions[p.get_subject()][p.get_object()]
cons_pred.triple = p.triple
cons_pred.all_sources += p.all_sources
return per_entity_predictions
def _merge_and_sort_cut(self, per_entity_prediction, threshold=0, topk=-1):
"""
Merge the the inferred facts in case of functional predicates
:param per_entity_prediction:
:return:
"""
def quality_method(p):
return p.get_quality(self.quality, self.quality_aggregation)
per_entity_prediction_filtered = defaultdict(list)
for sub, per_obj_predictions in per_entity_prediction.items():
# print([(k, p.triple[2], qaulity_method(p)) for k, p in per_obj_predictions.items()])
merged_predictions = list(
filter(lambda p: quality_method(p) > threshold, list(per_obj_predictions.values())))
merged_predictions.sort(key=quality_method, reverse=True)
include = topk if topk > 0 else len(merged_predictions)
per_entity_prediction_filtered[sub] = merged_predictions[:include]
return per_entity_prediction_filtered
def _infer_single(self, description: Description2):
"""
Infer new facts for the given Description
:param description:
:return:
"""
bindings = self.query_executer.get_arguments_bindings(description,
restriction_pattern=Description2(body=[Atom('?x',
self.relation,
'?z')]))
head = description.head
# only supports p(?x,CONSTANT)
predictions = [Prediction((b, head.predicate, head.object), [description]) for b in bindings]
return predictions
def dump_predictions_map(per_var_predictions, out_filepath, triple_format=True, topk=-1, with_weight=True,
with_description=False, quality='x_coverage'):
"""
Writes the predictions to two files, the first is human readable and the other with .parsable extension that can be
parsed in python.
:param per_var_predictions:
:param out_filepath:
:param triple_format:
:param topk:
:param with_weight:
:param with_description:
:return:
"""
out_file_parsable = out_filepath + '.parsable'
out_filepath_with_type = out_filepath + ('.%s' % quality if len(quality) > 0 else '')
with open(out_filepath_with_type, 'w') as out_file:
for var, predictions in per_var_predictions.items():
if topk > 0:
predictions = predictions[:topk]
for p in predictions:
if triple_format:
# I only output normalized_coverage
out_str = n3_repr(p.triple) + ('\t%f' % p.get_quality(quality) if with_weight else '') + (
'\t%s' % p.source_description if with_description else '')
else:
out_str = str(p)
out_file.write(out_str)
out_file.write('\n')
with open(out_file_parsable + ('.%s' % quality if len(quality) > 0 else ''), 'w') as out_file:
out_file.write('\n'.join(
map(str, chain.from_iterable(map(lambda l: l[:topk] if topk > 0 else l, per_var_predictions.values())))))
return out_filepath_with_type
if __name__ == '__main__':
target_entities=tes.load_from_file('/scratch/GW/pool0/gadelrab/ExDEC/data/yago/yago_art_3_4k.tsv')
vos_executer = EndPointKGQueryInterfaceExtended('http://halimede:8890/sparql',
['http://yago-expr.org', 'http://yago-expr.org.types'],
labels_identifier='http://yago-expr.org.labels')
explainer=PathBasedClustersExplainerExtended(vos_executer, language_bias={'max_length': 4, 'structure': ExplanationStructure.TREE})
explans=explainer.explain(target_entities,
output_file='/scratch/GW/pool0/gadelrab/ExDEC/tmp/explanations_tree.txt')
ded = SparqlBasedDeductionEngineExtended(vos_executer)
per_var_predictions = ded.infer(explans, target_entities,
output_filepath='/scratch/GW/pool0/gadelrab/ExDEC/tmp/predictions_tree.tsv')
logger.info("Total variables with predictions subjects: %i", len(per_var_predictions))
| [
"excut.clustering.target_entities.load_from_file",
"excut.explanations_mining.descriptions_new.Atom",
"excut.kg.utils.data_formating.n3_repr",
"itertools.chain.from_iterable",
"collections.defaultdict",
"excut.kg.kg_indexing.Indexer",
"excut.explanations_mining.explaining_engines_extended.PathBasedClustersExplainerExtended",
"excut.kg.kg_query_interface_extended.EndPointKGQueryInterfaceExtended"
] | [((9790, 9877), 'excut.clustering.target_entities.load_from_file', 'tes.load_from_file', (['"""/scratch/GW/pool0/gadelrab/ExDEC/data/yago/yago_art_3_4k.tsv"""'], {}), "(\n '/scratch/GW/pool0/gadelrab/ExDEC/data/yago/yago_art_3_4k.tsv')\n", (9808, 9877), True, 'from excut.clustering import target_entities as tes\n'), ((9892, 10069), 'excut.kg.kg_query_interface_extended.EndPointKGQueryInterfaceExtended', 'EndPointKGQueryInterfaceExtended', (['"""http://halimede:8890/sparql"""', "['http://yago-expr.org', 'http://yago-expr.org.types']"], {'labels_identifier': '"""http://yago-expr.org.labels"""'}), "('http://halimede:8890/sparql', [\n 'http://yago-expr.org', 'http://yago-expr.org.types'],\n labels_identifier='http://yago-expr.org.labels')\n", (9924, 10069), False, 'from excut.kg.kg_query_interface_extended import EndPointKGQueryInterfaceExtended, KGQueryInterfaceExtended\n'), ((10180, 10306), 'excut.explanations_mining.explaining_engines_extended.PathBasedClustersExplainerExtended', 'PathBasedClustersExplainerExtended', (['vos_executer'], {'language_bias': "{'max_length': 4, 'structure': ExplanationStructure.TREE}"}), "(vos_executer, language_bias={\n 'max_length': 4, 'structure': ExplanationStructure.TREE})\n", (10214, 10306), False, 'from excut.explanations_mining.explaining_engines_extended import PathBasedClustersExplainerExtended\n'), ((3618, 3791), 'excut.kg.kg_indexing.Indexer', 'Indexer', ([], {'store': 'kg_query_interface.type', 'endpoint': 'kg_query_interface.endpoint', 'graph': 'kg_query_interface.labels_graph', 'identifier': 'kg_query_interface.labels_identifier'}), '(store=kg_query_interface.type, endpoint=kg_query_interface.endpoint,\n graph=kg_query_interface.labels_graph, identifier=kg_query_interface.\n labels_identifier)\n', (3625, 3791), False, 'from excut.kg.kg_indexing import Indexer\n'), ((6765, 6782), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (6776, 6782), False, 'from collections import defaultdict\n'), ((6142, 6174), 'itertools.chain.from_iterable', 'chain.from_iterable', (['predictions'], {}), '(predictions)\n', (6161, 6174), False, 'from itertools import chain\n'), ((6095, 6118), 'collections.defaultdict', 'defaultdict', (['Prediction'], {}), '(Prediction)\n', (6106, 6118), False, 'from collections import defaultdict\n'), ((7711, 7742), 'excut.explanations_mining.descriptions_new.Atom', 'Atom', (['"""?x"""', 'self.relation', '"""?z"""'], {}), "('?x', self.relation, '?z')\n", (7715, 7742), False, 'from excut.explanations_mining.descriptions_new import Description2, Atom, load_from_file\n'), ((9152, 9169), 'excut.kg.utils.data_formating.n3_repr', 'n3_repr', (['p.triple'], {}), '(p.triple)\n', (9159, 9169), False, 'from excut.kg.utils.data_formating import n3_repr\n')] |
import logging
import random
from datetime import timedelta
from typing import TYPE_CHECKING
from duration import to_iso8601
from pyramid.httpexceptions import HTTPBadRequest, HTTPCreated, HTTPNotFound, HTTPOk
from weaver import sort
from weaver.config import WEAVER_CONFIGURATION_ADES, WEAVER_CONFIGURATION_EMS, get_weaver_configuration
from weaver.database import get_db
from weaver.datatype import Bill, Quote
from weaver.exceptions import ProcessNotFound, QuoteNotFound, log_unhandled_exceptions
from weaver.formats import OUTPUT_FORMAT_JSON
from weaver.processes.types import PROCESS_APPLICATION, PROCESS_WORKFLOW
from weaver.processes.wps_package import get_package_workflow_steps, get_process_location
from weaver.store.base import StoreBills, StoreQuotes
from weaver.utils import get_settings, get_weaver_url
from weaver.wps_restapi import swagger_definitions as sd
from weaver.wps_restapi.processes.processes import submit_local_job
if TYPE_CHECKING:
from weaver.datatype import Process
from weaver.typedefs import JSON
LOGGER = logging.getLogger(__name__)
def process_quote_estimator(process): # noqa: E811
# type: (Process) -> JSON
"""
Simulate quote parameters for the process execution.
:param process: instance of :class:`weaver.datatype.Process` for which to evaluate the quote.
:return: dict of {price, currency, estimatedTime} values for the process quote.
"""
# TODO: replace by some fancy ml technique or something?
price = random.uniform(0, 10) # nosec
currency = "CAD"
estimated_time = to_iso8601(timedelta(minutes=random.uniform(5, 60))) # nosec
return {"price": price, "currency": currency, "estimatedTime": estimated_time}
@sd.process_quotes_service.post(tags=[sd.TAG_BILL_QUOTE, sd.TAG_PROCESSES], renderer=OUTPUT_FORMAT_JSON,
schema=sd.PostProcessQuoteRequestEndpoint(), response_schemas=sd.post_quotes_responses)
@log_unhandled_exceptions(logger=LOGGER, message=sd.InternalServerErrorResponseSchema.description)
def request_quote(request):
"""
Request a quotation for a process.
"""
settings = get_settings(request)
weaver_config = get_weaver_configuration(settings)
if weaver_config not in [WEAVER_CONFIGURATION_ADES, WEAVER_CONFIGURATION_EMS]:
raise HTTPBadRequest("Unsupported request for configuration '{}'.".format(weaver_config))
process_id = request.matchdict.get("process_id")
process_store = get_db(request).get_store("processes")
try:
process = process_store.fetch_by_id(process_id)
except ProcessNotFound:
raise HTTPNotFound("Could not find process with specified 'process_id'.")
store = get_db(request).get_store(StoreQuotes)
process_url = get_process_location(process_id, data_source=get_weaver_url(settings))
process_type = process.type
process_params = dict()
for param in ["inputs", "outputs", "mode", "response"]:
if param in request.json:
process_params[param] = request.json.pop(param)
process_quote_info = process_quote_estimator(process)
process_quote_info.update({
"process": process_id,
"processParameters": process_params,
"location": process_url,
"user": str(request.authenticated_userid)
})
# loop workflow sub-process steps to get individual quotes
if process_type == PROCESS_WORKFLOW and weaver_config == WEAVER_CONFIGURATION_EMS:
workflow_quotes = list()
for step in get_package_workflow_steps(process_url):
# retrieve quote from provider ADES
# TODO: data source mapping
process_step_url = get_process_location(step["reference"])
process_quote_url = "{}/quotations".format(process_step_url)
subreq = request.copy()
subreq.path_info = process_quote_url
resp_json = request.invoke_subrequest(subreq).json()
quote_json = resp_json["quote"]
quote = store.save_quote(Quote(**quote_json))
workflow_quotes.append(quote.id)
process_quote_info.update({"steps": workflow_quotes})
quote = store.save_quote(Quote(**process_quote_info))
return HTTPCreated(json={"quote": quote.json()})
# single application quotes (ADES or EMS)
elif process_type == PROCESS_APPLICATION:
quote = store.save_quote(Quote(**process_quote_info))
quote_json = quote.json()
quote_json.pop("steps", None)
return HTTPCreated(json={"quote": quote_json})
# error if not handled up to this point
raise HTTPBadRequest("Unsupported quoting process type '{0}' on '{1}'.".format(process_type, weaver_config))
@sd.process_quotes_service.get(tags=[sd.TAG_BILL_QUOTE, sd.TAG_PROCESSES], renderer=OUTPUT_FORMAT_JSON,
schema=sd.ProcessQuotesEndpoint(), response_schemas=sd.get_quote_list_responses)
@sd.quotes_service.get(tags=[sd.TAG_BILL_QUOTE], renderer=OUTPUT_FORMAT_JSON,
schema=sd.QuotesEndpoint(), response_schemas=sd.get_quote_list_responses)
@log_unhandled_exceptions(logger=LOGGER, message=sd.InternalServerErrorResponseSchema.description)
def get_quote_list(request):
"""
Get list of quotes IDs.
"""
page = int(request.params.get("page", "0"))
limit = int(request.params.get("limit", "10"))
filters = {
"process_id": request.params.get("process", None) or request.matchdict.get("process_id", None),
"page": page,
"limit": limit,
"sort": request.params.get("sort", sort.SORT_CREATED),
}
store = get_db(request).get_store(StoreQuotes)
items, count = store.find_quotes(**filters)
return HTTPOk(json={
"count": count,
"page": page,
"limit": limit,
"quotes": [quote.id for quote in items]
})
@sd.process_quote_service.get(tags=[sd.TAG_BILL_QUOTE, sd.TAG_PROCESSES], renderer=OUTPUT_FORMAT_JSON,
schema=sd.ProcessQuoteEndpoint(), response_schemas=sd.get_quote_responses)
@sd.quote_service.get(tags=[sd.TAG_BILL_QUOTE], renderer=OUTPUT_FORMAT_JSON,
schema=sd.QuoteEndpoint(), response_schemas=sd.get_quote_responses)
@log_unhandled_exceptions(logger=LOGGER, message=sd.InternalServerErrorResponseSchema.description)
def get_quote_info(request):
"""
Get quote information.
"""
quote_id = request.matchdict.get("quote_id")
store = get_db(request).get_store(StoreQuotes)
try:
quote = store.fetch_by_id(quote_id)
except QuoteNotFound:
raise HTTPNotFound("Could not find quote with specified 'quote_id'.")
return HTTPOk(json={"quote": quote.json()})
@sd.process_quote_service.post(tags=[sd.TAG_BILL_QUOTE, sd.TAG_EXECUTE, sd.TAG_PROCESSES], renderer=OUTPUT_FORMAT_JSON,
schema=sd.PostProcessQuote(), response_schemas=sd.post_quote_responses)
@sd.quote_service.post(tags=[sd.TAG_BILL_QUOTE, sd.TAG_EXECUTE], renderer=OUTPUT_FORMAT_JSON,
schema=sd.PostQuote(), response_schemas=sd.post_quote_responses)
@log_unhandled_exceptions(logger=LOGGER, message=sd.InternalServerErrorResponseSchema.description)
def execute_quote(request):
"""
Execute a quoted process.
"""
quote_info = get_quote_info(request).json["quote"]
quote_bill_info = {
"quote": quote_info.get("id"),
"price": quote_info.get("price"),
"currency": quote_info.get("currency")
}
job_resp = submit_local_job(request)
job_json = job_resp.json
job_id = job_json.get("jobID")
user_id = str(request.authenticated_userid)
store = get_db(request).get_store(StoreBills)
bill = store.save_bill(Bill(user=user_id, job=job_id, **quote_bill_info))
job_json.update({"bill": bill.id})
return HTTPCreated(json=job_json)
| [
"logging.getLogger",
"weaver.datatype.Quote",
"pyramid.httpexceptions.HTTPNotFound",
"pyramid.httpexceptions.HTTPOk",
"weaver.processes.wps_package.get_package_workflow_steps",
"weaver.config.get_weaver_configuration",
"weaver.wps_restapi.swagger_definitions.PostQuote",
"weaver.wps_restapi.swagger_definitions.ProcessQuoteEndpoint",
"weaver.wps_restapi.swagger_definitions.QuotesEndpoint",
"weaver.wps_restapi.swagger_definitions.PostProcessQuoteRequestEndpoint",
"random.uniform",
"weaver.wps_restapi.swagger_definitions.QuoteEndpoint",
"weaver.wps_restapi.swagger_definitions.PostProcessQuote",
"weaver.utils.get_settings",
"weaver.exceptions.log_unhandled_exceptions",
"pyramid.httpexceptions.HTTPCreated",
"weaver.wps_restapi.swagger_definitions.ProcessQuotesEndpoint",
"weaver.datatype.Bill",
"weaver.wps_restapi.processes.processes.submit_local_job",
"weaver.processes.wps_package.get_process_location",
"weaver.database.get_db",
"weaver.utils.get_weaver_url"
] | [((1050, 1077), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1067, 1077), False, 'import logging\n'), ((1938, 2040), 'weaver.exceptions.log_unhandled_exceptions', 'log_unhandled_exceptions', ([], {'logger': 'LOGGER', 'message': 'sd.InternalServerErrorResponseSchema.description'}), '(logger=LOGGER, message=sd.\n InternalServerErrorResponseSchema.description)\n', (1962, 2040), False, 'from weaver.exceptions import ProcessNotFound, QuoteNotFound, log_unhandled_exceptions\n'), ((5083, 5185), 'weaver.exceptions.log_unhandled_exceptions', 'log_unhandled_exceptions', ([], {'logger': 'LOGGER', 'message': 'sd.InternalServerErrorResponseSchema.description'}), '(logger=LOGGER, message=sd.\n InternalServerErrorResponseSchema.description)\n', (5107, 5185), False, 'from weaver.exceptions import ProcessNotFound, QuoteNotFound, log_unhandled_exceptions\n'), ((6216, 6318), 'weaver.exceptions.log_unhandled_exceptions', 'log_unhandled_exceptions', ([], {'logger': 'LOGGER', 'message': 'sd.InternalServerErrorResponseSchema.description'}), '(logger=LOGGER, message=sd.\n InternalServerErrorResponseSchema.description)\n', (6240, 6318), False, 'from weaver.exceptions import ProcessNotFound, QuoteNotFound, log_unhandled_exceptions\n'), ((7099, 7201), 'weaver.exceptions.log_unhandled_exceptions', 'log_unhandled_exceptions', ([], {'logger': 'LOGGER', 'message': 'sd.InternalServerErrorResponseSchema.description'}), '(logger=LOGGER, message=sd.\n InternalServerErrorResponseSchema.description)\n', (7123, 7201), False, 'from weaver.exceptions import ProcessNotFound, QuoteNotFound, log_unhandled_exceptions\n'), ((1492, 1513), 'random.uniform', 'random.uniform', (['(0)', '(10)'], {}), '(0, 10)\n', (1506, 1513), False, 'import random\n'), ((2134, 2155), 'weaver.utils.get_settings', 'get_settings', (['request'], {}), '(request)\n', (2146, 2155), False, 'from weaver.utils import get_settings, get_weaver_url\n'), ((2176, 2210), 'weaver.config.get_weaver_configuration', 'get_weaver_configuration', (['settings'], {}), '(settings)\n', (2200, 2210), False, 'from weaver.config import WEAVER_CONFIGURATION_ADES, WEAVER_CONFIGURATION_EMS, get_weaver_configuration\n'), ((5699, 5804), 'pyramid.httpexceptions.HTTPOk', 'HTTPOk', ([], {'json': "{'count': count, 'page': page, 'limit': limit, 'quotes': [quote.id for\n quote in items]}"}), "(json={'count': count, 'page': page, 'limit': limit, 'quotes': [quote\n .id for quote in items]})\n", (5705, 5804), False, 'from pyramid.httpexceptions import HTTPBadRequest, HTTPCreated, HTTPNotFound, HTTPOk\n'), ((7499, 7524), 'weaver.wps_restapi.processes.processes.submit_local_job', 'submit_local_job', (['request'], {}), '(request)\n', (7515, 7524), False, 'from weaver.wps_restapi.processes.processes import submit_local_job\n'), ((7815, 7841), 'pyramid.httpexceptions.HTTPCreated', 'HTTPCreated', ([], {'json': 'job_json'}), '(json=job_json)\n', (7826, 7841), False, 'from pyramid.httpexceptions import HTTPBadRequest, HTTPCreated, HTTPNotFound, HTTPOk\n'), ((3497, 3536), 'weaver.processes.wps_package.get_package_workflow_steps', 'get_package_workflow_steps', (['process_url'], {}), '(process_url)\n', (3523, 3536), False, 'from weaver.processes.wps_package import get_package_workflow_steps, get_process_location\n'), ((1856, 1892), 'weaver.wps_restapi.swagger_definitions.PostProcessQuoteRequestEndpoint', 'sd.PostProcessQuoteRequestEndpoint', ([], {}), '()\n', (1890, 1892), True, 'from weaver.wps_restapi import swagger_definitions as sd\n'), ((4833, 4859), 'weaver.wps_restapi.swagger_definitions.ProcessQuotesEndpoint', 'sd.ProcessQuotesEndpoint', ([], {}), '()\n', (4857, 4859), True, 'from weaver.wps_restapi import swagger_definitions as sd\n'), ((5015, 5034), 'weaver.wps_restapi.swagger_definitions.QuotesEndpoint', 'sd.QuotesEndpoint', ([], {}), '()\n', (5032, 5034), True, 'from weaver.wps_restapi import swagger_definitions as sd\n'), ((5980, 6005), 'weaver.wps_restapi.swagger_definitions.ProcessQuoteEndpoint', 'sd.ProcessQuoteEndpoint', ([], {}), '()\n', (6003, 6005), True, 'from weaver.wps_restapi import swagger_definitions as sd\n'), ((6154, 6172), 'weaver.wps_restapi.swagger_definitions.QuoteEndpoint', 'sd.QuoteEndpoint', ([], {}), '()\n', (6170, 6172), True, 'from weaver.wps_restapi import swagger_definitions as sd\n'), ((7714, 7763), 'weaver.datatype.Bill', 'Bill', ([], {'user': 'user_id', 'job': 'job_id'}), '(user=user_id, job=job_id, **quote_bill_info)\n', (7718, 7763), False, 'from weaver.datatype import Bill, Quote\n'), ((6851, 6872), 'weaver.wps_restapi.swagger_definitions.PostProcessQuote', 'sd.PostProcessQuote', ([], {}), '()\n', (6870, 6872), True, 'from weaver.wps_restapi import swagger_definitions as sd\n'), ((7040, 7054), 'weaver.wps_restapi.swagger_definitions.PostQuote', 'sd.PostQuote', ([], {}), '()\n', (7052, 7054), True, 'from weaver.wps_restapi import swagger_definitions as sd\n'), ((2467, 2482), 'weaver.database.get_db', 'get_db', (['request'], {}), '(request)\n', (2473, 2482), False, 'from weaver.database import get_db\n'), ((2613, 2680), 'pyramid.httpexceptions.HTTPNotFound', 'HTTPNotFound', (['"""Could not find process with specified \'process_id\'."""'], {}), '("Could not find process with specified \'process_id\'.")\n', (2625, 2680), False, 'from pyramid.httpexceptions import HTTPBadRequest, HTTPCreated, HTTPNotFound, HTTPOk\n'), ((2694, 2709), 'weaver.database.get_db', 'get_db', (['request'], {}), '(request)\n', (2700, 2709), False, 'from weaver.database import get_db\n'), ((2796, 2820), 'weaver.utils.get_weaver_url', 'get_weaver_url', (['settings'], {}), '(settings)\n', (2810, 2820), False, 'from weaver.utils import get_settings, get_weaver_url\n'), ((3657, 3696), 'weaver.processes.wps_package.get_process_location', 'get_process_location', (["step['reference']"], {}), "(step['reference'])\n", (3677, 3696), False, 'from weaver.processes.wps_package import get_package_workflow_steps, get_process_location\n'), ((4163, 4190), 'weaver.datatype.Quote', 'Quote', ([], {}), '(**process_quote_info)\n', (4168, 4190), False, 'from weaver.datatype import Bill, Quote\n'), ((4491, 4530), 'pyramid.httpexceptions.HTTPCreated', 'HTTPCreated', ([], {'json': "{'quote': quote_json}"}), "(json={'quote': quote_json})\n", (4502, 4530), False, 'from pyramid.httpexceptions import HTTPBadRequest, HTTPCreated, HTTPNotFound, HTTPOk\n'), ((5601, 5616), 'weaver.database.get_db', 'get_db', (['request'], {}), '(request)\n', (5607, 5616), False, 'from weaver.database import get_db\n'), ((6447, 6462), 'weaver.database.get_db', 'get_db', (['request'], {}), '(request)\n', (6453, 6462), False, 'from weaver.database import get_db\n'), ((6579, 6642), 'pyramid.httpexceptions.HTTPNotFound', 'HTTPNotFound', (['"""Could not find quote with specified \'quote_id\'."""'], {}), '("Could not find quote with specified \'quote_id\'.")\n', (6591, 6642), False, 'from pyramid.httpexceptions import HTTPBadRequest, HTTPCreated, HTTPNotFound, HTTPOk\n'), ((7649, 7664), 'weaver.database.get_db', 'get_db', (['request'], {}), '(request)\n', (7655, 7664), False, 'from weaver.database import get_db\n'), ((1594, 1615), 'random.uniform', 'random.uniform', (['(5)', '(60)'], {}), '(5, 60)\n', (1608, 1615), False, 'import random\n'), ((4001, 4020), 'weaver.datatype.Quote', 'Quote', ([], {}), '(**quote_json)\n', (4006, 4020), False, 'from weaver.datatype import Bill, Quote\n'), ((4375, 4402), 'weaver.datatype.Quote', 'Quote', ([], {}), '(**process_quote_info)\n', (4380, 4402), False, 'from weaver.datatype import Bill, Quote\n')] |
from django.db import models
class JobOffer(models.Model):
company_name = models.CharField(max_length=50)
company_email = models.EmailField()
job_title = models.CharField(max_length=60)
job_description = models.TextField()
salary = models.PositiveIntegerField()
city = models.CharField(max_length=35)
state = models.CharField(max_length=35)
created_at = models.DateField(auto_now_add=True)
available = models.BooleanField(default=True)
def __str__(self):
return self.company_name
| [
"django.db.models.EmailField",
"django.db.models.DateField",
"django.db.models.TextField",
"django.db.models.BooleanField",
"django.db.models.PositiveIntegerField",
"django.db.models.CharField"
] | [((80, 111), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (96, 111), False, 'from django.db import models\n'), ((132, 151), 'django.db.models.EmailField', 'models.EmailField', ([], {}), '()\n', (149, 151), False, 'from django.db import models\n'), ((168, 199), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(60)'}), '(max_length=60)\n', (184, 199), False, 'from django.db import models\n'), ((222, 240), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (238, 240), False, 'from django.db import models\n'), ((254, 283), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (281, 283), False, 'from django.db import models\n'), ((295, 326), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(35)'}), '(max_length=35)\n', (311, 326), False, 'from django.db import models\n'), ((339, 370), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(35)'}), '(max_length=35)\n', (355, 370), False, 'from django.db import models\n'), ((388, 423), 'django.db.models.DateField', 'models.DateField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (404, 423), False, 'from django.db import models\n'), ((440, 473), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(True)'}), '(default=True)\n', (459, 473), False, 'from django.db import models\n')] |
from django.shortcuts import render,redirect
from django.contrib.auth.models import User
from django.contrib import messages
from .forms import PictureUploadForm,CommentForm
from .models import Image,Profile,Likes,Comments
from django.contrib.auth.decorators import login_required
from django.contrib .auth import authenticate,login,logout
from django.contrib.auth.forms import UserCreationForm
from datetime import datetime
def index(request):
images=Image.objects.all()
context={'images':images}
return render(request,'memeapp/index.html',context)
def registerPage(request):
form=UserCreationForm()
if request.method == "POST":
form_results=UserCreationForm(request.POST)
if form_results.is_valid():
user =form_results.save(commit=False)
user.username=user.username.lower()
user.save()
login(request,user)
return redirect('index')
else:
messages.error(request, 'Error occured during registration')
context = {'reg_form':form}
return render(request, 'memeapp/auth.html',context)
def loginPage(request):
page='login'
if request.user.is_authenticated:
return redirect('index')
if request.method == "POST":
username=request.POST.get('username').lower()
password=request.POST.get('password')
try:
user=User.objects.get(username=username)
except:
messages.error(request, 'User does not exist')
user=authenticate(request,username=username,password=password)
if user is not None:
login(request,user)
return redirect('index')
else:
messages.error(request, 'Username OR Password does not exist')
context={'page':page}
return render(request, 'memeapp/auth.html', context)
def logoutUser(request):
logout(request)
return redirect('index')
@login_required(login_url='login')
def uploadPicture(request):
form = PictureUploadForm()
if request.method == "POST":
form_results = PictureUploadForm(request.POST,request.FILES)
if form_results.is_valid():
form_results.save()
return redirect('index')
context = {"form": form}
return render(request, 'memeapp/upload_picture.html', context)
@login_required(login_url='login')
def my_images(request):
current_user = request.user
images = Profile.objects.filter(user_id=current_user.id).first()
profiles = Image.objects.filter(user_id=current_user.id)
return render(request, 'memeapp/profile.html', {"profile": images,"images":profiles})
@login_required(login_url='login')
def each_image(request, id):
image = Image.objects.get(id=id)
return render(request, 'memeapp/image_details.html', {'image': image})
@login_required(login_url='login')
def like_picture(request, id):
likes = Likes.objects.filter(image_id=id).first()
if Likes.objects.filter(image_id=id, user_id=request.user.id).exists():
likes.delete()
image = Image.objects.get(id=id)
if image.likes_number == 0:
image.likes_number = 0
image.save()
else:
image.likes_number -= 1
image.save()
return redirect('/')
else:
likes = Likes(image_id=id, user_id=request.user.id)
likes.save()
image = Image.objects.get(id=id)
image.likes_number = image.likes_number + 1
image.save()
return redirect('/')
@login_required(login_url='login')
def comment(request,pk):
profile = Image.objects.get(pk=pk)
form_results = CommentForm(request.POST,instance=profile)
if request.method == "POST":
if form_results.is_valid():
user = request.user
comment= form_results.cleaned_data['comment']
comment_content = Comments(user=user, image=profile, comment=comment, created_on=datetime.now())
comment_content.save()
profile.comments_number = profile.comments_number + 1
profile.save()
return redirect('index')
else:
print('form is invalid')
else:
form_results = CommentForm
context = {'form':form_results,'image':profile}
return render(request,'memeapp/comments.html',context)
def search(request):
title = "Search"
if 'search_query' in request.GET and request.GET["search_query"]:
search_term = request.GET.get("search_query").lower()
searched_results = Image.search_image(search_term)
message = f"{search_term}"
context = {'message': message, 'results': searched_results, 'title': title}
return render(request, 'memeapp/search.html', context)
else:
messages.error(request, "You haven't searched for any term")
message = "You haven't searched for any term"
return render(request, 'memeapp/search.html', {"message": message})
| [
"django.shortcuts.render",
"django.contrib.auth.authenticate",
"django.contrib.messages.error",
"django.contrib.auth.login",
"datetime.datetime.now",
"django.shortcuts.redirect",
"django.contrib.auth.forms.UserCreationForm",
"django.contrib.auth.decorators.login_required",
"django.contrib.auth.models.User.objects.get",
"django.contrib.auth.logout"
] | [((1915, 1948), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (1929, 1948), False, 'from django.contrib.auth.decorators import login_required\n'), ((2314, 2347), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (2328, 2347), False, 'from django.contrib.auth.decorators import login_required\n'), ((2626, 2659), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (2640, 2659), False, 'from django.contrib.auth.decorators import login_required\n'), ((2804, 2837), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (2818, 2837), False, 'from django.contrib.auth.decorators import login_required\n'), ((3499, 3532), 'django.contrib.auth.decorators.login_required', 'login_required', ([], {'login_url': '"""login"""'}), "(login_url='login')\n", (3513, 3532), False, 'from django.contrib.auth.decorators import login_required\n'), ((519, 565), 'django.shortcuts.render', 'render', (['request', '"""memeapp/index.html"""', 'context'], {}), "(request, 'memeapp/index.html', context)\n", (525, 565), False, 'from django.shortcuts import render, redirect\n'), ((602, 620), 'django.contrib.auth.forms.UserCreationForm', 'UserCreationForm', ([], {}), '()\n', (618, 620), False, 'from django.contrib.auth.forms import UserCreationForm\n'), ((1063, 1108), 'django.shortcuts.render', 'render', (['request', '"""memeapp/auth.html"""', 'context'], {}), "(request, 'memeapp/auth.html', context)\n", (1069, 1108), False, 'from django.shortcuts import render, redirect\n'), ((1792, 1837), 'django.shortcuts.render', 'render', (['request', '"""memeapp/auth.html"""', 'context'], {}), "(request, 'memeapp/auth.html', context)\n", (1798, 1837), False, 'from django.shortcuts import render, redirect\n'), ((1868, 1883), 'django.contrib.auth.logout', 'logout', (['request'], {}), '(request)\n', (1874, 1883), False, 'from django.contrib.auth import authenticate, login, logout\n'), ((1895, 1912), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (1903, 1912), False, 'from django.shortcuts import render, redirect\n'), ((2255, 2310), 'django.shortcuts.render', 'render', (['request', '"""memeapp/upload_picture.html"""', 'context'], {}), "(request, 'memeapp/upload_picture.html', context)\n", (2261, 2310), False, 'from django.shortcuts import render, redirect\n'), ((2545, 2630), 'django.shortcuts.render', 'render', (['request', '"""memeapp/profile.html"""', "{'profile': images, 'images': profiles}"], {}), "(request, 'memeapp/profile.html', {'profile': images, 'images': profiles}\n )\n", (2551, 2630), False, 'from django.shortcuts import render, redirect\n'), ((2737, 2800), 'django.shortcuts.render', 'render', (['request', '"""memeapp/image_details.html"""', "{'image': image}"], {}), "(request, 'memeapp/image_details.html', {'image': image})\n", (2743, 2800), False, 'from django.shortcuts import render, redirect\n'), ((4251, 4300), 'django.shortcuts.render', 'render', (['request', '"""memeapp/comments.html"""', 'context'], {}), "(request, 'memeapp/comments.html', context)\n", (4257, 4300), False, 'from django.shortcuts import render, redirect\n'), ((675, 705), 'django.contrib.auth.forms.UserCreationForm', 'UserCreationForm', (['request.POST'], {}), '(request.POST)\n', (691, 705), False, 'from django.contrib.auth.forms import UserCreationForm\n'), ((1205, 1222), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (1213, 1222), False, 'from django.shortcuts import render, redirect\n'), ((1510, 1569), 'django.contrib.auth.authenticate', 'authenticate', (['request'], {'username': 'username', 'password': 'password'}), '(request, username=username, password=password)\n', (1522, 1569), False, 'from django.contrib.auth import authenticate, login, logout\n'), ((3249, 3262), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (3257, 3262), False, 'from django.shortcuts import render, redirect\n'), ((3483, 3496), 'django.shortcuts.redirect', 'redirect', (['"""/"""'], {}), "('/')\n", (3491, 3496), False, 'from django.shortcuts import render, redirect\n'), ((4668, 4715), 'django.shortcuts.render', 'render', (['request', '"""memeapp/search.html"""', 'context'], {}), "(request, 'memeapp/search.html', context)\n", (4674, 4715), False, 'from django.shortcuts import render, redirect\n'), ((4734, 4794), 'django.contrib.messages.error', 'messages.error', (['request', '"""You haven\'t searched for any term"""'], {}), '(request, "You haven\'t searched for any term")\n', (4748, 4794), False, 'from django.contrib import messages\n'), ((4864, 4924), 'django.shortcuts.render', 'render', (['request', '"""memeapp/search.html"""', "{'message': message}"], {}), "(request, 'memeapp/search.html', {'message': message})\n", (4870, 4924), False, 'from django.shortcuts import render, redirect\n'), ((876, 896), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (881, 896), False, 'from django.contrib.auth import authenticate, login, logout\n'), ((915, 932), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (923, 932), False, 'from django.shortcuts import render, redirect\n'), ((959, 1019), 'django.contrib.messages.error', 'messages.error', (['request', '"""Error occured during registration"""'], {}), "(request, 'Error occured during registration')\n", (973, 1019), False, 'from django.contrib import messages\n'), ((1386, 1421), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'username'}), '(username=username)\n', (1402, 1421), False, 'from django.contrib.auth.models import User\n'), ((1609, 1629), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (1614, 1629), False, 'from django.contrib.auth import authenticate, login, logout\n'), ((1648, 1665), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (1656, 1665), False, 'from django.shortcuts import render, redirect\n'), ((1692, 1754), 'django.contrib.messages.error', 'messages.error', (['request', '"""Username OR Password does not exist"""'], {}), "(request, 'Username OR Password does not exist')\n", (1706, 1754), False, 'from django.contrib import messages\n'), ((2197, 2214), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (2205, 2214), False, 'from django.shortcuts import render, redirect\n'), ((4074, 4091), 'django.shortcuts.redirect', 'redirect', (['"""index"""'], {}), "('index')\n", (4082, 4091), False, 'from django.shortcuts import render, redirect\n'), ((1450, 1496), 'django.contrib.messages.error', 'messages.error', (['request', '"""User does not exist"""'], {}), "(request, 'User does not exist')\n", (1464, 1496), False, 'from django.contrib import messages\n'), ((3911, 3925), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3923, 3925), False, 'from datetime import datetime\n')] |
"""Create diapivot annotation."""
import logging
import pickle
import xml.etree.ElementTree as etree
import sparv.util as util
from sparv import Annotation, Model, ModelOutput, Output, annotator, modelbuilder
log = logging.getLogger(__name__)
PART_DELIM1 = "^1"
# @annotator("Diapivot annotation", language=["swe-1800"])
def diapivot_annotate(out: Output = Output("<token>:hist.diapivot", description="SALDO IDs corresponding to lemgrams"),
lemgram: Annotation = Annotation("<token>:saldo.lemgram"),
model: Model = Model("hist/diapivot.pickle")):
"""Annotate each lemgram with its corresponding saldo_id according to model.
Args:
out (str, optional): Resulting annotation file.
Defaults to Output("<token>:hist.diapivot", description="SALDO IDs corresponding to lemgrams").
lemgram (str, optional): Existing lemgram annotation. Defaults to Annotation("<token>:saldo.lemgram").
model (str, optional): Crosslink model. Defaults to Model("hist/diapivot.pickle").
"""
lexicon = PivotLexicon(model)
lemgram_annotation = list(lemgram.read())
out_annotation = []
for lemgrams in lemgram_annotation:
saldo_ids = []
for lemgram in lemgrams.split(util.DELIM):
s_i = lexicon.get_exactMatch(lemgram)
if s_i:
saldo_ids += [s_i]
out_annotation.append(util.AFFIX + util.DELIM.join(set(saldo_ids)) + util.AFFIX if saldo_ids else util.AFFIX)
out.write(out_annotation)
# @modelbuilder("Diapivot model", language=["swe"])
def build_diapivot(out: ModelOutput = ModelOutput("hist/diapivot.pickle")):
"""Download diapivot XML dictionary and save as a pickle file."""
# Download diapivot.xml
xml_model = Model("hist/diapivot.xml")
xml_model.download("https://svn.spraakdata.gu.se/sb-arkiv/pub/lmf/diapivot/diapivot.xml")
# Create pickle file
xml_lexicon = read_xml(xml_model.path)
log.info("Saving cross lexicon in Pickle format")
picklex = {}
for lem in xml_lexicon:
lemgrams = []
for saldo, match in list(xml_lexicon[lem].items()):
lemgrams.append(PART_DELIM1.join([saldo, match]))
picklex[lem] = sorted(lemgrams)
out.write_pickle(picklex)
# Clean up
xml_model.remove()
################################################################################
# Auxiliaries
################################################################################
class PivotLexicon:
"""A lexicon for old swedish SALDO lookups.
It is initialized from a pickled file.
"""
def __init__(self, crossfile, verbose=True):
"""Read pickled lexicon."""
if verbose:
log.info("Reading cross lexicon: %s", crossfile)
with open(crossfile, "rb") as F:
self.lexicon = pickle.load(F)
if verbose:
log.info("OK, read %d words", len(self.lexicon))
def lookup(self, lem):
"""Lookup a word in the lexicon."""
if lem.lower() == lem:
annotation_tag_pairs = self.lexicon.get(lem, [])
else:
annotation_tag_pairs = self.lexicon.get(lem, []) + self.lexicon.get(lem.lower(), [])
return list(map(_split_val, annotation_tag_pairs))
def get_exactMatch(self, word):
"""Get only exact matches from lexicon."""
s = self.lookup(word)
if s and s[0] == "exactMatch":
return s[1]
def _split_val(key_val):
return key_val.rsplit(PART_DELIM1)[1]
def read_xml(xml):
"""Read the XML version of crosslinked lexicon."""
log.info("Reading XML lexicon")
lexicon = {}
context = etree.iterparse(xml, events=("start", "end")) # "start" needed to save reference to root element
context = iter(context)
_event, root = next(context)
for event, elem in context:
if event == "end":
if elem.tag == 'LexicalEntry':
lemma = elem.find("Lemma")
dalin, saldo = [], ''
for form in lemma.findall("FormRepresentation"):
cat = _findval(form, "category")
lem = _findval(form, "lemgram")
if cat == "modern":
saldo = lem
else:
match = _findval(form, "match")
dalin += [(lem, match)]
[lexicon.update({d: {'saldo': saldo, 'match': m}}) for (d, m) in dalin]
# Done parsing section. Clear tree to save memory
if elem.tag in ['LexicalEntry', 'frame', 'resFrame']:
root.clear()
testwords = ["tigerhjerta..nn.1",
"lågland..nn.1",
"gud..nn.1"]
util.test_lexicon(lexicon, testwords)
log.info("OK, read")
return lexicon
def _findval(elems, key):
for form in elems:
att = form.get("att", "")
if att == key:
return form.get("val")
return ""
| [
"logging.getLogger",
"sparv.util.test_lexicon",
"pickle.load",
"sparv.Model",
"sparv.Annotation",
"xml.etree.ElementTree.iterparse",
"sparv.ModelOutput",
"sparv.Output"
] | [((218, 245), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (235, 245), False, 'import logging\n'), ((363, 450), 'sparv.Output', 'Output', (['"""<token>:hist.diapivot"""'], {'description': '"""SALDO IDs corresponding to lemgrams"""'}), "('<token>:hist.diapivot', description=\n 'SALDO IDs corresponding to lemgrams')\n", (369, 450), False, 'from sparv import Annotation, Model, ModelOutput, Output, annotator, modelbuilder\n'), ((491, 526), 'sparv.Annotation', 'Annotation', (['"""<token>:saldo.lemgram"""'], {}), "('<token>:saldo.lemgram')\n", (501, 526), False, 'from sparv import Annotation, Model, ModelOutput, Output, annotator, modelbuilder\n'), ((565, 594), 'sparv.Model', 'Model', (['"""hist/diapivot.pickle"""'], {}), "('hist/diapivot.pickle')\n", (570, 594), False, 'from sparv import Annotation, Model, ModelOutput, Output, annotator, modelbuilder\n'), ((1629, 1664), 'sparv.ModelOutput', 'ModelOutput', (['"""hist/diapivot.pickle"""'], {}), "('hist/diapivot.pickle')\n", (1640, 1664), False, 'from sparv import Annotation, Model, ModelOutput, Output, annotator, modelbuilder\n'), ((1781, 1807), 'sparv.Model', 'Model', (['"""hist/diapivot.xml"""'], {}), "('hist/diapivot.xml')\n", (1786, 1807), False, 'from sparv import Annotation, Model, ModelOutput, Output, annotator, modelbuilder\n'), ((3683, 3728), 'xml.etree.ElementTree.iterparse', 'etree.iterparse', (['xml'], {'events': "('start', 'end')"}), "(xml, events=('start', 'end'))\n", (3698, 3728), True, 'import xml.etree.ElementTree as etree\n'), ((4757, 4794), 'sparv.util.test_lexicon', 'util.test_lexicon', (['lexicon', 'testwords'], {}), '(lexicon, testwords)\n', (4774, 4794), True, 'import sparv.util as util\n'), ((2859, 2873), 'pickle.load', 'pickle.load', (['F'], {}), '(F)\n', (2870, 2873), False, 'import pickle\n')] |
import os
def get_root_path():
current_path = os.path.abspath(os.path.dirname(__file__))
root_path = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.dirname(current_path)))
)
return os.path.join(root_path, "xbot")
def get_config_path():
config_path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../config"))
return config_path
def get_data_path():
data_path = os.path.abspath(
os.path.join(os.path.dirname(__file__), "../../../data/")
)
return data_path
| [
"os.path.dirname",
"os.path.join"
] | [((217, 248), 'os.path.join', 'os.path.join', (['root_path', '"""xbot"""'], {}), "(root_path, 'xbot')\n", (229, 248), False, 'import os\n'), ((68, 93), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (83, 93), False, 'import os\n'), ((321, 346), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (336, 346), False, 'import os\n'), ((462, 487), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (477, 487), False, 'import os\n'), ((168, 197), 'os.path.dirname', 'os.path.dirname', (['current_path'], {}), '(current_path)\n', (183, 197), False, 'import os\n')] |
from django.utils.html import format_html
from wagtail.wagtailcore import hooks
@hooks.register('insert_editor_js')
def enable_source():
return format_html(
"""
<script>
registerHalloPlugin('hallohtml');
</script>
"""
)
| [
"wagtail.wagtailcore.hooks.register",
"django.utils.html.format_html"
] | [((83, 117), 'wagtail.wagtailcore.hooks.register', 'hooks.register', (['"""insert_editor_js"""'], {}), "('insert_editor_js')\n", (97, 117), False, 'from wagtail.wagtailcore import hooks\n'), ((150, 269), 'django.utils.html.format_html', 'format_html', (['"""\n <script>\n registerHalloPlugin(\'hallohtml\');\n </script>\n """'], {}), '(\n """\n <script>\n registerHalloPlugin(\'hallohtml\');\n </script>\n """\n )\n', (161, 269), False, 'from django.utils.html import format_html\n')] |
from buildbot.process.remotecommand import RemoteCommand
from buildbot.interfaces import WorkerTooOldError
import stat
class FileExists(object):
"""I check a file existence on the worker. I return True if the file
with the given name exists, False if the file does not exist or that is
a directory.
Use me with doStepIf to make a build step conditional to existence of some
file. For example
doStepIf=FileExists('build/configure')
"""
def __init__(self, filename):
self.filename = filename
def __call__(self, step):
step.checkWorkerHasCommand('stat')
cmd = RemoteCommand('stat', {'file': self.filename})
d = step.runCommand(cmd)
d.addCallback(lambda res: self.commandComplete(cmd))
return d
def commandComplete(self, cmd):
if cmd.didFail():
return False
s = cmd.updates["stat"][-1]
filemode = s[stat.ST_MODE]
if stat.S_ISREG(filemode) or stat.S_ISLNK(filemode):
# True only if this is a file or a link and not any other file
# system object.
return True
else:
return False
class FileDoesNotExist(object):
"""I check a file existence on the worker. I return False if
the file with the given name exists or that is a directory, True if the
file does not exist.
Use me with doStepIf to make a build step conditional to nonexistence
of some file. For example
doStepIf=FileDoesNotExist('build/configure')
"""
def __init__(self, filename):
self.filename = filename
def __call__(self, step):
step.checkWorkerHasCommand('stat')
cmd = RemoteCommand('stat', {'file': self.filename})
d = step.runCommand(cmd)
d.addCallback(lambda res: self.commandComplete(cmd))
return d
def commandComplete(self, cmd):
# False if any filesystem object with the given name exists.
return cmd.didFail()
| [
"stat.S_ISLNK",
"stat.S_ISREG",
"buildbot.process.remotecommand.RemoteCommand"
] | [((623, 669), 'buildbot.process.remotecommand.RemoteCommand', 'RemoteCommand', (['"""stat"""', "{'file': self.filename}"], {}), "('stat', {'file': self.filename})\n", (636, 669), False, 'from buildbot.process.remotecommand import RemoteCommand\n'), ((1688, 1734), 'buildbot.process.remotecommand.RemoteCommand', 'RemoteCommand', (['"""stat"""', "{'file': self.filename}"], {}), "('stat', {'file': self.filename})\n", (1701, 1734), False, 'from buildbot.process.remotecommand import RemoteCommand\n'), ((952, 974), 'stat.S_ISREG', 'stat.S_ISREG', (['filemode'], {}), '(filemode)\n', (964, 974), False, 'import stat\n'), ((978, 1000), 'stat.S_ISLNK', 'stat.S_ISLNK', (['filemode'], {}), '(filemode)\n', (990, 1000), False, 'import stat\n')] |
"""Hyper-distributions."""
from libqif.core.secrets import Secrets
from libqif.core.channel import Channel
from numpy import array, arange, zeros
from numpy import delete as npdelete
class Hyper:
def __init__(self, channel):
"""Hyper-distribution. To create an instance of this class it is
class it is necessary to have an instance of :py:class:`.Channel`
class. Once created an instance of :py:class:`.Hyper`, the constructor
generates the joint, outer and inner distributions.
Attributes
----------
channel : core.Channel
Channel object.
joint : numpy.ndarray
Matrix of joint distribution.
outer : numpy.ndarray
Outer distribution.
inners : numpy.ndarray
Matrix of inner distributions.
num_posteriors : int
Number of posterior distributions resulted by reducing the
hyper-distribution, i.e., remove columns that contains only
zeros and merge columns which one of them a linear combination
of the other.
Parameters
----------
channel : core.Channel
Channel object.
"""
self._check_types(channel)
self.channel = channel
self.joint = self._generate_joint_distribution()
self.outer, self.inners = self._generate_posteriors()
self._reduce_hyper()
self.num_posteriors = len(self.outer)
def update_prior(self, prior):
"""Update the prior distribution on set of secrets.
The number of secrets must match the current number of rows of the channel.
Parameters
----------
prior : list, numpy.ndarray
Prior distribution on the set of secrets. prior[i] is the
probability of secret named labels[i] beeing the real secret.
"""
self.channel.update_prior(prior)
self.joint = self._generate_joint_distribution()
self.outer, self.inners = self._generate_posteriors()
self._reduce_hyper()
self.num_posteriors = len(self.outer)
def _check_types(self, channel):
if type(channel) != type(Channel(Secrets(['x1','x2'], [1,0]), ['y1'], array([[1],[1]]))):
raise TypeError('The parameter \'channel\' must be a core.channel.Channel object')
def _generate_joint_distribution(self):
joint = []
channel_t = self.channel.matrix.T
for i in arange(self.channel.num_outputs):
joint.append(self.channel.secrets.prior * channel_t[i])
return array(joint).T
def _generate_posteriors(self):
joint_t = self.joint.T.copy()
outer = []
for i in arange(self.channel.num_outputs):
outer.append(joint_t[i].sum())
if outer[i] > 0:
joint_t[i] = joint_t[i]/outer[i]
return array(outer), joint_t.T
def _reduce_hyper(self):
"""Given the hyper-distribution generated by _generate_posteriors
remove columns with zeros and merge columns that are a linear
combination of others. Thus algorithm has time complexity of O(n*m^2)
where n is the number of secrets and m is the number of outputs in
the.
"""
epsilon = 10**(-6)
# Delete inners that have 0 probability of occuring
zero_prob = self.outer < epsilon
self.outer = npdelete(self.outer, zero_prob, 0)
self.inners = npdelete(self.inners, zero_prob, 1)
delete_inner = [False] * len(self.outer)
for i in arange(self.inners.shape[1]):
for j in arange(i+1, self.inners.shape[1]):
# Check if inner i is equal to inner j
if (abs(self.inners[:,i] - self.inners[:,j]) < epsilon).sum() == self.channel.secrets.num_secrets:
delete_inner[j] = True # Delete inner j
self.outer[i] += self.outer[j] # Merge inner j into inner i
self.outer = npdelete(self.outer, delete_inner, 0)
self.inners = npdelete(self.inners, delete_inner, 1)
| [
"numpy.delete",
"numpy.array",
"libqif.core.secrets.Secrets",
"numpy.arange"
] | [((2486, 2518), 'numpy.arange', 'arange', (['self.channel.num_outputs'], {}), '(self.channel.num_outputs)\n', (2492, 2518), False, 'from numpy import array, arange, zeros\n'), ((2742, 2774), 'numpy.arange', 'arange', (['self.channel.num_outputs'], {}), '(self.channel.num_outputs)\n', (2748, 2774), False, 'from numpy import array, arange, zeros\n'), ((3447, 3481), 'numpy.delete', 'npdelete', (['self.outer', 'zero_prob', '(0)'], {}), '(self.outer, zero_prob, 0)\n', (3455, 3481), True, 'from numpy import delete as npdelete\n'), ((3504, 3539), 'numpy.delete', 'npdelete', (['self.inners', 'zero_prob', '(1)'], {}), '(self.inners, zero_prob, 1)\n', (3512, 3539), True, 'from numpy import delete as npdelete\n'), ((3607, 3635), 'numpy.arange', 'arange', (['self.inners.shape[1]'], {}), '(self.inners.shape[1])\n', (3613, 3635), False, 'from numpy import array, arange, zeros\n'), ((4025, 4062), 'numpy.delete', 'npdelete', (['self.outer', 'delete_inner', '(0)'], {}), '(self.outer, delete_inner, 0)\n', (4033, 4062), True, 'from numpy import delete as npdelete\n'), ((4085, 4123), 'numpy.delete', 'npdelete', (['self.inners', 'delete_inner', '(1)'], {}), '(self.inners, delete_inner, 1)\n', (4093, 4123), True, 'from numpy import delete as npdelete\n'), ((2604, 2616), 'numpy.array', 'array', (['joint'], {}), '(joint)\n', (2609, 2616), False, 'from numpy import array, arange, zeros\n'), ((2921, 2933), 'numpy.array', 'array', (['outer'], {}), '(outer)\n', (2926, 2933), False, 'from numpy import array, arange, zeros\n'), ((3658, 3693), 'numpy.arange', 'arange', (['(i + 1)', 'self.inners.shape[1]'], {}), '(i + 1, self.inners.shape[1])\n', (3664, 3693), False, 'from numpy import array, arange, zeros\n'), ((2211, 2240), 'libqif.core.secrets.Secrets', 'Secrets', (["['x1', 'x2']", '[1, 0]'], {}), "(['x1', 'x2'], [1, 0])\n", (2218, 2240), False, 'from libqif.core.secrets import Secrets\n'), ((2248, 2265), 'numpy.array', 'array', (['[[1], [1]]'], {}), '([[1], [1]])\n', (2253, 2265), False, 'from numpy import array, arange, zeros\n')] |
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_virtual_wan_link
short_description: Configure redundant internet connections using SD-WAN (formerly virtual WAN link) in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the
user to set and modify system feature and virtual_wan_link category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.8"
author:
- <NAME> (@mamunozgonzalez)
- <NAME> (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
version_added: 2.9
system_virtual_wan_link:
description:
- Configure redundant internet connections using SD-WAN (formerly virtual WAN link).
default: null
type: dict
suboptions:
fail_alert_interfaces:
description:
- Physical interfaces that will be alerted.
type: list
suboptions:
name:
description:
- Physical interface name. Source system.interface.name.
required: true
type: str
fail_detect:
description:
- Enable/disable SD-WAN Internet connection status checking (failure detection).
type: str
choices:
- enable
- disable
health_check:
description:
- SD-WAN status checking or health checking. Identify a server on the Internet and determine how SD-WAN verifies that the FortiGate can
communicate with it.
type: list
suboptions:
addr_mode:
description:
- Address mode (IPv4 or IPv6).
type: str
choices:
- ipv4
- ipv6
failtime:
description:
- Number of failures before server is considered lost (1 - 3600).
type: int
http_agent:
description:
- String in the http-agent field in the HTTP header.
type: str
http_get:
description:
- URL used to communicate with the server if the protocol if the protocol is HTTP.
type: str
http_match:
description:
- Response string expected from the server if the protocol is HTTP.
type: str
interval:
description:
- Status check interval, or the time between attempting to connect to the server (1 - 3600 sec).
type: int
members:
description:
- Member sequence number list.
type: list
suboptions:
seq_num:
description:
- Member sequence number. Source system.virtual-wan-link.members.seq-num.
type: int
name:
description:
- Status check or health check name.
required: true
type: str
packet_size:
description:
- Packet size of a twamp test session,
type: int
password:
description:
- Twamp controller password in authentication mode
type: str
port:
description:
- Port number used to communicate with the server over the selected protocol.
type: int
protocol:
description:
- Protocol used to determine if the FortiGate can communicate with the server.
type: str
choices:
- ping
- tcp-echo
- udp-echo
- http
- twamp
- ping6
recoverytime:
description:
- Number of successful responses received before server is considered recovered (1 - 3600).
type: int
security_mode:
description:
- Twamp controller security mode.
type: str
choices:
- none
- authentication
server:
description:
- IP address or FQDN name of the server.
type: str
sla:
description:
- Service level agreement (SLA).
type: list
suboptions:
id:
description:
- SLA ID.
required: true
type: int
jitter_threshold:
description:
- Jitter for SLA to make decision in milliseconds. (0 - 10000000).
type: int
latency_threshold:
description:
- Latency for SLA to make decision in milliseconds. (0 - 10000000).
type: int
link_cost_factor:
description:
- Criteria on which to base link selection.
type: str
choices:
- latency
- jitter
- packet-loss
packetloss_threshold:
description:
- Packet loss for SLA to make decision in percentage. (0 - 100).
type: int
threshold_alert_jitter:
description:
- Alert threshold for jitter (ms).
type: int
threshold_alert_latency:
description:
- Alert threshold for latency (ms).
type: int
threshold_alert_packetloss:
description:
- Alert threshold for packet loss (percentage).
type: int
threshold_warning_jitter:
description:
- Warning threshold for jitter (ms).
type: int
threshold_warning_latency:
description:
- Warning threshold for latency (ms).
type: int
threshold_warning_packetloss:
description:
- Warning threshold for packet loss (percentage).
type: int
update_cascade_interface:
description:
- Enable/disable update cascade interface.
type: str
choices:
- enable
- disable
update_static_route:
description:
- Enable/disable updating the static route.
type: str
choices:
- enable
- disable
load_balance_mode:
description:
- Algorithm or mode to use for load balancing Internet traffic to SD-WAN members.
type: str
choices:
- source-ip-based
- weight-based
- usage-based
- source-dest-ip-based
- measured-volume-based
members:
description:
- Physical FortiGate interfaces added to the virtual-wan-link.
type: list
suboptions:
comment:
description:
- Comments.
type: str
gateway:
description:
- The default gateway for this interface. Usually the default gateway of the Internet service provider that this interface is
connected to.
type: str
gateway6:
description:
- IPv6 gateway.
type: str
ingress_spillover_threshold:
description:
- Ingress spillover threshold for this interface (0 - 16776000 kbit/s). When this traffic volume threshold is reached, new
sessions spill over to other interfaces in the SD-WAN.
type: int
interface:
description:
- Interface name. Source system.interface.name.
type: str
priority:
description:
- Priority of the interface (0 - 4294967295). Used for SD-WAN rules or priority rules.
type: int
seq_num:
description:
- Sequence number(1-255).
type: int
source:
description:
- Source IP address used in the health-check packet to the server.
type: str
source6:
description:
- Source IPv6 address used in the health-check packet to the server.
type: str
spillover_threshold:
description:
- Egress spillover threshold for this interface (0 - 16776000 kbit/s). When this traffic volume threshold is reached, new sessions
spill over to other interfaces in the SD-WAN.
type: int
status:
description:
- Enable/disable this interface in the SD-WAN.
type: str
choices:
- disable
- enable
volume_ratio:
description:
- Measured volume ratio (this value / sum of all values = percentage of link volume, 0 - 255).
type: int
weight:
description:
- Weight of this interface for weighted load balancing. (0 - 255) More traffic is directed to interfaces with higher weights.
type: int
service:
description:
- Create SD-WAN rules or priority rules (also called services) to control how sessions are distributed to physical interfaces in the
SD-WAN.
type: list
suboptions:
addr_mode:
description:
- Address mode (IPv4 or IPv6).
type: str
choices:
- ipv4
- ipv6
bandwidth_weight:
description:
- Coefficient of reciprocal of available bidirectional bandwidth in the formula of custom-profile-1.
type: int
default:
description:
- Enable/disable use of SD-WAN as default service.
type: str
choices:
- enable
- disable
dscp_forward:
description:
- Enable/disable forward traffic DSCP tag.
type: str
choices:
- enable
- disable
dscp_forward_tag:
description:
- Forward traffic DSCP tag.
type: str
dscp_reverse:
description:
- Enable/disable reverse traffic DSCP tag.
type: str
choices:
- enable
- disable
dscp_reverse_tag:
description:
- Reverse traffic DSCP tag.
type: str
dst:
description:
- Destination address name.
type: list
suboptions:
name:
description:
- Address or address group name. Source firewall.address.name firewall.addrgrp.name.
required: true
type: str
dst_negate:
description:
- Enable/disable negation of destination address match.
type: str
choices:
- enable
- disable
dst6:
description:
- Destination address6 name.
type: list
suboptions:
name:
description:
- Address6 or address6 group name. Source firewall.address6.name firewall.addrgrp6.name.
required: true
type: str
end_port:
description:
- End destination port number.
type: int
gateway:
description:
- Enable/disable SD-WAN service gateway.
type: str
choices:
- enable
- disable
groups:
description:
- User groups.
type: list
suboptions:
name:
description:
- Group name. Source user.group.name.
required: true
type: str
health_check:
description:
- Health check. Source system.virtual-wan-link.health-check.name.
type: str
hold_down_time:
description:
- Waiting period in seconds when switching from the back-up member to the primary member (0 - 10000000).
type: int
id:
description:
- Priority rule ID (1 - 4000).
required: true
type: int
input_device:
description:
- Source interface name.
type: list
suboptions:
name:
description:
- Interface name. Source system.interface.name.
required: true
type: str
internet_service:
description:
- Enable/disable use of Internet service for application-based load balancing.
type: str
choices:
- enable
- disable
internet_service_ctrl:
description:
- Control-based Internet Service ID list.
type: list
suboptions:
id:
description:
- Control-based Internet Service ID.
required: true
type: int
internet_service_ctrl_group:
description:
- Control-based Internet Service group list.
type: list
suboptions:
name:
description:
- Control-based Internet Service group name. Source application.group.name.
required: true
type: str
internet_service_custom:
description:
- Custom Internet service name list.
type: list
suboptions:
name:
description:
- Custom Internet service name. Source firewall.internet-service-custom.name.
required: true
type: str
internet_service_custom_group:
description:
- Custom Internet Service group list.
type: list
suboptions:
name:
description:
- Custom Internet Service group name. Source firewall.internet-service-custom-group.name.
required: true
type: str
internet_service_group:
description:
- Internet Service group list.
type: list
suboptions:
name:
description:
- Internet Service group name. Source firewall.internet-service-group.name.
required: true
type: str
internet_service_id:
description:
- Internet service ID list.
type: list
suboptions:
id:
description:
- Internet service ID. Source firewall.internet-service.id.
required: true
type: int
jitter_weight:
description:
- Coefficient of jitter in the formula of custom-profile-1.
type: int
latency_weight:
description:
- Coefficient of latency in the formula of custom-profile-1.
type: int
link_cost_factor:
description:
- Link cost factor.
type: str
choices:
- latency
- jitter
- packet-loss
- inbandwidth
- outbandwidth
- bibandwidth
- custom-profile-1
link_cost_threshold:
description:
- Percentage threshold change of link cost values that will result in policy route regeneration (0 - 10000000).
type: int
member:
description:
- Member sequence number.
type: int
mode:
description:
- Control how the priority rule sets the priority of interfaces in the SD-WAN.
type: str
choices:
- auto
- manual
- priority
- sla
name:
description:
- Priority rule name.
type: str
packet_loss_weight:
description:
- Coefficient of packet-loss in the formula of custom-profile-1.
type: int
priority_members:
description:
- Member sequence number list.
type: list
suboptions:
seq_num:
description:
- Member sequence number. Source system.virtual-wan-link.members.seq-num.
type: int
protocol:
description:
- Protocol number.
type: int
quality_link:
description:
- Quality grade.
type: int
route_tag:
description:
- IPv4 route map route-tag.
type: int
sla:
description:
- Service level agreement (SLA).
type: list
suboptions:
health_check:
description:
- Virtual WAN Link health-check. Source system.virtual-wan-link.health-check.name.
type: str
id:
description:
- SLA ID.
type: int
src:
description:
- Source address name.
type: list
suboptions:
name:
description:
- Address or address group name. Source firewall.address.name firewall.addrgrp.name.
required: true
type: str
src_negate:
description:
- Enable/disable negation of source address match.
type: str
choices:
- enable
- disable
src6:
description:
- Source address6 name.
type: list
suboptions:
name:
description:
- Address6 or address6 group name. Source firewall.address6.name firewall.addrgrp6.name.
required: true
type: str
start_port:
description:
- Start destination port number.
type: int
status:
description:
- Enable/disable SD-WAN service.
type: str
choices:
- enable
- disable
tos:
description:
- Type of service bit pattern.
type: str
tos_mask:
description:
- Type of service evaluated bits.
type: str
users:
description:
- User name.
type: list
suboptions:
name:
description:
- User name. Source user.local.name.
required: true
type: str
status:
description:
- Enable/disable SD-WAN.
type: str
choices:
- disable
- enable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Configure redundant internet connections using SD-WAN (formerly virtual WAN link).
fortios_system_virtual_wan_link:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
system_virtual_wan_link:
fail_alert_interfaces:
-
name: "default_name_4 (source system.interface.name)"
fail_detect: "enable"
health_check:
-
addr_mode: "ipv4"
failtime: "8"
http_agent: "<your_own_value>"
http_get: "<your_own_value>"
http_match: "<your_own_value>"
interval: "12"
members:
-
seq_num: "14 (source system.virtual-wan-link.members.seq-num)"
name: "default_name_15"
packet_size: "16"
password: "<<PASSWORD>>"
port: "18"
protocol: "ping"
recoverytime: "20"
security_mode: "none"
server: "192.168.100.40"
sla:
-
id: "24"
jitter_threshold: "25"
latency_threshold: "26"
link_cost_factor: "latency"
packetloss_threshold: "28"
threshold_alert_jitter: "29"
threshold_alert_latency: "30"
threshold_alert_packetloss: "31"
threshold_warning_jitter: "32"
threshold_warning_latency: "33"
threshold_warning_packetloss: "34"
update_cascade_interface: "enable"
update_static_route: "enable"
load_balance_mode: "source-ip-based"
members:
-
comment: "Comments."
gateway: "<your_own_value>"
gateway6: "<your_own_value>"
ingress_spillover_threshold: "42"
interface: "<your_own_value> (source system.interface.name)"
priority: "44"
seq_num: "45"
source: "<your_own_value>"
source6: "<your_own_value>"
spillover_threshold: "48"
status: "disable"
volume_ratio: "50"
weight: "51"
service:
-
addr_mode: "ipv4"
bandwidth_weight: "54"
default: "enable"
dscp_forward: "enable"
dscp_forward_tag: "<your_own_value>"
dscp_reverse: "enable"
dscp_reverse_tag: "<your_own_value>"
dst:
-
name: "default_name_61 (source firewall.address.name firewall.addrgrp.name)"
dst_negate: "enable"
dst6:
-
name: "default_name_64 (source firewall.address6.name firewall.addrgrp6.name)"
end_port: "65"
gateway: "enable"
groups:
-
name: "default_name_68 (source user.group.name)"
health_check: "<your_own_value> (source system.virtual-wan-link.health-check.name)"
hold_down_time: "70"
id: "71"
input_device:
-
name: "default_name_73 (source system.interface.name)"
internet_service: "enable"
internet_service_ctrl:
-
id: "76"
internet_service_ctrl_group:
-
name: "default_name_78 (source application.group.name)"
internet_service_custom:
-
name: "default_name_80 (source firewall.internet-service-custom.name)"
internet_service_custom_group:
-
name: "default_name_82 (source firewall.internet-service-custom-group.name)"
internet_service_group:
-
name: "default_name_84 (source firewall.internet-service-group.name)"
internet_service_id:
-
id: "86 (source firewall.internet-service.id)"
jitter_weight: "87"
latency_weight: "88"
link_cost_factor: "latency"
link_cost_threshold: "90"
member: "91"
mode: "auto"
name: "default_name_93"
packet_loss_weight: "94"
priority_members:
-
seq_num: "96 (source system.virtual-wan-link.members.seq-num)"
protocol: "97"
quality_link: "98"
route_tag: "99"
sla:
-
health_check: "<your_own_value> (source system.virtual-wan-link.health-check.name)"
id: "102"
src:
-
name: "default_name_104 (source firewall.address.name firewall.addrgrp.name)"
src_negate: "enable"
src6:
-
name: "default_name_107 (source firewall.address6.name firewall.addrgrp6.name)"
start_port: "108"
status: "enable"
tos: "<your_own_value>"
tos_mask: "<your_own_value>"
users:
-
name: "default_name_113 (source user.local.name)"
status: "disable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_virtual_wan_link_data(json):
option_list = ['fail_alert_interfaces', 'fail_detect', 'health_check',
'load_balance_mode', 'members', 'service',
'status']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_virtual_wan_link(data, fos):
vdom = data['vdom']
system_virtual_wan_link_data = data['system_virtual_wan_link']
filtered_data = underscore_to_hyphen(filter_system_virtual_wan_link_data(system_virtual_wan_link_data))
return fos.set('system',
'virtual-wan-link',
data=filtered_data,
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system(data, fos):
if data['system_virtual_wan_link']:
resp = system_virtual_wan_link(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "default": "", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"system_virtual_wan_link": {
"required": False, "type": "dict", "default": None,
"options": {
"fail_alert_interfaces": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"fail_detect": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"health_check": {"required": False, "type": "list",
"options": {
"addr_mode": {"required": False, "type": "str",
"choices": ["ipv4", "ipv6"]},
"failtime": {"required": False, "type": "int"},
"http_agent": {"required": False, "type": "str"},
"http_get": {"required": False, "type": "str"},
"http_match": {"required": False, "type": "str"},
"interval": {"required": False, "type": "int"},
"members": {"required": False, "type": "list",
"options": {
"seq_num": {"required": False, "type": "int"}
}},
"name": {"required": True, "type": "str"},
"packet_size": {"required": False, "type": "int"},
"password": {"required": False, "type": "str"},
"port": {"required": False, "type": "int"},
"protocol": {"required": False, "type": "str",
"choices": ["ping", "tcp-echo", "udp-echo",
"http", "twamp", "ping6"]},
"recoverytime": {"required": False, "type": "int"},
"security_mode": {"required": False, "type": "str",
"choices": ["none", "authentication"]},
"server": {"required": False, "type": "str"},
"sla": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"},
"jitter_threshold": {"required": False, "type": "int"},
"latency_threshold": {"required": False, "type": "int"},
"link_cost_factor": {"required": False, "type": "str",
"choices": ["latency", "jitter", "packet-loss"]},
"packetloss_threshold": {"required": False, "type": "int"}
}},
"threshold_alert_jitter": {"required": False, "type": "int"},
"threshold_alert_latency": {"required": False, "type": "int"},
"threshold_alert_packetloss": {"required": False, "type": "int"},
"threshold_warning_jitter": {"required": False, "type": "int"},
"threshold_warning_latency": {"required": False, "type": "int"},
"threshold_warning_packetloss": {"required": False, "type": "int"},
"update_cascade_interface": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"update_static_route": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}},
"load_balance_mode": {"required": False, "type": "str",
"choices": ["source-ip-based", "weight-based", "usage-based",
"source-dest-ip-based", "measured-volume-based"]},
"members": {"required": False, "type": "list",
"options": {
"comment": {"required": False, "type": "str"},
"gateway": {"required": False, "type": "str"},
"gateway6": {"required": False, "type": "str"},
"ingress_spillover_threshold": {"required": False, "type": "int"},
"interface": {"required": False, "type": "str"},
"priority": {"required": False, "type": "int"},
"seq_num": {"required": False, "type": "int"},
"source": {"required": False, "type": "str"},
"source6": {"required": False, "type": "str"},
"spillover_threshold": {"required": False, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"volume_ratio": {"required": False, "type": "int"},
"weight": {"required": False, "type": "int"}
}},
"service": {"required": False, "type": "list",
"options": {
"addr_mode": {"required": False, "type": "str",
"choices": ["ipv4", "ipv6"]},
"bandwidth_weight": {"required": False, "type": "int"},
"default": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dscp_forward": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dscp_forward_tag": {"required": False, "type": "str"},
"dscp_reverse": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dscp_reverse_tag": {"required": False, "type": "str"},
"dst": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"dst_negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"dst6": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"end_port": {"required": False, "type": "int"},
"gateway": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"groups": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"health_check": {"required": False, "type": "str"},
"hold_down_time": {"required": False, "type": "int"},
"id": {"required": True, "type": "int"},
"input_device": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"internet_service": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"internet_service_ctrl": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"}
}},
"internet_service_ctrl_group": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"internet_service_custom": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"internet_service_custom_group": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"internet_service_group": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"internet_service_id": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"}
}},
"jitter_weight": {"required": False, "type": "int"},
"latency_weight": {"required": False, "type": "int"},
"link_cost_factor": {"required": False, "type": "str",
"choices": ["latency", "jitter", "packet-loss",
"inbandwidth", "outbandwidth", "bibandwidth",
"custom-profile-1"]},
"link_cost_threshold": {"required": False, "type": "int"},
"member": {"required": False, "type": "int"},
"mode": {"required": False, "type": "str",
"choices": ["auto", "manual", "priority",
"sla"]},
"name": {"required": False, "type": "str"},
"packet_loss_weight": {"required": False, "type": "int"},
"priority_members": {"required": False, "type": "list",
"options": {
"seq_num": {"required": False, "type": "int"}
}},
"protocol": {"required": False, "type": "int"},
"quality_link": {"required": False, "type": "int"},
"route_tag": {"required": False, "type": "int"},
"sla": {"required": False, "type": "list",
"options": {
"health_check": {"required": False, "type": "str"},
"id": {"required": False, "type": "int"}
}},
"src": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"src_negate": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"src6": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"start_port": {"required": False, "type": "int"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"tos": {"required": False, "type": "str"},
"tos_mask": {"required": False, "type": "str"},
"users": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}}
}},
"status": {"required": False, "type": "str",
"choices": ["disable", "enable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
# legacy_mode refers to using fortiosapi instead of HTTPAPI
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| [
"ansible.module_utils.basic.AnsibleModule",
"ansible.module_utils.network.fortios.fortios.FortiOSHandler",
"fortiosapi.FortiOSAPI",
"ansible.module_utils.connection.Connection"
] | [((52308, 52370), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'fields', 'supports_check_mode': '(False)'}), '(argument_spec=fields, supports_check_mode=False)\n', (52321, 52370), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((53210, 53222), 'fortiosapi.FortiOSAPI', 'FortiOSAPI', ([], {}), '()\n', (53220, 53222), False, 'from fortiosapi import FortiOSAPI\n'), ((52813, 52844), 'ansible.module_utils.connection.Connection', 'Connection', (['module._socket_path'], {}), '(module._socket_path)\n', (52823, 52844), False, 'from ansible.module_utils.connection import Connection\n'), ((52863, 52889), 'ansible.module_utils.network.fortios.fortios.FortiOSHandler', 'FortiOSHandler', (['connection'], {}), '(connection)\n', (52877, 52889), False, 'from ansible.module_utils.network.fortios.fortios import FortiOSHandler\n')] |
"""
# Step 1 - Create the App
# Step 2 - Create the Game
# Step 3 - Build the Game
# Step 4 - Run the App
"""
from kivy.app import App
from kivy.uix.widget import Widget
from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty
from kivy.vector import Vector
from kivy.clock import Clock
from random import randint
class PongPaddle(Widget):
score = NumericProperty(0)
def bounce_ball(self, ball):
if self.collide_widget(ball):
ball.velocity_x *= -1
print('hello world')
class PongBall(Widget):
velocity_x = NumericProperty(0)
velocity_y = NumericProperty(0)
velocity = ReferenceListProperty(velocity_x, velocity_y)
# Latest Position of the Ball = Current Velocity + Current Position
def move(self):
self.pos = Vector(*self.velocity) + self.pos
# Update - moving the ball by calling the move function and other stuff
# on touch_down() = When our fingers/mouse touches he screen
# on touch_up() - when we lift our finger off the screen after touching it
# on_touch_move() - when we drag our finger on the screen
class PongGame(Widget):
ball = ObjectProperty(None)
player1 = ObjectProperty(None)
player2 = ObjectProperty(None)
def serve_ball(self):
self.ball.velocity = Vector(4, 0).rotate(randint(0, 360))
def update(self, dt):
self.ball.move()
# Bounce off top and bottom Y
if (self.ball.y < 0) or (self.ball.y > self.height - 50):
self.ball.velocity_y *= -1.1
# Bounce off left and increase th score
if self.ball.x < 0:
self.ball.velocity_x *= -1
self.player1.score += 1
# Bounce off right and increase the score
if self.ball.x > self.width - 50:
self.ball.velocity_x *= -1
self.player2.score += 1
self.player1.bounce_ball(self.ball)
self.player2.bounce_ball(self.ball)
def on_touch_move(self, touch):
if touch.x < self.width / 1 / 4:
self.player1.center_y = touch.y
if touch.x > self.width * 3 / 4:
self.player2.center_y = touch.y
class PongApp(App):
def build(self):
game = PongGame()
game.serve_ball()
Clock.schedule_interval(game.update, 1.0 / 60.0)
return game
PongApp().run()
| [
"kivy.properties.NumericProperty",
"kivy.vector.Vector",
"kivy.clock.Clock.schedule_interval",
"kivy.properties.ReferenceListProperty",
"random.randint",
"kivy.properties.ObjectProperty"
] | [((397, 415), 'kivy.properties.NumericProperty', 'NumericProperty', (['(0)'], {}), '(0)\n', (412, 415), False, 'from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty\n'), ((607, 625), 'kivy.properties.NumericProperty', 'NumericProperty', (['(0)'], {}), '(0)\n', (622, 625), False, 'from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty\n'), ((644, 662), 'kivy.properties.NumericProperty', 'NumericProperty', (['(0)'], {}), '(0)\n', (659, 662), False, 'from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty\n'), ((679, 724), 'kivy.properties.ReferenceListProperty', 'ReferenceListProperty', (['velocity_x', 'velocity_y'], {}), '(velocity_x, velocity_y)\n', (700, 724), False, 'from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty\n'), ((1188, 1208), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {}), '(None)\n', (1202, 1208), False, 'from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty\n'), ((1224, 1244), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {}), '(None)\n', (1238, 1244), False, 'from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty\n'), ((1260, 1280), 'kivy.properties.ObjectProperty', 'ObjectProperty', (['None'], {}), '(None)\n', (1274, 1280), False, 'from kivy.properties import NumericProperty, ReferenceListProperty, ObjectProperty\n'), ((2329, 2377), 'kivy.clock.Clock.schedule_interval', 'Clock.schedule_interval', (['game.update', '(1.0 / 60.0)'], {}), '(game.update, 1.0 / 60.0)\n', (2352, 2377), False, 'from kivy.clock import Clock\n'), ((841, 863), 'kivy.vector.Vector', 'Vector', (['*self.velocity'], {}), '(*self.velocity)\n', (847, 863), False, 'from kivy.vector import Vector\n'), ((1360, 1375), 'random.randint', 'randint', (['(0)', '(360)'], {}), '(0, 360)\n', (1367, 1375), False, 'from random import randint\n'), ((1340, 1352), 'kivy.vector.Vector', 'Vector', (['(4)', '(0)'], {}), '(4, 0)\n', (1346, 1352), False, 'from kivy.vector import Vector\n')] |
"""Data Test Suite."""
from aiogithubapi.objects import repository
import pytest
import os
from homeassistant.core import HomeAssistant
from custom_components.hacs.hacsbase.data import HacsData
from custom_components.hacs.helpers.classes.repository import HacsRepository
from custom_components.hacs.hacsbase.configuration import Configuration
from custom_components.hacs.share import get_hacs
from tests.dummy_repository import dummy_repository_base
@pytest.mark.asyncio
async def test_hacs_data_async_write1(tmpdir):
data = HacsData()
hacs = get_hacs()
repository = dummy_repository_base()
repository.data.installed = True
repository.data.installed_version = "1"
hacs.repositories = [repository]
hacs.hass = HomeAssistant()
hacs.hass.config.config_dir = tmpdir
hacs.configuration = Configuration()
await data.async_write()
@pytest.mark.asyncio
async def test_hacs_data_async_write2(tmpdir):
data = HacsData()
hacs = get_hacs()
hacs.hass = HomeAssistant()
hacs.hass.config.config_dir = tmpdir
hacs.configuration = Configuration()
hacs.system.status.background_task = False
hacs.system.disabled = False
await data.async_write()
@pytest.mark.asyncio
async def test_hacs_data_restore(tmpdir):
data = HacsData()
hacs = get_hacs()
hacs.hass = HomeAssistant()
hacs.hass.config.config_dir = tmpdir
await data.restore()
| [
"custom_components.hacs.hacsbase.data.HacsData",
"homeassistant.core.HomeAssistant",
"tests.dummy_repository.dummy_repository_base",
"custom_components.hacs.hacsbase.configuration.Configuration",
"custom_components.hacs.share.get_hacs"
] | [((532, 542), 'custom_components.hacs.hacsbase.data.HacsData', 'HacsData', ([], {}), '()\n', (540, 542), False, 'from custom_components.hacs.hacsbase.data import HacsData\n'), ((554, 564), 'custom_components.hacs.share.get_hacs', 'get_hacs', ([], {}), '()\n', (562, 564), False, 'from custom_components.hacs.share import get_hacs\n'), ((582, 605), 'tests.dummy_repository.dummy_repository_base', 'dummy_repository_base', ([], {}), '()\n', (603, 605), False, 'from tests.dummy_repository import dummy_repository_base\n'), ((740, 755), 'homeassistant.core.HomeAssistant', 'HomeAssistant', ([], {}), '()\n', (753, 755), False, 'from homeassistant.core import HomeAssistant\n'), ((822, 837), 'custom_components.hacs.hacsbase.configuration.Configuration', 'Configuration', ([], {}), '()\n', (835, 837), False, 'from custom_components.hacs.hacsbase.configuration import Configuration\n'), ((948, 958), 'custom_components.hacs.hacsbase.data.HacsData', 'HacsData', ([], {}), '()\n', (956, 958), False, 'from custom_components.hacs.hacsbase.data import HacsData\n'), ((970, 980), 'custom_components.hacs.share.get_hacs', 'get_hacs', ([], {}), '()\n', (978, 980), False, 'from custom_components.hacs.share import get_hacs\n'), ((997, 1012), 'homeassistant.core.HomeAssistant', 'HomeAssistant', ([], {}), '()\n', (1010, 1012), False, 'from homeassistant.core import HomeAssistant\n'), ((1079, 1094), 'custom_components.hacs.hacsbase.configuration.Configuration', 'Configuration', ([], {}), '()\n', (1092, 1094), False, 'from custom_components.hacs.hacsbase.configuration import Configuration\n'), ((1280, 1290), 'custom_components.hacs.hacsbase.data.HacsData', 'HacsData', ([], {}), '()\n', (1288, 1290), False, 'from custom_components.hacs.hacsbase.data import HacsData\n'), ((1302, 1312), 'custom_components.hacs.share.get_hacs', 'get_hacs', ([], {}), '()\n', (1310, 1312), False, 'from custom_components.hacs.share import get_hacs\n'), ((1329, 1344), 'homeassistant.core.HomeAssistant', 'HomeAssistant', ([], {}), '()\n', (1342, 1344), False, 'from homeassistant.core import HomeAssistant\n')] |
import re
from curtsies.formatstring import fmtstr, FmtStr
from curtsies.termformatconstants import (
FG_COLORS,
BG_COLORS,
colors as CURTSIES_COLORS,
)
from functools import partial
from ..lazyre import LazyReCompile
COLORS = CURTSIES_COLORS + ("default",)
CNAMES = dict(zip("krgybmcwd", COLORS))
# hack for finding the "inverse"
INVERSE_COLORS = {
CURTSIES_COLORS[idx]: CURTSIES_COLORS[
(idx + (len(CURTSIES_COLORS) // 2)) % len(CURTSIES_COLORS)
]
for idx in range(len(CURTSIES_COLORS))
}
INVERSE_COLORS["default"] = INVERSE_COLORS[CURTSIES_COLORS[0]]
def func_for_letter(letter_color_code: str, default: str = "k"):
"""Returns FmtStr constructor for a bpython-style color code"""
if letter_color_code == "d":
letter_color_code = default
elif letter_color_code == "D":
letter_color_code = default.upper()
return partial(
fmtstr,
fg=CNAMES[letter_color_code.lower()],
bold=letter_color_code.isupper(),
)
def color_for_letter(letter_color_code: str, default: str = "k"):
if letter_color_code == "d":
letter_color_code = default
return CNAMES[letter_color_code.lower()]
def parse(s):
"""Returns a FmtStr object from a bpython-formatted colored string"""
rest = s
stuff = []
while True:
if not rest:
break
start, rest = peel_off_string(rest)
stuff.append(start)
return (
sum((fs_from_match(d) for d in stuff[1:]), fs_from_match(stuff[0]))
if len(stuff) > 0
else FmtStr()
)
def fs_from_match(d):
atts = {}
if d["fg"]:
# this isn't according to spec as I understand it
if d["fg"].isupper():
d["bold"] = True
# TODO figure out why boldness isn't based on presence of \x02
color = CNAMES[d["fg"].lower()]
if color != "default":
atts["fg"] = FG_COLORS[color]
if d["bg"]:
if d["bg"] == "I":
# hack for finding the "inverse"
color = INVERSE_COLORS[color]
else:
color = CNAMES[d["bg"].lower()]
if color != "default":
atts["bg"] = BG_COLORS[color]
if d["bold"]:
atts["bold"] = True
return fmtstr(d["string"], **atts)
peel_off_string_re = LazyReCompile(
r"""(?P<colormarker>\x01
(?P<fg>[krgybmcwdKRGYBMCWD]?)
(?P<bg>[krgybmcwdKRGYBMCWDI]?)?)
(?P<bold>\x02?)
\x03
(?P<string>[^\x04]*)
\x04
(?P<rest>.*)
""",
re.VERBOSE | re.DOTALL,
)
def peel_off_string(s):
m = peel_off_string_re.match(s)
assert m, repr(s)
d = m.groupdict()
rest = d["rest"]
del d["rest"]
return d, rest
| [
"curtsies.formatstring.FmtStr",
"curtsies.formatstring.fmtstr"
] | [((2247, 2274), 'curtsies.formatstring.fmtstr', 'fmtstr', (["d['string']"], {}), "(d['string'], **atts)\n", (2253, 2274), False, 'from curtsies.formatstring import fmtstr, FmtStr\n'), ((1558, 1566), 'curtsies.formatstring.FmtStr', 'FmtStr', ([], {}), '()\n', (1564, 1566), False, 'from curtsies.formatstring import fmtstr, FmtStr\n')] |
"""Library for executing user-defined dance."""
import logging
from typing import Any, Dict, Optional, Callable
import datetime
import ac
import ac.blocks
from ac import ACs, AC
JC = Dict[str, Any]
class DanceStartException(Exception):
pass
class Step:
"""Base class for all specific dance steps."""
def update(self, acn: AC) -> None:
pass
def on_start(self, acn: AC) -> None:
pass
def disp_str(self) -> str:
return ''
class JCNotFoundException(DanceStartException):
pass
class StepJC(Step):
"""
Process jc 'name'. If processed already, skip processing and continue.
"""
name_to_id: Dict[str, int] = {}
def __init__(self, name: str, type_: str = 'VC') -> None:
self.jc: Optional[JC] = None
self.type = type_
self.name = name
def update(self, acn: AC) -> None:
assert isinstance(acn, DanceAC)
if self.jc is None:
jcid = self.get_jc_id(self.name, acn)
self.jc = acn.pt_get(f'/jc/{jcid}?state=true')['jc']
if self.jc['state']['active']:
self.jc = None
acn.step_done()
return
result = acn.pt_put(f'/jc/{self.jc["id"]}/state', {})
if result['success']:
self.jc = None
acn.step_done()
def on_start(self, acn: AC) -> None:
self.get_jc_id(self.name, acn)
def get_jc_id(self, name: str, acn: AC) -> int:
if not StepJC.name_to_id:
jcs = acn.pt_get('/jc')['jc']
StepJC.name_to_id = {
jc['name']: jc['id']
for jc in jcs if jc['type'] == self.type
}
if name not in StepJC.name_to_id.keys():
raise JCNotFoundException(f'Jízdní cesta {self.name} neexistuje!')
return StepJC.name_to_id[name]
def disp_str(self) -> str:
return f'Stavění JC {self.name}'
class StepDelay(Step):
"""Delay any time."""
def __init__(self, delay: datetime.timedelta) -> None:
self.delay = delay
self.finish: Optional[datetime.datetime] = None
def update(self, acn: AC) -> None:
assert isinstance(acn, DanceAC)
if self.finish is None:
self.finish = datetime.datetime.now() + self.delay
if datetime.datetime.now() > self.finish:
self.finish = None
acn.step_done()
def disp_str(self) -> str:
return f'Čekání {self.delay}'
class BlockNotFoundException(DanceStartException):
pass
class StepWaitForBlock(Step):
"""Wait for specific state of any block. See examples below."""
name_to_id: Dict[str, int] = {}
def __init__(self, name: str, checker: Callable[[ac.Block], bool]) -> None:
self.name = name
self.checker = checker
self.block: Optional[ac.Block] = None
def update(self, acn: AC) -> None:
assert isinstance(acn, DanceAC)
if self.block is None:
blockid = self.get_block_id(self.name, acn)
self.block = acn.pt_get(f'/blocks/{blockid}?state=true')['block']
if self.checker(self.block):
self.block = None
acn.step_done()
else:
ac.blocks.register([self.block['id']])
def on_start(self, acn: AC) -> None:
self.get_block_id(self.name, acn)
def on_block_change(self, acn: AC, block: ac.Block) -> None:
assert isinstance(acn, DanceAC)
if self.block is None or block['id'] != self.block['id']:
return
if self.checker(block):
ac.blocks.unregister([self.block['id']])
self.block = None
acn.step_done()
def get_block_id(self, name: str, acn: AC) -> int:
if not StepWaitForBlock.name_to_id:
blocks = acn.pt_get('/blocks')['blocks']
StepWaitForBlock.name_to_id = {
block['name']: block['id'] for block in blocks
}
if name not in StepWaitForBlock.name_to_id.keys():
raise BlockNotFoundException(f"Blok {self.name} neexistuje!")
return StepWaitForBlock.name_to_id[name]
def disp_str(self) -> str:
return f'Čekání na stav bloku {self.name}'
def track_is_occupied(block: ac.Block) -> bool:
return bool(block['blockState']['state'] == 'occupied')
class DanceAC(AC):
"""This AC executes predefined steps."""
def __init__(self, id_: str, password: str,
steps: Dict[int, Step]) -> None:
AC.__init__(self, id_, password)
self.steps = steps
self.stepi = 0
def on_start(self) -> None:
logging.info('Start')
for stepi, step in self.steps.items():
try:
step.on_start(self)
except DanceStartException as e:
self.disp_error(f'Krok {stepi}: '+str(e))
self.done()
return
self.stepi = 1
self.send_step()
self.on_update()
def on_stop(self) -> None:
self.statestr = ''
self.statestr_send()
def on_update(self) -> None:
AC.on_update(self)
if not self.running():
return
if self.stepi in self.steps:
self.steps[self.stepi].update(self)
else:
logging.info('Done')
self.done()
def step_done(self) -> None:
logging.info(f'Step {self.stepi} done, '
f'going to step {self.stepi+1}...')
self.stepi += 1
self.send_step()
self.on_update()
def send_step(self) -> None:
if self.stepi in self.steps.keys():
if self.running():
description = self.steps[self.stepi].disp_str()
self.statestr = f'Aktuální krok: {self.stepi}: {description}'
self.statestr_send()
def on_block_change(self, block: ac.Block) -> None:
if (self.running() and
isinstance(self.steps[self.stepi], StepWaitForBlock)):
self.steps[self.stepi].on_block_change(self, block) # type: ignore
@ac.blocks.on_block_change()
def _on_block_change(block: ac.Block) -> None:
for acn in ACs.values():
if isinstance(acn, DanceAC):
acn.on_block_change(block)
| [
"ac.AC.__init__",
"datetime.datetime.now",
"ac.ACs.values",
"ac.blocks.on_block_change",
"ac.AC.on_update",
"ac.blocks.unregister",
"logging.info",
"ac.blocks.register"
] | [((6074, 6101), 'ac.blocks.on_block_change', 'ac.blocks.on_block_change', ([], {}), '()\n', (6099, 6101), False, 'import ac\n'), ((6164, 6176), 'ac.ACs.values', 'ACs.values', ([], {}), '()\n', (6174, 6176), False, 'from ac import ACs, AC\n'), ((4503, 4535), 'ac.AC.__init__', 'AC.__init__', (['self', 'id_', 'password'], {}), '(self, id_, password)\n', (4514, 4535), False, 'from ac import ACs, AC\n'), ((4627, 4648), 'logging.info', 'logging.info', (['"""Start"""'], {}), "('Start')\n", (4639, 4648), False, 'import logging\n'), ((5108, 5126), 'ac.AC.on_update', 'AC.on_update', (['self'], {}), '(self)\n', (5120, 5126), False, 'from ac import ACs, AC\n'), ((5376, 5450), 'logging.info', 'logging.info', (['f"""Step {self.stepi} done, going to step {self.stepi + 1}..."""'], {}), "(f'Step {self.stepi} done, going to step {self.stepi + 1}...')\n", (5388, 5450), False, 'import logging\n'), ((2289, 2312), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2310, 2312), False, 'import datetime\n'), ((3582, 3622), 'ac.blocks.unregister', 'ac.blocks.unregister', (["[self.block['id']]"], {}), "([self.block['id']])\n", (3602, 3622), False, 'import ac\n'), ((5289, 5309), 'logging.info', 'logging.info', (['"""Done"""'], {}), "('Done')\n", (5301, 5309), False, 'import logging\n'), ((2241, 2264), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (2262, 2264), False, 'import datetime\n'), ((3224, 3262), 'ac.blocks.register', 'ac.blocks.register', (["[self.block['id']]"], {}), "([self.block['id']])\n", (3242, 3262), False, 'import ac\n')] |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Break repeating-key XOR
#
# It is officially on, now.
#
# This challenge isn't conceptually hard, but it involves actual
# error-prone coding. The other challenges in this set are there to bring
# you up to speed. This one is there to qualify you. If you can do this
# one, you're probably just fine up to Set 6.
#
# There's a file here:
#
# http://cryptopals.com/static/challenge-data/6.txt
#
# It's been base64'd after being encrypted with repeating-key XOR.
#
# Decrypt it.
#
# Here's how:
#
# 1. Let KEYSIZE be the guessed length of the key; try values from 2 to
# (say) 40.
# 2. Write a function to compute the edit distance/Hamming distance between
# two strings. The Hamming distance is just the number of differing
# bits. The distance between:
#
# this is a test
#
# and
#
# wokka wokka!!!
#
# is 37. *Make sure your code agrees before you proceed.*
# 3. For each KEYSIZE, take the first KEYSIZE worth of bytes, and the
# second KEYSIZE worth of bytes, and find the edit distance between them.
# Normalize this result by dividing by KEYSIZE.
# 4. The KEYSIZE with the smallest normalized edit distance is probably the
# key. You could proceed perhaps with the smallest 2-3 KEYSIZE values.
# Or take 4 KEYSIZE blocks instead of 2 and average the distances.
# 5. Now that you probably know the KEYSIZE: break the ciphertext into
# blocks of KEYSIZE length.
# 6. Now transpose the blocks: make a block that is the first byte of every
# block, and a block that is the second byte of every block, and so on.
# 7. Solve each block as if it was single-character XOR. You already have
# code to do this.
# 8. For each block, the single-byte XOR key that produces the best looking
# histogram is the repeating-key XOR key byte for that block. Put them
# together and you have the key.
#
# This code is going to turn out to be surprisingly useful later on. Breaking
# repeating-key XOR ("Vigenère") statistically is obviously an academic
# exercise, a "Crypto 101" thing. But more people "know how" to break it than
# can actually break it, and a similar technique breaks something much more
# important.
#
# No, that's not a mistake.
#
# We get more tech support questions for this challenge than any of the
# other ones. We promise, there aren't any blatant errors in this text.
# In particular: the "wokka wokka!!!" edit distance really is 37.
#
import inspect
import os
import sys
from itertools import zip_longest
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(inspect.getfile(lambda: 0)))))
from util.loader import loader
from util.text import englishness, repeating_key_xor, single_byte_xor
# Lookup table for the number of 1 bits in a nibble. (Nybble, quartet, etc.)
NIBBLE_BITS = [0, 1, 1, 2, 1, 2, 2, 3, 1, 2, 2, 3, 2, 3, 3, 4]
def likely_key_sizes(bs, lower=2, upper=40, n=3):
"""Finds a repeating-key-XOR'd ciphertext's most likely key sizes."""
sizes = {}
for size in range(lower, upper + 1):
normalized_distance = 0
for i in range(0, len(bs) - size * 2, size * 2):
bs1, bs2 = bs[i : i + size], bs[i + size : i + size * 2]
normalized_distance += hamming_distance(bs1, bs2) / 2
sizes.update({size: normalized_distance})
return sorted(sizes, key=lambda k: sizes[k])[:n]
def hamming_distance(bs1, bs2):
"""Finds the Hamming distance between two bytestrings."""
distance = 0
for b1, b2 in zip_longest(bs1, bs2, fillvalue=0):
b = b1 ^ b2
distance += NIBBLE_BITS[b >> 4] + NIBBLE_BITS[b & 0xF]
return distance
def main():
ctext = loader("6.txt", "base64", split=False)
ptext, key, high_score = b"", b"", 0
for size in likely_key_sizes(ctext):
blocks = [ctext[i : i + size] for i in range(0, len(ctext), size)]
transposed = zip_longest(*blocks, fillvalue=0)
likely_key = b"".join(
single_byte_xor(tblock, key=True) for tblock in transposed
)
candidate = repeating_key_xor(ctext, likely_key)
score = englishness(candidate)
if score > high_score:
ptext, key, high_score = candidate, likely_key, score
print(f"Key: '{key.decode()}'")
print()
print(ptext.decode())
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
pass
# Output:
#
# Key: 'Terminator X: Bring the noise' (29 bytes)
#
# I'm back and I'm ringin' the bell
# A rockin' on the mike while the fly girls yell
# In ecstasy in the back of me
# Well that's my DJ Deshay cuttin' all them Z's
# Hittin' hard and the girlies goin' crazy
# Vanilla's on the mike, man I'm not lazy.
#
# <remainder of output omitted>
#
| [
"util.loader.loader",
"itertools.zip_longest",
"util.text.englishness",
"inspect.getfile",
"util.text.single_byte_xor",
"util.text.repeating_key_xor"
] | [((3574, 3608), 'itertools.zip_longest', 'zip_longest', (['bs1', 'bs2'], {'fillvalue': '(0)'}), '(bs1, bs2, fillvalue=0)\n', (3585, 3608), False, 'from itertools import zip_longest\n'), ((3740, 3778), 'util.loader.loader', 'loader', (['"""6.txt"""', '"""base64"""'], {'split': '(False)'}), "('6.txt', 'base64', split=False)\n", (3746, 3778), False, 'from util.loader import loader\n'), ((3959, 3992), 'itertools.zip_longest', 'zip_longest', (['*blocks'], {'fillvalue': '(0)'}), '(*blocks, fillvalue=0)\n', (3970, 3992), False, 'from itertools import zip_longest\n'), ((4127, 4163), 'util.text.repeating_key_xor', 'repeating_key_xor', (['ctext', 'likely_key'], {}), '(ctext, likely_key)\n', (4144, 4163), False, 'from util.text import englishness, repeating_key_xor, single_byte_xor\n'), ((4180, 4202), 'util.text.englishness', 'englishness', (['candidate'], {}), '(candidate)\n', (4191, 4202), False, 'from util.text import englishness, repeating_key_xor, single_byte_xor\n'), ((2654, 2681), 'inspect.getfile', 'inspect.getfile', (['(lambda : 0)'], {}), '(lambda : 0)\n', (2669, 2681), False, 'import inspect\n'), ((4037, 4070), 'util.text.single_byte_xor', 'single_byte_xor', (['tblock'], {'key': '(True)'}), '(tblock, key=True)\n', (4052, 4070), False, 'from util.text import englishness, repeating_key_xor, single_byte_xor\n')] |
import torch
import torch.nn as nn
import torch.nn.functional as f
from prettytable import PrettyTable
from c2nl.modules.char_embedding import CharEmbedding
from c2nl.modules.embeddings import Embeddings
from c2nl.modules.highway import Highway
from c2nl.encoders.transformer import TransformerEncoder
from c2nl.decoders.transformer import TransformerDecoder
from c2nl.inputters import constants
from c2nl.modules.global_attention import GlobalAttention
from c2nl.modules.copy_generator import CopyGenerator, CopyGeneratorCriterion
from c2nl.utils.misc import sequence_mask
class Embedder(nn.Module):
def __init__(self, args):
super(Embedder, self).__init__()
self.enc_input_size = 0
self.dec_input_size = 0
# at least one of word or char embedding options should be True
assert args.use_src_word or args.use_src_char
assert args.use_tgt_word or args.use_tgt_char
self.use_src_word = args.use_src_word
self.use_tgt_word = args.use_tgt_word
if self.use_src_word:
self.src_word_embeddings = Embeddings(args.emsize,
args.src_vocab_size,
constants.PAD)
self.enc_input_size += args.emsize
if self.use_tgt_word:
self.tgt_word_embeddings = Embeddings(args.emsize,
args.tgt_vocab_size,
constants.PAD)
self.dec_input_size += args.emsize
self.use_src_char = args.use_src_char
self.use_tgt_char = args.use_tgt_char
if self.use_src_char:
assert len(args.filter_size) == len(args.nfilters)
self.src_char_embeddings = CharEmbedding(args.n_characters,
args.char_emsize,
args.filter_size,
args.nfilters)
self.enc_input_size += sum(list(map(int, args.nfilters)))
self.src_highway_net = Highway(self.enc_input_size, num_layers=2)
if self.use_tgt_char:
assert len(args.filter_size) == len(args.nfilters)
self.tgt_char_embeddings = CharEmbedding(args.n_characters,
args.char_emsize,
args.filter_size,
args.nfilters)
self.dec_input_size += sum(list(map(int, args.nfilters)))
self.tgt_highway_net = Highway(self.dec_input_size, num_layers=2)
self.use_type = args.use_code_type
if self.use_type:
self.type_embeddings = nn.Embedding(len(constants.TOKEN_TYPE_MAP),
self.enc_input_size)
self.src_pos_emb = args.src_pos_emb
self.tgt_pos_emb = args.tgt_pos_emb
self.no_relative_pos = all(v == 0 for v in args.max_relative_pos)
if self.src_pos_emb and self.no_relative_pos:
self.src_pos_embeddings = nn.Embedding(args.max_src_len,
self.enc_input_size)
if self.tgt_pos_emb:
self.tgt_pos_embeddings = nn.Embedding(args.max_tgt_len + 2,
self.dec_input_size)
self.dropout = nn.Dropout(args.dropout_emb)
def forward(self,
sequence,
sequence_char,
sequence_type=None,
mode='encoder',
step=None):
if mode == 'encoder':
word_rep = None
if self.use_src_word:
word_rep = self.src_word_embeddings(sequence.unsqueeze(2)) # B x P x d
if self.use_src_char:
char_rep = self.src_char_embeddings(sequence_char) # B x P x f
if word_rep is None:
word_rep = char_rep
else:
word_rep = torch.cat((word_rep, char_rep), 2) # B x P x d+f
word_rep = self.src_highway_net(word_rep) # B x P x d+f
if self.use_type:
type_rep = self.type_embeddings(sequence_type)
word_rep = word_rep + type_rep
if self.src_pos_emb and self.no_relative_pos:
pos_enc = torch.arange(start=0,
end=word_rep.size(1)).type(torch.LongTensor)
pos_enc = pos_enc.expand(*word_rep.size()[:-1])
if word_rep.is_cuda:
pos_enc = pos_enc.cuda()
pos_rep = self.src_pos_embeddings(pos_enc)
word_rep = word_rep + pos_rep
elif mode == 'decoder':
word_rep = None
if self.use_tgt_word:
word_rep = self.tgt_word_embeddings(sequence.unsqueeze(2)) # B x P x d
if self.use_tgt_char:
char_rep = self.tgt_char_embeddings(sequence_char) # B x P x f
if word_rep is None:
word_rep = char_rep
else:
word_rep = torch.cat((word_rep, char_rep), 2) # B x P x d+f
word_rep = self.tgt_highway_net(word_rep) # B x P x d+f
if self.tgt_pos_emb:
if step is None:
pos_enc = torch.arange(start=0,
end=word_rep.size(1)).type(torch.LongTensor)
else:
pos_enc = torch.LongTensor([step]) # used in inference time
pos_enc = pos_enc.expand(*word_rep.size()[:-1])
if word_rep.is_cuda:
pos_enc = pos_enc.cuda()
pos_rep = self.tgt_pos_embeddings(pos_enc)
word_rep = word_rep + pos_rep
else:
raise ValueError('Unknown embedder mode!')
word_rep = self.dropout(word_rep)
return word_rep
class Encoder(nn.Module):
def __init__(self,
args,
input_size):
super(Encoder, self).__init__()
self.transformer = TransformerEncoder(num_layers=args.nlayers,
d_model=input_size,
heads=args.num_head,
d_k=args.d_k,
d_v=args.d_v,
d_ff=args.d_ff,
dropout=args.trans_drop,
max_relative_positions=args.max_relative_pos,
use_neg_dist=args.use_neg_dist)
self.use_all_enc_layers = args.use_all_enc_layers
if self.use_all_enc_layers:
self.layer_weights = nn.Linear(input_size, 1, bias=False)
def count_parameters(self):
return self.transformer.count_parameters()
def forward(self,
input,
input_len):
layer_outputs, _ = self.transformer(input, input_len) # B x seq_len x h
if self.use_all_enc_layers:
output = torch.stack(layer_outputs, dim=2) # B x seq_len x nlayers x h
layer_scores = self.layer_weights(output).squeeze(3)
layer_scores = f.softmax(layer_scores, dim=-1)
memory_bank = torch.matmul(output.transpose(2, 3),
layer_scores.unsqueeze(3)).squeeze(3)
else:
memory_bank = layer_outputs[-1]
return memory_bank, layer_outputs
class Decoder(nn.Module):
def __init__(self, args, input_size):
super(Decoder, self).__init__()
self.input_size = input_size
self.split_decoder = args.split_decoder and args.copy_attn
if self.split_decoder:
# Following (https://arxiv.org/pdf/1808.07913.pdf), we split decoder
self.transformer_c = TransformerDecoder(
num_layers=args.nlayers,
d_model=self.input_size,
heads=args.num_head,
d_k=args.d_k,
d_v=args.d_v,
d_ff=args.d_ff,
coverage_attn=args.coverage_attn,
dropout=args.trans_drop
)
self.transformer_d = TransformerDecoder(
num_layers=args.nlayers,
d_model=self.input_size,
heads=args.num_head,
d_k=args.d_k,
d_v=args.d_v,
d_ff=args.d_ff,
dropout=args.trans_drop
)
# To accomplish eq. 19 - 21 from `https://arxiv.org/pdf/1808.07913.pdf`
self.fusion_sigmoid = nn.Sequential(
nn.Linear(self.input_size * 2, self.input_size),
nn.Sigmoid()
)
self.fusion_gate = nn.Sequential(
nn.Linear(self.input_size * 2, self.input_size),
nn.ReLU()
)
else:
self.transformer = TransformerDecoder(
num_layers=args.nlayers,
d_model=self.input_size,
heads=args.num_head,
d_k=args.d_k,
d_v=args.d_v,
d_ff=args.d_ff,
coverage_attn=args.coverage_attn,
dropout=args.trans_drop
)
if args.reload_decoder_state:
state_dict = torch.load(
args.reload_decoder_state, map_location=lambda storage, loc: storage
)
self.decoder.load_state_dict(state_dict)
def count_parameters(self):
if self.split_decoder:
return self.transformer_c.count_parameters() + self.transformer_d.count_parameters()
else:
return self.transformer.count_parameters()
def init_decoder(self,
src_lens,
max_src_len):
if self.split_decoder:
state_c = self.transformer_c.init_state(src_lens, max_src_len)
state_d = self.transformer_d.init_state(src_lens, max_src_len)
return state_c, state_d
else:
return self.transformer.init_state(src_lens, max_src_len)
def decode(self,
tgt_words,
tgt_emb,
memory_bank,
state,
step=None,
layer_wise_coverage=None):
if self.split_decoder:
copier_out, attns = self.transformer_c(tgt_words,
tgt_emb,
memory_bank,
state[0],
step=step,
layer_wise_coverage=layer_wise_coverage)
dec_out, _ = self.transformer_d(tgt_words,
tgt_emb,
memory_bank,
state[1],
step=step)
f_t = self.fusion_sigmoid(torch.cat([copier_out, dec_out], dim=-1))
gate_input = torch.cat([copier_out, torch.mul(f_t, dec_out)], dim=-1)
decoder_outputs = self.fusion_gate(gate_input)
else:
decoder_outputs, attns = self.transformer(tgt_words,
tgt_emb,
memory_bank,
state,
step=step,
layer_wise_coverage=layer_wise_coverage)
return decoder_outputs, attns
def forward(self,
memory_bank,
memory_len,
tgt_pad_mask,
tgt_emb):
max_mem_len = memory_bank[0].shape[1] \
if isinstance(memory_bank, list) else memory_bank.shape[1]
state = self.init_decoder(memory_len, max_mem_len)
return self.decode(tgt_pad_mask, tgt_emb, memory_bank, state)
class Transformer(nn.Module):
"""Module that writes an answer for the question given a passage."""
def __init__(self, args, tgt_dict):
""""Constructor of the class."""
super(Transformer, self).__init__()
self.name = 'Transformer'
if len(args.max_relative_pos) != args.nlayers:
assert len(args.max_relative_pos) == 1
args.max_relative_pos = args.max_relative_pos * args.nlayers
self.embedder = Embedder(args)
self.encoder = Encoder(args, self.embedder.enc_input_size)
self.decoder = Decoder(args, self.embedder.dec_input_size)
self.layer_wise_attn = args.layer_wise_attn
self.generator = nn.Linear(self.decoder.input_size, args.tgt_vocab_size)
if args.share_decoder_embeddings:
if self.embedder.use_tgt_word:
assert args.emsize == self.decoder.input_size
self.generator.weight = self.embedder.tgt_word_embeddings.word_lut.weight
self._copy = args.copy_attn
if self._copy:
self.copy_attn = GlobalAttention(dim=self.decoder.input_size,
attn_type=args.attn_type)
self.copy_generator = CopyGenerator(self.decoder.input_size,
tgt_dict,
self.generator)
self.criterion = CopyGeneratorCriterion(vocab_size=len(tgt_dict),
force_copy=args.force_copy)
else:
self.criterion = nn.CrossEntropyLoss(reduction='none')
def _run_forward_ml(self,
code_word_rep,
code_char_rep,
code_type_rep,
code_len,
summ_word_rep,
summ_char_rep,
summ_len,
tgt_seq,
src_map,
alignment,
**kwargs):
batch_size = code_len.size(0)
# embed and encode the source sequence
code_rep = self.embedder(code_word_rep,
code_char_rep,
code_type_rep,
mode='encoder')
memory_bank, layer_wise_outputs = self.encoder(code_rep, code_len) # B x seq_len x h
# embed and encode the target sequence
summ_emb = self.embedder(summ_word_rep,
summ_char_rep,
mode='decoder')
summ_pad_mask = ~sequence_mask(summ_len, max_len=summ_emb.size(1))
enc_outputs = layer_wise_outputs if self.layer_wise_attn else memory_bank
layer_wise_dec_out, attns = self.decoder(enc_outputs,
code_len,
summ_pad_mask,
summ_emb)
decoder_outputs = layer_wise_dec_out[-1]
loss = dict()
target = tgt_seq[:, 1:].contiguous()
if self._copy:
# copy_score: batch_size, tgt_len, src_len
_, copy_score, _ = self.copy_attn(decoder_outputs,
memory_bank,
memory_lengths=code_len,
softmax_weights=False)
# mask copy_attn weights here if needed
if kwargs['code_mask_rep'] is not None:
mask = kwargs['code_mask_rep'].byte().unsqueeze(1) # Make it broadcastable.
copy_score.data.masked_fill_(mask, -float('inf'))
attn_copy = f.softmax(copy_score, dim=-1)
scores = self.copy_generator(decoder_outputs, attn_copy, src_map)
scores = scores[:, :-1, :].contiguous()
ml_loss = self.criterion(scores,
alignment[:, 1:].contiguous(),
target)
else:
scores = self.generator(decoder_outputs) # `batch x tgt_len x vocab_size`
scores = scores[:, :-1, :].contiguous() # `batch x tgt_len - 1 x vocab_size`
ml_loss = self.criterion(scores.view(-1, scores.size(2)),
target.view(-1))
ml_loss = ml_loss.view(*scores.size()[:-1])
ml_loss = ml_loss.mul(target.ne(constants.PAD).float())
ml_loss = ml_loss.sum(1) * kwargs['example_weights']
loss['ml_loss'] = ml_loss.mean()
loss['loss_per_token'] = ml_loss.div((summ_len - 1).float()).mean()
return loss
def forward(self,
code_word_rep,
code_char_rep,
code_type_rep,
code_len,
summ_word_rep,
summ_char_rep,
summ_len,
tgt_seq,
src_map,
alignment,
**kwargs):
"""
Input:
- code_word_rep: ``(batch_size, max_doc_len)``
- code_char_rep: ``(batch_size, max_doc_len, max_word_len)``
- code_len: ``(batch_size)``
- summ_word_rep: ``(batch_size, max_que_len)``
- summ_char_rep: ``(batch_size, max_que_len, max_word_len)``
- summ_len: ``(batch_size)``
- tgt_seq: ``(batch_size, max_len)``
Output:
- ``(batch_size, P_LEN)``, ``(batch_size, P_LEN)``
"""
if self.training:
return self._run_forward_ml(code_word_rep,
code_char_rep,
code_type_rep,
code_len,
summ_word_rep,
summ_char_rep,
summ_len,
tgt_seq,
src_map,
alignment,
**kwargs)
else:
return self.decode(code_word_rep,
code_char_rep,
code_type_rep,
code_len,
src_map,
alignment,
**kwargs)
def __tens2sent(self,
t,
tgt_dict,
src_vocabs):
words = []
for idx, w in enumerate(t):
widx = w[0].item()
if widx < len(tgt_dict):
words.append(tgt_dict[widx])
else:
widx = widx - len(tgt_dict)
words.append(src_vocabs[idx][widx])
return words
def __generate_sequence(self,
params,
choice='greedy',
tgt_words=None):
batch_size = params['memory_bank'].size(0)
use_cuda = params['memory_bank'].is_cuda
if tgt_words is None:
tgt_words = torch.LongTensor([constants.BOS])
if use_cuda:
tgt_words = tgt_words.cuda()
tgt_words = tgt_words.expand(batch_size).unsqueeze(1) # B x 1
tgt_chars = None
if self.embedder.use_tgt_char:
tgt_chars = params['tgt_dict'].word_to_char_ids(constants.BOS_WORD)
tgt_chars = torch.Tensor(tgt_chars.tolist()).unsqueeze(0)
tgt_chars = tgt_chars.repeat(batch_size, 1)
tgt_chars = tgt_chars.to(tgt_words).unsqueeze(1)
dec_preds = []
copy_info = []
attentions = []
dec_log_probs = []
acc_dec_outs = []
max_mem_len = params['memory_bank'][0].shape[1] \
if isinstance(params['memory_bank'], list) else params['memory_bank'].shape[1]
dec_states = self.decoder.init_decoder(params['src_len'], max_mem_len)
attns = {"coverage": None}
enc_outputs = params['layer_wise_outputs'] if self.layer_wise_attn \
else params['memory_bank']
# +1 for <EOS> token
for idx in range(params['max_len'] + 1):
tgt = self.embedder(tgt_words,
tgt_chars,
mode='decoder',
step=idx)
tgt_pad_mask = tgt_words.data.eq(constants.PAD)
layer_wise_dec_out, attns = self.decoder.decode(tgt_pad_mask,
tgt,
enc_outputs,
dec_states,
step=idx,
layer_wise_coverage=attns['coverage'])
decoder_outputs = layer_wise_dec_out[-1]
acc_dec_outs.append(decoder_outputs.squeeze(1))
if self._copy:
_, copy_score, _ = self.copy_attn(decoder_outputs,
params['memory_bank'],
memory_lengths=params['src_len'],
softmax_weights=False)
# mask copy_attn weights here if needed
if params['src_mask'] is not None:
mask = params['src_mask'].byte().unsqueeze(1) # Make it broadcastable.
copy_score.data.masked_fill_(mask, -float('inf'))
attn_copy = f.softmax(copy_score, dim=-1)
prediction = self.copy_generator(decoder_outputs,
attn_copy,
params['src_map'])
prediction = prediction.squeeze(1)
for b in range(prediction.size(0)):
if params['blank'][b]:
blank_b = torch.LongTensor(params['blank'][b])
fill_b = torch.LongTensor(params['fill'][b])
if use_cuda:
blank_b = blank_b.cuda()
fill_b = fill_b.cuda()
prediction[b].index_add_(0, fill_b,
prediction[b].index_select(0, blank_b))
prediction[b].index_fill_(0, blank_b, 1e-10)
else:
prediction = self.generator(decoder_outputs.squeeze(1))
prediction = f.softmax(prediction, dim=1)
if choice == 'greedy':
tgt_prob, tgt = torch.max(prediction, dim=1, keepdim=True)
log_prob = torch.log(tgt_prob + 1e-20)
elif choice == 'sample':
tgt, log_prob = self.reinforce.sample(prediction.unsqueeze(1))
else:
assert False
dec_log_probs.append(log_prob.squeeze(1))
dec_preds.append(tgt.squeeze(1).clone())
if "std" in attns:
# std_attn: batch_size x num_heads x 1 x src_len
std_attn = torch.stack(attns["std"], dim=1)
attentions.append(std_attn.squeeze(2))
if self._copy:
mask = tgt.gt(len(params['tgt_dict']) - 1)
copy_info.append(mask.float().squeeze(1))
words = self.__tens2sent(tgt, params['tgt_dict'], params['source_vocab'])
tgt_chars = None
if self.embedder.use_tgt_char:
tgt_chars = [params['tgt_dict'].word_to_char_ids(w).tolist() for w in words]
tgt_chars = torch.Tensor(tgt_chars).to(tgt).unsqueeze(1)
words = [params['tgt_dict'][w] for w in words]
words = torch.Tensor(words).type_as(tgt)
tgt_words = words.unsqueeze(1)
return dec_preds, attentions, copy_info, dec_log_probs
def decode(self,
code_word_rep,
code_char_rep,
code_type_rep,
code_len,
src_map,
alignment,
**kwargs):
word_rep = self.embedder(code_word_rep,
code_char_rep,
code_type_rep,
mode='encoder')
memory_bank, layer_wise_outputs = self.encoder(word_rep, code_len) # B x seq_len x h
params = dict()
params['memory_bank'] = memory_bank
params['layer_wise_outputs'] = layer_wise_outputs
params['src_len'] = code_len
params['source_vocab'] = kwargs['source_vocab']
params['src_map'] = src_map
params['src_mask'] = kwargs['code_mask_rep']
params['fill'] = kwargs['fill']
params['blank'] = kwargs['blank']
params['src_dict'] = kwargs['src_dict']
params['tgt_dict'] = kwargs['tgt_dict']
params['max_len'] = kwargs['max_len']
params['src_words'] = code_word_rep
dec_preds, attentions, copy_info, _ = self.__generate_sequence(params, choice='greedy')
dec_preds = torch.stack(dec_preds, dim=1)
copy_info = torch.stack(copy_info, dim=1) if copy_info else None
# attentions: batch_size x tgt_len x num_heads x src_len
attentions = torch.stack(attentions, dim=1) if attentions else None
return {
'predictions': dec_preds,
'copy_info': copy_info,
'memory_bank': memory_bank,
'attentions': attentions
}
def count_parameters(self):
return sum(p.numel() for p in self.parameters() if p.requires_grad)
def count_encoder_parameters(self):
return self.encoder.count_parameters()
def count_decoder_parameters(self):
return self.decoder.count_parameters()
def layer_wise_parameters(self):
table = PrettyTable()
table.field_names = ["Layer Name", "Output Shape", "Param #"]
table.align["Layer Name"] = "l"
table.align["Output Shape"] = "r"
table.align["Param #"] = "r"
for name, parameters in self.named_parameters():
if parameters.requires_grad:
table.add_row([name, str(list(parameters.shape)), parameters.numel()])
return table
| [
"c2nl.modules.highway.Highway",
"torch.mul",
"torch.nn.ReLU",
"torch.nn.Dropout",
"torch.nn.CrossEntropyLoss",
"torch.LongTensor",
"c2nl.encoders.transformer.TransformerEncoder",
"torch.max",
"torch.nn.functional.softmax",
"torch.nn.Sigmoid",
"c2nl.modules.char_embedding.CharEmbedding",
"c2nl.modules.global_attention.GlobalAttention",
"torch.nn.Embedding",
"prettytable.PrettyTable",
"c2nl.modules.embeddings.Embeddings",
"c2nl.modules.copy_generator.CopyGenerator",
"torch.Tensor",
"torch.cat",
"torch.log",
"torch.load",
"torch.stack",
"torch.nn.Linear",
"c2nl.decoders.transformer.TransformerDecoder"
] | [((3482, 3510), 'torch.nn.Dropout', 'nn.Dropout', (['args.dropout_emb'], {}), '(args.dropout_emb)\n', (3492, 3510), True, 'import torch.nn as nn\n'), ((6238, 6484), 'c2nl.encoders.transformer.TransformerEncoder', 'TransformerEncoder', ([], {'num_layers': 'args.nlayers', 'd_model': 'input_size', 'heads': 'args.num_head', 'd_k': 'args.d_k', 'd_v': 'args.d_v', 'd_ff': 'args.d_ff', 'dropout': 'args.trans_drop', 'max_relative_positions': 'args.max_relative_pos', 'use_neg_dist': 'args.use_neg_dist'}), '(num_layers=args.nlayers, d_model=input_size, heads=args.\n num_head, d_k=args.d_k, d_v=args.d_v, d_ff=args.d_ff, dropout=args.\n trans_drop, max_relative_positions=args.max_relative_pos, use_neg_dist=\n args.use_neg_dist)\n', (6256, 6484), False, 'from c2nl.encoders.transformer import TransformerEncoder\n'), ((13035, 13090), 'torch.nn.Linear', 'nn.Linear', (['self.decoder.input_size', 'args.tgt_vocab_size'], {}), '(self.decoder.input_size, args.tgt_vocab_size)\n', (13044, 13090), True, 'import torch.nn as nn\n'), ((25627, 25656), 'torch.stack', 'torch.stack', (['dec_preds'], {'dim': '(1)'}), '(dec_preds, dim=1)\n', (25638, 25656), False, 'import torch\n'), ((26389, 26402), 'prettytable.PrettyTable', 'PrettyTable', ([], {}), '()\n', (26400, 26402), False, 'from prettytable import PrettyTable\n'), ((1083, 1142), 'c2nl.modules.embeddings.Embeddings', 'Embeddings', (['args.emsize', 'args.src_vocab_size', 'constants.PAD'], {}), '(args.emsize, args.src_vocab_size, constants.PAD)\n', (1093, 1142), False, 'from c2nl.modules.embeddings import Embeddings\n'), ((1359, 1418), 'c2nl.modules.embeddings.Embeddings', 'Embeddings', (['args.emsize', 'args.tgt_vocab_size', 'constants.PAD'], {}), '(args.emsize, args.tgt_vocab_size, constants.PAD)\n', (1369, 1418), False, 'from c2nl.modules.embeddings import Embeddings\n'), ((1791, 1879), 'c2nl.modules.char_embedding.CharEmbedding', 'CharEmbedding', (['args.n_characters', 'args.char_emsize', 'args.filter_size', 'args.nfilters'], {}), '(args.n_characters, args.char_emsize, args.filter_size, args.\n nfilters)\n', (1804, 1879), False, 'from c2nl.modules.char_embedding import CharEmbedding\n'), ((2139, 2181), 'c2nl.modules.highway.Highway', 'Highway', (['self.enc_input_size'], {'num_layers': '(2)'}), '(self.enc_input_size, num_layers=2)\n', (2146, 2181), False, 'from c2nl.modules.highway import Highway\n'), ((2315, 2403), 'c2nl.modules.char_embedding.CharEmbedding', 'CharEmbedding', (['args.n_characters', 'args.char_emsize', 'args.filter_size', 'args.nfilters'], {}), '(args.n_characters, args.char_emsize, args.filter_size, args.\n nfilters)\n', (2328, 2403), False, 'from c2nl.modules.char_embedding import CharEmbedding\n'), ((2663, 2705), 'c2nl.modules.highway.Highway', 'Highway', (['self.dec_input_size'], {'num_layers': '(2)'}), '(self.dec_input_size, num_layers=2)\n', (2670, 2705), False, 'from c2nl.modules.highway import Highway\n'), ((3180, 3231), 'torch.nn.Embedding', 'nn.Embedding', (['args.max_src_len', 'self.enc_input_size'], {}), '(args.max_src_len, self.enc_input_size)\n', (3192, 3231), True, 'import torch.nn as nn\n'), ((3351, 3406), 'torch.nn.Embedding', 'nn.Embedding', (['(args.max_tgt_len + 2)', 'self.dec_input_size'], {}), '(args.max_tgt_len + 2, self.dec_input_size)\n', (3363, 3406), True, 'import torch.nn as nn\n'), ((6965, 7001), 'torch.nn.Linear', 'nn.Linear', (['input_size', '(1)'], {'bias': '(False)'}), '(input_size, 1, bias=False)\n', (6974, 7001), True, 'import torch.nn as nn\n'), ((7298, 7331), 'torch.stack', 'torch.stack', (['layer_outputs'], {'dim': '(2)'}), '(layer_outputs, dim=2)\n', (7309, 7331), False, 'import torch\n'), ((7453, 7484), 'torch.nn.functional.softmax', 'f.softmax', (['layer_scores'], {'dim': '(-1)'}), '(layer_scores, dim=-1)\n', (7462, 7484), True, 'import torch.nn.functional as f\n'), ((8086, 8287), 'c2nl.decoders.transformer.TransformerDecoder', 'TransformerDecoder', ([], {'num_layers': 'args.nlayers', 'd_model': 'self.input_size', 'heads': 'args.num_head', 'd_k': 'args.d_k', 'd_v': 'args.d_v', 'd_ff': 'args.d_ff', 'coverage_attn': 'args.coverage_attn', 'dropout': 'args.trans_drop'}), '(num_layers=args.nlayers, d_model=self.input_size, heads=\n args.num_head, d_k=args.d_k, d_v=args.d_v, d_ff=args.d_ff,\n coverage_attn=args.coverage_attn, dropout=args.trans_drop)\n', (8104, 8287), False, 'from c2nl.decoders.transformer import TransformerDecoder\n'), ((8454, 8622), 'c2nl.decoders.transformer.TransformerDecoder', 'TransformerDecoder', ([], {'num_layers': 'args.nlayers', 'd_model': 'self.input_size', 'heads': 'args.num_head', 'd_k': 'args.d_k', 'd_v': 'args.d_v', 'd_ff': 'args.d_ff', 'dropout': 'args.trans_drop'}), '(num_layers=args.nlayers, d_model=self.input_size, heads=\n args.num_head, d_k=args.d_k, d_v=args.d_v, d_ff=args.d_ff, dropout=args\n .trans_drop)\n', (8472, 8622), False, 'from c2nl.decoders.transformer import TransformerDecoder\n'), ((9177, 9378), 'c2nl.decoders.transformer.TransformerDecoder', 'TransformerDecoder', ([], {'num_layers': 'args.nlayers', 'd_model': 'self.input_size', 'heads': 'args.num_head', 'd_k': 'args.d_k', 'd_v': 'args.d_v', 'd_ff': 'args.d_ff', 'coverage_attn': 'args.coverage_attn', 'dropout': 'args.trans_drop'}), '(num_layers=args.nlayers, d_model=self.input_size, heads=\n args.num_head, d_k=args.d_k, d_v=args.d_v, d_ff=args.d_ff,\n coverage_attn=args.coverage_attn, dropout=args.trans_drop)\n', (9195, 9378), False, 'from c2nl.decoders.transformer import TransformerDecoder\n'), ((9576, 9661), 'torch.load', 'torch.load', (['args.reload_decoder_state'], {'map_location': '(lambda storage, loc: storage)'}), '(args.reload_decoder_state, map_location=lambda storage, loc: storage\n )\n', (9586, 9661), False, 'import torch\n'), ((13417, 13487), 'c2nl.modules.global_attention.GlobalAttention', 'GlobalAttention', ([], {'dim': 'self.decoder.input_size', 'attn_type': 'args.attn_type'}), '(dim=self.decoder.input_size, attn_type=args.attn_type)\n', (13432, 13487), False, 'from c2nl.modules.global_attention import GlobalAttention\n'), ((13567, 13631), 'c2nl.modules.copy_generator.CopyGenerator', 'CopyGenerator', (['self.decoder.input_size', 'tgt_dict', 'self.generator'], {}), '(self.decoder.input_size, tgt_dict, self.generator)\n', (13580, 13631), False, 'from c2nl.modules.copy_generator import CopyGenerator, CopyGeneratorCriterion\n'), ((13929, 13966), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'reduction': '"""none"""'}), "(reduction='none')\n", (13948, 13966), True, 'import torch.nn as nn\n'), ((16110, 16139), 'torch.nn.functional.softmax', 'f.softmax', (['copy_score'], {'dim': '(-1)'}), '(copy_score, dim=-1)\n', (16119, 16139), True, 'import torch.nn.functional as f\n'), ((19557, 19590), 'torch.LongTensor', 'torch.LongTensor', (['[constants.BOS]'], {}), '([constants.BOS])\n', (19573, 19590), False, 'import torch\n'), ((25677, 25706), 'torch.stack', 'torch.stack', (['copy_info'], {'dim': '(1)'}), '(copy_info, dim=1)\n', (25688, 25706), False, 'import torch\n'), ((25816, 25846), 'torch.stack', 'torch.stack', (['attentions'], {'dim': '(1)'}), '(attentions, dim=1)\n', (25827, 25846), False, 'import torch\n'), ((8889, 8936), 'torch.nn.Linear', 'nn.Linear', (['(self.input_size * 2)', 'self.input_size'], {}), '(self.input_size * 2, self.input_size)\n', (8898, 8936), True, 'import torch.nn as nn\n'), ((8954, 8966), 'torch.nn.Sigmoid', 'nn.Sigmoid', ([], {}), '()\n', (8964, 8966), True, 'import torch.nn as nn\n'), ((9043, 9090), 'torch.nn.Linear', 'nn.Linear', (['(self.input_size * 2)', 'self.input_size'], {}), '(self.input_size * 2, self.input_size)\n', (9052, 9090), True, 'import torch.nn as nn\n'), ((9108, 9117), 'torch.nn.ReLU', 'nn.ReLU', ([], {}), '()\n', (9115, 9117), True, 'import torch.nn as nn\n'), ((11301, 11341), 'torch.cat', 'torch.cat', (['[copier_out, dec_out]'], {'dim': '(-1)'}), '([copier_out, dec_out], dim=-1)\n', (11310, 11341), False, 'import torch\n'), ((22077, 22106), 'torch.nn.functional.softmax', 'f.softmax', (['copy_score'], {'dim': '(-1)'}), '(copy_score, dim=-1)\n', (22086, 22106), True, 'import torch.nn.functional as f\n'), ((23066, 23094), 'torch.nn.functional.softmax', 'f.softmax', (['prediction'], {'dim': '(1)'}), '(prediction, dim=1)\n', (23075, 23094), True, 'import torch.nn.functional as f\n'), ((23163, 23205), 'torch.max', 'torch.max', (['prediction'], {'dim': '(1)', 'keepdim': '(True)'}), '(prediction, dim=1, keepdim=True)\n', (23172, 23205), False, 'import torch\n'), ((23233, 23260), 'torch.log', 'torch.log', (['(tgt_prob + 1e-20)'], {}), '(tgt_prob + 1e-20)\n', (23242, 23260), False, 'import torch\n'), ((23655, 23687), 'torch.stack', 'torch.stack', (["attns['std']"], {'dim': '(1)'}), "(attns['std'], dim=1)\n", (23666, 23687), False, 'import torch\n'), ((4112, 4146), 'torch.cat', 'torch.cat', (['(word_rep, char_rep)', '(2)'], {}), '((word_rep, char_rep), 2)\n', (4121, 4146), False, 'import torch\n'), ((11391, 11414), 'torch.mul', 'torch.mul', (['f_t', 'dec_out'], {}), '(f_t, dec_out)\n', (11400, 11414), False, 'import torch\n'), ((24292, 24311), 'torch.Tensor', 'torch.Tensor', (['words'], {}), '(words)\n', (24304, 24311), False, 'import torch\n'), ((5245, 5279), 'torch.cat', 'torch.cat', (['(word_rep, char_rep)', '(2)'], {}), '((word_rep, char_rep), 2)\n', (5254, 5279), False, 'import torch\n'), ((5626, 5650), 'torch.LongTensor', 'torch.LongTensor', (['[step]'], {}), '([step])\n', (5642, 5650), False, 'import torch\n'), ((22481, 22517), 'torch.LongTensor', 'torch.LongTensor', (["params['blank'][b]"], {}), "(params['blank'][b])\n", (22497, 22517), False, 'import torch\n'), ((22551, 22586), 'torch.LongTensor', 'torch.LongTensor', (["params['fill'][b]"], {}), "(params['fill'][b])\n", (22567, 22586), False, 'import torch\n'), ((24167, 24190), 'torch.Tensor', 'torch.Tensor', (['tgt_chars'], {}), '(tgt_chars)\n', (24179, 24190), False, 'import torch\n')] |
import logging
from cattle import Config
from cattle.utils import reply, popen
from .compute import DockerCompute
from cattle.agent.handler import BaseHandler
from cattle.progress import Progress
from cattle.type_manager import get_type, MARSHALLER
from . import docker_client
import subprocess
import os
import time
log = logging.getLogger('docker')
def ns_exec(pid, event):
script = os.path.join(Config.home(), 'events', event.name.split(';')[0])
cmd = ['nsenter',
'-F',
'-m',
'-u',
'-i',
'-n',
'-p',
'-t', str(pid),
'--', script]
marshaller = get_type(MARSHALLER)
input = marshaller.to_string(event)
data = None
env = {}
with open('/proc/{}/environ'.format(pid)) as f:
for line in f.read().split('\0'):
if not len(line):
continue
kv = line.split('=', 1)
if kv[0].startswith('CATTLE'):
env[kv[0]] = kv[1]
env['PATH'] = os.environ['PATH']
env['CATTLE_CONFIG_URL'] = Config.config_url()
for i in range(3):
p = popen(cmd,
env=env,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT)
output, error = p.communicate(input=input)
retcode = p.poll()
if retcode == 0:
break
exists_cmd = cmd[:-1] + ['/usr/bin/test', '-e', script]
if popen(exists_cmd, env=env).wait() == 0:
break
# Sleep and try again if missing
time.sleep(1)
if retcode:
return retcode, output, None
text = []
for line in output.splitlines():
if line.startswith('{'):
data = marshaller.from_string(line)
break
text.append(line)
return retcode, ''.join(text), data
class DockerDelegate(BaseHandler):
def __init__(self):
self.compute = DockerCompute()
pass
def events(self):
return ['delegate.request']
def delegate_request(self, req=None, event=None, instanceData=None, **kw):
if instanceData.kind != 'container' or \
instanceData.get('token') is None:
return
container = self.compute.get_container(docker_client(), instanceData,
by_agent=True)
if container is None:
log.info('Can not call [%s], container does not exists',
instanceData.uuid)
return
inspect = self.compute.inspect(container)
try:
running = inspect['State']['Running']
if not running:
log.error('Can not call [%s], container is not running',
instanceData.uuid)
return
except KeyError:
log.error('Can not call [%s], container is not running',
instanceData.uuid)
return
progress = Progress(event, parent=req)
exit_code, output, data = ns_exec(inspect['State']['Pid'], event)
if exit_code == 0:
return reply(event, data, parent=req)
else:
progress.update('Update failed', data={
'exitCode': exit_code,
'output': output
})
| [
"logging.getLogger",
"cattle.Config.config_url",
"cattle.type_manager.get_type",
"time.sleep",
"cattle.utils.popen",
"cattle.progress.Progress",
"cattle.Config.home",
"cattle.utils.reply"
] | [((326, 353), 'logging.getLogger', 'logging.getLogger', (['"""docker"""'], {}), "('docker')\n", (343, 353), False, 'import logging\n'), ((652, 672), 'cattle.type_manager.get_type', 'get_type', (['MARSHALLER'], {}), '(MARSHALLER)\n', (660, 672), False, 'from cattle.type_manager import get_type, MARSHALLER\n'), ((1075, 1094), 'cattle.Config.config_url', 'Config.config_url', ([], {}), '()\n', (1092, 1094), False, 'from cattle import Config\n'), ((407, 420), 'cattle.Config.home', 'Config.home', ([], {}), '()\n', (418, 420), False, 'from cattle import Config\n'), ((1131, 1228), 'cattle.utils.popen', 'popen', (['cmd'], {'env': 'env', 'stdin': 'subprocess.PIPE', 'stdout': 'subprocess.PIPE', 'stderr': 'subprocess.STDOUT'}), '(cmd, env=env, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=\n subprocess.STDOUT)\n', (1136, 1228), False, 'from cattle.utils import reply, popen\n'), ((1602, 1615), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (1612, 1615), False, 'import time\n'), ((3011, 3038), 'cattle.progress.Progress', 'Progress', (['event'], {'parent': 'req'}), '(event, parent=req)\n', (3019, 3038), False, 'from cattle.progress import Progress\n'), ((3160, 3190), 'cattle.utils.reply', 'reply', (['event', 'data'], {'parent': 'req'}), '(event, data, parent=req)\n', (3165, 3190), False, 'from cattle.utils import reply, popen\n'), ((1494, 1520), 'cattle.utils.popen', 'popen', (['exists_cmd'], {'env': 'env'}), '(exists_cmd, env=env)\n', (1499, 1520), False, 'from cattle.utils import reply, popen\n')] |
import sys
import pytz
#import xml.utils.iso8601
import time
import numpy
from datetime import date, datetime, timedelta
from matplotlib import pyplot as plt
from exchange import cb_exchange as cb_exchange
from exchange import CoinbaseExchangeAuth
from abc import ABCMeta, abstractmethod
class strategy(object):
"""`strategy` defines an abstract base strategy class. Minimum required to create a strategy is a file with a class which inherits from strategy containing a backtest_strategy function. As a bonus, strategy includes utility functions like calculate_historic_data.
"""
__metaclass__ = ABCMeta
def __init__(name="default name", interval=5):
"""Constructor for an abstract strategy. You can modify it as needed.
\n`interval`: a.k.a timeslice the amount of time in seconds for each 'tick' default is 5
\n`name`: a string name for the strategy
"""
self.name = name
self.interval = interval
self.times_recalculated = 0
@abstractmethod
def trade(self, timeslice):
"""Perform operations on a timeslice.
\n`timeslice`: a section of trade data with time length equal to the strategy's interval, formatted as follows:
\n[time, low, high, open, close, volume]
"""
return
def backtest_strategy(self, historic_data):
"""Returns performance of a strategy vs market performance.
"""
# Reverse the data since Coinbase returns it in reverse chronological
# now historic_data strarts with the oldest entry
historic_data = list(reversed(historic_data))
earliest_time = float(historic_data[0][0])
latest_time = float(historic_data[-1][0])
start_price = float(historic_data[0][4])
end_price = float(historic_data[-1][4])
market_performance = ((end_price-start_price)/start_price)*100
print("Running simulation on historic data. This may take some time....")
for timeslice in historic_data:
# Display what percent through the data we are
idx = historic_data.index(timeslice)
percent = (float(idx)/float(len(historic_data)))*100 + 1
sys.stdout.write("\r%d%%" % percent)
sys.stdout.flush()
self.trade(timeslice)
# Calculate performance
end_amt_no_trades = (float(self.exchange.start_usd)/float(end_price)) + float(self.exchange.start_btc)
end_amt = (float(self.exchange.usd_bal)/float(end_price)) + float(self.exchange.btc_bal)
start_amt = (float(self.exchange.start_usd)/float(start_price)) + float(self.exchange.start_btc)
strategy_performance = ((end_amt-start_amt)/start_amt)*100
print("\n")
print("Times recalculated: "+str(self.times_recalculated))
print("Times bought: "+str(self.exchange.times_bought))
print("Times sold: "+str(self.exchange.times_sold))
print("The Market's performance: "+str(market_performance)+" %")
print("Strategy's performance: "+str(strategy_performance)+" %")
print("Account's ending value if no trades were made: "+str(end_amt_no_trades)+" BTC")
print("Account's ending value with this strategy: "+str(end_amt)+" BTC")
strategy_performance_vs_market = strategy_performance - market_performance
if strategy_performance > market_performance:
print("Congratulations! This strategy has beat the market by: "+str(strategy_performance_vs_market)+" %")
elif strategy_performance < market_performance:
print("This strategy has preformed: "+str(strategy_performance_vs_market)+" % worse than market.")
return strategy_performance_vs_market, strategy_performance, market_performance
@staticmethod
def calculate_historic_data(data, pivot):
"""Returns average price weighted according to volume, and the number of bitcoins traded
above and below a price point, called a pivot.\n
\npivot: the price used for returning volume above and below
\ndata: a list of lists formated as follows [time, low, high, open, close]
\n[
\n\t["2014-11-07 22:19:28.578544+00", "0.32", "4.2", "0.35", "4.2", "12.3"],
\n\t\t...
\n]
"""
price_list = []
weights = []
if data is None:
pass
min_price = float(data[0][1])
max_price = float(data[0][2])
discrete_prices = {}
for timeslice in data:
timeslice = [float(i) for i in timeslice]
if max_price < timeslice[2]:
max_prie = timeslice[2]
if min_price > timeslice[1]:
min_price = timeslice[1]
closing_price = timeslice[4]
volume = timeslice[5]
if closing_price not in discrete_prices.keys():
discrete_prices[str(closing_price)] = volume
else:
discrete[str(closing_price)] += volume
idx = data.index(timeslice)
price_list.append(closing_price)
weights.append(volume)
fltprices = [float(i) for i in discrete_prices.keys()]
fltvolumes = [float(i) for i in discrete_prices.values()]
np_discrete_prices = numpy.array(fltprices)
np_volume_per_price = numpy.array(fltvolumes)
weighted_avg = numpy.average(np_discrete_prices, weights=np_volume_per_price)
num_above = 0
num_below = 0
num_at = 0
for key in discrete_prices.keys():
value = discrete_prices[key]
if float(key) > pivot:
num_above+=value
elif float(key) < pivot:
num_below+=value
elif float(key) == pivot:
num_at+=value
total_volume = 0.0
for volume in fltvolumes:
total_volume+=volume
fltprops = []
for volume in fltvolumes:
fltprops.append((volume/total_volume))
#print("num_below: "+str(num_below))
#print("num_above: "+str(num_above))
#print("num_at: "+str(num_at))
#print("weighted_average: "+str(weighted_avg))
#plt.title("Price distribution")
#plt.xlabel("Price (USD)")
#plt.ylabel("Volume")
#plt.bar(fltprices, fltprops)
#plt.show()
return weighted_avg, num_above, num_below
| [
"sys.stdout.write",
"numpy.array",
"sys.stdout.flush",
"numpy.average"
] | [((5274, 5296), 'numpy.array', 'numpy.array', (['fltprices'], {}), '(fltprices)\n', (5285, 5296), False, 'import numpy\n'), ((5327, 5350), 'numpy.array', 'numpy.array', (['fltvolumes'], {}), '(fltvolumes)\n', (5338, 5350), False, 'import numpy\n'), ((5374, 5436), 'numpy.average', 'numpy.average', (['np_discrete_prices'], {'weights': 'np_volume_per_price'}), '(np_discrete_prices, weights=np_volume_per_price)\n', (5387, 5436), False, 'import numpy\n'), ((2197, 2233), 'sys.stdout.write', 'sys.stdout.write', (["('\\r%d%%' % percent)"], {}), "('\\r%d%%' % percent)\n", (2213, 2233), False, 'import sys\n'), ((2246, 2264), 'sys.stdout.flush', 'sys.stdout.flush', ([], {}), '()\n', (2262, 2264), False, 'import sys\n')] |
"""
Learns a matrix of Z-Space directions using a pre-trained BigGAN Generator.
Modified from train.py in the PyTorch BigGAN repo.
"""
import os
from tqdm import tqdm
import torch
import torch.nn as nn
import torch.optim
import utils
import train_fns
from sync_batchnorm import patch_replication_callback
from torch.utils.tensorboard import SummaryWriter
from orojar import orojar
from direction_utils import visualize_directions, load_G, get_direction_padding_fn, init_wandb, download_G
from layers import fast_gram_schmidt, norm
class DataParallelLoss(nn.Module):
"""
This is simply a wrapper class to compute the OroJaR efficiently over several GPUs
"""
def __init__(self, G):
super(DataParallelLoss, self).__init__()
self.G = G
def forward(self, z, y, w, Q):
penalty = orojar(self.G, z, c=y, w=w, G_z=None, Q=Q, multiple_layers=False)
return penalty
# The main training file. Config is a dictionary specifying the configuration
# of this training run.
def run(config):
if config['wandb_entity'] is not None:
init_wandb(config, config['experiment_name'], config['wandb_entity'], 'imagenet')
if config["G_path"] is None: # Download a pre-trained G if necessary
download_G()
config["G_path"] = 'checkpoints/138k'
G, state_dict, device, experiment_name = load_G(config)
# If parallel, parallelize the GD module
if config['parallel']:
G = nn.DataParallel(DataParallelLoss(G))
if config['cross_replica']:
patch_replication_callback(G)
num_gpus = torch.cuda.device_count()
print(f'Using {num_gpus} GPUs')
# If search_space != 'all', then we need to pad the z components that we are leaving alone:
pad = get_direction_padding_fn(config)
direction_size = config['dim_z'] if config['search_space'] == 'all' else config['ndirs']
# A is our (ndirs, |z|) matrix of directions, where ndirs indicates the number of directions we want to learn
if config['load_A'] == 'coords':
print('Initializing with standard basis directions')
A = torch.nn.Parameter(torch.eye(config['ndirs'], direction_size, device=device), requires_grad=True)
elif config['load_A'] == 'random':
print('Initializing with random directions')
A = torch.nn.Parameter(torch.empty(config['ndirs'], direction_size, device=device), requires_grad=True)
torch.nn.init.kaiming_normal_(A)
else:
raise NotImplementedError
# We only learn A; G is left frozen during training:
optim = torch.optim.Adam(params=[A], lr=config['A_lr'])
# Allow for different batch sizes in G
G_batch_size = max(config['G_batch_size'], config['batch_size'])
z_, y_ = utils.prepare_z_y(G_batch_size, G.module.G.dim_z, config['n_classes'],
device=device, fp16=config['G_fp16'])
# Prepare a fixed z & y to see individual sample evolution throghout training
fixed_z, fixed_y = utils.prepare_z_y(G_batch_size, G.module.G.dim_z,
config['n_classes'], device=device,
fp16=config['G_fp16'])
fixed_z.sample_()
fixed_y.sample_()
interp_z, interp_y = utils.prepare_z_y(config["n_samples"], G.module.G.dim_z,
config['n_classes'], device=device,
fp16=config['G_fp16'])
interp_z.sample_()
interp_y.sample_()
if config['fix_class'] is not None:
y_ = y_.new_full(y_.size(), config['fix_class'])
fixed_y = fixed_y.new_full(fixed_y.size(), config['fix_class'])
interp_y = interp_y.new_full(interp_y.size(), config['fix_class'])
print('Beginning training at epoch %d...' % state_dict['epoch'])
# Train for specified number of epochs, although we mostly track G iterations.
iters_per_epoch = 1000
dummy_loader = [None] * iters_per_epoch # We don't need any real data
path_size = config['path_size']
# Simply stores a |z|-dimensional one-hot vector indicating each direction we are learning:
direction_indicators = torch.eye(config['ndirs']).to(device)
G.eval()
G.module.optim = optim
writer = SummaryWriter('%s/%s' % (config['logs_root'], experiment_name))
sample_sheet = train_fns.save_and_sample(G.module.G, None, G.module.G, z_, y_, fixed_z, fixed_y,
state_dict, config, experiment_name)
writer.add_image('samples', sample_sheet, 0)
interp_y_ = G.module.G.shared(interp_y)
norm_fn = norm
# Make directions orthonormal via Gram Schmidt followed a normalization:
Q = pad(norm_fn(fast_gram_schmidt(A))) if not config["no_ortho"] else pad(A)
if config["vis_during_training"]:
print("Generating initial visualizations...")
interp_vis = visualize_directions(G.module.G, interp_z, interp_y_, path_sizes=path_size, Q=Q,
high_quality=False, npv=1)
for w_ix in range(config['ndirs']):
writer.add_video('G_ema/w%03d' % w_ix, interp_vis[w_ix], 0, fps=24)
for epoch in range(state_dict['epoch'], config['num_epochs']):
if config['pbar'] == 'mine':
pbar = utils.progress(dummy_loader, displaytype='s1k' if config['use_multiepoch_sampler'] else 'eta')
else:
pbar = tqdm(dummy_loader)
for i, _ in enumerate(pbar):
state_dict['itr'] += 1
z_.sample_()
if config['fix_class'] is None:
y_.sample_()
y = G.module.G.shared(y_)
# OroJaR taken w.r.t. w_sampled, NOT z:
w = torch.zeros((G_batch_size, config['ndirs'])) # equal to the one-hot w
penalty = G(z_, y, w=w, Q=Q.repeat(num_gpus, 1)).mean()
optim.zero_grad()
penalty.backward()
optim.step()
# re-orthogonalize A for visualizations and the next training iteration:
Q = pad(norm_fn(fast_gram_schmidt(A))) if not config["no_ortho"] else pad(A)
# Log metrics to TensorBoard/WandB:
cur_training_iter = epoch * iters_per_epoch + i
writer.add_scalar(f'Metrics/orojar', penalty.item(), cur_training_iter)
writer.add_scalar('Metrics/direction_norm', A.pow(2).mean().pow(0.5).item(), cur_training_iter)
# Save directions and log visuals:
if not (state_dict['itr'] % config['save_every']):
torch.save(A.cpu().detach(), '%s/%s/A_%06d.pt' %
(config['weights_root'], experiment_name, cur_training_iter))
if config["vis_during_training"]:
interp_vis = visualize_directions(G.module.G, interp_z, interp_y_, path_sizes=path_size, Q=Q,
high_quality=False, npv=1)
for w_ix in range(config['ndirs']):
writer.add_video('G_ema/w%03d' % w_ix, interp_vis[w_ix], cur_training_iter, fps=24)
state_dict['epoch'] += 1
def main():
# parse command line and run
parser = utils.prepare_parser()
config = vars(parser.parse_args())
print(config)
run(config)
if __name__ == '__main__':
main()
| [
"torch.cuda.device_count",
"direction_utils.init_wandb",
"layers.fast_gram_schmidt",
"torch.utils.tensorboard.SummaryWriter",
"torch.eye",
"torch.nn.init.kaiming_normal_",
"direction_utils.download_G",
"train_fns.save_and_sample",
"sync_batchnorm.patch_replication_callback",
"utils.progress",
"utils.prepare_parser",
"torch.empty",
"torch.optim.Adam",
"direction_utils.get_direction_padding_fn",
"orojar.orojar",
"utils.prepare_z_y",
"tqdm.tqdm",
"direction_utils.visualize_directions",
"torch.zeros",
"direction_utils.load_G"
] | [((1355, 1369), 'direction_utils.load_G', 'load_G', (['config'], {}), '(config)\n', (1361, 1369), False, 'from direction_utils import visualize_directions, load_G, get_direction_padding_fn, init_wandb, download_G\n'), ((1585, 1610), 'torch.cuda.device_count', 'torch.cuda.device_count', ([], {}), '()\n', (1608, 1610), False, 'import torch\n'), ((1754, 1786), 'direction_utils.get_direction_padding_fn', 'get_direction_padding_fn', (['config'], {}), '(config)\n', (1778, 1786), False, 'from direction_utils import visualize_directions, load_G, get_direction_padding_fn, init_wandb, download_G\n'), ((2560, 2607), 'torch.optim.Adam', 'torch.optim.Adam', ([], {'params': '[A]', 'lr': "config['A_lr']"}), "(params=[A], lr=config['A_lr'])\n", (2576, 2607), False, 'import torch\n'), ((2734, 2846), 'utils.prepare_z_y', 'utils.prepare_z_y', (['G_batch_size', 'G.module.G.dim_z', "config['n_classes']"], {'device': 'device', 'fp16': "config['G_fp16']"}), "(G_batch_size, G.module.G.dim_z, config['n_classes'],\n device=device, fp16=config['G_fp16'])\n", (2751, 2846), False, 'import utils\n'), ((2980, 3092), 'utils.prepare_z_y', 'utils.prepare_z_y', (['G_batch_size', 'G.module.G.dim_z', "config['n_classes']"], {'device': 'device', 'fp16': "config['G_fp16']"}), "(G_batch_size, G.module.G.dim_z, config['n_classes'],\n device=device, fp16=config['G_fp16'])\n", (2997, 3092), False, 'import utils\n'), ((3241, 3361), 'utils.prepare_z_y', 'utils.prepare_z_y', (["config['n_samples']", 'G.module.G.dim_z', "config['n_classes']"], {'device': 'device', 'fp16': "config['G_fp16']"}), "(config['n_samples'], G.module.G.dim_z, config['n_classes'\n ], device=device, fp16=config['G_fp16'])\n", (3258, 3361), False, 'import utils\n'), ((4243, 4306), 'torch.utils.tensorboard.SummaryWriter', 'SummaryWriter', (["('%s/%s' % (config['logs_root'], experiment_name))"], {}), "('%s/%s' % (config['logs_root'], experiment_name))\n", (4256, 4306), False, 'from torch.utils.tensorboard import SummaryWriter\n'), ((4326, 4448), 'train_fns.save_and_sample', 'train_fns.save_and_sample', (['G.module.G', 'None', 'G.module.G', 'z_', 'y_', 'fixed_z', 'fixed_y', 'state_dict', 'config', 'experiment_name'], {}), '(G.module.G, None, G.module.G, z_, y_, fixed_z,\n fixed_y, state_dict, config, experiment_name)\n', (4351, 4448), False, 'import train_fns\n'), ((7167, 7189), 'utils.prepare_parser', 'utils.prepare_parser', ([], {}), '()\n', (7187, 7189), False, 'import utils\n'), ((825, 890), 'orojar.orojar', 'orojar', (['self.G', 'z'], {'c': 'y', 'w': 'w', 'G_z': 'None', 'Q': 'Q', 'multiple_layers': '(False)'}), '(self.G, z, c=y, w=w, G_z=None, Q=Q, multiple_layers=False)\n', (831, 890), False, 'from orojar import orojar\n'), ((1087, 1172), 'direction_utils.init_wandb', 'init_wandb', (['config', "config['experiment_name']", "config['wandb_entity']", '"""imagenet"""'], {}), "(config, config['experiment_name'], config['wandb_entity'],\n 'imagenet')\n", (1097, 1172), False, 'from direction_utils import visualize_directions, load_G, get_direction_padding_fn, init_wandb, download_G\n'), ((1251, 1263), 'direction_utils.download_G', 'download_G', ([], {}), '()\n', (1261, 1263), False, 'from direction_utils import visualize_directions, load_G, get_direction_padding_fn, init_wandb, download_G\n'), ((4877, 4988), 'direction_utils.visualize_directions', 'visualize_directions', (['G.module.G', 'interp_z', 'interp_y_'], {'path_sizes': 'path_size', 'Q': 'Q', 'high_quality': '(False)', 'npv': '(1)'}), '(G.module.G, interp_z, interp_y_, path_sizes=path_size,\n Q=Q, high_quality=False, npv=1)\n', (4897, 4988), False, 'from direction_utils import visualize_directions, load_G, get_direction_padding_fn, init_wandb, download_G\n'), ((1539, 1568), 'sync_batchnorm.patch_replication_callback', 'patch_replication_callback', (['G'], {}), '(G)\n', (1565, 1568), False, 'from sync_batchnorm import patch_replication_callback\n'), ((2123, 2180), 'torch.eye', 'torch.eye', (["config['ndirs']", 'direction_size'], {'device': 'device'}), "(config['ndirs'], direction_size, device=device)\n", (2132, 2180), False, 'import torch\n'), ((2414, 2446), 'torch.nn.init.kaiming_normal_', 'torch.nn.init.kaiming_normal_', (['A'], {}), '(A)\n', (2443, 2446), False, 'import torch\n'), ((4149, 4175), 'torch.eye', 'torch.eye', (["config['ndirs']"], {}), "(config['ndirs'])\n", (4158, 4175), False, 'import torch\n'), ((5275, 5374), 'utils.progress', 'utils.progress', (['dummy_loader'], {'displaytype': "('s1k' if config['use_multiepoch_sampler'] else 'eta')"}), "(dummy_loader, displaytype='s1k' if config[\n 'use_multiepoch_sampler'] else 'eta')\n", (5289, 5374), False, 'import utils\n'), ((5403, 5421), 'tqdm.tqdm', 'tqdm', (['dummy_loader'], {}), '(dummy_loader)\n', (5407, 5421), False, 'from tqdm import tqdm\n'), ((5699, 5743), 'torch.zeros', 'torch.zeros', (["(G_batch_size, config['ndirs'])"], {}), "((G_batch_size, config['ndirs']))\n", (5710, 5743), False, 'import torch\n'), ((2325, 2384), 'torch.empty', 'torch.empty', (["config['ndirs']", 'direction_size'], {'device': 'device'}), "(config['ndirs'], direction_size, device=device)\n", (2336, 2384), False, 'import torch\n'), ((4702, 4722), 'layers.fast_gram_schmidt', 'fast_gram_schmidt', (['A'], {}), '(A)\n', (4719, 4722), False, 'from layers import fast_gram_schmidt, norm\n'), ((6747, 6858), 'direction_utils.visualize_directions', 'visualize_directions', (['G.module.G', 'interp_z', 'interp_y_'], {'path_sizes': 'path_size', 'Q': 'Q', 'high_quality': '(False)', 'npv': '(1)'}), '(G.module.G, interp_z, interp_y_, path_sizes=path_size,\n Q=Q, high_quality=False, npv=1)\n', (6767, 6858), False, 'from direction_utils import visualize_directions, load_G, get_direction_padding_fn, init_wandb, download_G\n'), ((6037, 6057), 'layers.fast_gram_schmidt', 'fast_gram_schmidt', (['A'], {}), '(A)\n', (6054, 6057), False, 'from layers import fast_gram_schmidt, norm\n')] |
import xlsxwriter
import pandas as pd
import numpy as np
import mysql.connector
australia=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Australia')
brunei=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Brunei')
cambodia=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Cambodia')
china=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='China')
indonesia=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Indonesia')
japan=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Japan')
lao=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Lao')
malaysia=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Malaysia')
myanmar=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Myanmar')
new_zeland=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='New Zeland')
philipines=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Philipines')
singapore=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Singapore')
thailand=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Thailand')
vietnam=pd.read_excel(r'\Users\jesica\Desktop\RCEP_economic_analysis.xlsx', sheet_name='Vietnam')
'''
mydb = mysql.connector.connect(
host = "localhost",
user = "root",
passwd = "",
database = ""
)
mycursor = mydb.cursor()
sqlformula1 = "INSERT INTO australia VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(australia['Year'], australia['RGDP'], australia['NGDP'], australia['GDP_pc'], australia['Inflation'], australia['Unemployment_Rate'], australia['Net_LB'], australia['Account_Balance']):
mycursor.execute(sqlformula1, [a, b, c, d, e, f, g, h])
sqlformula2 = "INSERT INTO brunei VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(brunei['Year'], brunei['RGDP'], brunei['NGDP'], brunei['GDP_pc'], brunei['Inflation'], brunei['Unemployment_Rate'], brunei['Net_LB'], brunei['Account_Balance']):
mycursor.execute(sqlformula2, [a, b, c, d, e, f, g, h])
sqlformula3 = "INSERT INTO cambodia VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(cambodia['Year'], cambodia['RGDP'], cambodia['NGDP'], cambodia['GDP_pc'], cambodia['Inflation'], cambodia['Unemployment_Rate'], cambodia['Net_LB'], cambodia['Account_Balance']):
mycursor.execute(sqlformula3, [a, b, c, d, e, f, g, h])
sqlformula4 = "INSERT INTO china VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(china['Year'], china['RGDP'], china['NGDP'], china['GDP_pc'], china['Inflation'], china['Unemployment_Rate'], china['Net_LB'], china['Account_Balance']):
mycursor.execute(sqlformula4, [a, b, c, d, e, f, g, h])
sqlformula5 = "INSERT INTO indonesia VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(indonesia['Year'], indonesia['RGDP'], indonesia['NGDP'], indonesia['GDP_pc'], indonesia['Inflation'], indonesia['Unemployment_Rate'], indonesia['Net_LB'], indonesia['Account_Balance']):
mycursor.execute(sqlformula5, [a, b, c, d, e, f, g, h])
sqlformula6 = "INSERT INTO japan VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(japan['Year'], japan['RGDP'], japan['NGDP'], japan['GDP_pc'], japan['Inflation'], japan['Unemployment_Rate'], japan['Net_LB'], japan['Account_Balance']):
mycursor.execute(sqlformula6, [a, b, c, d, e, f, g, h])
sqlformula7 = "INSERT INTO lao VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(lao['Year'], lao['RGDP'], lao['NGDP'], lao['GDP_pc'], lao['Inflation'], lao['Unemployment_Rate'], lao['Net_LB'], lao['Account_Balance']):
mycursor.execute(sqlformula7, [a, b, c, d, e, f, g, h])
sqlformula8 = "INSERT INTO malaysia VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(malaysia['Year'], malaysia['RGDP'], malaysia['NGDP'], malaysia['GDP_pc'], malaysia['Inflation'], malaysia['Unemployment_Rate'], malaysia['Net_LB'], malaysia['Account_Balance']):
mycursor.execute(sqlformula8, [a, b, c, d, e, f, g, h])
sqlformula9 = "INSERT INTO myanmar VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(myanmar['Year'], myanmar['RGDP'], myanmar['NGDP'], myanmar['GDP_pc'], myanmar['Inflation'], myanmar['Unemployment_Rate'], myanmar['Net_LB'], myanmar['Account_Balance']):
mycursor.execute(sqlformula9, [a, b, c, d, e, f, g, h])
sqlformula10 = "INSERT INTO new_zeland VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(new_zeland['Year'], new_zeland['RGDP'], new_zeland['NGDP'], new_zeland['GDP_pc'], new_zeland['Inflation'], new_zeland['Unemployment_Rate'], new_zeland['Net_LB'], new_zeland['Account_Balance']):
mycursor.execute(sqlformula10, [a, b, c, d, e, f, g, h])
sqlformula11 = "INSERT INTO philipines VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(philipines['Year'], philipines['RGDP'], philipines['NGDP'], philipines['GDP_pc'], philipines['Inflation'], philipines['Unemployment_Rate'], philipines['Net_LB'], philipines['Account_Balance']):
mycursor.execute(sqlformula11, [a, b, c, d, e, f, g, h])
sqlformula12 = "INSERT INTO singapore VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(singapore['Year'], singapore['RGDP'], singapore['NGDP'], singapore['GDP_pc'], singapore['Inflation'], singapore['Unemployment_Rate'], singapore['Net_LB'], singapore['Account_Balance']):
mycursor.execute(sqlformula12, [a, b, c, d, e, f, g, h])
sqlformula13 = "INSERT INTO thailand VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(thailand['Year'], thailand['RGDP'], thailand['NGDP'], thailand['GDP_pc'], thailand['Inflation'], thailand['Unemployment_Rate'], thailand['Net_LB'], thailand['Account_Balance']):
mycursor.execute(sqlformula13, [a, b, c, d, e, f, g, h])
sqlformula14 = "INSERT INTO vietnam VALUES(%s, %s, %s, %s, %s, %s, %s, %s)"
for a, b, c, d, e, f, g, h in zip(vietnam['Year'], vietnam['RGDP'], vietnam['NGDP'], vietnam['GDP_pc'], vietnam['Inflation'], vietnam['Unemployment_Rate'], vietnam['Net_LB'], vietnam['Account_Balance']):
mycursor.execute(sqlformula14, [a, b, c, d, e, f, g, h])
'''
#mydb.commit()
| [
"pandas.read_excel"
] | [((91, 189), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Australia"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Australia')\n", (104, 189), True, 'import pandas as pd\n'), ((190, 285), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Brunei"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Brunei')\n", (203, 285), True, 'import pandas as pd\n'), ((288, 385), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Cambodia"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Cambodia')\n", (301, 385), True, 'import pandas as pd\n'), ((385, 479), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""China"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='China')\n", (398, 479), True, 'import pandas as pd\n'), ((483, 581), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Indonesia"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Indonesia')\n", (496, 581), True, 'import pandas as pd\n'), ((581, 675), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Japan"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Japan')\n", (594, 675), True, 'import pandas as pd\n'), ((673, 765), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Lao"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Lao')\n", (686, 765), True, 'import pandas as pd\n'), ((768, 865), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Malaysia"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Malaysia')\n", (781, 865), True, 'import pandas as pd\n'), ((867, 963), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Myanmar"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Myanmar')\n", (880, 963), True, 'import pandas as pd\n'), ((968, 1067), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""New Zeland"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='New Zeland')\n", (981, 1067), True, 'import pandas as pd\n'), ((1072, 1171), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Philipines"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Philipines')\n", (1085, 1171), True, 'import pandas as pd\n'), ((1175, 1273), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Singapore"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Singapore')\n", (1188, 1273), True, 'import pandas as pd\n'), ((1276, 1373), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Thailand"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Thailand')\n", (1289, 1373), True, 'import pandas as pd\n'), ((1375, 1471), 'pandas.read_excel', 'pd.read_excel', (['"""\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx"""'], {'sheet_name': '"""Vietnam"""'}), "('\\\\Users\\\\jesica\\\\Desktop\\\\RCEP_economic_analysis.xlsx',\n sheet_name='Vietnam')\n", (1388, 1471), True, 'import pandas as pd\n')] |
#!/usr/bin/env python3
import os
import re
import glob
import boto3
import requests
import subprocess
from time import sleep
AWS_REGION = os.environ['AWS_REGION']
DEPLOY_UUID = os.environ['DEPLOY_UUID']
SERVICE_NAME = os.environ['SERVICE_NAME']
MOUNT_POINT = "/var/lib/" + SERVICE_NAME
NIC_IP = os.environ['NIC_IP']
TAG_KEY = os.environ['TAG_KEY']
def retrieve_eni_ids():
ec2 = boto3.resource('ec2')
enis = []
for eni in ec2.network_interfaces.all():
for tag in eni.tag_set:
if tag['Key'] == TAG_KEY:
if tag['Value'] == DEPLOY_UUID:
enis.append(eni.network_interface_id)
return enis if len(enis) > 0 else None
def attach_eni_ids():
c_ec2 = boto3.client('ec2')
r_ec2 = boto3.resource('ec2')
i_id = requests.get('http://169.254.169.254/latest/meta-data/instance-id').text
eni_ids = retrieve_eni_ids()
device_number = len(r_ec2.Instance(i_id).network_interfaces) + 1
for eni_id in eni_ids:
c_ec2.attach_network_interface(DeviceIndex=device_number, InstanceId=i_id, NetworkInterfaceId=eni_id)
def retrieve_ebs_ids():
ec2 = boto3.resource('ec2')
ebss = []
for volume in ec2.volumes.all():
if volume.tags is not None:
for tag in volume.tags:
if tag['Key'] == TAG_KEY:
if tag['Value'] == DEPLOY_UUID:
ebss.append(volume.volume_id)
return ebss if len(ebss) > 0 else None
def attach_ebs():
ec2 = boto3.client('ec2')
i_id = requests.get('http://169.254.169.254/latest/meta-data/instance-id').text
volume_ids = retrieve_ebs_ids()
i = 0
device_char = 'z'
while i < len(volume_ids):
v_id = volume_ids[i]
device = '/dev/xvd{0}'.format(device_char)
ec2.attach_volume(Device=device, InstanceId=i_id, VolumeId=v_id)
# Wait to ensure device is attached
sleep(3)
if not check_ebs(v_id):
prepare_ebs(v_id)
add_fstab_entries(v_id, MOUNT_POINT)
p_mount = subprocess.Popen('mount -a'.split(), stdout=subprocess.PIPE)
stdout, stderr = p_mount.communicate()
p_chown = subprocess.Popen('chown -R {0}:{0} {1}'.format(SERVICE_NAME, MOUNT_POINT).split(),
stdout=subprocess.PIPE)
stdout, stderr = p_chown.communicate()
device_char = chr(ord(device_char) - 1)
i += 1
def check_ebs(volume_id):
v_id = volume_id.replace('vol-', 'vol')
pattern = '/dev/disk/by-id/*{0}-part1'.format(v_id)
return bool(len(glob.glob(pattern)))
def prepare_ebs(volume_id):
v_id = volume_id.replace('vol-', 'vol')
pattern = '/dev/disk/by-id/*{0}'.format(v_id)
device = glob.glob(pattern)[0]
gdisk_commands = '\n'.join([
'n',
'1',
'34',
'',
'',
'w',
'Y',
''
])
p_echo = subprocess.Popen('echo -ne {0}'.format(gdisk_commands).split(' '), stdout=subprocess.PIPE)
p_fdisk = subprocess.Popen('gdisk {0}'.format(device).split(), stdin=p_echo.stdout, stdout=subprocess.PIPE)
stdout, stderr = p_fdisk.communicate()
print(stdout)
print(stderr)
# p_partprobe = subprocess.Popen('partprobe'.split(' '), stdout=subprocess.PIPE)
# stdout, stderr = p_partprobe.communicate()
# print(stdout)
# print(stderr)
sleep(3)
pattern = '/dev/disk/by-id/*{0}-part1'.format(v_id)
partition = glob.glob(pattern)[0]
p_xfs = subprocess.Popen('mkfs.xfs {0}'.format(partition).split(), stdout=subprocess.PIPE)
stdout, stderr = p_xfs.communicate()
print(stdout)
print(stderr)
def add_fstab_entries(volume_id, mount_point):
v_id = volume_id.replace('vol-', 'vol')
pattern = '/dev/disk/by-id/*{0}-part1'.format(v_id)
partition = glob.glob(pattern)[0]
fstab_entries = [
mount_point,
'xfs',
'defaults',
'0',
'0'
]
with open('/etc/fstab', 'a') as f:
f.write('{0} {1}\n'.format(partition, ' '.join(fstab_entries)))
f.flush()
f.close()
def wait_device_ready(timeout=3):
c = 0
while c < timeout:
sleep(1)
p_ip = subprocess.Popen('ip a'.split(), stdout=subprocess.PIPE)
stdout, stderr = p_ip.communicate()
for line in stdout.decode().splitlines():
res = re.match('.*inet {0}/[0-9]{{2}}'.format(NIC_IP), line)
if res is not None:
return None
c += 1
raise Exception('Device with address {0} not ready'.format(NIC_IP))
def change_default_route():
wait_device_ready(10)
p_ip = subprocess.Popen('ip r'.split(), stdout=subprocess.PIPE)
stdout, stderr = p_ip.communicate()
r_subnet_rules = []
for line in stdout. decode().splitlines():
res = re.match('(.* ){2}eth[0-9](?! $).*', line)
if res is not None:
subnet_rule = res.group(0)
l_subnet_rule = subnet_rule.split()
device = l_subnet_rule[2]
ip = l_subnet_rule[-1]
r_subnet_rules.append(
{
'device': device,
'ip': ip,
'subnet_rule': subnet_rule
}
)
r_default_route = ''
for line in stdout.decode().splitlines():
res = re.match('default .*', line)
if res is not None:
r_default_route = res.group(0)
break
with open('/etc/rc.local', 'a') as f:
f.write('#!/bin/bash\n\n')
rule_index = 128
default_route_device = ''
for rule in r_subnet_rules:
default_route = re.sub('eth.', rule['device'], r_default_route)
f.write('ip rule add from {0} table {1}\n'.format(rule['ip'], rule_index))
f.write('ip r add {0} table {1}\n'.format(default_route, rule_index))
f.write('ip r add {0} table {1}\n\n'.format(rule['subnet_rule'], rule_index))
if rule['ip'] == NIC_IP:
default_route_device = rule['device']
rule_index += 1
default_route = re.sub('eth.', default_route_device, r_default_route)
f.write('ip r del default\n')
f.write('ip r add {0}\n\n'.format(default_route))
f.write('exit 0\n')
f.flush()
f.close()
os.chmod('/etc/rc.local', 0o0755)
p_rc_local = subprocess.Popen('/etc/rc.local'.split(), stdout=subprocess.PIPE)
stdout, stderr = p_rc_local.communicate()
if __name__ == '__main__':
boto3.setup_default_session(region_name=AWS_REGION)
# uses: DEPLOY_UUID, TAG_KEY
attach_eni_ids()
# uses: MOUNT_POINT, SERVICE_NAME, DEPLOY_UUID, TAG_KEY
attach_ebs()
# uses: NIC_IP
change_default_route()
| [
"boto3.setup_default_session",
"boto3.client",
"re.match",
"time.sleep",
"os.chmod",
"requests.get",
"boto3.resource",
"re.sub",
"glob.glob"
] | [((444, 465), 'boto3.resource', 'boto3.resource', (['"""ec2"""'], {}), "('ec2')\n", (458, 465), False, 'import boto3\n'), ((782, 801), 'boto3.client', 'boto3.client', (['"""ec2"""'], {}), "('ec2')\n", (794, 801), False, 'import boto3\n'), ((814, 835), 'boto3.resource', 'boto3.resource', (['"""ec2"""'], {}), "('ec2')\n", (828, 835), False, 'import boto3\n'), ((1197, 1218), 'boto3.resource', 'boto3.resource', (['"""ec2"""'], {}), "('ec2')\n", (1211, 1218), False, 'import boto3\n'), ((1565, 1584), 'boto3.client', 'boto3.client', (['"""ec2"""'], {}), "('ec2')\n", (1577, 1584), False, 'import boto3\n'), ((3441, 3449), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (3446, 3449), False, 'from time import sleep\n'), ((6408, 6438), 'os.chmod', 'os.chmod', (['"""/etc/rc.local"""', '(493)'], {}), "('/etc/rc.local', 493)\n", (6416, 6438), False, 'import os\n'), ((6607, 6658), 'boto3.setup_default_session', 'boto3.setup_default_session', ([], {'region_name': 'AWS_REGION'}), '(region_name=AWS_REGION)\n', (6634, 6658), False, 'import boto3\n'), ((848, 915), 'requests.get', 'requests.get', (['"""http://169.254.169.254/latest/meta-data/instance-id"""'], {}), "('http://169.254.169.254/latest/meta-data/instance-id')\n", (860, 915), False, 'import requests\n'), ((1597, 1664), 'requests.get', 'requests.get', (['"""http://169.254.169.254/latest/meta-data/instance-id"""'], {}), "('http://169.254.169.254/latest/meta-data/instance-id')\n", (1609, 1664), False, 'import requests\n'), ((1977, 1985), 'time.sleep', 'sleep', (['(3)'], {}), '(3)\n', (1982, 1985), False, 'from time import sleep\n'), ((2801, 2819), 'glob.glob', 'glob.glob', (['pattern'], {}), '(pattern)\n', (2810, 2819), False, 'import glob\n'), ((3523, 3541), 'glob.glob', 'glob.glob', (['pattern'], {}), '(pattern)\n', (3532, 3541), False, 'import glob\n'), ((3883, 3901), 'glob.glob', 'glob.glob', (['pattern'], {}), '(pattern)\n', (3892, 3901), False, 'import glob\n'), ((4241, 4249), 'time.sleep', 'sleep', (['(1)'], {}), '(1)\n', (4246, 4249), False, 'from time import sleep\n'), ((4892, 4934), 're.match', 're.match', (['"""(.* ){2}eth[0-9](?! $).*"""', 'line'], {}), "('(.* ){2}eth[0-9](?! $).*', line)\n", (4900, 4934), False, 'import re\n'), ((5411, 5439), 're.match', 're.match', (['"""default .*"""', 'line'], {}), "('default .*', line)\n", (5419, 5439), False, 'import re\n'), ((6187, 6240), 're.sub', 're.sub', (['"""eth."""', 'default_route_device', 'r_default_route'], {}), "('eth.', default_route_device, r_default_route)\n", (6193, 6240), False, 'import re\n'), ((2643, 2661), 'glob.glob', 'glob.glob', (['pattern'], {}), '(pattern)\n', (2652, 2661), False, 'import glob\n'), ((5733, 5780), 're.sub', 're.sub', (['"""eth."""', "rule['device']", 'r_default_route'], {}), "('eth.', rule['device'], r_default_route)\n", (5739, 5780), False, 'import re\n')] |
import csv
import datetime
import random
import os
from parsers.parser_base import ParserBase
FILE_TIME_EPOCH = datetime.datetime(1601, 1, 1)
FILE_TIME_MICROSECOND = 10
def filetime_to_epoch_datetime(file_time):
if isinstance(file_time, int):
microseconds_since_file_time_epoch = file_time / FILE_TIME_MICROSECOND
else:
microseconds_since_file_time_epoch = int(file_time) / FILE_TIME_MICROSECOND
return FILE_TIME_EPOCH + datetime.timedelta(microseconds=microseconds_since_file_time_epoch)
class SrumParser(ParserBase):
CSV_FIELDS = {
"Unknown1.csv": ["TimeStamp", "AppId", "UserId", "EndTime", "DurationMS"],
"Unknown2.csv": [],
"Unknown3.csv": [],
"Unknown4.csv": ["TimeStamp", "AppId", "UserId"],
"SruDbCheckpointTable.csv": [],
"SruDbIdMapTable.csv": [],
"Network Usage.csv": ["TimeStamp", "AppId", "UserId", "InterfaceLuid", "L2ProfileId", "BytesSent",
"BytesRecvd"],
"Network Connections.csv": [],
"Energy Usage.csv": [],
"Energy Usage(Long - Term).csv": [],
"Application Resources.csv": ["TimeStamp", "AppId", "UserId"],
"Application Resource Usage.csv": ["TimeStamp", "AppId", "UserId"]
}
PARSING_TOOL = r"Tools\ese-analyst-master\ese2csv.exe"
PARSE_COMMAND = "{parser_path} -o {output_path} -p srudb_plugin {srum_db} --plugin-args {software_hive}"
def __init__(self, temp, config):
super().__init__(config)
self.temp_result_path = temp
def parse(self, args):
srum_db, software_hive = args
output = r"{}\srum_{}".format(self.temp_result_path, random.randint(1, 1000000))
os.mkdir(output)
command = self.PARSE_COMMAND.format(parser_path=self.PARSING_TOOL, output_path=output, srum_db=srum_db,
software_hive=software_hive)
self._run_command(command)
for csv_file in os.listdir(output):
srum_records = []
full_path = os.path.join(output, csv_file)
headers = self.CSV_FIELDS.get(csv_file)
if not headers:
continue
if csv_file == "Unknown1.csv":
with open(full_path, "r") as f:
reader = csv.DictReader(f)
for line in reader:
cur_record = {}
endTime = line.get("EndTime")
duration = line.get("DurationMS")
if endTime and duration:
cur_record["time"] = filetime_to_epoch_datetime(int(endTime) - int(duration)).isoformat()
cur_record["EndTime"] = filetime_to_epoch_datetime(endTime).isoformat()
cur_record["DurationMS"] = duration
else:
cur_record["time"] = datetime.datetime(1970, 1, 1).isoformat()
cur_record["AppId"] = line.get("AppId")
cur_record["UserId"] = line.get("UserId")
srum_records.append(cur_record)
else:
with open(full_path, "r") as f:
reader = csv.DictReader(f)
for line in reader:
cur_record = {}
for header in headers:
if header == "TimeStamp":
cur_record["time"] = line.get("TimeStamp").replace(" ", "T")
line.pop("TimeStamp")
value = line.get(header)
if value:
if isinstance(value, bytes):
cur_record[header.lower().replace(" ", "_")] = value.decode()
elif str.isdigit(value):
cur_record[header.lower().replace(" ", "_")] = int(value)
else:
cur_record[header.lower().replace(" ", "_")] = value
else:
cur_record[header.lower().replace(" ", "_")] = ""
srum_records.append(cur_record)
self._write_results_list([("srum-{}".format(csv_file.split(".")[0].lower().replace(" ", "_")), srum_records)])
| [
"datetime.datetime",
"os.listdir",
"csv.DictReader",
"os.path.join",
"os.mkdir",
"datetime.timedelta",
"random.randint"
] | [((113, 142), 'datetime.datetime', 'datetime.datetime', (['(1601)', '(1)', '(1)'], {}), '(1601, 1, 1)\n', (130, 142), False, 'import datetime\n'), ((452, 519), 'datetime.timedelta', 'datetime.timedelta', ([], {'microseconds': 'microseconds_since_file_time_epoch'}), '(microseconds=microseconds_since_file_time_epoch)\n', (470, 519), False, 'import datetime\n'), ((1704, 1720), 'os.mkdir', 'os.mkdir', (['output'], {}), '(output)\n', (1712, 1720), False, 'import os\n'), ((1966, 1984), 'os.listdir', 'os.listdir', (['output'], {}), '(output)\n', (1976, 1984), False, 'import os\n'), ((1668, 1694), 'random.randint', 'random.randint', (['(1)', '(1000000)'], {}), '(1, 1000000)\n', (1682, 1694), False, 'import random\n'), ((2040, 2070), 'os.path.join', 'os.path.join', (['output', 'csv_file'], {}), '(output, csv_file)\n', (2052, 2070), False, 'import os\n'), ((2297, 2314), 'csv.DictReader', 'csv.DictReader', (['f'], {}), '(f)\n', (2311, 2314), False, 'import csv\n'), ((3242, 3259), 'csv.DictReader', 'csv.DictReader', (['f'], {}), '(f)\n', (3256, 3259), False, 'import csv\n'), ((2917, 2946), 'datetime.datetime', 'datetime.datetime', (['(1970)', '(1)', '(1)'], {}), '(1970, 1, 1)\n', (2934, 2946), False, 'import datetime\n')] |
from django.http import HttpRequest
from django.middleware.csrf import _compare_salted_tokens as equivalent_tokens
from django.template.context_processors import csrf
from django.test import SimpleTestCase
class TestContextProcessor(SimpleTestCase):
def test_force_token_to_string(self):
request = HttpRequest()
test_token = '<KEY>'
request.META['CSRF_COOKIE'] = test_token
token = csrf(request).get('csrf_token')
self.assertTrue(equivalent_tokens(str(token), test_token))
| [
"django.template.context_processors.csrf",
"django.http.HttpRequest"
] | [((322, 335), 'django.http.HttpRequest', 'HttpRequest', ([], {}), '()\n', (333, 335), False, 'from django.http import HttpRequest\n'), ((433, 446), 'django.template.context_processors.csrf', 'csrf', (['request'], {}), '(request)\n', (437, 446), False, 'from django.template.context_processors import csrf\n')] |
#! /usr/bin/python3
from help import *
import time
# short-forms are used, so as to reduce the .json file size
# t : type - d or f
# d : directory
# f : file
# ts : timestamp
# dirs : The dictionary containing info about directory contents
# time : edit time of the file/folder
# s : size of the file/folder
# p : full path of the file/folder
# n : name of the main file/folder in the .json file
# i : info about the contents in the .json file
# folder = {'t': 'd', 's': get_size(dir_dict), 'p': full_path + '/' + entity, 'time': get_time(stats), 'dirs': dir_dict}
# file = {'t': 'f', 's': stats.st_size, 'p': full_path + '/' + entity, 'time': get_time(stats)}
# info = {'t': 'd', 's': size, 'p': base_path, 'time': get_time(stats), 'dirs': info}
# write = {'n': examine_name, 'ts': time.time(), 'i': info}
# info = {'t': 'f', 's': stats.st_size, 'p': base_path, 'time': get_time(stats)}
# write = {'n': examine_name, 'ts': time.time(), 'i': info}
no_of_files = 0
no_of_dirs = 0
examine_name = ''
save_filename = ''
_base_path = None
_ignore = False
errors = []
def get_save_config(base_path: str) -> None:
global examine_name, save_filename
examine_name = base_path.strip().split('/')[-1]
save_filename = examine_name + '.json'
if not os.path.lexists(constants.save_folder_name):
execute_bash("mkdir " + constants.save_folder_name)
def get_info_dict(sub_path: str) -> dict:
global no_of_files, no_of_dirs, _base_path, _ignore, errors
full_path = _base_path + '/' + sub_path
full_path = full_path.strip()
if full_path.endswith('/'):
full_path = full_path[:-1]
edit_dict = dict()
try:
entity_list = os.listdir(full_path)
for entity in entity_list:
ignore_it = False
if _ignore and to_be_ignored(full_path + '/' + entity): # ignoring cache temp etc files
ignore_it = True
if not ignore_it:
try:
stats = os.stat(full_path + '/' + entity)
if not os.path.islink(full_path + '/' + entity):
if os.path.isdir(full_path + '/' + entity):
no_of_dirs += 1
new_sub_path = sub_path + '/' + entity
dir_dict = get_info_dict(new_sub_path)
edit_dict[entity] = {'t': 'd', 's': get_size(dir_dict), 'p': full_path + '/' + entity,
'time': get_time(stats), 'dirs': dir_dict}
if os.path.isfile(full_path + '/' + entity):
no_of_files += 1
edit_dict[entity] = {'t': 'f', 's': stats.st_size, 'p': full_path + '/' + entity,
'time': get_time(stats)}
except FileNotFoundError:
errors.append(full_path + '/' + entity)
except PermissionError:
errors.append(full_path)
return edit_dict
def track(base_path: str, dir_path: str, output: bool = False, ignore: bool = False) -> list:
global _base_path, no_of_dirs, no_of_files, save_filename, _ignore, errors
no_of_dirs = 0
no_of_files = 0
print("Tracking...")
_base_path = base_path
_ignore = ignore
get_save_config(base_path)
if _ignore:
get_ignore_list()
if os.path.isdir(base_path):
info = get_info_dict('')
size = get_size(info)
no_of_dirs += 1
stats = os.stat(base_path)
info = {'t': 'd', 's': size, 'p': base_path, 'time': get_time(stats), 'dirs': info}
write = {'n': examine_name, 'ts': time.time(), 'i': info}
write_to_json_file(write, constants.save_folder_name + "/" + save_filename)
if output:
print("Successfully analysed the folder " + base_path)
print("Found {} folder(s)".format(no_of_dirs))
print("Found {} file(s)".format(no_of_files))
print("The directory is of size {}".format(get_size_format(size)))
print("A detailed report can be found using the 'file_tb.py print [FILE/FOLDER]' command ")
else:
no_of_files += 1
stats = os.stat(base_path)
info = {'t': 'f', 's': stats.st_size, 'p': base_path, 'time': get_time(stats)}
write = {'n': examine_name, 'ts': time.time(), 'i': info}
write_to_json_file(write, constants.save_folder_name + "/" + save_filename)
if output:
print("Successfully analysed the file")
print("The file is of size {}".format(get_size_format(stats.st_size)))
print("A detailed report can be found using the 'file_tb.py print [FILE/FOLDER]' command ")
# pp(info)
return errors
if __name__ == '__main__':
track(os.getcwd(), os.getcwd(), output=True)
| [
"time.time"
] | [((3659, 3670), 'time.time', 'time.time', ([], {}), '()\n', (3668, 3670), False, 'import time\n'), ((4352, 4363), 'time.time', 'time.time', ([], {}), '()\n', (4361, 4363), False, 'import time\n')] |
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Simple service for SL (Storstockholms Lokaltrafik)."""
import datetime
import json
import logging
from datetime import timedelta
import homeassistant.helpers.config_validation as cv
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (ATTR_FRIENDLY_NAME, CONF_SCAN_INTERVAL,
CONF_SENSOR_TYPE, CONF_SENSORS, STATE_OFF,
STATE_ON)
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.event import (async_track_point_in_utc_time,
async_track_utc_time_change,
track_time_interval)
from homeassistant.util import Throttle
from homeassistant.util.dt import now
from hasl import (haslapi, fpapi, tl2api, ri4api, si2api,
HASL_Error, HASL_API_Error, HASL_HTTP_Error)
__version__ = '2.2.0'
_LOGGER = logging.getLogger(__name__)
DOMAIN = 'hasl'
# Keys used in the configuration.
CONF_RI4_KEY = 'ri4key'
CONF_SI2_KEY = 'si2key'
CONF_TL2_KEY = 'tl2key'
CONF_SITEID = 'siteid'
CONF_LINES = 'lines'
CONF_DIRECTION = 'direction'
CONF_ENABLED_SENSOR = 'sensor'
CONF_TIMEWINDOW = 'timewindow'
CONF_SENSORPROPERTY = 'property'
CONF_TRAIN_TYPE = 'train_type'
CONF_TRAFFIC_CLASS = 'traffic_class'
CONF_VERSION = 'version_sensor'
CONF_USE_MINIMIZATION = 'api_minimization'
LIST_SENSOR_TYPES = ['departures', 'status', 'trainlocation', 'comb', 'tl2']
LIST_SENSOR_PROPERTIES = ['min', 'time', 'deviations', 'refresh', 'updated']
LIST_TRAIN_TYPES = ['PT', 'RB', 'TVB', 'SB', 'LB', 'SpvC', 'TB1', 'TB2', 'TB3']
# Default values for configuration.
DEFAULT_INTERVAL = timedelta(minutes=10)
DEFAULT_TIMEWINDOW = 30
DEFAULT_DIRECTION = 0
DEFAULT_SENSORPROPERTY = 'min'
DEFAULT_TRAIN_TYPE = 'PT'
DEFAULT_TRAFFIC_CLASS = ['metro', 'train', 'local', 'tram', 'bus', 'fer']
DEFAULT_SENSORTYPE = 'departures'
DEFAULT_CACHE_FILE = '.storage/haslcache.json'
# Defining the configuration schema.
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
# API Keys
vol.Optional(CONF_RI4_KEY): cv.string,
vol.Optional(CONF_SI2_KEY): cv.string,
vol.Optional(CONF_TL2_KEY): cv.string,
vol.Optional(CONF_VERSION, default=False): cv.boolean,
vol.Optional(CONF_USE_MINIMIZATION, default=True): cv.boolean,
vol.Required(CONF_SENSORS, default=[]):
vol.All(cv.ensure_list, [vol.All({
vol.Required(ATTR_FRIENDLY_NAME): cv.string,
vol.Required(CONF_SENSOR_TYPE, default=DEFAULT_SENSORTYPE):
vol.In(LIST_SENSOR_TYPES),
vol.Optional(CONF_ENABLED_SENSOR): cv.string,
vol.Optional(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL):
vol.Any(cv.time_period, cv.positive_timedelta),
vol.Optional(CONF_SITEID): cv.string,
vol.Optional(CONF_LINES, default=[]):
vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_DIRECTION, default=DEFAULT_DIRECTION):
vol.All(vol.Coerce(int), vol.Range(min=0, max=2)),
vol.Optional(CONF_TIMEWINDOW, default=DEFAULT_TIMEWINDOW):
vol.All(vol.Coerce(int), vol.Range(min=0, max=60)),
vol.Optional(CONF_SENSORPROPERTY, default=DEFAULT_SENSORPROPERTY):
vol.In(LIST_SENSOR_PROPERTIES),
vol.Optional(CONF_TRAFFIC_CLASS, default=DEFAULT_TRAFFIC_CLASS):
vol.All(cv.ensure_list, [vol.In(DEFAULT_TRAFFIC_CLASS)]),
vol.Optional(CONF_TRAIN_TYPE, default=DEFAULT_TRAIN_TYPE):
vol.In(LIST_TRAIN_TYPES)
})]),
}, extra=vol.ALLOW_EXTRA)
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the sensors."""
if not hass.data.get(DOMAIN):
hass.data[DOMAIN] = {}
sensors = []
if config[CONF_VERSION]:
sensors.append(SLVersionSensor(hass))
_LOGGER.info("Created version sensor for HASL")
for sensorconf in config[CONF_SENSORS]:
if sensorconf[CONF_SENSOR_TYPE] == 'departures' or \
sensorconf[CONF_SENSOR_TYPE] == 'comb':
sitekey = sensorconf.get(CONF_SITEID)
si2key = config.get(CONF_SI2_KEY)
ri4key = config.get(CONF_RI4_KEY)
if sitekey and ri4key:
sensorname = sensorconf[ATTR_FRIENDLY_NAME]
sensors.append(SLDeparturesSensor(
hass,
si2key,
ri4key,
sitekey,
sensorconf.get(CONF_LINES),
sensorname,
sensorconf.get(CONF_ENABLED_SENSOR),
sensorconf.get(CONF_SCAN_INTERVAL),
sensorconf.get(CONF_DIRECTION),
sensorconf.get(CONF_TIMEWINDOW),
sensorconf.get(CONF_SENSORPROPERTY),
config.get(CONF_USE_MINIMIZATION)
))
_LOGGER.info("Created departures sensor %s...", sensorname)
else:
_LOGGER.error("Sensor %s is missing site, si2key or ri4key",
sensorconf[ATTR_FRIENDLY_NAME])
if sensorconf[CONF_SENSOR_TYPE] == 'status' or \
sensorconf[CONF_SENSOR_TYPE] == 'tl2':
tl2key = config.get(CONF_TL2_KEY)
if tl2key:
sensorname = sensorconf[ATTR_FRIENDLY_NAME]
sensors.append(SLStatusSensor(
hass,
tl2key,
sensorname,
sensorconf.get(CONF_ENABLED_SENSOR),
sensorconf.get(CONF_SCAN_INTERVAL),
sensorconf.get(CONF_TRAFFIC_CLASS),
config.get(CONF_USE_MINIMIZATION)
))
_LOGGER.info("Created status sensor %s...", sensorname)
else:
_LOGGER.error("Sensor %s is missing tl2key attribute",
sensorconf[ATTR_FRIENDLY_NAME])
if sensorconf[CONF_SENSOR_TYPE] == 'trainlocation':
train_type = sensorconf.get(CONF_TRAIN_TYPE)
if train_type:
sensorname = sensorconf[ATTR_FRIENDLY_NAME]
sensors.append(SLTrainLocationSensor(
hass,
sensorname,
train_type,
sensorconf.get(CONF_SCAN_INTERVAL),
sensorconf.get(CONF_ENABLED_SENSOR),
))
_LOGGER.info("Created train sensor %s...", sensorname)
else:
_LOGGER.error("Sensor %s is missing train_type attribute",
sensorconf[ATTR_FRIENDLY_NAME])
add_devices(sensors)
class SLTrainLocationSensor(Entity):
"""Trafic Situation Sensor."""
def __init__(self, hass, friendly_name, train_type,
interval, enabled_sensor):
self._hass = hass
self._fpapi = fpapi()
self._name = friendly_name
self._interval = interval
self._enabled_sensor = enabled_sensor
self._train_type = train_type
self._data = {}
self.update = Throttle(interval)(self._update)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
""" Return the icon for the frontend."""
return None
@property
def device_state_attributes(self):
""" Return the sensor attributes."""
return {'type': self._train_type, 'data': json.dumps(self._data)}
@property
def state(self):
""" Return the state of the sensor."""
return self._train_type
def _update(self):
if self._enabled_sensor is not None:
sensor_state = self._hass.states.get(self._enabled_sensor)
if self._enabled_sensor is None or sensor_state.state is STATE_ON:
try:
apidata = self._fpapi.request(self._train_type)
except HASL_Error as e:
_LOGGER.error("A communication error occured while "
"updating train location sensor: %s", e.details)
return
except Exception as e:
_LOGGER.error("A error occured while"
"updating train location sensor: %s", e)
return
self._data = apidata
_LOGGER.info("Update completed %s...", self._name)
class SLVersionSensor(Entity):
"""HASL Version Sensor."""
def __init__(self, hass):
self._hass = hass
self._haslapi = haslapi()
self._name = 'HASL Version'
self._version = __version__
self._py_version = self._haslapi.version()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
""" Return the icon for the frontend."""
return None
@property
def device_state_attributes(self):
""" Return the sensor attributes."""
return {'hasl': self._version, 'pyHasl': self._py_version}
@property
def state(self):
""" Return the state of the sensor."""
return self._version + "/" + self._py_version
class SLStatusSensor(Entity):
"""Trafic Situation Sensor."""
def __init__(self, hass, tl2key, friendly_name,
enabled_sensor, interval, type,
minimization):
self._tl2api = tl2api(tl2key)
self._datakey = 'tl2_' + tl2key
self._interval = interval
self._hass = hass
self._name = friendly_name
self._enabled_sensor = enabled_sensor
self._type = type
self._sensordata = []
self._lastupdate = '-'
self._cachefile = hass.config.path(DEFAULT_CACHE_FILE)
self._minimization = minimization
if not hass.data[DOMAIN].get(self._datakey):
hass.data[DOMAIN][self._datakey] = ''
self.update = Throttle(interval)(self._update)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
""" Return the icon for the frontend."""
return 'mdi:train-car'
@property
def device_state_attributes(self):
""" Return the sensor attributes."""
return self._sensordata
@property
def state(self):
""" Return the state of the sensor."""
return self._lastupdate
def getCache(self, key):
try:
jsonFile = open(self._cachefile, 'r')
data = json.load(jsonFile)
jsonFile.close()
return data.get(key)
except:
return {}
def putCache(self, key, value):
try:
jsonFile = open(self._cachefile, 'r')
data = json.load(jsonFile)
jsonFile.close()
data[key] = value
except:
data = {'' + key + '': value}
jsonFile = open(self._cachefile, 'w')
jsonFile.write(json.dumps(data))
jsonFile.close()
def _update(self):
if self._enabled_sensor is not None:
sensor_state = self._hass.states.get(self._enabled_sensor)
if self._enabled_sensor is None or sensor_state.state is STATE_ON:
_LOGGER.info("Starting to update TL2 for %s...",
self._name)
# Object used to create our object.
newdata = {}
# Use some nice translations for the statuses etc.
statuses = {
'EventGood': 'Good',
'EventMinor': 'Minor',
'EventMajor': 'Closed',
'EventPlanned': 'Planned',
}
# Icon table used for HomeAssistant.
statusIcons = {
'EventGood': 'mdi:check',
'EventMinor': 'mdi:clock-alert-outline',
'EventMajor': 'mdi:close',
'EventPlanned': 'mdi:triangle-outline'
}
trafficTypeIcons = {
'ferry': 'mdi:ferry',
'bus': 'mdi:bus',
'tram': 'mdi:tram',
'train': 'mdi:train',
'local': 'mdi:train-variant',
'metro': 'mdi:subway-variant'
}
# If the same API have already made the request in within
# the specified interval then use that data instead of
# requesting it again and spare some innocent credits from dying.
cacheage = self._hass.data[DOMAIN][self._datakey]
if not cacheage or now(self._hass.config.time_zone) \
- self._interval > cacheage or not self._minimization:
try:
apidata = self._tl2api.request()
apidata = apidata['ResponseData']['TrafficTypes']
self.putCache(self._datakey, apidata)
self._hass.data[DOMAIN][self._datakey] = \
now(self._hass.config.time_zone)
_LOGGER.info("Updated cache for %s...", self._name)
except HASL_Error as e:
_LOGGER.error("A communication error occured while "
"updating TL2 sensor: %s", e.details)
return
except Exception as e:
_LOGGER.error("A error occured while "
"updating TL4 API: %s", e)
return
else:
apidata = self.getCache(self._datakey)
_LOGGER.info("Reusing data from cache for %s...",
self._name)
# Return only the relevant portion of the results.
for response in apidata:
type = response['Type']
if self._type is None or type in self._type:
statustype = ('ferry' if type == 'fer' else type)
newdata[statustype + '_status'] = \
statuses.get(response['StatusIcon'])
newdata[statustype + '_status_icon'] = \
statusIcons.get(response['StatusIcon'])
newdata[statustype + '_icon'] = \
trafficTypeIcons.get(statustype)
for event in response['Events']:
event['Status'] = statuses.get(event['StatusIcon'])
event['StatusIcon'] = \
statusIcons.get(event['StatusIcon'])
newdata[statustype + '_events'] = response['Events']
# Attribution and update sensor data.
newdata['attribution'] = "Stockholms Lokaltrafik"
newdata['last_updated'] = \
self._hass.data[DOMAIN][self._datakey].strftime('%Y-%m-%d' +
'%H:%M:%S')
self._sensordata = newdata
self._lastupdate = newdata['last_updated']
_LOGGER.info("TL2 update completed for %s...", self._name)
class SLDeparturesSensor(Entity):
"""Departure board for one SL site."""
def __init__(self, hass, si2key, ri4key, siteid,
lines, friendly_name, enabled_sensor,
interval, direction, timewindow, sensorproperty,
minimization):
"""Initialize"""
# The table of resulttypes and the corresponding units of measure.
unit_table = {
'min': 'min',
'time': '',
'deviations': '',
'refresh': '',
'update': '',
}
if si2key:
self._si2key = si2key
self._si2api = si2api(si2key, siteid, '')
self._si2datakey = 'si2_' + si2key + '_' + siteid
self._ri4key = ri4key
self._ri4api = ri4api(ri4key, siteid, 60)
self._ri4datakey = 'ri2_' + ri4key + '_' + siteid
self._hass = hass
self._name = friendly_name
self._lines = lines
self._siteid = siteid
self._enabled_sensor = enabled_sensor
self._sensorproperty = sensorproperty
self._departure_table = []
self._deviations_table = []
self._direction = direction
self._timewindow = timewindow
self._nextdeparture_minutes = '0'
self._nextdeparture_expected = '-'
self._lastupdate = '-'
self._interval = interval
self._unit_of_measure = unit_table.get(self._sensorproperty, 'min')
self._cachefile = hass.config.path(DEFAULT_CACHE_FILE)
self._minimization = minimization
if not hass.data[DOMAIN].get(self._ri4datakey):
hass.data[DOMAIN][self._ri4datakey] = ''
if self._si2key:
if not hass.data[DOMAIN].get(self._si2datakey):
hass.data[DOMAIN][self._si2datakey] = ''
# Setup updating of the sensor.
self.update = Throttle(interval)(self._update)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def icon(self):
""" Return the icon for the frontend."""
if self._deviations_table:
return 'mdi:bus-alert'
return 'mdi:bus'
@property
def state(self):
""" Return number of minutes to the next departure """
# If the sensor should return minutes to next departure.
if self._sensorproperty is 'min':
if not self._departure_table:
return '-'
return self._departure_table[0]['time']
# If the sensor should return the time at which next departure occurs.
if self._sensorproperty is 'time':
if not self._departure_table:
return '-'
expected = self._departure_table[0]['expected'] or '-'
if expected is not '-':
expected = \
datetime.datetime.strptime(self._nextdeparture_expected,
'%Y-%m-%dT%H:%M:%S')
expected = expected.strftime('%H:%M:%S')
return expected
# If the sensor should return the number of deviations.
if self._sensorproperty is 'deviations':
return len(self._deviations_table)
# If the sensor should return if it is updating or not.
if self._sensorproperty is 'refresh':
if self._enabled_sensor is None or sensor_state.state is STATE_ON:
return STATE_ON
return STATE_OFF
if self._sensorproperty is 'updated':
if self._lastupdate is '-':
return '-'
return refresh.strftime('%Y-%m-%d %H:%M:%S')
# Failsafe
return '-'
@property
def device_state_attributes(self):
""" Return the sensor attributes ."""
# Initialize the state attributes.
val = {}
# Format the next exptected time.
if self._departure_table:
expected_time = self._departure_table[0]['expected'] or '-'
expected_minutes = self._departure_table[0]['time'] or '-'
if expected_time is not '-':
expected_time = \
datetime.datetime.strptime(expected_time,
'%Y-%m-%dT%H:%M:%S')
expected_time = expected_time.strftime('%H:%M:%S')
else:
expected_time = '-'
expected_minutes = '-'
# Format the last refresh time.
refresh = self._lastupdate
if self._lastupdate is not '-':
refresh = refresh.strftime('%Y-%m-%d %H:%M:%S')
# Setup the unit of measure.
if self._unit_of_measure is not '':
val['unit_of_measurement'] = self._unit_of_measure
# Check if sensor is currently updating or not.
if self._enabled_sensor is not None:
sensor_state = self._hass.states.get(self._enabled_sensor)
if self._enabled_sensor is None or sensor_state.state is STATE_ON:
val['refresh_enabled'] = STATE_ON
else:
val['refresh_enabled'] = STATE_OFF
# Set values of the sensor.
val['attribution'] = 'Stockholms Lokaltrafik'
val['departures'] = self._departure_table
val['deviations'] = self._deviations_table
val['last_refresh'] = refresh
val['next_departure_minutes'] = expected_minutes
val['next_departure_time'] = expected_time
val['deviation_count'] = len(self._deviations_table)
return val
def parseDepartureTime(self, t):
""" weird time formats from the API,
do some quick and dirty conversions. """
try:
if t == 'Nu':
return 0
s = t.split()
if len(s) > 1 and s[1] == 'min':
return int(s[0])
s = t.split(':')
if len(s) > 1:
rightnow = now(self._hass.config.time_zone)
min = int(s[0]) * 60 + int(s[1]) - (rightnow.hour * 60 +
rightnow.minute)
if min < 0:
min = min + 1440
return min
except Exception:
_LOGGER.warning("Failed to parse departure time (%s) ", t)
return 0
def getCache(self, key):
try:
jsonFile = open(self._cachefile, 'r')
data = json.load(jsonFile)
jsonFile.close()
return data.get(key)
except:
return {}
def putCache(self, key, value):
try:
jsonFile = open(self._cachefile, 'r')
data = json.load(jsonFile)
jsonFile.close()
data[key] = value
except:
data = {'' + key + '': value}
jsonFile = open(self._cachefile, 'w')
jsonFile.write(json.dumps(data))
jsonFile.close()
def _update(self):
"""Get the departure board."""
# If using external sensor, get its value.
if self._enabled_sensor is not None:
sensor_state = self._hass.states.get(self._enabled_sensor)
# If we dont have external sensor or it is ON then proceed.
if self._enabled_sensor is None or sensor_state.state \
is STATE_ON:
self._update_ri4()
if self._si2key:
self._update_si2()
self._lastupdate = now(self._hass.config.time_zone)
def _update_ri4(self):
errorOccured = False
_LOGGER.info("Starting to update RI4 for %s...", self._name)
cacheage = self._hass.data[DOMAIN][self._ri4datakey]
if not cacheage or now(self._hass.config.time_zone) \
- self._interval > cacheage or not self._minimization:
try:
departuredata = self._ri4api.request()
departuredata = departuredata['ResponseData']
self.putCache(self._ri4datakey, departuredata)
self._hass.data[DOMAIN][self._ri4datakey] = \
now(self._hass.config.time_zone)
_LOGGER.info("Updated cache for %s...", self._name)
except HASL_Error as e:
_LOGGER.error("A communication error occured while "
"updating SI2 sensor: %s", e.details)
errorOccured = True
except Exception as e:
_LOGGER.error("A communication error occured while "
"updating RI4 API: %s", e)
errorOccured = True
else:
try:
departuredata = self.getCache(self._ri4datakey)
_LOGGER.info("Reusing data from cache for %s...",
self._name)
except Exception as e:
_LOGGER.error("A error occured while retreiving "
"cached RI4 sensor data: %s", e)
errorOccured = True
if not errorOccured:
departures = []
iconswitcher = {
'Buses': 'mdi:bus',
'Trams': 'mdi:tram',
'Ships': 'mdi:ferry',
'Metros': 'mdi:subway-variant',
'Trains': 'mdi:train',
}
for (i, traffictype) in enumerate(['Metros', 'Buses', 'Trains',
'Trams', 'Ships']):
for (idx, value) in enumerate(departuredata[traffictype]):
direction = value['JourneyDirection'] or 0
displaytime = value['DisplayTime'] or ''
destination = value['Destination'] or ''
linenumber = value['LineNumber'] or ''
expected = value['ExpectedDateTime'] or ''
groupofline = value['GroupOfLine'] or ''
icon = iconswitcher.get(traffictype, 'mdi:train-car')
if int(self._direction) == 0 or int(direction) \
== int(self._direction):
if self._lines == [] or linenumber \
in self._lines:
diff = self.parseDepartureTime(displaytime)
if diff < self._timewindow:
departures.append({
'line': linenumber,
'direction': direction,
'departure': displaytime,
'destination': destination,
'time': diff,
'expected': expected,
'type': traffictype,
'groupofline': groupofline,
'icon': icon,
})
self._departure_table = sorted(departures,
key=lambda k: k['time'])
_LOGGER.info("RI4 update completed for %s...", self._name)
def _update_si2(self):
errorOccured = False
_LOGGER.info("Starting to update SI2 for %s...", self._name)
cacheage = self._hass.data[DOMAIN][self._si2datakey]
if not cacheage or now(self._hass.config.time_zone) \
- self._interval > cacheage or not self._minimization:
try:
deviationdata = self._si2api.request()
deviationdata = deviationdata['ResponseData']
self.putCache(self._si2datakey, deviationdata)
self._hass.data[DOMAIN][self._si2datakey] = \
now(self._hass.config.time_zone)
_LOGGER.info('Updated cache for %s...', self._name)
except HASL_Error as e:
_LOGGER.error("A communication error occured while "
"updating SI2 sensor: %s", e.details)
errorOccured = True
except Exception as e:
_LOGGER.error("A error occured while "
"updating SI2 sensor: %s", e)
errorOccured = True
else:
try:
deviationdata = self.getCache(self._si2datakey)
_LOGGER.info("Reusing data from cache for %s...",
self._name)
except Exception as e:
_LOGGER.error("A error occured while retreiving "
"cached SI2 sensor: %s", e.details)
errorOccured = True
if not errorOccured:
deviations = []
for (idx, value) in enumerate(deviationdata):
deviations.append({
'updated': value['Updated'],
'title': value['Header'],
'fromDate': value['FromDateTime'],
'toDate': value['UpToDateTime'],
'details': value['Details'],
'sortOrder': value['SortOrder'],
})
self._deviations_table = \
sorted(deviations, key=lambda k: k['sortOrder'])
_LOGGER.info("SI2 update completed for %s...", self._name)
| [
"logging.getLogger",
"voluptuous.Any",
"hasl.tl2api",
"datetime.timedelta",
"voluptuous.Optional",
"hasl.haslapi",
"voluptuous.All",
"json.dumps",
"homeassistant.util.Throttle",
"homeassistant.util.dt.now",
"hasl.fpapi",
"hasl.si2api",
"voluptuous.Required",
"voluptuous.Coerce",
"voluptuous.Range",
"hasl.ri4api",
"datetime.datetime.strptime",
"json.load",
"voluptuous.In"
] | [((990, 1017), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1007, 1017), False, 'import logging\n'), ((1743, 1764), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(10)'}), '(minutes=10)\n', (1752, 1764), False, 'from datetime import timedelta\n'), ((2124, 2150), 'voluptuous.Optional', 'vol.Optional', (['CONF_RI4_KEY'], {}), '(CONF_RI4_KEY)\n', (2136, 2150), True, 'import voluptuous as vol\n'), ((2167, 2193), 'voluptuous.Optional', 'vol.Optional', (['CONF_SI2_KEY'], {}), '(CONF_SI2_KEY)\n', (2179, 2193), True, 'import voluptuous as vol\n'), ((2210, 2236), 'voluptuous.Optional', 'vol.Optional', (['CONF_TL2_KEY'], {}), '(CONF_TL2_KEY)\n', (2222, 2236), True, 'import voluptuous as vol\n'), ((2253, 2294), 'voluptuous.Optional', 'vol.Optional', (['CONF_VERSION'], {'default': '(False)'}), '(CONF_VERSION, default=False)\n', (2265, 2294), True, 'import voluptuous as vol\n'), ((2312, 2361), 'voluptuous.Optional', 'vol.Optional', (['CONF_USE_MINIMIZATION'], {'default': '(True)'}), '(CONF_USE_MINIMIZATION, default=True)\n', (2324, 2361), True, 'import voluptuous as vol\n'), ((2380, 2418), 'voluptuous.Required', 'vol.Required', (['CONF_SENSORS'], {'default': '[]'}), '(CONF_SENSORS, default=[])\n', (2392, 2418), True, 'import voluptuous as vol\n'), ((7044, 7051), 'hasl.fpapi', 'fpapi', ([], {}), '()\n', (7049, 7051), False, 'from hasl import haslapi, fpapi, tl2api, ri4api, si2api, HASL_Error, HASL_API_Error, HASL_HTTP_Error\n'), ((8709, 8718), 'hasl.haslapi', 'haslapi', ([], {}), '()\n', (8716, 8718), False, 'from hasl import haslapi, fpapi, tl2api, ri4api, si2api, HASL_Error, HASL_API_Error, HASL_HTTP_Error\n'), ((9579, 9593), 'hasl.tl2api', 'tl2api', (['tl2key'], {}), '(tl2key)\n', (9585, 9593), False, 'from hasl import haslapi, fpapi, tl2api, ri4api, si2api, HASL_Error, HASL_API_Error, HASL_HTTP_Error\n'), ((16028, 16054), 'hasl.ri4api', 'ri4api', (['ri4key', 'siteid', '(60)'], {}), '(ri4key, siteid, 60)\n', (16034, 16054), False, 'from hasl import haslapi, fpapi, tl2api, ri4api, si2api, HASL_Error, HASL_API_Error, HASL_HTTP_Error\n'), ((7252, 7270), 'homeassistant.util.Throttle', 'Throttle', (['interval'], {}), '(interval)\n', (7260, 7270), False, 'from homeassistant.util import Throttle\n'), ((7644, 7666), 'json.dumps', 'json.dumps', (['self._data'], {}), '(self._data)\n', (7654, 7666), False, 'import json\n'), ((10094, 10112), 'homeassistant.util.Throttle', 'Throttle', (['interval'], {}), '(interval)\n', (10102, 10112), False, 'from homeassistant.util import Throttle\n'), ((10706, 10725), 'json.load', 'json.load', (['jsonFile'], {}), '(jsonFile)\n', (10715, 10725), False, 'import json\n'), ((10946, 10965), 'json.load', 'json.load', (['jsonFile'], {}), '(jsonFile)\n', (10955, 10965), False, 'import json\n'), ((11154, 11170), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (11164, 11170), False, 'import json\n'), ((15885, 15911), 'hasl.si2api', 'si2api', (['si2key', 'siteid', '""""""'], {}), "(si2key, siteid, '')\n", (15891, 15911), False, 'from hasl import haslapi, fpapi, tl2api, ri4api, si2api, HASL_Error, HASL_API_Error, HASL_HTTP_Error\n'), ((17116, 17134), 'homeassistant.util.Throttle', 'Throttle', (['interval'], {}), '(interval)\n', (17124, 17134), False, 'from homeassistant.util import Throttle\n'), ((21647, 21666), 'json.load', 'json.load', (['jsonFile'], {}), '(jsonFile)\n', (21656, 21666), False, 'import json\n'), ((21887, 21906), 'json.load', 'json.load', (['jsonFile'], {}), '(jsonFile)\n', (21896, 21906), False, 'import json\n'), ((22095, 22111), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (22105, 22111), False, 'import json\n'), ((22660, 22692), 'homeassistant.util.dt.now', 'now', (['self._hass.config.time_zone'], {}), '(self._hass.config.time_zone)\n', (22663, 22692), False, 'from homeassistant.util.dt import now\n'), ((18108, 18185), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['self._nextdeparture_expected', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(self._nextdeparture_expected, '%Y-%m-%dT%H:%M:%S')\n", (18134, 18185), False, 'import datetime\n'), ((19416, 19478), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['expected_time', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(expected_time, '%Y-%m-%dT%H:%M:%S')\n", (19442, 19478), False, 'import datetime\n'), ((21154, 21186), 'homeassistant.util.dt.now', 'now', (['self._hass.config.time_zone'], {}), '(self._hass.config.time_zone)\n', (21157, 21186), False, 'from homeassistant.util.dt import now\n'), ((23295, 23327), 'homeassistant.util.dt.now', 'now', (['self._hass.config.time_zone'], {}), '(self._hass.config.time_zone)\n', (23298, 23327), False, 'from homeassistant.util.dt import now\n'), ((26944, 26976), 'homeassistant.util.dt.now', 'now', (['self._hass.config.time_zone'], {}), '(self._hass.config.time_zone)\n', (26947, 26976), False, 'from homeassistant.util.dt import now\n'), ((13146, 13178), 'homeassistant.util.dt.now', 'now', (['self._hass.config.time_zone'], {}), '(self._hass.config.time_zone)\n', (13149, 13178), False, 'from homeassistant.util.dt import now\n'), ((22908, 22940), 'homeassistant.util.dt.now', 'now', (['self._hass.config.time_zone'], {}), '(self._hass.config.time_zone)\n', (22911, 22940), False, 'from homeassistant.util.dt import now\n'), ((26557, 26589), 'homeassistant.util.dt.now', 'now', (['self._hass.config.time_zone'], {}), '(self._hass.config.time_zone)\n', (26560, 26589), False, 'from homeassistant.util.dt import now\n'), ((2476, 2508), 'voluptuous.Required', 'vol.Required', (['ATTR_FRIENDLY_NAME'], {}), '(ATTR_FRIENDLY_NAME)\n', (2488, 2508), True, 'import voluptuous as vol\n'), ((2534, 2592), 'voluptuous.Required', 'vol.Required', (['CONF_SENSOR_TYPE'], {'default': 'DEFAULT_SENSORTYPE'}), '(CONF_SENSOR_TYPE, default=DEFAULT_SENSORTYPE)\n', (2546, 2592), True, 'import voluptuous as vol\n'), ((2650, 2683), 'voluptuous.Optional', 'vol.Optional', (['CONF_ENABLED_SENSOR'], {}), '(CONF_ENABLED_SENSOR)\n', (2662, 2683), True, 'import voluptuous as vol\n'), ((2709, 2767), 'voluptuous.Optional', 'vol.Optional', (['CONF_SCAN_INTERVAL'], {'default': 'DEFAULT_INTERVAL'}), '(CONF_SCAN_INTERVAL, default=DEFAULT_INTERVAL)\n', (2721, 2767), True, 'import voluptuous as vol\n'), ((2846, 2871), 'voluptuous.Optional', 'vol.Optional', (['CONF_SITEID'], {}), '(CONF_SITEID)\n', (2858, 2871), True, 'import voluptuous as vol\n'), ((2897, 2933), 'voluptuous.Optional', 'vol.Optional', (['CONF_LINES'], {'default': '[]'}), '(CONF_LINES, default=[])\n', (2909, 2933), True, 'import voluptuous as vol\n'), ((3002, 3057), 'voluptuous.Optional', 'vol.Optional', (['CONF_DIRECTION'], {'default': 'DEFAULT_DIRECTION'}), '(CONF_DIRECTION, default=DEFAULT_DIRECTION)\n', (3014, 3057), True, 'import voluptuous as vol\n'), ((3139, 3196), 'voluptuous.Optional', 'vol.Optional', (['CONF_TIMEWINDOW'], {'default': 'DEFAULT_TIMEWINDOW'}), '(CONF_TIMEWINDOW, default=DEFAULT_TIMEWINDOW)\n', (3151, 3196), True, 'import voluptuous as vol\n'), ((3279, 3344), 'voluptuous.Optional', 'vol.Optional', (['CONF_SENSORPROPERTY'], {'default': 'DEFAULT_SENSORPROPERTY'}), '(CONF_SENSORPROPERTY, default=DEFAULT_SENSORPROPERTY)\n', (3291, 3344), True, 'import voluptuous as vol\n'), ((3407, 3470), 'voluptuous.Optional', 'vol.Optional', (['CONF_TRAFFIC_CLASS'], {'default': 'DEFAULT_TRAFFIC_CLASS'}), '(CONF_TRAFFIC_CLASS, default=DEFAULT_TRAFFIC_CLASS)\n', (3419, 3470), True, 'import voluptuous as vol\n'), ((3559, 3616), 'voluptuous.Optional', 'vol.Optional', (['CONF_TRAIN_TYPE'], {'default': 'DEFAULT_TRAIN_TYPE'}), '(CONF_TRAIN_TYPE, default=DEFAULT_TRAIN_TYPE)\n', (3571, 3616), True, 'import voluptuous as vol\n'), ((2610, 2635), 'voluptuous.In', 'vol.In', (['LIST_SENSOR_TYPES'], {}), '(LIST_SENSOR_TYPES)\n', (2616, 2635), True, 'import voluptuous as vol\n'), ((2785, 2831), 'voluptuous.Any', 'vol.Any', (['cv.time_period', 'cv.positive_timedelta'], {}), '(cv.time_period, cv.positive_timedelta)\n', (2792, 2831), True, 'import voluptuous as vol\n'), ((2951, 2987), 'voluptuous.All', 'vol.All', (['cv.ensure_list', '[cv.string]'], {}), '(cv.ensure_list, [cv.string])\n', (2958, 2987), True, 'import voluptuous as vol\n'), ((3362, 3392), 'voluptuous.In', 'vol.In', (['LIST_SENSOR_PROPERTIES'], {}), '(LIST_SENSOR_PROPERTIES)\n', (3368, 3392), True, 'import voluptuous as vol\n'), ((3634, 3658), 'voluptuous.In', 'vol.In', (['LIST_TRAIN_TYPES'], {}), '(LIST_TRAIN_TYPES)\n', (3640, 3658), True, 'import voluptuous as vol\n'), ((12745, 12777), 'homeassistant.util.dt.now', 'now', (['self._hass.config.time_zone'], {}), '(self._hass.config.time_zone)\n', (12748, 12777), False, 'from homeassistant.util.dt import now\n'), ((3083, 3098), 'voluptuous.Coerce', 'vol.Coerce', (['int'], {}), '(int)\n', (3093, 3098), True, 'import voluptuous as vol\n'), ((3100, 3123), 'voluptuous.Range', 'vol.Range', ([], {'min': '(0)', 'max': '(2)'}), '(min=0, max=2)\n', (3109, 3123), True, 'import voluptuous as vol\n'), ((3222, 3237), 'voluptuous.Coerce', 'vol.Coerce', (['int'], {}), '(int)\n', (3232, 3237), True, 'import voluptuous as vol\n'), ((3239, 3263), 'voluptuous.Range', 'vol.Range', ([], {'min': '(0)', 'max': '(60)'}), '(min=0, max=60)\n', (3248, 3263), True, 'import voluptuous as vol\n'), ((3513, 3542), 'voluptuous.In', 'vol.In', (['DEFAULT_TRAFFIC_CLASS'], {}), '(DEFAULT_TRAFFIC_CLASS)\n', (3519, 3542), True, 'import voluptuous as vol\n')] |
"""
A quick library to deal with searching simbad for info
about a SN and parsing the results.
Author: <NAME>, <EMAIL>, 2014
example SIMBAD uri query:
http://simbad.u-strasbg.fr/simbad/sim-id?output.format=ASCII&Ident=sn%201998S
"""
import re
from urllib2 import urlopen
def get_SN_info( name ):
"""
Queries simbad for SN coords, redshift, and host galaxy.
If redshift is not given for SN, attempts to resolve link to
host galaxy and report its redshift.
Returns ( (ra,dec), redshift, host_name, redshift_citation ), with
values of None inserted whenever it cannot resolve the value.
"""
simbad_uri = "http://simbad.u-strasbg.fr/simbad/sim-id?output.format=ASCII&Ident=%s"
regex_coords = "Coordinates\(FK5.+\): .+"
regex_redshift = "Redshift:\s+\d+\.\d+.+"
regex_host = "apparent\s+host\s+galaxy\s+.+?\{(.*?)\}"
result = urlopen( simbad_uri % name.replace(' ','%20') ).read()
rescoords = re.search( regex_coords, result )
resred = re.search( regex_redshift, result )
reshost = re.search( regex_host, result )
try:
cs = rescoords.group().split(':')[1].strip()
ra = cs[:12].strip()
dec = cs[12:].strip()
except:
ra,dec = None,None
try:
redshift = float(resred.group().strip('Redshift: ').split(' ')[0])
citation = resred.group().split(' ')[-1]
except AttributeError:
redshift = None
citation = None
try:
host = reshost.group().split('{')[1].split('}')[0]
except AttributeError:
host = None
if (redshift == None) and (host != None):
# get the redshift from the host galaxy
result = urlopen( simbad_uri % host.replace(' ','%20') ).read()
resred = re.search( regex_redshift, result )
try:
redshift = float(resred.group().strip('Redshift: ').split(' ')[0])
citation = resred.group().split(' ')[-1]
except AttributeError:
pass
return ((ra,dec), redshift, host, citation)
| [
"re.search"
] | [((950, 981), 're.search', 're.search', (['regex_coords', 'result'], {}), '(regex_coords, result)\n', (959, 981), False, 'import re\n'), ((997, 1030), 're.search', 're.search', (['regex_redshift', 'result'], {}), '(regex_redshift, result)\n', (1006, 1030), False, 'import re\n'), ((1047, 1076), 're.search', 're.search', (['regex_host', 'result'], {}), '(regex_host, result)\n', (1056, 1076), False, 'import re\n'), ((1765, 1798), 're.search', 're.search', (['regex_redshift', 'result'], {}), '(regex_redshift, result)\n', (1774, 1798), False, 'import re\n')] |
import os
from math import cos
from math import sin
import Sofa.Core
from splib.numerics import Quat, Vec3
from sofacontrol import measurement_models
path = os.path.dirname(os.path.abspath(__file__))
class TemplateEnvironment:
def __init__(self, name='Template', rayleighMass=0.1, rayleighStiffness=0.1, dt=0.01):
self.name = name
self.robot = Sofa.Core.Node(name)
# set-up solvers
self.robot.addObject('EulerImplicitSolver', name='odesolver', firstOrder="0", rayleighMass=str(rayleighMass),
rayleighStiffness=str(rayleighStiffness))
self.robot.addObject('SparseLDLSolver', name='preconditioner')
self.robot.addObject('GenericConstraintCorrection', solverName="preconditioner")
self.actuator_list = []
self.nb_nodes = None
self.gravity = [0., -9810., 0.] # default
self.dt = dt
def get_measurement_model(self, nodes=None, pos=True, vel=True):
if nodes is None:
return measurement_models.linearModel(range(self.nb_nodes), self.nb_nodes, pos=pos, vel=vel)
else:
return measurement_models.linearModel(nodes, self.nb_nodes, pos=pos, vel=vel)
class Trunk(TemplateEnvironment):
def __init__(self, name='Trunk', all_cables=True):
super(Trunk, self).__init__(name=name)
self.nb_nodes = 709
self.gravity = [0., 0., 9810.]
self.robot.min_force = [0.] * 8 # Without premultiplication with dt
self.robot.addObject('MeshVTKLoader', name='loader', filename=path + '/mesh/trunk.vtk')
self.robot.addObject('TetrahedronSetTopologyContainer', src='@loader', name='container')
self.robot.addObject('TetrahedronSetTopologyModifier')
self.robot.addObject('TetrahedronSetTopologyAlgorithms')
self.robot.addObject('TetrahedronSetGeometryAlgorithms')
# Option 1:
self.robot.addObject('MechanicalObject', name='tetras', template='Vec3d', showIndices='false',
showIndicesScale='4e-5')
# Option 2: Equivalent to option 1 (we believe)
# self.robot.addObject('MechanicalObject', src='@loader')
# Gives a mass to the model
self.robot.addObject('UniformMass', totalMass=0.042)
# Add a TetrahedronFEMForceField componant which implement an elastic material model solved using the Finite
# Element Method on tetrahedrons.
self.robot.addObject('TetrahedronFEMForceField', template='Vec3d', name='FEM', method='large',
poissonRatio=0.45,
youngModulus=450)
# Fix the base of the trunk by adding constraints in a region of interest (ROI)
self.robot.addObject('BoxROI', name='boxROI', box=[[-20, -20, 0], [20, 20, 20]], drawBoxes=False)
self.robot.addObject('RestShapeSpringsForceField', points='@boxROI.indices', stiffness='1e12')
##########################################
# Cable #
##########################################
actuator_names = ''
length1 = 10.
length2 = 2.
lengthTrunk = 195.
pullPoint = [[0., length1, 0.], [-length1, 0., 0.], [0., -length1, 0.], [length1, 0., 0.]]
direction = Vec3(0., length2 - length1, lengthTrunk)
direction.normalize()
nbCables = 4
actuators = self.robot.addChild('actuators')
for i in range(0, nbCables):
childname = 'cableL' + str(i)
theta = 1.57 * i
q = Quat(0., 0., sin(theta / 2.), cos(theta / 2.))
position = [[0., 0., 0.]] * 20
for k in range(0, 20, 2):
v = Vec3(direction[0], direction[1] * 17.5 * (k / 2) + length1, direction[2] * 17.5 * (k / 2) + 21)
position[k] = v.rotateFromQuat(q)
v = Vec3(direction[0], direction[1] * 17.5 * (k / 2) + length1, direction[2] * 17.5 * (k / 2) + 27)
position[k + 1] = v.rotateFromQuat(q)
cableL = actuators.addChild(childname)
cableL.addObject('MechanicalObject', name='meca',
position=pullPoint[i] + [pos.toList() for pos in position])
cableL.addObject('CableConstraint', template='Vec3d', name="cable",
hasPullPoint="0",
indices=list(range(21)),
maxPositiveDisp='70',
maxDispVariation="1",
valueType='force',
minForce=self.robot.min_force[i] * self.robot.dt.value)
cableL.addObject('BarycentricMapping', name='mapping', mapForces='false', mapMasses='false')
actuator_names += childname + '/cable,'
self.actuator_list.append(cableL.cable)
if all_cables:
for i in range(0, nbCables):
childname = 'cableS' + str(i)
theta = 1.57 * i
q = Quat(0., 0., sin(theta / 2.), cos(theta / 2.))
position = [[0., 0., 0.]] * 10
for k in range(0, 9, 2):
v = Vec3(direction[0], direction[1] * 17.5 * (k / 2) + length1, direction[2] * 17.5 * (k / 2) + 21)
position[k] = v.rotateFromQuat(q)
v = Vec3(direction[0], direction[1] * 17.5 * (k / 2) + length1, direction[2] * 17.5 * (k / 2) + 27)
position[k + 1] = v.rotateFromQuat(q)
cableS = actuators.addChild(childname)
cableS.addObject('MechanicalObject', name='meca',
position=pullPoint[i] + [pos.toList() for pos in position])
cableS.addObject('CableConstraint', template='Vec3d', name="cable",
hasPullPoint="0",
indices=list(range(10)),
maxPositiveDisp='40',
maxDispVariation="1",
valueType='force',
minForce=self.robot.min_force[i + 4] * self.robot.dt.value)
cableS.addObject('BarycentricMapping', name='mapping', mapForces='false', mapMasses='false')
actuator_names += childname + '/cable,'
self.actuator_list.append(cableS.cable)
self.robot.actuator_list = self.actuator_list
##########################################
# Visualization #
##########################################
trunkVisu = self.robot.addChild('VisualModel')
trunkVisu.addObject('MeshSTLLoader', filename=path + "/mesh/trunk.stl")
trunkVisu.addObject('OglModel', template='Vec3d', color=[1., 1., 1., 0.8])
trunkVisu.addObject('BarycentricMapping')
class Trunk4Cables(Trunk):
def __init__(self, name='Trunk4Cables'):
super(Trunk4Cables, self).__init__(name=name, all_cables=False)
self.robot.min_force = [0, 0, 0, 0] # Without premultiplication with dt
class Finger(TemplateEnvironment):
def __init__(self, name='Finger'):
super(Finger, self).__init__(name=name)
self.nb_nodes = 158
self.robot.min_force = [0.] # Without premultiplication with dt
self.robot.addObject('MeshVTKLoader', name='loader', filename=path + '/mesh/finger.vtk')
self.robot.addObject('TetrahedronSetTopologyContainer', src='@loader', name='container')
self.robot.addObject('TetrahedronSetTopologyModifier')
self.robot.addObject('TetrahedronSetTopologyAlgorithms')
self.robot.addObject('TetrahedronSetGeometryAlgorithms')
self.robot.addObject('MechanicalObject', name='tetras', template='Vec3d', showIndices='false',
showIndicesScale='4e-5')
self.robot.addObject('UniformMass', totalMass=0.075)
# Add a TetrahedronFEMForceField componant which implement an elastic material model solved using the Finite Element Method on tetrahedrons.
self.robot.addObject('TetrahedronFEMForceField', template='Vec3d', name='FEM', method='large',
poissonRatio=0.45,
youngModulus=600)
# Fix the base of the trunk by adding constraints in a region of interest (ROI)
self.robot.addObject('BoxROI', name='boxROI', box=[[-15, 0, 0], [5, 10, 15]], drawBoxes=False)
self.robot.addObject('RestShapeSpringsForceField', points='@boxROI.indices', stiffness='1e12')
##########################################
# Cable #
##########################################
# This creates a new node in the scene. This node is appended to the finger's node.
actuators = self.robot.addChild('actuators')
cable = actuators.addChild('cable')
# This create a MechanicalObject, a componant holding the degree of freedom of our
# mechanical modelling. In the case of a cable it is a set of positions specifying
# the points where the cable is passing by.
cable.addObject('MechanicalObject', name='meca',
position=(
"-17.5 12.5 2.5 " +
"-32.5 12.5 2.5 " +
"-47.5 12.5 2.5 " +
"-62.5 12.5 2.5 " +
"-77.5 12.5 2.5 " +
"-83.5 12.5 4.5 " +
"-85.5 12.5 6.5 " +
"-85.5 12.5 8.5 " +
"-83.5 12.5 10.5 " +
"-77.5 12.5 12.5 " +
"-62.5 12.5 12.5 " +
"-47.5 12.5 12.5 " +
"-32.5 12.5 12.5 " +
"-17.5 12.5 12.5 "))
# Create a CableConstraint object with a name.
# the indices are referring to the MechanicalObject's positions.
# The last indice is where the pullPoint is connected.
cable.addObject('CableConstraint', name="cable",
indices=list(range(14)),
pullPoint="0.0 12.5 2.5", valueType='force',
minForce=self.robot.min_force[0] * self.robot.dt.value)
# This create a BarycentricMapping. A BarycentricMapping is a key element as it will create a bi-directional link
# between the cable's DoFs and the finger's ones so that movements of the cable's DoFs will be mapped
# to the finger and vice-versa;
cable.addObject('BarycentricMapping', name='mapping', mapForces='false', mapMasses='false')
self.actuator_list.append(cable.cable)
self.robot.actuator_list = self.actuator_list
##########################################
# Visualization #
##########################################
# In Sofa, visualization is handled by adding a rendering model.
# Create an empty child node to store this rendering model.
fingerVisu = self.robot.addChild('VisualModel')
# Add to this empty node a rendering model made of triangles and loaded from an stl file.
fingerVisu.addObject('MeshSTLLoader', filename=path + "/mesh/finger.stl")
fingerVisu.addObject('OglModel', template='Vec3d', color=[1., 1., 1., 0.8])
# Add a BarycentricMapping to deform rendering model in way that follow the ones of the parent mechanical model.
fingerVisu.addObject('BarycentricMapping')
class Diamond(TemplateEnvironment):
def __init__(self, name='Diamond', totalMass=0.5, poissonRatio=0.45, youngModulus=450, rayleighMass=0.1, rayleighStiffness=0.1, dt=0.01):
super(Diamond, self).__init__(name=name, rayleighMass=rayleighMass, rayleighStiffness=rayleighStiffness, dt=dt)
self.nb_nodes = 1628
self.gravity = [0., 0., -9810.]
rotation = [90, 0.0, 0.0]
translation = [0.0, 0.0, 35]
self.robot.min_force = [0, 0, 0, 0] # Without premultiplication with dt
self.robot.addObject('MeshVTKLoader', name='loader', filename=path + "/mesh/diamond.vtu", rotation=rotation,
translation=translation)
self.robot.addObject('TetrahedronSetTopologyContainer', src='@loader', name='container')
self.robot.addObject('TetrahedronSetTopologyModifier')
self.robot.addObject('TetrahedronSetTopologyAlgorithms')
self.robot.addObject('TetrahedronSetGeometryAlgorithms')
self.robot.addObject('MechanicalObject', template='Vec3d', name='tetras', showIndices='false',
showIndicesScale='4e-5')
self.robot.addObject('UniformMass', totalMass=totalMass, name='mass')
self.robot.addObject('TetrahedronFEMForceField', template='Vec3d',
method='large', name='forcefield',
poissonRatio=poissonRatio, youngModulus=youngModulus)
# Fix the base of the trunk by adding constraints in a region of interest (ROI)
self.robot.addObject('BoxROI', name='boxROI', box=[-15, -15, -40, 15, 15, 10], drawBoxes=True)
self.robot.addObject('RestShapeSpringsForceField', points='@boxROI.<EMAIL>', stiffness='1e12')
##########################################
# Cable #
##########################################
self.actuatorsParam = [
{'withName': 'A',
'withCableGeometry': [[0, 97, 45]],
'withAPullPointLocation': [0, 10, 30]
},
{'withName': 'B',
'withCableGeometry': [[-97, 0, 45]],
'withAPullPointLocation': [-10, 0, 30]
},
{'withName': 'C',
'withCableGeometry': [[0, -97, 45]],
'withAPullPointLocation': [0, -10, 30]
},
{'withName': 'D',
'withCableGeometry': [[97, 0, 45]],
'withAPullPointLocation': [10, 0, 30]
}
]
actuators = self.robot.addChild('actuators')
for i in range(len(self.actuatorsParam)):
cable = actuators.addChild(self.actuatorsParam[i]['withName'])
cable.addObject('MechanicalObject', position=self.actuatorsParam[i]['withCableGeometry'])
cable.addObject('CableConstraint',
name='cable',
indices=list(range(len(self.actuatorsParam[i]['withCableGeometry']))),
pullPoint=self.actuatorsParam[i]['withAPullPointLocation'],
valueType='force',
hasPullPoint=True,
minForce=self.robot.min_force[i] * self.robot.dt.value
)
cable.addObject('BarycentricMapping', name="Mapping", mapForces=False, mapMasses=False)
self.actuator_list.append(cable.cable)
self.robot.actuator_list = self.actuator_list
##########################################
# Visualization #
##########################################
diamondVisu = self.robot.addChild('VisualModel')
diamondVisu.addObject('MeshSTLLoader', filename=path + "/mesh/diamond.stl")
diamondVisu.addObject('OglModel', template='Vec3d', color=[0.7, 0.7, 0.7, 0.7], updateNormals=False)
diamondVisu.addObject('BarycentricMapping')
| [
"sofacontrol.measurement_models.linearModel",
"splib.numerics.Vec3",
"math.cos",
"os.path.abspath",
"math.sin"
] | [((176, 201), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (191, 201), False, 'import os\n'), ((3298, 3339), 'splib.numerics.Vec3', 'Vec3', (['(0.0)', '(length2 - length1)', 'lengthTrunk'], {}), '(0.0, length2 - length1, lengthTrunk)\n', (3302, 3339), False, 'from splib.numerics import Quat, Vec3\n'), ((1131, 1201), 'sofacontrol.measurement_models.linearModel', 'measurement_models.linearModel', (['nodes', 'self.nb_nodes'], {'pos': 'pos', 'vel': 'vel'}), '(nodes, self.nb_nodes, pos=pos, vel=vel)\n', (1161, 1201), False, 'from sofacontrol import measurement_models\n'), ((3581, 3597), 'math.sin', 'sin', (['(theta / 2.0)'], {}), '(theta / 2.0)\n', (3584, 3597), False, 'from math import sin\n'), ((3598, 3614), 'math.cos', 'cos', (['(theta / 2.0)'], {}), '(theta / 2.0)\n', (3601, 3614), False, 'from math import cos\n'), ((3716, 3816), 'splib.numerics.Vec3', 'Vec3', (['direction[0]', '(direction[1] * 17.5 * (k / 2) + length1)', '(direction[2] * 17.5 * (k / 2) + 21)'], {}), '(direction[0], direction[1] * 17.5 * (k / 2) + length1, direction[2] * \n 17.5 * (k / 2) + 21)\n', (3720, 3816), False, 'from splib.numerics import Quat, Vec3\n'), ((3882, 3982), 'splib.numerics.Vec3', 'Vec3', (['direction[0]', '(direction[1] * 17.5 * (k / 2) + length1)', '(direction[2] * 17.5 * (k / 2) + 27)'], {}), '(direction[0], direction[1] * 17.5 * (k / 2) + length1, direction[2] * \n 17.5 * (k / 2) + 27)\n', (3886, 3982), False, 'from splib.numerics import Quat, Vec3\n'), ((5037, 5053), 'math.sin', 'sin', (['(theta / 2.0)'], {}), '(theta / 2.0)\n', (5040, 5053), False, 'from math import sin\n'), ((5054, 5070), 'math.cos', 'cos', (['(theta / 2.0)'], {}), '(theta / 2.0)\n', (5057, 5070), False, 'from math import cos\n'), ((5184, 5284), 'splib.numerics.Vec3', 'Vec3', (['direction[0]', '(direction[1] * 17.5 * (k / 2) + length1)', '(direction[2] * 17.5 * (k / 2) + 21)'], {}), '(direction[0], direction[1] * 17.5 * (k / 2) + length1, direction[2] * \n 17.5 * (k / 2) + 21)\n', (5188, 5284), False, 'from splib.numerics import Quat, Vec3\n'), ((5358, 5458), 'splib.numerics.Vec3', 'Vec3', (['direction[0]', '(direction[1] * 17.5 * (k / 2) + length1)', '(direction[2] * 17.5 * (k / 2) + 27)'], {}), '(direction[0], direction[1] * 17.5 * (k / 2) + length1, direction[2] * \n 17.5 * (k / 2) + 27)\n', (5362, 5458), False, 'from splib.numerics import Quat, Vec3\n')] |
Subsets and Splits